Compare commits

..

No commits in common. "main" and "feat/valheim" have entirely different histories.

113 changed files with 1510 additions and 2184 deletions

View file

@ -7,11 +7,11 @@ jobs:
pre-commit:
runs-on: ubuntu-22.04
steps:
- uses: https://code.forgejo.org/actions/checkout@v6
- uses: https://code.forgejo.org/actions/setup-python@v6
- uses: https://code.forgejo.org/actions/checkout@v4
- uses: https://code.forgejo.org/actions/setup-python@v5
with:
python-version: '3.10'
- uses: opentofu/setup-opentofu@v2
- uses: opentofu/setup-opentofu@v1
with:
tofu_version: 1.7.0
- uses: pre-commit/action@v3.0.1
@ -19,7 +19,7 @@ jobs:
k8s:
runs-on: ubuntu-22.04
steps:
- uses: https://code.forgejo.org/actions/checkout@v6
- uses: https://code.forgejo.org/actions/checkout@v4
- name: Set up Kubeconform
uses: bmuschko/setup-kubeconform@v1
@ -30,8 +30,8 @@ jobs:
tflint:
runs-on: ubuntu-22.04
steps:
- uses: https://code.forgejo.org/actions/checkout@v6
- uses: terraform-linters/setup-tflint@v6
- uses: https://code.forgejo.org/actions/checkout@v4
- uses: terraform-linters/setup-tflint@v4
name: Setup TFLint
with:
tflint_version: v0.50.3

View file

@ -10,8 +10,8 @@ jobs:
authentik:
runs-on: ubuntu-22.04
steps:
- uses: https://code.forgejo.org/actions/checkout@v6
- uses: opentofu/setup-opentofu@v2
- uses: https://code.forgejo.org/actions/checkout@v4
- uses: opentofu/setup-opentofu@v1
with:
tofu_version: 1.8.1
- name: Deploy
@ -40,8 +40,8 @@ jobs:
adguard:
runs-on: ubuntu-22.04
steps:
- uses: https://code.forgejo.org/actions/checkout@v6
- uses: opentofu/setup-opentofu@v2
- uses: https://code.forgejo.org/actions/checkout@v4
- uses: opentofu/setup-opentofu@v1
with:
tofu_version: 1.7.0
- name: Deploy

View file

@ -1,7 +1,7 @@
---
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v6.0.0
rev: v5.0.0
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
@ -15,18 +15,18 @@ repos:
- id: trailing-whitespace
- repo: https://github.com/antonbabenko/pre-commit-terraform
rev: v1.105.0
rev: v1.92.1
hooks:
- id: terraform_fmt
- repo: https://github.com/adrienverge/yamllint.git
rev: v1.38.0
rev: v1.35.1
hooks:
- id: yamllint
args: [--format, parsable, --strict]
- repo: https://github.com/shellcheck-py/shellcheck-py
rev: v0.11.0.1
rev: v0.10.0.1
hooks:
- id: shellcheck
files: \.sh

3
ansible/ansible.cfg Normal file
View file

@ -0,0 +1,3 @@
[defaults]
inventory = inventory
host_key_checking = False

26
ansible/inventory Normal file
View file

@ -0,0 +1,26 @@
[nextclouds]
cloud.fuku
[nextclouds:vars]
ansible_user=root
[k3s_masters]
master1.ramiel.fuku
master2.ramiel.fuku
master3.ramiel.fuku
[k3s_agents]
agent1.zeruel.fuku
sandalphon.fuku
[k3s_masters:vars]
ansible_user=ci
[k3s_agents:vars]
ansible_user=ci
[giteas]
gitea.fuku
[giteas:vars]
ansible_user=root

View file

@ -0,0 +1,9 @@
---
- name: Apply base configuration
hosts:
- k3s_agents
- k3s_masters
roles:
- role: ../roles/base
become: true

View file

@ -0,0 +1,13 @@
---
- name: Install k3s nodes
hosts:
- k3s_masters
- k3s_agents
roles:
- role: ../roles/k3s
become: true
vars:
first_master_hostname: "{{ groups['k3s_masters'][0] }}"
is_first_master: "{{ inventory_hostname in groups['k3s_masters'][0] }}"
short_hostname: "{{ inventory_hostname.split('.')[0] }}"

View file

@ -0,0 +1,36 @@
---
- name: Set same timezone on every Server
community.general.timezone:
name: '{{ system_timezone }}'
when: (system_timezone is defined) and (system_timezone != "Europe/Madrid")
- name: Enable IPv4 forwarding
ansible.posix.sysctl:
name: net.ipv4.ip_forward
value: '1'
state: present
reload: true
- name: Enable IPv6 forwarding
ansible.posix.sysctl:
name: net.ipv6.conf.all.forwarding
value: '1'
state: present
reload: true
- name: Enable IPv6 router advertisements
ansible.posix.sysctl:
name: net.ipv6.conf.all.accept_ra
value: '2'
state: present
reload: true
- import_tasks: packages.yml
name: Install base packages
tags:
- packages
- import_tasks: mounts.yml
name: Mount NFS shares
tags:
- nfs

View file

@ -0,0 +1,19 @@
---
- name: Create mountpoint directory
file:
path: /nfs/nas1
state: directory
owner: 10000
group: 10000
- name: Mount nas1 share
mount:
fstype: nfs
src: zeruel.fuku:/mnt/pool1/nas1
path: /nfs/nas1
state: mounted
fstab: /etc/fstab
opts: _netdev,nofail,tcp,bg,retrans=2,timeo=150,rsize=32768,wsize=32768,noresvport
backup: true
become: true
become_user: root

View file

@ -0,0 +1,18 @@
---
- name: Install base packages
apt:
name: '{{ item }}'
state: present
update_cache: true
loop:
- qemu-guest-agent
- git
- tmux
- vim
- curl
- nfs-common
- name: Update all packages
apt:
upgrade: dist
update_cache: true

View file

@ -0,0 +1,17 @@
---
- name: Create rancher folder
file:
state: directory
path: /etc/rancher/k3s
owner: root
group: root
mode: 755
- name: Copy k3s config file
template:
src: agent.config.yaml.j2
dest: /etc/rancher/k3s/config.yaml
mode: 600
- name: Install k3s agent
shell: bash /tmp/k3s.install.sh agent

View file

@ -0,0 +1,19 @@
---
- name: Create .kube directory
become: true
file:
path: /home/ci/.kube
state: directory
mode: '0755'
owner: ci
group: ci
- name: Copy kubeconfig
copy:
remote_src: true
src: /etc/rancher/k3s/k3s.yaml
dest: /home/ci/.kube/config
mode: 0644
owner: ci
group: ci
become: true

View file

@ -0,0 +1,5 @@
---
- name: Download k3s script
get_url:
url: https://get.k3s.io
dest: /tmp/k3s.install.sh

View file

@ -0,0 +1,15 @@
---
- import_tasks: download.yml
name: Download install script
- import_tasks: master.yml
name: Install master node
when: inventory_hostname in groups["k3s_masters"]
- import_tasks: agent.yml
name: Install agent node
when: inventory_hostname in groups["k3s_agents"]
- import_tasks: copy-kubeconfig.yml
name: Copy kubeconfig
when: inventory_hostname in groups["k3s_masters"] and is_first_master

View file

@ -0,0 +1,19 @@
---
- name: Create rancher folder
file:
state: directory
path: /etc/rancher/k3s
owner: root
group: root
mode: 755
- name: Copy k3s config file
template:
src: master.config.yaml.j2
dest: /etc/rancher/k3s/config.yaml
mode: 600
vars:
etcd_snapshot_dir: /nfs/nas1/backups/{{ short_hostname }}
- name: Install k3s master
command: bash /tmp/k3s.install.sh

View file

@ -0,0 +1,2 @@
token: {{ cluster_token }}
server: https://{{ tls_san }}:6443

View file

@ -0,0 +1,12 @@
tls-san:
- {{ inventory_hostname }}
- {{ tls_san }}
node-label:
- name={{ inventory_hostname }}
token: "{{ cluster_token }}"
etcd-snapshot-dir: {{ etcd_snapshot_dir }}
{% if is_first_master %}
cluster-init: "{{ is_first_master }}"
{% else %}
server: https://{{ first_master_hostname }}:6443
{% endif %}

View file

@ -0,0 +1,4 @@
---
k3s_version: v1.27.4+k3s1
tls_san: "{{ lookup('env', 'ANSIBLE_TLS_SAN') | mandatory }}"
cluster_token: "{{ lookup('env', 'ANSIBLE_CLUSTER_TOKEN') | mandatory }}"

2
ansible/k3s/sample.env Normal file
View file

@ -0,0 +1,2 @@
ANSIBLE_K3S_CLUSTER_TOKEN=
ANSIBLE_K3S_TLS_SAN=

View file

@ -0,0 +1,27 @@
---
- name: Install promtail
hosts:
- nextclouds
roles:
- role: patrickjahns.promtail
vars:
promtail_version: 2.9.4
promtail_config_clients:
- url: https://loki.fuku/loki/api/v1/push
basic_auth:
username: cloud
password: "{{ lookup('env', 'NEXTCLOUD_PROMTAIL_PASSWORD') | mandatory }}"
tls_config:
insecure_skip_verify: true
promtail_config_scrape_configs:
- job_name: system
static_configs:
- targets:
- localhost
labels:
nextcloud: cloud.fukurokuju.dev
__path__: /mnt/share/data/cloud/data/{nextcloud,audit}.log
promtail_config_limits_config:
readline_rate_enabled: true
readline_rate_drop: true

View file

@ -0,0 +1 @@
NEXTCLOUD_PROMTAIL_PASSWORD=superdupersecure

3
ansible/requirements.yml Normal file
View file

@ -0,0 +1,3 @@
---
- name: patrickjahns.promtail
version: 1.31.0

View file

@ -1,13 +0,0 @@
---
services:
wiki:
image: dokuwiki/dokuwiki:2024-02-06b
restart: unless-stopped
ports:
- "44344:8080"
volumes:
- /mnt/nas1/shared/dokuwiki/dokuwiki:/storage
environment:
PHP_TIMEZONE: Europe/Madrid
PHP_MEMORYLIMIT: 512M
PHP_UPLOADLIMIT: 128M

View file

@ -0,0 +1,41 @@
---
x-runner-common: &runner-common
image: code.forgejo.org/forgejo/runner:6.3.1
links:
- docker-in-docker
depends_on:
docker-in-docker:
condition: service_started
user: 1001:1001
restart: unless-stopped
command: '/bin/sh -c "sleep 5; forgejo-runner daemon"'
environment:
DOCKER_HOST: tcp://docker-in-docker:2375
networks:
forgejo:
external: false
services:
docker-in-docker:
image: docker:dind
container_name: 'docker_dind'
privileged: true
command: ['dockerd', '-H', 'tcp://0.0.0.0:2375', '--tls=false']
restart: 'unless-stopped'
runner:
<<: *runner-common
container_name: 'runner'
volumes:
- ${FORGEJO_RUNNER_DATA:-/mnt/nas1/shared/forgejo-runner/data}:/data
- /etc/timezone:/etc/timezone:ro
- /etc/localtime:/etc/localtime:ro
runner-2:
<<: *runner-common
container_name: 'runner2'
volumes:
- ${FORGEJO_RUNNER_DATA:-/mnt/nas1/shared/forgejo-runner/data2}:/data
- /etc/timezone:/etc/timezone:ro
- /etc/localtime:/etc/localtime:ro

View file

@ -1,48 +0,0 @@
---
services:
ganymede:
container_name: ganymede
image: ghcr.io/zibbp/ganymede:4.16.0
restart: unless-stopped
environment:
DEBUG: ${GANYMEDE_DEBUG:-false}
TZ: ${GANYMEDE_TZ:-Europe/Madrid}
VIDEOS_DIR: ${GANYMEDE_VIDEOS_DIR:-/data/videos}
TEMP_DIR: ${GANYMEDE_TEMP_DIR:-/data/temp}
LOGS_DIR: ${GANYMEDE_LOGS_DIR:-/data/logs}
CONFIG_DIR: ${GANYMEDE_CONFIG_DIR:-/data/config}
DB_HOST: ${GANYMEDE_DB_HOST:-192.168.1.3}
DB_PORT: ${GANYMEDE_DB_PORT:-5432}
DB_USER: ${GANYMEDE_DB_USER:-ganymede}
DB_PASS: ${GANYMEDE_DB_PASS}
DB_NAME: ${GANYMEDE_DB_NAME:-ganymede}
DB_SSL: ${GANYMEDE_DB_SSL:-disable}
TWITCH_CLIENT_ID: ${GANYMEDE_TWITCH_CLIENT_ID}
TWITCH_CLIENT_SECRET: ${GANYMEDE_TWITCH_CLIENT_SECRET}
MAX_CHAT_DOWNLOAD_EXECUTIONS: ${GANYMEDE_MAX_CHAT_DOWNLOAD_EXECUTIONS:-3}
MAX_CHAT_RENDER_EXECUTIONS: ${GANYMEDE_MAX_CHAT_RENDER_EXECUTIONS:-2}
MAX_VIDEO_DOWNLOAD_EXECUTIONS: ${GANYMEDE_MAX_VIDEO_DOWNLOAD_EXECUTIONS:-2}
MAX_VIDEO_CONVERT_EXECUTIONS: ${GANYMEDE_MAX_VIDEO_CONVERT_EXECUTIONS:-3}
MAX_VIDEO_SPRITE_THUMBNAIL_EXECUTIONS: ${GANYMEDE_MAX_VIDEO_SPRITE_THUMBNAIL_EXECUTIONS:-2}
OAUTH_ENABLED: ${GANYMEDE_OAUTH_ENABLED:-true}
OAUTH_PROVIDER_URL: ${GANYMEDE_OAUTH_PROVIDER_URL:-https://auth.fukurokuju.dev/application/o/ganymede/}
OAUTH_CLIENT_ID: ${GANYMEDE_OAUTH_CLIENT_ID}
OAUTH_CLIENT_SECRET: ${GANYMEDE_OAUTH_CLIENT_SECRET}
OAUTH_REDIRECT_URL: ${GANYMEDE_OAUTH_REDIRECT_URL:-https://vods.roboces.dev/api/v1/auth/oauth/callback}
SHOW_SSO_LOGIN_BUTTON: ${GANYMEDE_SHOW_SSO_LOGIN_BUTTON:-true}
FORCE_SSO_AUTH: ${GANYMEDE_FORCE_SSO_AUTH:-true}
REQUIRE_LOGIN: ${GANYMEDE_REQUIRE_LOGIN:-true}
volumes:
- ${GANYMEDE_VIDEOS:-/mnt/vods/ganymede/videos}:/data/videos
- ${GANYMEDE_TEMP:-/mnt/vods/ganymede/temp}:/data/temp
- ${GANYMEDE_CACHE:-/mnt/vods/ganymede/cache}:/data/.cache
- ${GANYMEDE_LOGS:-/mnt/vods/ganymede/logs}:/data/logs
- ${GANYMEDE_CONFIG:-/mnt/vods/ganymede/config}:/data/config
ports:
- "4800:4000"
healthcheck:
test: curl --fail http://localhost:4000/health || exit 1
interval: 60s
retries: 5
start_period: 60s
timeout: 10s

View file

@ -1,27 +0,0 @@
GANYMEDE_DEBUG=false
GANYMEDE_TZ=Europe/Madrid
GANYMEDE_VIDEOS_DIR=/data/videos
GANYMEDE_TEMP_DIR=/data/temp
GANYMEDE_LOGS_DIR=/data/logs
GANYMEDE_CONFIG_DIR=/data/config
GANYMEDE_DB_HOST=192.168.1.3
GANYMEDE_DB_PORT=5432
GANYMEDE_DB_USER=ganymede
GANYMEDE_DB_PASS=
GANYMEDE_DB_NAME=ganymede
GANYMEDE_DB_SSL=disable
GANYMEDE_TWITCH_CLIENT_ID=
GANYMEDE_TWITCH_CLIENT_SECRET=
GANYMEDE_MAX_CHAT_DOWNLOAD_EXECUTIONS=3
GANYMEDE_MAX_CHAT_RENDER_EXECUTIONS=2
GANYMEDE_MAX_VIDEO_DOWNLOAD_EXECUTIONS=2
GANYMEDE_MAX_VIDEO_CONVERT_EXECUTIONS=3
GANYMEDE_MAX_VIDEO_SPRITE_THUMBNAIL_EXECUTIONS=2
GANYMEDE_OAUTH_ENABLED=true
GANYMEDE_OAUTH_PROVIDER_URL=https://auth.fukurokuju.dev/application/o/ganymede/
GANYMEDE_OAUTH_CLIENT_ID=
GANYMEDE_OAUTH_CLIENT_SECRET=
GANYMEDE_OAUTH_REDIRECT_URL=https://vods.roboces.dev/api/v1/auth/oauth/callback
GANYMEDE_SHOW_SSO_LOGIN_BUTTON=true
GANYMEDE_FORCE_SSO_AUTH=false
GANYMEDE_REQUIRE_LOGIN=false

View file

@ -0,0 +1,40 @@
---
services:
mc:
image: itzg/minecraft-server:java23-graalvm
tty: true
stdin_open: true
ports:
- "25565:25565"
environment:
EULA: "TRUE"
MEMORY: ${MEMORY:-"6G"}
TZ: "Europe/Madrid"
VERSION: 1.20.1
ENABLE_ROLLING_LOGS: true
USE_AIKAR_FLAGS: true
MOTD: "Huesoperrers Minecraft Episodio 2: Ahora es personal"
ICON: /data/icon.png
MAX_PLAYERS: 10
MAX_WORLD_SIZE: 10000
SEED: huesoperrers2
MODE: survival
ONLINE_MODE: false
ALLOW_FLIGHT: true
SERVER_NAME: Huesoperrers and co.
PLAYER_IDLE_TIMEOUT: 15
STOP_SERVER_ANNOUNCE_DELAY: 30
WHITELIST: ${WHITELIST}
OPS: ${OPS}
SYNCHRONIZE: true
MERGE: true
ENFORCE_WHITELIST: true
ENABLE_RCON: false
MAX_TICK_TIME: -1
USER_API_PROVIDER: ${USER_API_PROVIDER:-playerdb}
DIFFICULTY: ${DIFFICULTY:-normal}
ENABLE_AUTOPAUSE: true
DEBUG_AUTOPAUSE: false
TYPE: FORGE
volumes:
- ${MC_DATA_DIR:-/mnt/zeruel/nas1/shared/mc2}:/data

View file

@ -0,0 +1,112 @@
---
services:
dashboard:
image: netbirdio/dashboard:v2.10.0
restart: unless-stopped
ports:
- 8005:80
environment:
NETBIRD_MGMT_API_ENDPOINT: ${NETBIRD_MGMT_API_ENDPOINT:-https://vpn.fukurokuju.dev}
NETBIRD_MGMT_GRPC_API_ENDPOINT: ${NETBIRD_MGMT_GRPC_API_ENDPOINT:-https://vpn.fukurokuju.dev}
AUTH_AUDIENCE: ${NETBIRD_AUTH_AUDIENCE:-64e44b85ebdec2a3cf87c0c9916e2dbb0570f6d87b03ca8d149c3551565c3057ce1e559d16b5399cb7df60646e4e2bc6515842a198efb09d1620ea9ac1d8ace2} # yamllint disable rule:line-length
AUTH_CLIENT_ID: ${NETBIRD_AUTH_CLIENT_ID:-64e44b85ebdec2a3cf87c0c9916e2dbb0570f6d87b03ca8d149c3551565c3057ce1e559d16b5399cb7df60646e4e2bc6515842a198efb09d1620ea9ac1d8ace2} # yamllint disable rule:line-length
AUTH_AUTHORITY: ${NETBIRD_AUTH_AUTHORITY:-https://auth.fukurokuju.dev/application/o/netbird/}
USE_AUTH0: false
AUTH_SUPPORTED_SCOPES: ${NETBIRD_AUTH_SUPPORTED_SCOPES:-api offline_access openid email profile}
AUTH_REDIRECT_URI:
AUTH_SILENT_REDIRECT_URI:
NETBIRD_TOKEN_SOURCE: accessToken
NGINX_SSL_PORT: 443
logging:
driver: "json-file"
options:
max-size: "500m"
max-file: "2"
signal:
image: netbirdio/signal:0.39.2
restart: unless-stopped
volumes:
- netbird-signal:/var/lib/netbird
ports:
- "10000:80"
logging:
driver: "json-file"
options:
max-size: "500m"
max-file: "2"
relay:
image: netbirdio/relay:0.39.2
restart: unless-stopped
environment:
NB_LOG_LEVEL: ${NB_LOG_LEVEL:-info}
NB_LISTEN_ADDRESS: ${NB_LISTEN_ADDRESS:-:33080}
NB_EXPOSED_ADDRESS: ${NB_EXPOSED_ADDRESS:-vpn.fukurokuju.dev:33080}
NB_AUTH_SECRET: ${NB_AUTH_SECRET}
ports:
- "33080:33080"
logging:
driver: "json-file"
options:
max-size: "500m"
max-file: "2"
management:
image: netbirdio/management:0.39.2
restart: unless-stopped
depends_on:
- dashboard
volumes:
- ${NETBIRD_MANAGEMENT_VOLUME:-/mnt/nas1/shared/netbird/management}/data:/var/lib/netbird
- ${NETBIRD_MANAGEMENT_VOLUME:-/mnt/nas1/shared/netbird/management}/management.json:/etc/netbird/management.json:z
ports:
- "33073:443"
command: [
"--port", "443",
"--log-file", "console",
"--log-level", "info",
"--disable-anonymous-metrics=false",
"--single-account-mode-domain=vpn.fukurokuju.dev",
"--dns-domain=netbird.fuku",
]
logging:
driver: "json-file"
options:
max-size: "500m"
max-file: "2"
environment:
- NETBIRD_STORE_ENGINE_POSTGRES_DSN=
coturn:
image: coturn/coturn:4.6
restart: unless-stopped
domainname: vpn.fukurokuju.dev
volumes:
- ${NETBIRD_COTURN_VOLUME:-/mnt/nas1/shared/netbird/coturn}/turnserver.conf:/etc/turnserver.conf:ro
network_mode: host
command:
- -c /etc/turnserver.conf
logging:
driver: "json-file"
options:
max-size: "500m"
max-file: "2"
peer-1:
image: netbirdio/netbird:0.39.2
restart: unless-stopped
volumes:
- ${NETBIRD_PEER_VOLUME:-/mnt/nas1/shared/netbird/peer-1}/data:/etc/netbird
environment:
NB_MANAGEMENT_URL: https://vpn.fukurokuju.dev:443
NB_SETUP_KEY: ${NB_SETUP_KEY}
cap_add:
- NET_ADMIN
depends_on:
- management
- dashboard
- relay
- signal
- coturn
volumes:
netbird-mgmt:
netbird-signal:

View file

@ -0,0 +1,2 @@
NB_AUTH_SECRET=
NB_SETUP_KEY=

View file

@ -0,0 +1,62 @@
FROM nextcloud:31.0.2-apache
RUN set -ex; \
\
apt-get update; \
apt-get install -y --no-install-recommends \
ffmpeg \
ghostscript \
libmagickcore-6.q16-6-extra \
procps \
smbclient \
supervisor \
vim \
clamav \
sudo \
; \
rm -rf /var/lib/apt/lists/*
RUN set -ex; \
\
savedAptMark="$(apt-mark showmanual)"; \
\
apt-get update; \
apt-get install -y --no-install-recommends \
libbz2-dev \
libc-client-dev \
libkrb5-dev \
libsmbclient-dev \
; \
\
docker-php-ext-configure imap --with-kerberos --with-imap-ssl; \
docker-php-ext-install \
bz2 \
imap \
; \
pecl install smbclient; \
docker-php-ext-enable smbclient; \
\
# reset apt-mark's "manual" list so that "purge --auto-remove" will remove all build dependencies
apt-mark auto '.*' > /dev/null; \
apt-mark manual $savedAptMark; \
ldd "$(php -r 'echo ini_get("extension_dir");')"/*.so \
| awk '/=>/ { so = $(NF-1); if (index(so, "/usr/local/") == 1) { next }; gsub("^/(usr/)?", "", so); print so }' \
| sort -u \
| xargs -r dpkg-query --search \
| cut -d: -f1 \
| sort -u \
| xargs -rt apt-mark manual; \
\
apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false; \
rm -rf /var/lib/apt/lists/*
RUN mkdir -p \
/var/log/supervisord \
/var/run/supervisord \
;
COPY supervisord.conf /
ENV NEXTCLOUD_UPDATE=1
CMD ["/usr/bin/supervisord", "-c", "/supervisord.conf"]

View file

@ -0,0 +1,40 @@
---
services:
imaginary:
image: nextcloud/aio-imaginary:latest
cap_add:
- SYS_NICE
volumes:
- type: tmpfs
target: /tmp:exec
environment:
- TZ=Europe/Madrid
restart: unless-stopped
networks:
- nextcloud
nextcloud:
image: git.roboces.dev/catalin/fukuops:nextcloud-31.0.2
volumes:
- /mnt/nas1/legacy-storage/cloud/cloud/data:/var/www/html/data
- /mnt/nas1/legacy-storage/cloud/cloud/config:/var/www/html/config
- /mnt/nas1/legacy-storage/cloud/cloud/custom_apps:/var/www/html/custom_apps
- /mnt/nas1/legacy-storage/cloud/cloud/apps:/var/www/html/apps
- type: tmpfs
target: /tmp:exec
- supervisorlog:/var/log/supervisor:z
- supervisorpid:/var/run/supervisord/:z
environment:
PHP_MEMORY_LIMIT: ${PHP_MEMORY_LIMIT:-2048M}
NEXTCLOUD_INIT_HTACCESS: ${NEXTCLOUD_INIT_HTACCESS:-1}
restart: unless-stopped
ports:
- 8080:80
networks:
- nextcloud
networks:
nextcloud: {}
volumes:
supervisorlog: {}
supervisorpid: {}

View file

@ -0,0 +1,22 @@
[supervisord]
nodaemon=true
logfile=/var/log/supervisord/supervisord.log
pidfile=/var/run/supervisord/supervisord.pid
childlogdir=/var/log/supervisord/
logfile_maxbytes=50MB ; maximum size of logfile before rotation
logfile_backups=10 ; number of backed up logfiles
loglevel=error
[program:apache2]
stdout_logfile=/dev/stdout
stdout_logfile_maxbytes=0
stderr_logfile=/dev/stderr
stderr_logfile_maxbytes=0
command=apache2-foreground
[program:cron]
stdout_logfile=/dev/stdout
stdout_logfile_maxbytes=0
stderr_logfile=/dev/stderr
stderr_logfile_maxbytes=0
command=/cron.sh

View file

@ -14,7 +14,7 @@ services:
webserver:
image: ghcr.io/paperless-ngx/paperless-ngx:2.20.15
image: ghcr.io/paperless-ngx/paperless-ngx:2.14.7
restart: unless-stopped
ports:
- 8002:8000

View file

@ -1,18 +0,0 @@
---
services:
rustical:
image: ghcr.io/lennart-k/rustical:0.12.12
restart: unless-stopped
ports:
- '4000:4000'
volumes:
- "${RUSTICAL_DATA_VOLUME:-/mnt/nas1/shared/rustical/:/var/lib/rustical/}"
environment:
RUSTICAL_OIDC__NAME: ${RUSTICAL_OIDC_NAME:-Authentik}
RUSTICAL_OIDC__ISSUER: ${RUSTICAL_OIDC_ISSUER:-https://auth.fukurokuju.dev/application/o/rustical/}
RUSTICAL_OIDC__CLIENT_ID: ${RUSTICAL_OIDC_CLIENT_ID}
RUSTICAL_OIDC__CLIENT_SECRET: ${RUSTICAL_OIDC_CLIENT_SECRET}
RUSTICAL_OIDC__CLAIM_USERID: ${RUSTICAL_OIDC_CLAIM_USERID:-preferred_username}
RUSTICAL_OIDC__SCOPES: '["openid", "profile", "groups"]'
RUSTICAL_OIDC__ALLOW_SIGN_UP: "true"
RUSTICAL_FRONTEND__ALLOW_PASSWORD_LOGIN: ${RUSTICAL_FRONTED_ALLOW_PASSWORD_LOGIN:-false}

View file

@ -1,18 +0,0 @@
---
services:
tailscale:
image: tailscale/tailscale:v1.96.5
hostname: tailscale
environment:
TS_AUTHKEY: ${TS_AUTHKEY}
TS_HOSTNAME: ${TS_HOSTNAME:-docker-exit-node}
TS_EXTRA_ARGS: ${TS_EXTRA_ARGS:---advertise-exit-node}
TS_ROUTES: ${TS_ROUTES:-192.168.1.0/24}
TS_STATE_DIR: /var/lib/tailscale
volumes:
- ${TS_VOLUME:-/mnt/nas1/shared/tailscale}:/var/lib/tailscale
devices:
- /dev/net/tun:/dev/net/tun
cap_add:
- net_admin
restart: unless-stopped

View file

@ -1,5 +0,0 @@
TS_AUTHKEY=
TS_HOSTNAME=docker-exit-node
TS_EXTRA_ARGS=--advertise-exit-node
TS_ROUTES=192.168.1.0/24
TS_VOLUME=/mnt/nas1/shared/tailscale

View file

@ -1,7 +1,7 @@
---
services:
vaultwarden:
image: vaultwarden/server:1.36.0-alpine
image: vaultwarden/server:1.33.2-alpine
restart: unless-stopped
environment:
DATABASE_URL: ${DATABASE_URL}

View file

@ -12,7 +12,7 @@ spec:
sources:
- chart: authentik
repoURL: https://charts.goauthentik.io/
targetRevision: 2026.2.*
targetRevision: 2025.2.*
helm:
valuesObject:
authentik:
@ -27,7 +27,7 @@ spec:
from: auth@fukurokuju.dev
postgresql:
host: 192.168.1.3
port: 5432
port: 55432
name: auth
user: file:///authentik-creds/pg_username
password: file:///authentik-creds/pg_password

View file

@ -2,39 +2,29 @@
apiVersion: argoproj.io/v1alpha1
kind: Application
metadata:
name: democratic-csi
namespace: argocd
name: democratic-csi
namespace: argocd
spec:
destination:
name: ''
namespace: democratic-csi
server: https://kubernetes.default.svc
sources:
- chart: democratic-csi
repoURL: https://democratic-csi.github.io/charts/
targetRevision: 0.15.*
helm:
releaseName: zfs-nfs
valuesObject:
node:
driver:
image:
tag: next
controller:
driver:
image:
tag: next
csiDriver:
name: org.dcsi.nfs
driver:
image:
tag: next
existingConfigSecret: secrets-dcsi
config:
driver: freenas-api-nfs
- repoURL: https://git.roboces.dev/catalin/fukuops.git
path: k8s/services/dcsi
targetRevision: main
project: management
syncPolicy:
automated: {}
destination:
name: ''
namespace: democratic-csi
server: https://kubernetes.default.svc
sources:
- chart: democratic-csi
repoURL: https://democratic-csi.github.io/charts/
targetRevision: 0.14.*
helm:
releaseName: zfs-nfs
valuesObject:
csiDriver:
name: org.dcsi.nfs
driver:
existingConfigSecret: secrets-dcsi
config:
driver: freenas-api-nfs
- repoURL: https://git.roboces.dev/catalin/fukuops.git
path: k8s/services/dcsi
targetRevision: main
project: management
syncPolicy:
automated: {}

View file

@ -0,0 +1,46 @@
---
apiVersion: argoproj.io/v1alpha1
kind: Application
metadata:
name: elastic
namespace: argocd
spec:
destination:
name: ''
namespace: apps-fuku
server: https://kubernetes.default.svc
sources:
- chart: elasticsearch
repoURL: registry-1.docker.io/bitnamicharts
targetRevision: 21.4.9
helm:
valuesObject:
service:
type: LoadBalancer
master:
persistence:
enabled: true
storageClass: truenas-nfs-csi
accessModes:
- ReadWriteMany
size: 50Gi
ingress:
enabled: true
hostname: elastic.fuku
tls: true
selfSigned: true
ingressClassName: traefik
data:
persistence:
enabled: true
storageClass: truenas-nfs-csi
accessModes:
- ReadWriteMany
size: 50Gi
autoscaling:
enabled: true
maxReplicas: 3
minReplicas: 1
project: fuku
syncPolicy:
automated: {}

View file

@ -0,0 +1,45 @@
---
apiVersion: argoproj.io/v1alpha1
kind: Application
metadata:
name: factorio
namespace: argocd
spec:
destination:
name: ''
namespace: apps-fuku
server: https://kubernetes.default.svc
sources:
- chart: factorio-server-charts
repoURL: https://sqljames.github.io/factorio-server-charts/
targetRevision: 2.5.*
helm:
valuesObject:
rcon:
passwordSecret: secrets-factorio
nodeSelector:
kubernetes.io/hostname: agent1
image:
tag: latest
factorioServer:
save_name: fukurokuju-space
admin_list:
- Phireh
account:
accountSecret: secrets-factorio
server_settings:
name: factorio-fukurokuju
visibility:
public: false
require_user_verification: false
persistence:
storageClassName: truenas-nfs-csi
serverPassword:
passwordSecret: secrets-factorio
- repoURL: https://git.roboces.dev/catalin/fukuops.git
path: k8s/services/factorio
targetRevision: main
project: fuku
syncPolicy:
automated: {}

View file

@ -4,8 +4,6 @@ kind: Application
metadata:
name: forgejo
namespace: argocd
annotations:
argocd.argoproj.io/sync-options: Force=true,Replace=true
spec:
destination:
name: ''
@ -14,10 +12,10 @@ spec:
sources:
- chart: forgejo
repoURL: code.forgejo.org/forgejo-helm
targetRevision: 17.0.1
targetRevision: 11.0.5
helm:
valuesObject:
replicaCount: 1
replicaCount: 2
service:
http:
type: LoadBalancer
@ -49,8 +47,15 @@ spec:
serviceMonitor:
enabled: true
config:
indexer:
ISSUE_INDEXER_CONN_STR: http://elastic-elasticsearch.apps-fuku.svc.cluster.local:9200
ISSUE_INDEXER_ENABLED: true
ISSUE_INDEXER_TYPE: elasticsearch
REPO_INDEXER_ENABLED: false
REPO_INDEXER_TYPE: elasticsearch
actions:
ENABLED: false
ENABLED: true
DEFAULT_ACTIONS_URL: https://github.com
picture:
DISABLE_GRAVATAR: false
ENABLE_FEDERATED_AVATAR: true
@ -82,9 +87,9 @@ spec:
NO_REPLY_ADDRESS: git@fukurokuju.dev
server:
ROOT_URL: https://git.roboces.dev/
additionalConfigSources:
- secret:
secretName: gitea-ini-redis
- secret:
secretName: secrets-forgejo-email
- secret:
@ -95,10 +100,10 @@ spec:
secretName: secrets-forgejo-internal
postgresql-ha:
enabled: false
redis:
enabled: false
redis-cluster:
enabled: false
- path: k8s/services/forgejo
repoURL: https://git.roboces.dev/catalin/fukuops.git
targetRevision: main
project: roboces
syncPolicy:
automated: {}

View file

@ -12,7 +12,7 @@ spec:
sources:
- path: charts/huesoporro
repoURL: https://git.roboces.dev/catalin/huesoporro.git
targetRevision: v0.3.7
targetRevision: v0.3.3
helm:
valuesObject:
secret:

View file

@ -13,7 +13,7 @@ spec:
source:
chart: kured
repoURL: https://kubereboot.github.io/charts
targetRevision: 5.11.*
targetRevision: 5.6.*
helm:
valuesObject:
configuration.rebootDays:

View file

@ -18,13 +18,13 @@ spec:
targetRevision: main
- chart: meilisearch
repoURL: https://meilisearch.github.io/meilisearch-kubernetes
targetRevision: 0.32.*
targetRevision: 0.12.*
helm:
valuesObject:
environment:
MEILI_ENV: production
auth:
existingMasterKeySecret: meili
existingMasterKeySecret: meilisearch-master-key
service:
type: NodePort
port: 7700

View file

@ -2,84 +2,18 @@
apiVersion: argoproj.io/v1alpha1
kind: Application
metadata:
name: miniflux
namespace: argocd
name: miniflux
namespace: argocd
spec:
destination:
name: ''
namespace: apps-roboces
server: https://kubernetes.default.svc
sources:
- path: k8s/charts/miniflux
repoURL: https://git.roboces.dev/catalin/fukuops.git
targetRevision: main
helm:
valuesObject:
replicaCount: 3
securityContext:
allowPrivilegeEscalation: false
readOnlyRootFilesystem: true
runAsUser: 10000
runAsGroup: 10000
capabilities:
drop:
- all
service:
type: LoadBalancer
ingress:
enabled: true
className: "traefik"
hosts:
- host: feeds.roboces.dev
paths:
- path: /
pathType: Prefix
resources:
requests:
cpu: 300m
memory: 300Mi
ephemeral-storage: 2Gi
limits:
cpu: 400m
memory: 500Mi
ephemeral-storage: 4Gi
livenessProbe:
tcpSocket:
port: 8080
initialDelaySeconds: 5
timeoutSeconds: 15
successThreshold: 1
failureThreshold: 3
periodSeconds: 10
readinessProbe:
httpGet:
path: /
port: 8080
initialDelaySeconds: 15
timeoutSeconds: 2
successThreshold: 1
failureThreshold: 3
periodSeconds: 10
podDisruptionBudget:
enabled: true
maxUnavailable: 1
env:
RUN_MIGRATIONS: "1"
CREATE_ADMIN: "1"
OAUTH2_PROVIDER: oidc
OAUTH2_REDIRECT_URL: https://feeds.roboces.dev/oauth2/oidc/callback
OAUTH2_OIDC_DISCOVERY_ENDPOINT: https://auth.fukurokuju.dev/application/o/miniflux/
OAUTH2_USER_CREATION: "1"
FETCH_YOUTUBE_WATCH_TIME: "1"
WORKER_POOL_SIZE: "1"
POLLING_FREQUENCY: "120"
BATCH_SIZE: "25"
METRICS_COLLECTOR: "1"
METRICS_ALLOWED_NETWORKS: 10.42.1.0/16
secret:
existingSecretName: miniflux
project: roboces
syncPolicy:
automated:
prune: true
selfHeal: true
destination:
name: ''
namespace: apps-roboces
server: https://kubernetes.default.svc
source:
path: k8s/services/miniflux
repoURL: https://git.roboces.dev/catalin/fukuops.git
targetRevision: main
sources: []
project: roboces
syncPolicy:
automated: {}

View file

@ -1,54 +0,0 @@
---
apiVersion: argoproj.io/v1alpha1
kind: Application
metadata:
name: oxicloud
namespace: argocd
spec:
destination:
name: ''
namespace: apps-roboces
server: https://kubernetes.default.svc
sources:
- path: k8s/charts/oxicloud
repoURL: https://git.roboces.dev/catalin/fukuops.git
targetRevision: main
helm:
valuesObject:
image:
repository: diocrafts/oxicloud
pullPolicy: Always
tag: "0.5.6"
persistence:
enabled: true
storageClass: "truenas-nfs-csi"
accessMode: ReadWriteMany
size: 50Gi
service:
type: LoadBalancer
config:
server:
port: 8086
host: "0.0.0.0"
baseUrl: "https://cloud.roboces.dev"
features:
enableAuth: "true"
enableSharing: "true"
mimalloc:
purgeDelay: "0"
allowLargeOsPages: "0"
secrets:
existingSecret: oxicloud
wopi:
enabled: false
ingress:
className: "traefik"
hosts:
- host: cloud.roboces.dev
paths:
- path: /
pathType: ImplementationSpecific
tls: []
project: roboces
syncPolicy:
automated: {}

View file

@ -15,7 +15,7 @@ spec:
sources:
- repoURL: https://portainer.github.io/k8s/
chart: portainer
targetRevision: 239.1.*
targetRevision: 1.0.*
helm:
valuesObject:
service:

View file

@ -13,7 +13,7 @@ spec:
sources:
- chart: renovate
repoURL: https://docs.renovatebot.com/helm-charts
targetRevision: 46.142.*
targetRevision: 39.227.*
helm:
valuesObject:
renovate:

View file

@ -12,7 +12,7 @@ spec:
source:
chart: sealed-secrets
repoURL: https://bitnami-labs.github.io/sealed-secrets
targetRevision: 2.18.*
targetRevision: 2.17.*
helm:
releaseName: sealed-secrets
valuesObject:

View file

@ -0,0 +1,41 @@
---
apiVersion: argoproj.io/v1alpha1
kind: Application
metadata:
name: valheim
namespace: argocd
spec:
destination:
name: ''
namespace: apps-fuku
server: https://kubernetes.default.svc
sources:
- path: k8s/charts/valheim-server
repoURL: https://git.roboces.dev/catalin/fukuops.git
targetRevision: main
helm:
valuesObject:
server:
name: "Huesoperrers Váljei"
public: 1
timezone: Europe/Madrid
secret:
name: valheim-secrets
key: server-password
persistence:
saves:
accessMode: ReadWriteMany
server:
accessMode: ReadWriteMany
backups:
accessMode: ReadWriteMany
resources:
requests:
memory: 4Gi
cpu: 2000m
limits:
memory: 8Gi
cpu: 4000m
project: fuku
syncPolicy:
automated: {}

29
k8s/argo-apps/valkey.yaml Normal file
View file

@ -0,0 +1,29 @@
---
apiVersion: argoproj.io/v1alpha1
kind: Application
metadata:
name: valkey
namespace: argocd
spec:
destination:
name: ''
namespace: apps-fuku
server: https://kubernetes.default.svc
project: fuku
syncPolicy:
automated: {}
sources:
- chart: valkey-cluster
repoURL: registry-1.docker.io/bitnamicharts
targetRevision: 2.0.*
helm:
valuesObject:
existingSecret: secrets-valkey
existingSecretPasswordKey: REDIS_PASSWORD
service:
type: LoadBalancer
persistence:
storageClass: truenas-nfs-csi
accessModes:
- ReadWriteMany
size: 50Gi

View file

@ -1,64 +0,0 @@
---
apiVersion: argoproj.io/v1alpha1
kind: Application
metadata:
name: vault-sm
namespace: argocd
spec:
destination:
name: ''
namespace: apps-fuku
server: https://kubernetes.default.svc
sources:
- chart: vaultwarden-kubernetes-secrets
repoURL: ghcr.io/antoniolago/charts
targetRevision: 1.4.01
helm:
valuesObject:
api:
enabled: true
service:
type: LoadBalancer
persistence:
storageClass: truenas-nfs-csi
dashboard:
enabled: true
service:
type: LoadBalancer
ingress:
enabled: true
className: traefik
hosts:
- host: vault-secrets.fuku
paths:
- path: /
pathType: Prefix
backend: dashboard
port: 80
- path: /api
pathType: Prefix
backend: api
port: 8080
env:
config:
VAULTWARDEN__SERVERURL: "https://vault.roboces.dev"
secrets:
BW_CLIENTID:
secretName: "vaultwarden-kubernetes-secrets"
secretKey: "BW_CLIENTID"
BW_CLIENTSECRET:
secretName: "vaultwarden-kubernetes-secrets"
secretKey: "BW_CLIENTSECRET"
VAULTWARDEN__MASTERPASSWORD:
secretName: "vaultwarden-kubernetes-secrets"
secretKey: "VAULTWARDEN__MASTERPASSWORD"
- path: k8s/services/vaultwarden-kubernetes-secrets
repoURL: https://git.roboces.dev/catalin/fukuops.git
targetRevision: main
project: fuku
syncPolicy:
automated:
prune: true
selfHeal: true
syncOptions:
- CreateNamespace=true

View file

@ -1,57 +0,0 @@
---
apiVersion: argoproj.io/v1alpha1
kind: Application
metadata:
name: woodpecker
namespace: argocd
annotations:
argocd.argoproj.io/sync-options: Force=true,Replace=true
spec:
destination:
name: ''
namespace: apps-roboces
server: https://kubernetes.default.svc
sources:
- chart: woodpecker
repoURL: ghcr.io/woodpecker-ci/helm
targetRevision: 3.5.1
helm:
valuesObject:
agent:
persistence:
storageClass: truenas-nfs-csi
accessModes:
- ReadWriteMany
server:
env:
WOODPECKER_ADMIN: 'woodpecker,admin,catalin'
WOODPECKER_HOST: 'https://ci.roboces.dev'
WOODPECKER_FORGEJO: "true"
WOODPECKER_FORGEJO_URL: "https://git.roboces.dev"
WOODPECKER_FORGEJO_CLIENT:
valueFrom:
secretKeyRef:
name: woodpecker
key: WOODPECKER_FORGEJO_CLIENT
WOODPECKER_FORGEJO_SECRET:
valueFrom:
secretKeyRef:
name: woodpecker
key: WOODPECKER_FORGEJO_SECRET
persistentVolume:
storageClass: truenas-nfs-csi
accessModes:
- ReadWriteMany
service:
type: LoadBalancer
ingress:
enabled: true
ingressClassName: traefik
hosts:
- host: ci.roboces.dev
paths:
- path: /
tls: []
project: roboces
syncPolicy:
automated: {}

View file

@ -1,6 +0,0 @@
apiVersion: v2
name: miniflux
description: A Helm chart for Miniflux RSS reader
type: application
version: 0.1.0
appVersion: "2.2.18"

View file

@ -1,62 +0,0 @@
{{/*
Expand the name of the chart.
*/}}
{{- define "miniflux.name" -}}
{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }}
{{- end }}
{{/*
Create a default fully qualified app name.
We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
If release name contains chart name it will be used as a full name.
*/}}
{{- define "miniflux.fullname" -}}
{{- if .Values.fullnameOverride }}
{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }}
{{- else }}
{{- $name := default .Chart.Name .Values.nameOverride }}
{{- if contains $name .Release.Name }}
{{- .Release.Name | trunc 63 | trimSuffix "-" }}
{{- else }}
{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }}
{{- end }}
{{- end }}
{{- end }}
{{/*
Create chart name and version as used by the chart label.
*/}}
{{- define "miniflux.chart" -}}
{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }}
{{- end }}
{{/*
Common labels
*/}}
{{- define "miniflux.labels" -}}
helm.sh/chart: {{ include "miniflux.chart" . }}
{{ include "miniflux.selectorLabels" . }}
{{- if .Chart.AppVersion }}
app.kubernetes.io/version: {{ .Chart.AppVersion | quote }}
{{- end }}
app.kubernetes.io/managed-by: {{ .Release.Service }}
{{- end }}
{{/*
Selector labels
*/}}
{{- define "miniflux.selectorLabels" -}}
app.kubernetes.io/name: {{ include "miniflux.name" . }}
app.kubernetes.io/instance: {{ .Release.Name }}
{{- end }}
{{/*
Create the name of the service account to use
*/}}
{{- define "miniflux.serviceAccountName" -}}
{{- if .Values.serviceAccount.create }}
{{- default (include "miniflux.fullname" .) .Values.serviceAccount.name }}
{{- else }}
{{- default "default" .Values.serviceAccount.name }}
{{- end }}
{{- end }}

View file

@ -1,73 +0,0 @@
apiVersion: apps/v1
kind: Deployment
metadata:
name: {{ include "miniflux.fullname" . }}
labels:
{{- include "miniflux.labels" . | nindent 4 }}
annotations:
kube-score/ignore: pod-networkpolicy,deployment-has-host-podantiaffinity
spec:
{{- if not .Values.autoscaling.enabled }}
replicas: {{ .Values.replicaCount }}
{{- end }}
selector:
matchLabels:
{{- include "miniflux.selectorLabels" . | nindent 6 }}
strategy:
rollingUpdate:
maxSurge: 50%
maxUnavailable: 50%
type: RollingUpdate
template:
metadata:
{{- with .Values.podAnnotations }}
annotations:
{{- toYaml . | nindent 8 }}
{{- end }}
labels:
{{- include "miniflux.selectorLabels" . | nindent 8 }}
spec:
{{- with .Values.imagePullSecrets }}
imagePullSecrets:
{{- toYaml . | nindent 8 }}
{{- end }}
securityContext:
{{- toYaml .Values.podSecurityContext | nindent 8 }}
containers:
- name: {{ .Chart.Name }}
securityContext:
{{- toYaml .Values.securityContext | nindent 12 }}
image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}"
imagePullPolicy: {{ .Values.image.pullPolicy }}
envFrom:
- secretRef:
name: {{ .Values.secret.existingSecretName | default (include "miniflux.fullname" .) }}
env:
{{- range $key, $value := .Values.env }}
- name: {{ $key }}
value: {{ $value | quote }}
{{- end }}
ports:
- name: http
containerPort: 8080
protocol: TCP
livenessProbe:
{{- toYaml .Values.livenessProbe | nindent 12 }}
readinessProbe:
{{- toYaml .Values.readinessProbe | nindent 12 }}
resources:
{{- toYaml .Values.resources | nindent 12 }}
{{- with .Values.nodeSelector }}
nodeSelector:
{{- toYaml . | nindent 8 }}
{{- end }}
{{- with .Values.affinity }}
affinity:
{{- toYaml . | nindent 8 }}
{{- end }}
{{- with .Values.tolerations }}
tolerations:
{{- toYaml . | nindent 8 }}
{{- end }}
restartPolicy: Always
automountServiceAccountToken: false

View file

@ -1,45 +0,0 @@
{{- if .Values.ingress.enabled -}}
{{- $fullName := include "miniflux.fullname" . -}}
{{- $svcPort := .Values.service.port -}}
apiVersion: networking.k8s.io/v1
kind: Ingress
metadata:
name: {{ $fullName }}
labels:
{{- include "miniflux.labels" . | nindent 4 }}
{{- with .Values.ingress.annotations }}
annotations:
{{- toYaml . | nindent 4 }}
{{- end }}
spec:
{{- if .Values.ingress.className }}
ingressClassName: {{ .Values.ingress.className }}
{{- end }}
{{- if .Values.ingress.tls }}
tls:
{{- range .Values.ingress.tls }}
- hosts:
{{- range .hosts }}
- {{ . | quote }}
{{- end }}
secretName: {{ .secretName }}
{{- end }}
{{- end }}
rules:
{{- range .Values.ingress.hosts }}
- host: {{ .host | quote }}
http:
paths:
{{- range .paths }}
- path: {{ .path }}
{{- if .pathType }}
pathType: {{ .pathType }}
{{- end }}
backend:
service:
name: {{ $fullName }}
port:
number: {{ $svcPort }}
{{- end }}
{{- end }}
{{- end }}

View file

@ -1,18 +0,0 @@
{{- if .Values.podDisruptionBudget.enabled -}}
apiVersion: policy/v1
kind: PodDisruptionBudget
metadata:
name: {{ include "miniflux.fullname" . }}
labels:
{{- include "miniflux.labels" . | nindent 4 }}
spec:
{{- if .Values.podDisruptionBudget.minAvailable }}
minAvailable: {{ .Values.podDisruptionBudget.minAvailable }}
{{- end }}
{{- if .Values.podDisruptionBudget.maxUnavailable }}
maxUnavailable: {{ .Values.podDisruptionBudget.maxUnavailable }}
{{- end }}
selector:
matchLabels:
{{- include "miniflux.selectorLabels" . | nindent 6 }}
{{- end }}

View file

@ -1,13 +0,0 @@
{{- if and .Values.secret.enabled (not .Values.secret.existingSecretName) -}}
apiVersion: v1
kind: Secret
metadata:
name: {{ include "miniflux.fullname" . }}
labels:
{{- include "miniflux.labels" . | nindent 4 }}
type: Opaque
stringData:
{{- range $key, $value := .Values.secret.data }}
{{ $key }}: {{ $value | quote }}
{{- end }}
{{- end }}

View file

@ -1,15 +0,0 @@
apiVersion: v1
kind: Service
metadata:
name: {{ include "miniflux.fullname" . }}
labels:
{{- include "miniflux.labels" . | nindent 4 }}
spec:
type: {{ .Values.service.type }}
ports:
- port: {{ .Values.service.port }}
targetPort: {{ .Values.service.targetPort }}
protocol: TCP
name: http
selector:
{{- include "miniflux.selectorLabels" . | nindent 4 }}

View file

@ -1,42 +0,0 @@
# Configuration is managed in k8s/argo-apps/miniflux.yaml
replicaCount: 1
image:
repository: miniflux/miniflux
pullPolicy: Always
tag: ""
imagePullSecrets: []
podAnnotations: {}
podSecurityContext: {}
securityContext: {}
service:
type: ClusterIP
port: 8888
targetPort: 8080
ingress:
enabled: false
resources: {}
livenessProbe: {}
readinessProbe: {}
autoscaling:
enabled: false
nodeSelector: {}
tolerations: []
affinity: {}
podDisruptionBudget:
enabled: false
env: {}
secret:
enabled: false
existingSecretName: ""
data: {}

View file

@ -1,23 +0,0 @@
# Patterns to ignore when building packages.
# This supports shell glob matching, relative path matching, and
# negation (prefixed with !). Only one pattern per line.
.DS_Store
# Common VCS dirs
.git/
.gitignore
.bzr/
.bzrignore
.hg/
.hgignore
.svn/
# Common backup files
*.swp
*.bak
*.tmp
*.orig
*~
# Various IDEs
.project
.idea/
*.tmproj
.vscode/

View file

@ -1,8 +0,0 @@
---
apiVersion: v2
name: oxicloud
description: |
Ultra-fast, secure & lightweight self-hosted cloud storage — your files, photos, calendars & contacts, all in one place. Built in Rust.
type: application
version: 0.1.0
appVersion: "0.5.2"

View file

@ -1,32 +0,0 @@
{{/* Expand the name of the chart. */}}
{{- define "oxicloud.name" -}}
{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }}
{{- end }}
{{/* Create a default fully qualified app name. */}}
{{- define "oxicloud.fullname" -}}
{{- if .Values.fullnameOverride }}
{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }}
{{- else }}
{{- $name := default .Chart.Name .Values.nameOverride }}
{{- if contains $name .Release.Name }}
{{- .Release.Name | trunc 63 | trimSuffix "-" }}
{{- else }}
{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }}
{{- end }}
{{- end }}
{{- end }}
{{/* Common labels */}}
{{- define "oxicloud.labels" -}}
helm.sh/chart: {{ printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }}
{{ include "oxicloud.selectorLabels" . }}
app.kubernetes.io/version: {{ .Chart.AppVersion | quote }}
app.kubernetes.io/managed-by: {{ .Release.Service }}
{{- end }}
{{/* Selector labels */}}
{{- define "oxicloud.selectorLabels" -}}
app.kubernetes.io/name: {{ include "oxicloud.name" . }}
app.kubernetes.io/instance: {{ .Release.Name }}
{{- end }}

View file

@ -1,22 +0,0 @@
---
apiVersion: v1
kind: ConfigMap
metadata:
name: {{ include "oxicloud.fullname" . }}-config
data:
OXICLOUD_SERVER_PORT: {{ .Values.config.server.port | quote }}
OXICLOUD_SERVER_HOST: {{ .Values.config.server.host | quote }}
{{- if .Values.config.server.baseUrl }}
OXICLOUD_BASE_URL: {{ .Values.config.server.baseUrl | quote }}
{{- end }}
OXICLOUD_ENABLE_AUTH: {{ .Values.config.features.enableAuth | quote }}
OXICLOUD_ENABLE_FILE_SHARING: {{ .Values.config.features.enableSharing | quote }}
MIMALLOC_PURGE_DELAY: {{ .Values.config.mimalloc.purgeDelay | quote }}
MIMALLOC_ALLOW_LARGE_OS_PAGES: {{ .Values.config.mimalloc.allowLargeOsPages | quote }}
{{- if .Values.wopi.enabled }}
OXICLOUD_WOPI_ENABLED: "true"
OXICLOUD_WOPI_DISCOVERY_URL: "{{ .Values.config.server.baseUrl }}/hosting/discovery"
{{- else }}
OXICLOUD_WOPI_ENABLED: "false"
{{- end }}

View file

@ -1,64 +0,0 @@
---
{{- if .Values.ingress.enabled -}}
apiVersion: networking.k8s.io/v1
kind: Ingress
metadata:
name: {{ include "oxicloud.fullname" . }}
labels:
{{- include "oxicloud.labels" . | nindent 4 }}
{{- with .Values.ingress.annotations }}
annotations:
{{- toYaml . | nindent 4 }}
{{- end }}
spec:
{{- if .Values.ingress.className }}
ingressClassName: {{ .Values.ingress.className }}
{{- end }}
{{- if .Values.ingress.tls }}
tls:
{{- range .Values.ingress.tls }}
- hosts:
{{- range .hosts }}
- {{ . | quote }}
{{- end }}
secretName: {{ .secretName }}
{{- end }}
{{- end }}
rules:
- host: {{ (index .Values.ingress.hosts 0).host | quote }}
http:
paths:
{{- if .Values.wopi.enabled }}
# Route Collabora traffic to the WOPI pod
- path: /browser
pathType: Prefix
backend:
service:
name: {{ include "oxicloud.fullname" $ }}-wopi
port:
number: {{ .Values.wopi.collabora.service.port }}
- path: /hosting
pathType: Prefix
backend:
service:
name: {{ include "oxicloud.fullname" $ }}-wopi
port:
number: {{ .Values.wopi.collabora.service.port }}
- path: /cool
pathType: Prefix
backend:
service:
name: {{ include "oxicloud.fullname" $ }}-wopi
port:
number: {{ .Values.wopi.collabora.service.port }}
{{- end }}
# Default Catch-All: Route everything else to OxiCloud
- path: /
pathType: Prefix
backend:
service:
name: {{ include "oxicloud.fullname" $ }}
port:
number: {{ $.Values.service.port }}
{{- end }}

View file

@ -1,19 +0,0 @@
---
{{- if not .Values.secrets.existingSecret }}
apiVersion: v1
kind: Secret
metadata:
name: {{ include "oxicloud.fullname" . }}-secret
labels:
{{- include "oxicloud.labels" . | nindent 4 }}
type: Opaque
data:
{{- if .Values.secrets.jwtSecret }}
OXICLOUD_JWT_SECRET: {{ .Values.secrets.jwtSecret | b64enc | quote }}
{{- end }}
DB_PASSWORD: {{ .Values.database.password | b64enc | quote }}
{{- if .Values.wopi.enabled }}
WOPI_ADMIN_USERNAME: {{ .Values.wopi.collabora.admin.username | b64enc | quote }}
WOPI_ADMIN_PASSWORD: {{ .Values.wopi.collabora.admin.password | b64enc | quote }}
{{- end }}
{{- end }}

View file

@ -1,32 +0,0 @@
---
apiVersion: v1
kind: Service
metadata:
name: {{ include "oxicloud.fullname" . }}
labels:
{{- include "oxicloud.labels" . | nindent 4 }}
spec:
type: {{ .Values.service.type }}
ports:
- port: {{ .Values.service.port }}
targetPort: http
protocol: TCP
name: http
selector:
{{- include "oxicloud.selectorLabels" . | nindent 4 }}
---
apiVersion: v1
kind: Service
metadata:
name: {{ include "oxicloud.fullname" . }}-headless
labels:
{{- include "oxicloud.labels" . | nindent 4 }}
spec:
clusterIP: None
ports:
- port: {{ .Values.service.port }}
targetPort: http
protocol: TCP
name: http
selector:
{{- include "oxicloud.selectorLabels" . | nindent 4 }}

View file

@ -1,53 +0,0 @@
apiVersion: apps/v1
kind: StatefulSet
metadata:
name: {{ include "oxicloud.fullname" . }}
labels:
{{- include "oxicloud.labels" . | nindent 4 }}
spec:
serviceName: {{ include "oxicloud.fullname" . }}-headless
replicas: {{ .Values.replicaCount }}
selector:
matchLabels:
{{- include "oxicloud.selectorLabels" . | nindent 6 }}
template:
metadata:
labels:
{{- include "oxicloud.selectorLabels" . | nindent 8 }}
spec:
containers:
- name: oxicloud
image: "{{ .Values.image.repository }}:{{ .Values.image.tag }}"
imagePullPolicy: {{ .Values.image.pullPolicy }}
ports:
- name: http
containerPort: 8086
protocol: TCP
envFrom:
- configMapRef:
name: {{ include "oxicloud.fullname" . }}-config
- secretRef:
name: {{ if .Values.secrets.existingSecret }}{{ .Values.secrets.existingSecret }}{{ else }}{{ include "oxicloud.fullname" . }}-secret{{ end }}
volumeMounts:
- name: storage-data
mountPath: /app/storage
{{- if not .Values.persistence.enabled }}
volumes:
- name: storage-data
emptyDir: {}
{{- end }}
{{- if .Values.persistence.enabled }}
volumeClaimTemplates:
- metadata:
name: storage-data
spec:
accessModes:
- {{ .Values.persistence.accessMode }}
{{- if .Values.persistence.storageClass }}
storageClassName: {{ .Values.persistence.storageClass }}
{{- end }}
resources:
requests:
storage: {{ .Values.persistence.size }}
{{- end }}

View file

@ -1,58 +0,0 @@
---
{{- if .Values.wopi.enabled -}}
apiVersion: apps/v1
kind: Deployment
metadata:
name: {{ include "oxicloud.fullname" . }}-wopi
labels:
{{- include "oxicloud.labels" . | nindent 4 }}
app.kubernetes.io/component: wopi
spec:
replicas: 1
selector:
matchLabels:
{{- include "oxicloud.selectorLabels" . | nindent 6 }}
app.kubernetes.io/component: wopi
template:
metadata:
labels:
{{- include "oxicloud.selectorLabels" . | nindent 8 }}
app.kubernetes.io/component: wopi
spec:
containers:
- name: collabora
image: "{{ .Values.wopi.collabora.image.repository }}:{{ .Values.wopi.collabora.image.tag }}"
imagePullPolicy: {{ .Values.wopi.collabora.image.pullPolicy }}
# Required for Collabora to build chroot jails
securityContext:
capabilities:
add:
- MKNOD
ports:
- name: wopi
containerPort: 9980
protocol: TCP
env:
- name: aliasgroup1
value: "http://{{ .Values.wopi.collabora.domain }}"
- name: server_name
value: {{ .Values.wopi.collabora.domain | quote }}
- name: extra_params
value: {{ .Values.wopi.collabora.extraParams | quote }}
- name: username
valueFrom:
secretKeyRef:
name: {{ if .Values.secrets.existingSecret }}{{ .Values.secrets.existingSecret }}{{ else }}{{ include "oxicloud.fullname" . }}-secret{{ end }}
key: WOPI_ADMIN_USERNAME
- name: password
valueFrom:
secretKeyRef:
name: {{ if .Values.secrets.existingSecret }}{{ .Values.secrets.existingSecret }}{{ else }}{{ include "oxicloud.fullname" . }}-secret{{ end }}
key: WOPI_ADMIN_PASSWORD
readinessProbe:
httpGet:
path: /hosting/discovery
port: wopi
initialDelaySeconds: 10
periodSeconds: 10
{{- end }}

View file

@ -1,20 +0,0 @@
---
{{- if .Values.wopi.enabled -}}
apiVersion: v1
kind: Service
metadata:
name: {{ include "oxicloud.fullname" . }}-wopi
labels:
{{- include "oxicloud.labels" . | nindent 4 }}
app.kubernetes.io/component: wopi
spec:
type: ClusterIP
ports:
- port: {{ .Values.wopi.collabora.service.port }}
targetPort: wopi
protocol: TCP
name: wopi
selector:
{{- include "oxicloud.selectorLabels" . | nindent 4 }}
app.kubernetes.io/component: wopi
{{- end }}

View file

@ -1,67 +0,0 @@
---
replicaCount: 1
image:
repository: oxicloud
pullPolicy: IfNotPresent
tag: "latest"
database:
host: "postgres.example.com"
port: 5432
username: "postgres"
password: "change_me_in_production"
name: "oxicloud"
config:
server:
port: 8086
host: "0.0.0.0"
baseUrl: "https://cloud.example.com"
features:
enableAuth: "true"
enableSharing: "true"
mimalloc:
purgeDelay: "0"
allowLargeOsPages: "0"
persistence:
enabled: true
storageClass: ""
accessMode: ReadWriteOnce
size: 50Gi
wopi:
enabled: true
collabora:
url: "cloud.example.com"
image:
repository: collabora/code
tag: latest
pullPolicy: IfNotPresent
service:
port: 9980
admin:
username: admin
password: "wopi_admin_password"
extraParams: "--o:ssl.enable=false --o:ssl.termination=false --o:net.frame_ancestors=http://* https://*"
secrets:
existingSecret: ""
jwtSecret: ""
oidcClientSecret: ""
service:
type: ClusterIP
port: 8086
ingress:
enabled: true
className: "traefik"
annotations: {}
hosts:
- host: cloud.example.com
paths:
- path: /
pathType: ImplementationSpecific
tls: []

View file

@ -3,7 +3,7 @@ image:
# -- Docker repository to use
repository: mbround18/valheim
# -- Docker tag to use - use "latest" for most current version
tag: "3.6"
tag: "3.1"
# -- Image pull policy
pullPolicy: Always

View file

@ -1,5 +1,5 @@
---
apiVersion: traefik.io/v1alpha1
apiVersion: traefik.containo.us/v1alpha1
kind: IngressRoute
metadata:
name: argocd-server

View file

@ -25,12 +25,5 @@ spec:
- https://charts.crystalnet.org
- https://portainer.github.io/k8s/
- https://docs.renovatebot.com/helm-charts
- registry-1.docker.io/bitnamicharts
- https://meilisearch.github.io/meilisearch-kubernetes
- https://kubetail-org.github.io/helm-charts/
- https://groundhog2k.github.io/helm-charts/
- registry-1.docker.io/cloudpirates
- https://vmware-tanzu.github.io/helm-charts/
- https://helm.runix.net
- https://rcourtman.github.io/Pulse
- ghcr.io/antoniolago/charts
- https://helm.elastic.co

View file

@ -8,11 +8,8 @@ spec:
destinations:
- namespace: apps-roboces
server: https://kubernetes.default.svc
- namespace: woodpecker
server: https://kubernetes.default.svc
sourceRepos:
- https://git.roboces.dev/catalin/fukuops.git
- code.forgejo.org/forgejo-helm
- https://git.roboces.dev/catalin/huesoporro.git
- https://gitlab.com/api/v4/projects/64552889/packages/helm/release
- ghcr.io/woodpecker-ci/helm

View file

@ -1,5 +1,5 @@
---
apiVersion: traefik.io/v1alpha1
apiVersion: traefik.containo.us/v1alpha1
kind: ServersTransport
metadata:
name: skipverify-authentik

View file

@ -0,0 +1,18 @@
---
apiVersion: bitnami.com/v1alpha1
kind: SealedSecret
metadata:
creationTimestamp:
name: secrets-factorio
namespace: apps-fuku
spec:
encryptedData:
game_password: AgCmUZilQTlqof5so2DyvjbCh3J8OAkz4lSQv++z+9XUz4/+KjwgEjP5SI9nf2WVfIHt7WiJN8oaPlYnm2XIdbBUrvKlTEuMAy2XeI8DE2+wKHXdbmLg7t3oZR/8kw3py9W3o4dlXp5XY2G4S3cG8TX0fkN58ni61mYv+zSvc6stcT/iveJqO5E+hXPcDSexzxQ/8DybS4D5g8W3N2OMhRoU0wwhYfXAuxN90BzFKgD6X/9Xy1c7pPQQkEidpA6l1uP5qIG/vChmIpqsOmQWbibGQn53el5ulPvaybx1wRu33eJJcSPRS+XthZv9dtwduFlboMT6QPWcVL5gSQ0ceCBidQIHGLRLxcHYPZz83miCeVYFY1xFegrwPBsXYEdfar5mufxgSQGtSHGzwEV0Ry+tcmjz9JqWpQBQVg10Bs0GYwvy/XGHi1BLCouAXXL//eVbGp1s9cl4uyN9Ymzt5zNrf/SvUweFsaCYrC6xVFA2CCsLbsyu/YbmKkO+cd1IwAle4luGmJHnZgrXKMwQFYoMTGsgEGYt87Itz7eOSmHEm/ZJwZ3oL6n0LGgHpJu1gb4Op0ZA9p44DKeg2fy5Go9rWeMOP0RIw8/SApE6QmT0Bw8QccddnTHhwuCLet2PoiUodKhFffTfGN6PGPeIcyQJpiEDxUI7nquWSNGTcMJDsR3LbzU6A8MpDQrrg5Az6YzyZOo7NLEMbitKLrqt9lfH5g7g # yamllint disable rule:line-length
password: AgCTY4hn/wTGipH9oX7SgS44PE6wEe34AB2Pz9IeB5KcISZVGgWAUMtcffexV31jcNwz5TrztNam22Ys7qYbsZZVNOWm27/KZP3U04Jrq1cIPhY+xE1xF3vMqCd54r+kaeMO4hlDhBlE7Hs6BHdURTpPz8ocqihT2bft+Q8p2Myf3vPHXcDwoUyQj/AFYJdJJhyVfD5NDdacFhOmPTB/tUE4AW1Rz9oND6sy0x7NaP44vswVbhREpMA9wkltJRugRKUwXdfC6kOrfKa1R88aNOwkqc22F1U1PhcqUSAMYQxOA+zz3xMjrP8o70V1/FBKxnTBYVIpdHuwl2RpvC/TewJYVEu1xzp3texfgkTn6XXMp9InxfA6y1wpSVpMPYK5zPRCnmuyPTdSd+DrD6C6y2rstrHvvHxnLPtqo5REVjkfTGkEilmQ1+SllPMPk/6hKivahdmORixoI2MtOz4k4d+7rdPrrsRscMHAheyJTNdKC3wGoKFqbm+0zFV3GFaxM65K0USlYhPwyKG3FlHGj0t8HmXOr+M2cQKd4vqIrq8betRp08YPGMOT0Ea1KIvoP3z4yiJg2Z64d8d0Brof/h4fFd4kKgfMYm/CvkNh5zjFzEYi+K/6G/G99RBxwl7kK3eMB6CiuOnLITCw/Ok/LiRcdnIcAe7yobHG6FWHas4KPL5t4dPTxo12catoWtuJG7L20AIl3171+gO3jS0e4zAvZ+7S # yamllint disable rule:line-length
token: AgByS4w6xSn0/FzWkgNazh7hyZjKTTmg9WtQ3oyKQUVF6FYg+qvPYYLuNu7rTwQOa4LGw3Cvf/yYT0+WU93BjfNCSVMfes1lE2dQzukK4+zEeDhZ4MSpBOBgYYwHJkrFyHpvVSAQkaPi40T0M8iJmFv+Oq1s8zRWmx84LMllaJuRGH/t9jMfmR3rF6JBcSmEmkmB7N8cD+ytPPtZKXGJXaWE0qvuNKNveqirLRt7E+B7z7yvhroaEHahHEseOQnJ6dKY83KzH1riHBTUNOVcI62hSkiYEbbZXAzxznxMKDs04w/BpOksTeg3OWD/RzwuRdX5M2zb3wrrqbF9r9yoLUbWBMS2bdUbUyLiqvfzKUWKAd8eZsS4+P8N5fbPrLgXmB+xRz5xiCQ+r/ZL4Nj9pfuSZMDKytIglldB6BT5gtnodiaCgAPrtLz2OMtBvvojpWOaaBbYWxbrnMhCG2YYU7Kd4UFXEttL/MVs7YkZow12AIngKqzz7vXo3K2iwRYoi2CjOwv3NeXSWk9LdTrTxs00iO8RT55wbAcg9HiNkYZFtrI+6sygvnHhDmNhYG0z7yHCpx13KIjoFEtVEcx2F9bbMftxBmZoOFaGtBhFSgH323CLGoFecdhv41cH9F5HHzpc13Pc5dShm6ZPgWrWG88w0Q4WIT6hiXlriSnd9xw0At4kl7wRBqusZgwDVTBCFbtKS6Gg9msBZnZzADI/aeljY3QXGg2YT/2Ra2c3quY= # yamllint disable rule:line-length
template: # yamllint disable rule:line-length
metadata:
creationTimestamp:
name: secrets-factorio
namespace: apps-fuku
type: Opaque

View file

@ -66,7 +66,21 @@ spec:
creationTimestamp: null
name: secrets-forgejo-email
namespace: apps-roboces
type: Opaque
---
apiVersion: bitnami.com/v1alpha1
kind: SealedSecret
metadata:
creationTimestamp: null
name: secrets-forgejo-db
namespace: apps-roboces
spec:
encryptedData:
database: AgCOuS5HiC1AHU3oUZn0ol2V4OsVK3+O3iUZDtgCx7I6mkU7gNEEtz3vH/fXckAL8r7TI0C1s9/A2fPUGKAgC8B+rzG2iXSqfEWB4qUxuWESDwhCvn9KHbd2ewkhDEh2ajfrWIn89iq2q3nz+n/XOIf4FRgan0mwTVwnVSgHkR9yxEbxE2bLZ/INW8/UsmKvhA+akFuHx24R1FxmInIulnqQ/b+Wrfp55Qwa5AtWm2IXhxb5UM1NL/6Mcp2+Dpq2mvDvk5VchJXmEXfyjOfN1+TW1qcHcadDaPBvdA1aE3StOqFbqlN7PxXT84nVsZi2P1m+evGuoaIfizX7wdq6jUkl4l2X2QbxcUbnwj9BAPshpNfgW0voXLdHuJ3n/mqY4UtduC1BQbkYE/ZPZro9NqV2EMsWnL1YHieUR5kNhq33djjgUKoa4Nx5y1m4IMFsotTy2yjUZK2YUoxxonkrXAyqHqW2qBM/PEBcuvum1KnkAkmNYdHs8eSLNk9BXTLmC0LQ5AahrrSKte2sNuv7Kc/ZA+gTyhgYJTcdDhE4U4YPTSLpgLHbXpUy5vnVPgQVonoVdZib8Gma3sTinpvKH8XrPr9/o3xmlGWeMvnCvJ8qoF4izeWqaDQBqWcDFp6AZr0JgrhRkqWgMFK11D5WNXbuwjznU7GmFA7lweElgcii9FvbCTR7Tt2vCxZYz8ABF2kMW+MLa7X3PSvfh+d2gyJt9/p/AIqmxL9vJ3b341OM3hvjUy5vo4ZUEDTSI6FSlzJryF6CbuwvUuWEyuzeThysA0Ziy7RnSSdcYKlLv95XhJSftUjHm8O8Kgu/4MxiQe/ggn91xkpdb8I+bH3F
template:
metadata:
creationTimestamp: null
name: secrets-forgejo-db
namespace: apps-roboces
---
apiVersion: bitnami.com/v1alpha1
kind: SealedSecret
@ -98,33 +112,3 @@ spec:
creationTimestamp: null
name: secrets-forgejo-internal
namespace: apps-roboces
---
apiVersion: bitnami.com/v1alpha1
kind: SealedSecret
metadata:
name: gitea-ini-redis
namespace: apps-roboces
spec:
encryptedData:
cache: AgBfhhRVgy0VonwvXF+qqhh1x9+Z1e0o/OJCI11srR6XLVCf+SIBejOSYyoYChS8HYgBdIuqKNS7Un5bdws5NF1HC0gbf8/XRCfivocceoXVta2MugJxz7Y0+9Ro3RH9uEKi6KVqAyHcev1oOW0eqmkNqHust06+VKa5Bw8F4NHa/3a1Rl9b9xaMwrJLxVMKZMyAIl2/WISUovKFPTjPU1HK9ftEaXwzj5HG/s2jm/MjrrvcoA5z1OGUm02xjqyooUNM6gsuNy8LTXDQybeav+PxlQztcOfNaqipTUkHLjmGWiste/Yl6ik05Dkh7BKmL2czY0KPhxtNGcUq4e2oE+b6DKGcJpbuSVxZLSqTKhg6Jing1GwNyembfRE3nwsvYgj9nzit1SZoQXWnIuBjxlfWGjGJeaj6PmcG1YK4wvwFFqBKIhUGH6fhWjxDl7y1FPsxxGvFg9Fnvcjex34K4J6UmnGO3G2Dts/V4pgJTGx5lp6wpvYVtr7U9ENRTym8GM5oVZ3DT0lONKcfZXRH4EDcMHKMfJ/nDnpQWJC+lihcTRVeSznxu8I073hk6MMAZ8Ho5/28rOCOdJc2HI807ipe39BzTn4U+ows34uFG55GgaTdfbbeFwLjrcVc9ht1WaApkdj8Bnt9inmFPsI14Zwb4Ap/gSSO+ztwhnwrA2rWD7fko53INLJLUb4/49H1xRpMeqEkjoUb76zpdnazuF0ksqs1zhPOUTpnQniduotkwZZrtdU2WPRxVzHXTaZD6/1oTrmFBoBOLnkBPz1CXY/rxMoxHrFoS3zdUtLYWXKqVZy5
queue: AgC1CxaTdBFeP3M4x8TMJKK2QsWydXC/eAslML9T1yzl4ZStKioMh8QJKbhj6oY3KTLyqBHpLZSGFuhtX/y9Z4JvkFhihfsKtlYL8OrNvO3kxjzku5l3vR6tJ5vCQfIi27hUjVKHN4L/6DGsegbgBGIidCsN5kHRv4C5ToAw/EvxgbHIRILIo+KOBALT1kqKIF+Zdd4fQGbIExwclhFl5SgzumXPh+/j1NUZ1peYYFRnvr5G9aeKPpgJ2wK4eMPQ3ZyaAcRCtxdEhZUF1DF0ESBQ++Nr/gU4GMoron/wSr5cUFmNjSoUTJA3xJeEROyoSGaaH45mLDYH5QZdeA/av8e6vaiMIuWr/UgV7wONFKsJLf0q9Fcr726maJwGiMmIjeCwLq4k+ZI+N6S8/xNe2sTrRUmwjWtbL82ZmafvoUwv0lmxxXpmFqZmG/Zy/KCx9QSDaBJBCStoCgMZZ04HdnxnxFdFj2DMXyshVxH3wqEFAKY0oDfrnIBxzngOEXxu0APp7rlMvFq++HMJ7+JC+JBLG6zfNSfpQyuXEwfSkvowA1i9uhAQVb3D7nSS39xKF7oK8aYDVK4j9QL3w9qxOkYW7bHVd4zDgtiidKDAFWoloMnjPYQDwJojc/dc52+SxJGH8qpYJ8cn7BOy80cvmkwQ2ITGGPo9FXyvMttD0hUmFRouNb4t4PHdmKauuml6hcfF3dqLqrvBhJ0H7tKRJMCK+rHehIQt8VNPhp2JN7n5/KKt5vtMFDUDpc/BbHiDqg3qnICTMESzWZD752tOucW7TJAA89JU
session: AgA+o9DsObwNhAaDpkgh7ZJ9Vx+xzjex+JG2k4yfv1EbLFDpdrSnKOl5BzEZyvydxoWG7Jtlhf/QoWKPUMBMgoRS+cMAH4R3C1FHxFbiEzh/olQBH2DyGnK5d+hAy3RmTC+Kgwgh4sdePWJ3KxwbaAa18tsju9fThQOBD+TOw5khZi2YUehDiviqCV3VGH+caJgDxAmXtut1mHaFLB4QXLzX9vPoULGmxGMARiMSrEqMruTRff17dZ3s0VPeu/s/Lau5yP7oPeMjhNw8P9W8QP/KnW28y0QETtXZyvmNQGCwxyVp7sHfDgo1slxWsgjDpep/jXiyfVeV8KL2AWE2pekGD0CVaCWPOV/MOrU+pe8xBrEcaXQMpWLT5d4/Unu8TLZ7RdEdgrOI8bOVrl9TO51itw0V7EorfeaKxB6j/flRMd8vU0i29+/UOTbEeRtxuHoyH4zy+v7h49b5ODN88z7f6uEp7diuS3WEHs30EHYtXRSMA6V6+H1aaUMEM0lWG6CXtccgAToqlw3k/+Mod8kVagPNQnJ6YB7BFqu74nHo/3lGKHT6+jRfLWUhbJ5teG0pLTutNN0jIqEUY4NfeLVtKhab5Nkxr6UFSUltQwO/dwVD2mqC80yXjf1Bhq07YGIV/M5DYf7oV1+z6UWoQwALcjCdZNgPkbyGqznfGsBpafJwv3MCmZwt50r8FH4ksb8P8lo28Eqepcf+vVL6C+tJGwm3F0CbawhU9mTLb0ksiYXrY3VYmYzUZVWHsosWI05bhQgN2g+B4+ANYX0SXtEc0/YbD8HusQvnoXWbj3CkQd0=
template:
metadata:
name: gitea-ini-redis
namespace: apps-roboces
type: Opaque
---
apiVersion: bitnami.com/v1alpha1
kind: SealedSecret
metadata:
name: secrets-forgejo-db
namespace: apps-roboces
spec:
encryptedData:
database: AgBouV3XhCd3Z66sDHDz0nbqbvAfip94yr9R90stLz2H/vJFGyx4xumE/9xe3b5GUd9aYQbTonhzHFl+zP3yoeTsGIGAbuvpeDimDKUIdnI6MJ2oGVHSn08QY/eu2vuj4nUODCeWPE7W5EFqxCxCq1YxZZpoBLzE3zBuIf8R48KAxs7aau8k4WPPSBxHgXuIeUWR/fNtQrA032f1wS5p6bae8403ro4aithq7J6DiOz69MXIQWwqufay+krsEEqIoE8CioQP993w+AUH1q2tk6O7WQLuzKt4T0mZm6F3cWyNbpCV9GT7q5LtejFn1NAwsmM4UG2toZfuWe9NgiSwyqNNUW7IjzfW/+CF3UfAtkgDfn7IAFu1Wg0yzufsnJuazFy2FiVDYNiHYS3Rq1iboKQl84svuq6oYdgvK6kf4IUfU2j02TgCyYc79/sLFqlbLOsZI07fAg/tDIzRkWQyG5P1HreIiDYZdgm50BgAzyEsvLjguKqPUl/c0LLwS6IxleN6RgcxfczCnaf3lezPXol37qCcyTqCqyiYlpI0i0Y45RTpLmTlyATVpzXCiir3IM0yEbK0ff2y2c7czTdoQSaIowAguUD3SamNY5y3530ZQDbZAXF0U4nDq7Pn59tfrlvvlsA8cSGjgyjwGJobGJUCsGWfOtKSbTNV0zd6EFHlqN5ilf15BSaWXU+6g/UbJKxjgk5aNpXH8LHuAQBVAxpRQR6CNlaz6kp87b5CEnLtPCE9nlQGYBXA9sqdvABGSTGdJzf5k57w7Q5LiTLwA6h8x8TCbkRgArl4r5RGEdtfBr3ZBCzKL+EHJGYGHas=
template:
metadata:
name: secrets-forgejo-db
namespace: apps-roboces
type: Opaque

View file

@ -0,0 +1,16 @@
# yamllint disable rule:line-length
---
apiVersion: bitnami.com/v1alpha1
kind: SealedSecret
metadata:
creationTimestamp: null
name: meilisearch-master-key
namespace: apps-fuku
spec:
encryptedData:
MEILI_MASTER_KEY: AgBcQDv79lsUJF09YTd+zsuC9Ufhgs74mk5sxIrgaAQW/5yBupPgIsZw+g33qDqejuG+hfdhvkTOFHYetNuEDjnPWEpySjMLiB6N/HXMSuPimbOSjhHP3d7jgnWnIluUPs3RsvxDzaHCygVsS2a5ul7+qJGbiQTlmcV/rMVkqiw95mxwswkZhWi1Da1QYPgjRkazbCV0JAVhYYoo7VBnxceyGOS7Um5BsdyDMmXCn0qegU2FDlXTcBBur48hlyRqie/DxyZi3Yx/yiOnVH7g7H41H6hLJpKhQTMQbnohAqUC2UZZJlwrc8b/3kisFw/pxBP7S47hn9iseQcw18mXs6SzlXbhWm+CyNsKEvuXJAMVlaCrOCqs8Kf8ZlraCJYYq8mx+zoA7yAHnRdC4uByR5SGwnXJgq4WJD3wx90NuVbTcJfpQ+bNMPpRS8W+66S9j+rBVk6YcqCqL62JPSf0I9ZKCrNJrtbx5WyxbcVAgZdd2oxxXq6fG4I/wvqn/LN7nAqDwaCjU0395R+vM89o24h8pMTNOUhY1Dqxh0rKQOnTACc12kmhwQucdtjwkFzM7PJxW8d8GGdvgPoIxe27sguUMvn6IFo8h0JmGrbAyDEeR113s/gwQm9ozM9KJXXyImfiRJCcDSlny0rTNWZaGonXuSezFuhcSazepd0v85ofHgIflQQjMfLUNz1b9+ci4SbnpoJwzlrY2d6SyJSIA7Bz223j9UcRgDvRvIz3
template:
metadata:
creationTimestamp: null
name: meilisearch-master-key
namespace: apps-fuku

View file

@ -0,0 +1,96 @@
---
apiVersion: apps/v1
kind: Deployment
metadata:
name: miniflux-deployment
namespace: apps-roboces
labels:
app.kubernetes.io/name: miniflux
app.kubernetes.io/managed-by: argo
app.kubernetes.io/version: 2.2.6
annotations:
kube-score/ignore: pod-networkpolicy,deployment-has-host-podantiaffinity
spec:
selector:
matchLabels:
app.kubernetes.io/name: miniflux
replicas: 3
strategy:
rollingUpdate:
maxSurge: 50%
maxUnavailable: 50%
type: RollingUpdate
template:
metadata:
labels:
app.kubernetes.io/name: miniflux
app.kubernetes.io/version: 2.2.6
spec:
containers:
- name: miniflux
image: miniflux/miniflux:2.2.6
imagePullPolicy: Always
securityContext:
allowPrivilegeEscalation: false
readOnlyRootFilesystem: true
runAsUser: 10000
runAsGroup: 10000
capabilities:
drop:
- all
resources:
requests:
cpu: 300m
memory: 300Mi
ephemeral-storage: 2Gi
limits:
cpu: 400m
memory: 500Mi
ephemeral-storage: 4Gi
livenessProbe:
tcpSocket:
port: 8080
initialDelaySeconds: 5
timeoutSeconds: 15
successThreshold: 1
failureThreshold: 3
periodSeconds: 10
readinessProbe:
httpGet:
path: /
port: 8080
initialDelaySeconds: 15
timeoutSeconds: 2
successThreshold: 1
failureThreshold: 3
periodSeconds: 10
envFrom:
- secretRef:
name: miniflux
env:
- name: RUN_MIGRATIONS
value: '1'
- name: CREATE_ADMIN
value: '1'
- name: OAUTH2_PROVIDER
value: oidc
- name: OAUTH2_REDIRECT_URL
value: https://feeds.roboces.dev/oauth2/oidc/callback
- name: OAUTH2_OIDC_DISCOVERY_ENDPOINT
value: https://auth.fukurokuju.dev/application/o/miniflux/
- name: OAUTH2_USER_CREATION
value: '1'
- name: FETCH_YOUTUBE_WATCH_TIME
value: '1'
- name: WORKER_POOL_SIZE
value: '1'
- name: POLLING_FREQUENCY
value: '120'
- name: BATCH_SIZE
value: '25'
- name: METRICS_COLLECTOR
value: '1'
- name: METRICS_ALLOWED_NETWORKS
value: 10.42.1.0/16
restartPolicy: Always
automountServiceAccountToken: false

View file

@ -0,0 +1,21 @@
---
apiVersion: networking.k8s.io/v1
kind: Ingress
metadata:
name: miniflux
namespace: apps-roboces
annotations:
nginx.ingress.kubernetes.io/rewrite-target: /
spec:
ingressClassName: traefik
rules:
- host: feeds.roboces.dev
http:
paths:
- path: /
pathType: Prefix
backend:
service:
name: miniflux-service
port:
number: 8888

View file

@ -0,0 +1,11 @@
---
apiVersion: policy/v1
kind: PodDisruptionBudget
metadata:
name: miniflux-pdb
namespace: apps-roboces
spec:
selector:
matchLabels:
app.kubernetes.io/name: miniflux
maxUnavailable: 1

View file

@ -0,0 +1,20 @@
# yamllint disable rule:line-length
---
apiVersion: bitnami.com/v1alpha1
kind: SealedSecret
metadata:
creationTimestamp: null
name: miniflux
namespace: apps-roboces
spec:
encryptedData:
ADMIN_PASSWORD: AgAHylzGx3pyxBBLQapB3nmKVbbT7TJ2l2mpX/GOOQnSLySjBQF+ji1ZSjjgW9a/eq6OSfvqDB0pyFIWD51hC7gwXFSH07dwWgguI0yDATg7RgTagvvebi1GFrmkOPC++C/+QbVfxsj7S7SvhhHcjv1K3IQjiAlNof/pD7J9fe59H53fh62eegC55VMI++AXS1iEFxLMq7gQ9+pgapW5a4tR2WhdkGKdTaLCG5OKpzAzoodixyP/fYBDmd8OvpXnG9lv+0tohFlRrYD0Vgl/PR8yobPYoi0Oo5CqOK59zELOjsepZfziw9OB6NaOmxN6QWYAmFNXVBrWDfuvMCG4SHARbfaNhIX9VVvlr7XUi0ZIzGYyWVgI4ZcsvfQjB11RznZzSPyv5Sos4UR7UCL9aU+p/OnTcyGrbQ/JsX3ZjGaaJCtpScfD9xNzQVnnaAizVNt2sBIqNG1TOzd2rVZKkl5B2QsNZ4mpyFNAeqx3IlXK+y+m61Zzoa1V0kN507bmSVg0rAspw/045hf6GtpHNQtFn5BBnoI54qn5CrntkfQzM3YJp+OaEywg3ekktmhGfMZorJYtg77YaYFXKmvgkc08WtVFQFISXzGXcWUqxloE47DlQkgITgTEXnBryIrwS6mGOiuMVv12eEwYl1QTjAuqYYerZq2QAObbw8xCkGkfN/oksfrivsmE2KLCIrLr7HxisprBi9vLjEQ7mRDFXg==
ADMIN_USERNAME: AgBJw0U5eXLX6Tvgm2If6axOPgFxiV58OM+Xt3c4XwVwYDuvNJw+ccOcwy97d1oYgE9t6bmSLIPLDv2oUN+GOzQ/danayN4tC/778qeWBjKA/ffwX8hHSmDPLtU/nwhm4+lZj+doWrom6MrI0A4oNlqNtxulK0neN+PFDK9X0Hn7FkZtOMnKC42KW8H80oXLdPcRB8u3ZM5WgcXaRm/vPiCsTPAXHywNMcIWnsjH3SaX5Aoc0VjB82tfQ62M3hM9tngoOcrIuRlolLQVFmLD/4qNQYQOs7cXy9BWB2GcIyVPiS3WJ4L9UETDYqSeiZVbrFS/C6RVHIP6TTrS+0XhIX/8DuY8vV3qt5wN4iwC+nq8qvI7LpogDl1sGhjHJp9hAvGzYpYLAXTTxVTAh2rgqw6TAWpGABOzFghUwhN0/RRJdSqkxENO8p6LZOvDia5zTQARu1061kyy4+6ownTw8DvIcnEaSCqvmJZbsUHIG9UnQlrUUpSMEdsr4NDOrh2hroGTYtXsl/iQ1yFBqNbtSQqUTsca1Hiny9TQjGjwyluZ6StoighZH3jbelubv3yHghN6PgZaLt94hnDN78eBxhB9XQ03rcdbm8WTZIOQ8k3RGcVgY5oGBqTGOq2IFZm8zTO0Ze9+jnFhGM2zuFGK6rv8RHbXhAautoQxpd+jdeTEpGRTQgG/7ebEe7WFchlNcNoCsm0OCA==
DATABASE_URL: AgCc0sAAV+6T9PuImOkcKWBcyNUYsNXuoS2G2ex3AUAXdLmNXwo03vFDyvLRZNFskpF1M8hHEo1W3o9PKXyPb6Ba8RiWTicQOSgQlJhpG6c4iDvX9ZuIe62V8tQUBbpNkXDXCJ35mEmQgLWl5WccindGJI1eIEkVcAnpFwO7vPpGGNCYi7rIvu92961nssYcGKrZ5UNxvsYdsmG3mWdUNq00klqi39p9QQZOc+vslsZn8R3i+Qe2g4jzBpv2Rn7EAcZawLruo1lAl6UhTGAMWosm2t/Kmd5JX0+q3dsUQ3V6n1tC855UolztEt24Q5lfudkKqAK9RaKPsC7qnRC7LuYHv0WmP66uCcI1Uc72IIxrDTvGI5F63Psk4JJd0xgie9bMd2qFAtr0N9o4RNShUk3igghl1SexGmwvWxsCadegQOVv7/6Pm3SgH5K0UxNuBDa3MLwlLmshBtEQcMDBbboWS8AdRzI2I6hf3I5AiFRCz7Fux+VDEQVziCFGXjGAq5jADmqR4tkto1Lx5sYvI//NS5ZQzrwa5tOxlpb89WUs3IxN2b+m8Vc2GI6FPkoVjRUxYZuyUIPTJqCrVistyVSGu0281ojb9+r6fc7wPiHjZngZ6E3wL5xXQb+k0cpBX8bYPr3wPNY6/mx61kyDBXjZ8+CrdKw71nVHQSYDpv8vlFsVd1fOd40S8D9XkTP3P/5LQpdMQxAkCF/qLjeBhZX+jGwMEIdedxc8t09R+JLev4ehnjNoxFTFLlckvYrX6P5Lp6hXE5jCylahNr/K1rFmxqF7RrGOoF26GiuO
OAUTH2_CLIENT_ID: AgBuTfIdxdW96TnjQor2KtH5/wDJfvWXJ9wSln1wUW7fzBaZ2tLoPxvOUv8NyyFWPYC/J7AeShOwJHJIT4iCLqBPT7qY+MiM8xn4k2yz0mfNia0CUNrs+0eHfUrYdAjVJah+NAO2TvBhXSmGYQ3nNDU3TQOqXj3x+PWpc0w5ulz+4V62Oc/gO+KAcK2Pn2ISD7IzkVYW+H6allQXzG/b23SQVYB49g9bgWuJIzVRlIE+91uI+CyFQE03guS1Jpya8J04UkZI8ki8O7cz9N6ak6OG73toZUQpcP3d6bMIt/NRa/qTEOp1bLf6ZQ0e7+gVgI+73WUVg2NhkMraHzeMfHRa8bD2/BQ/bk45r0U0eyntXjYv9bkE7l954QPPVjJ59zlGzQfD0QMB3U8OiCdqUiyfyojulWNhwzuipwFX8HfLvSnkrIFNo/zeHJzUasha9UJY+NorzOnXvXWHN2aLN5Tr0Rh5Wk3//PzbYd9UlPSBDxLVw86Fg0SfHkhngMatSkfzLeYHwDSl8pSQI39uMJKVUOpflkeZVbWX8knjr4D+S16zhnqk6K37PDDV9hb6Sp7luAgFesBIn+gtwTpfwT0CDtotm6yNfHOnLvLOMlFn35N1ic5HghFW8h5mkJTgoAMkmU9Yehb9RjrEVAUbabR8RhchO9cqVyakAp9/4/Nb6pG09O2XIBMrKS5bdqEwuObM4ygFSz16ByzQ8dekGEQECej+o95bI1qNxO3Yaslh06Z2NKV1DUJ7
OAUTH2_CLIENT_SECRET: AgCJgI3M7DDvNspsDLZ6CeXxYtPLs3WabskA5Tp9d4vNYUk5XvLbYUhzkxcdtWsG1dgazH+RrvgGgCZFVOw2cgb2EEkt+zxB10ihN7HwHtOGDMZDJFOLGso2VJvbKu4yntBTv/1w86gtz0n3CilhvAcEAD1UrmVrTwfJTF7DmCHVEEPCOLal/lF9rSMwFrhdLTUVp+eSEZ3kC+F8WQR69nk7JmWjEXeamJ+HzahLm2Bp2D9GtxaF37TjV0pXqgyybIpSfdabVGwcik3bT12lf+6gmEPDKvFoq2eUB7esIuSH+RCHy1M1Rk7EO81Ku3ELoSJPzd1JRuTn1jFY8DzOOQmUU6yFbZUdTWpECImnI+OwZYg82rPqG5Gy3xmKv+5h1SkejQwJ/olqG9M4BlG2DlTS2t9GAW7Z6Q7O4oglMpUmG3v8fZUblJT7HhyJv+K2FqZHuYjo64we+14qEnV0LvFdHhEoHrbKi7b7qwDYshHycZs+DcF9HiqkK1NMFjszY9W85uH1Trtx3yTEUC3t6yMacef1OTL8SMr7AQMlo4jo6QRzggGJw2EGIZqiAXAbpiiQyPfjoZ4A1jQlemwAd25SRfnGu7ZvGt/LOKZ/sBfMWhk3Eshw9ffvW2TQKA4oODb+6o6MiRVhL5UkqNxMLIt9IV7o7EnBsW7xQHgZUq9qq+Mqb2mvB2NVYC0skyQtHE7SU4nCOeSiIsmt4Y5jwjsGyHbAxJzp3H+cjcefZOf6Rb+iweNB5rtYBhQsTlPA6lZ9GPe04wRwMtyyv/sEh6LbDPvRyuIK5sLAApPZKBEmgqahri2/BNdARf54vHDkba3qlB2cgCxjJJdT1XuuPUn6+W7vE1gYrKP5TXBi/YZ2Qg==
template:
metadata:
creationTimestamp: null
name: miniflux
namespace: apps-roboces

View file

@ -0,0 +1,19 @@
---
apiVersion: v1
kind: Service
metadata:
name: miniflux-service
namespace: apps-roboces
labels:
app.kubernetes.io/name: miniflux
app.kubernetes.io/managed-by: argo
app.kubernetes.io/version: 2.2.6
spec:
selector:
app.kubernetes.io/name: miniflux
type: LoadBalancer
ports:
- name: miniflux-service
protocol: TCP
port: 8888
targetPort: 8080

View file

@ -0,0 +1,16 @@
# yamllint disable rule:line-length
---
apiVersion: bitnami.com/v1alpha1
kind: SealedSecret
metadata:
creationTimestamp: null
name: valheim-secrets
namespace: apps-fuku
spec:
encryptedData:
server-password: AgBsm7Qg9ej7FtFh5twb4ALyL0I/fzVukURvFg17aweeDX7bM/9p/Yq7S2XG8gbqOYbC1GxknGMHQUnTXqXC9YZ4tZVUAptTCrAsPZHhHiet8bM39KCo2tGa5mCyC7lcmxae26cHuKj8Df6iMQCHL9ZH58A2SU8OIaszkonjwvSnbk6u7/HLCE8UyqP1JjXBMd4wx4BFDrhbauZr10f51tI55ksY+x44QQNrz84QEXmQ/dgwdzGAWqcPQTf57BebSI+ZKtUIvrMpNtz1ioqGnH3vWlb7QnqyqcyAYri3W3j8DB03EpfI2QjYi5Rs1NaJoO8L5HFdHW5p+rmttuwRxiEUPmURftH25o6Mgv/EcWGsB1TpyyFXM8JNU01lWJ+Wty316YF1BV3zHqdQeKu82R/wSv+iVm1dYKTfSOLe3YJr+aFnhYX3hCpBup1cB2KeOe/X9wTo2ETdvKhcIJPz8x7TRcXaCerVmVBw6LagmmdtMsCL4AIXw2gdkBeGONQmOzR1hDyTBAmpTv59WYzAJcCPZRE6gGxCPqH32G36E7WGEI4UOsjvT3GkVDnYx4FUDppzSP0ebnHZOwwAPFtXojHUaHg7ZTjZiuXDQa9Hkqt4mIOKa0i1HI0MyPu8eZJjoRXNS4j1yLfDCP2eSuhGjtVNbbyQthaITolitZ0VeUU8St1iKB7rvAGHqhBoPSw9TOBVSsBcHgIAV64oRqto4kM8
template:
metadata:
creationTimestamp: null
name: valheim-secrets
namespace: apps-fuku

View file

@ -0,0 +1,15 @@
---
apiVersion: bitnami.com/v1alpha1
kind: SealedSecret
metadata:
creationTimestamp: null
name: secrets-valkey
namespace: apps-fuku
spec:
encryptedData:
REDIS_PASSWORD: AgCjt/Lbu+2WmEQv1TjvaRD/hMKPFYFYGxisNc4BMm3FqR5zZG4EBgLJSlFFk/YKnFEZl7tKaW1E0A0JxfIoIdivb6fbWOPUyLQy2oluDTR607o7Rk1tP8+nZ83gq3KdHgWQ02ocq4zEw6XATJxCsStGWQjCMAuEcXtHMEDpW87tiq/3CEV8u48Ao2Cf354omVKFzj17fMDe+Y7+Hs9QEChDCbt2LGTDNHOjYJd6jxEhhP8hzrj8IU1EahbmpI6uKlR4q3rBmngGlITi1UnUvVK98W89wLI8wgTRu6h4LOGtKKATUh0E1burLjgmTndrHp0+BkgjhdSeMRax4nwGZJ+xlTy2NYdE7xRCdz/9K5fKMBZKLDHYkaU+69QtkH3xTCSOONdim8A1HPix9jWBlGBMSMbt1SrS1TCHL+hsIH9njpsgS+9INzpvMD9zVspBSfj+05OrB6y6xe9alle87ILBsK4/y5G6prHUFguFeZJbTDuEzmWg5JY2yci4BB3TDP/zT7JWsP+MVfIqdFXKFDFMpM6a13PstC0JXumxot4zRydtAfhb5dZqlD+QA7I4V2RKTx3CVxTur4gGehz0aeLyAhScoiiqjG8+o1hQtX0JrGRt6oHSta9eASM+KaNWaU4sM5fDyh/zyVnzGEwNr/rWLVKUtD3+JpnpVbuRDe6ELIOBzFIl7M4VqWiyLHLWGfIWVZUXMX5ScC5T7Di1UA== # yamllint disable rule:line-length
template:
metadata:
creationTimestamp: null
name: secrets-valkey
namespace: apps-fuku

View file

@ -1,17 +0,0 @@
# yamllint disable rule:line-length
---
apiVersion: bitnami.com/v1alpha1
kind: SealedSecret
metadata:
name: vaultwarden-kubernetes-secrets
namespace: apps-fuku
spec:
encryptedData:
BW_CLIENTID: AgB6UpzjiBqifwHwm4YfevKVQLTt/2JxrTdJ0O29i416TrvPvYlrofG6ihWQDIr7zAROq5RE1YI1mFdczzcHTccMV+/rPPBTY04rdkoypc17/+P5eVLO03dcSldhbcgiMJQYgji+U59SFebPxxPI9gn6GmOss368Wqgdffu/d7V6RtvBNN+qgIu1FjS26FYxKRKi/mEjPmF6GCkkWRHkkpimdjKalVkuQXiu04cwDTSRYNmgePv5ihem/5tP7ZqgQCFpYafpia6CnQwhHNoPP4Dq+cV5VVPw7AfVdm28HgFLiZhiUWXoGiiFvTZcDwViG4T80gqxtfN/2ur94V7zc/PTGXSsVWBJYM93/jf3zcK7h5wag0nXeYm7nD+NT1JM/2NZguqLVl3iX4qE+f0C83dPTUrBv8+9H3aw0YLI/zgnT8Fdg6VAdbGrXMXrTEqm2IChRZ65/WIgwaRWIH+ETsWPFqUj3mH9Cx8NkNNSRfTqmZS28VEfcCzutSgTJ4zs2VwTYDBBD1QQSMrhUSLrCihWLK3ZTjVTEwAaoUObnaFrYpNBGVZQne9zzWO38/y4NQ2D1Q1YTx0cBP8qcKit9v1GFmOcNDsVG1WCFkZh0qz4j37SOBH0J00sG1lwGvkb05pOjcGVUexjzvHloUjSauFypW+2XQqnVshMbNgKgZYZmZmWbHf8nyq7+wssivbjB5qX5foiCN/Qp2WtIG92k08ZU1+hTq/w/GX8DI/UsbSLU7p/0vpAKMDBuw==
BW_CLIENTSECRET: AgBfsr/ECO+lxSojrp1Ailv7SOYMqtGzmQCmXI3g7+K0W+RT7dOHuZOk7VlvYG5l4qVjriXhMo1xGcGYf8WeAebx+OWTs9Y9sRQ7eGnQW/KD7ihV3vCy2+jEdWZas9wEN1coUUt7Lbg09jz+nrt8Di2xFigJWSjuWejyAwmnRC0O0gLSudidf2x1aTeclid1tFvubbKbYUrLbTCLPW1bDuDs8BseRX6sF8/CVR1ZKWbcADUYvP7Amygc4WMElRREMQJjKBiPYNA4OuepvpQDlNVz/wq499XJAnFMDP8BhKxYalwOqTYzWQT0DA4mwBokMnpRE0VJ3erAAKQwzDHqO8pFE5bqhgzjwTWryH0ZmRF96JVLxx5IMetb8jYEPAHA/ymz9GmSUyVDXDRoeyH2xM4vuD/6A2JXc6kgcpfRx+5UJUybajO7urvHBCS/5X5cEiEOyEtqPMqkRdv2LgN1wXMEU88eK2NqpVX7zhLIJgoMusdHtkDmSlS+pFIb3GwGQRmn5khj5xkyQKweMoPvC5Pq+T/F5/2NziJGRj56HYvaiOPfyfzetaw7Zh3I+umMfZ6mtKD7ntYB1EYaqIMhTlAQv8DxS98t19LOke3h5QKcX6SdKeAqAvlqxuYZ5rweNtZsDevtnaFdmDzbve6xbZrtNwAurpZMYC/7tetyH+jFHrcFjDCuMMLdD5t4d8NW50nks71Pofe6KO/8lkzNOjiQwBIUfG+8Y6bAmPiBBr0=
VAULTWARDEN__MASTERPASSWORD: AgBlRRDUcWw8Kq8IJ/dBk1RQwA6jg4VtpDTzU9eRtkdZfBoEI+KnQt2QHtGv7ZMmyCHztRAoJcEWyoN25RMdG4dQQN40IOPBiv7D8e036nQv7rQqZWE7mPPs9veskS+8sE+h3HFlwIEytn4721nHw4DNl2Uwbtgo1rTRRyJ3Px2UoCfnCU1xVtimWhj7uLjf2kPkSvWRUFZFfzSkuMWtiAPDxspk7q8CTktdiUHV6ZsuuIcZfJ1mHkuredVGYpOcrKLJcwGE7Auzn382lILNwkSuiZjt0T+O5A0c406SPVGU/ovofbRgdUQmmIbS+q6y17HMDkwLutdmIyJgqMEPJXR0KfebjzdtaNdSbmL68QsdECqCbQm6Az3uMEOJ8TVm9rH5yfZZoXLMVjzHgwtQbV9vBb0ubMUKdqJahD0zUQ/1FqDtYHt9OBv8bLh8SXiTKNxz2GByHjcGFUNZhZac1eTqmtYxxUhk4KNFsqx7FvJNUi0VTfINvHAd9Tjlrd0vQbST3VgdiIEHcuxW5HShdSnl8o5WXKmEtlecMqB3Y/C6IIPF+CZ6HoMgfE59G2dchnNccSwdZRa0n6OWt3BWJi7fuhrBXvTBpa5Zxrqh6o1VX3k5wXDgBRN7a8pZawhuCXbcBcrhcs+wDm7YlK3Gj3F01dIOGc7qMpdMWUHcUxCikC9Wlnp5b+OKB2huiHWr2p8v0IeOu8MfC65GEkx/dInxW1CkitHsGVA=
template:
metadata:
name: vaultwarden-kubernetes-secrets
namespace: apps-fuku
type: Opaque

View file

@ -1,162 +0,0 @@
#!/usr/bin/env bash
usage() {
cat <<EOF
Usage: $(basename "$0") REPLICAS [RESOURCE...] [OPTIONS]
Scale up or down one or several deployments/statefulsets.
Arguments:
REPLICAS Number of replicas to scale to
RESOURCE Resource to scale in "namespace/name" format, or just "name"
(uses current context namespace). Can be specified multiple times.
Commands:
--all Scale all deployments and statefulsets in the namespace
--all-namespaces
Scale all deployments and statefulsets across all namespaces
Options:
-h, --help Show this help message
-n, --namespace NAMESPACE
Namespace to use (default: current context namespace)
--dry-run Print what would be scaled without making changes
-v Pass -v to kubectl (minimal output)
-vv Pass -vv to kubectl (more output)
-vvv Pass -vvv to kubectl (debug output)
Examples:
$(basename "$0") 1 mynamespace/mydeployment
$(basename "$0") 1 mynamespace/mydeployment mydeployment2
$(basename "$0") 1 --all
$(basename "$0") 1 --all --namespace mynamespace
$(basename "$0") 0 --all-namespaces --dry-run
EOF
}
KUBECTL_V=""
NAMESPACE=""
DRY_RUN=false
REPLICAS=""
RESOURCES=()
ALL=false
ALL_NAMESPACES=false
while [[ $# -gt 0 ]]; do
case "$1" in
-h|--help)
usage
exit 0
;;
-n|--namespace)
NAMESPACE="$2"
shift 2
;;
--dry-run)
DRY_RUN=true
shift
;;
-v|-vv|-vvv)
KUBECTL_V="$1"
shift
;;
--all)
ALL=true
shift
;;
--all-namespaces)
ALL_NAMESPACES=true
shift
;;
-*)
echo "Error: Unknown option: $1" >&2
usage >&2
exit 1
;;
*)
if [[ -z "$REPLICAS" ]]; then
REPLICAS="$1"
else
RESOURCES+=("$1")
fi
shift
;;
esac
done
if [[ -z "$REPLICAS" ]]; then
echo "Error: REPLICAS is required" >&2
usage >&2
exit 1
fi
if [[ "$ALL" == false && "$ALL_NAMESPACES" == false && ${#RESOURCES[@]} -eq 0 ]]; then
echo "Error: Must specify --all, --all-namespaces, or at least one RESOURCE" >&2
usage >&2
exit 1
fi
NAMESPACE_ARG=()
if [[ -n "$NAMESPACE" ]]; then
NAMESPACE_ARG=("-n" "$NAMESPACE")
fi
DRY_RUN_ARG=()
if [[ "$DRY_RUN" == true ]]; then
DRY_RUN_ARG=("--dry-run=client")
fi
KUBECTL_BASE=(kubectl)
if [[ -n "$KUBECTL_V" ]]; then
KUBECTL_BASE+=( "$KUBECTL_V" )
fi
KUBECTL_BASE+=( "${NAMESPACE_ARG[@]}" )
KUBECTL_BASE+=( "${DRY_RUN_ARG[@]}" )
scale_resource() {
local resource="$1"
local ns name
if [[ "$resource" == */* ]]; then
ns="${resource%%/*}"
name="${resource#*/}"
else
ns="${NAMESPACE:-$(kubectl "${NAMESPACE_ARG[@]}" config view --minify --output jsonpath='{.contexts[0].context.namespace}' 2>/dev/null || echo "default")}"
name="$resource"
fi
for kind in deployment statefulset; do
if "${KUBECTL_BASE[@]}" get "$kind" "$name" -n "$ns" &>/dev/null; then
echo "Scaling $kind/$ns/$name to $REPLICAS replicas${DRY_RUN:+ (dry-run)}"
"${KUBECTL_BASE[@]}" scale "$kind" "$name" -n "$ns" --replicas="$REPLICAS"
return 0
fi
done
echo "Error: Resource '$resource' not found as deployment or statefulset" >&2
return 1
}
get_resources() {
local ns_flag=()
if [[ "$ALL_NAMESPACES" == true ]]; then
ns_flag=("--all-namespaces")
elif [[ -n "$NAMESPACE" ]]; then
ns_flag=("-n" "$NAMESPACE")
fi
"${KUBECTL_BASE[@]}" get "${ns_flag[@]}" deployment,statefulset -o jsonpath='{range .items[*]}{.metadata.namespace}/{.kind}/{.metadata.name}{"\n"}{end}' 2>/dev/null | while IFS=/ read -r ns kind name; do
echo "$ns/$name"
done
}
if [[ "$ALL" == true || "$ALL_NAMESPACES" == true ]]; then
while IFS= read -r resource; do
[[ -n "$resource" ]] && scale_resource "$resource"
done < <(get_resources)
else
for resource in "${RESOURCES[@]}"; do
scale_resource "$resource"
done
fi

35
scripts/ncm.sh Executable file
View file

@ -0,0 +1,35 @@
#!/usr/bin/env bash
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
cd "$SCRIPT_DIR/../docker/nextcloud" || exit
docker_exec() {
docker compose exec nextcloud "$@"
}
occ_exec() {
docker_exec sudo -E -u www-data php occ "$@"
}
case "$1" in
upgrade)
occ_exec upgrade
;;
htaccess)
occ_exec maintenance:update:htaccess
;;
indices)
occ_exec db:add-missing-indices
;;
occ)
occ_exec "$@"
;;
exec)
docker_exec "$@"
;;
*)
echo "Usage: $0 {upgrade|htaccess|indices|occ <custom occ command>|exec <custom command>}"
exit 1
;;
esac

View file

@ -1,313 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
# Proxmox cluster power helper
# - Start or shutdown a set of QEMU VMs and/or LXC containers by ID, or all.
# - Auth via API token or username/password (env vars or secret-tool).
#
# Requirements: curl, jq; optional: secret-tool (GNOME keyring)
#
# Environment variables (examples):
# PVE_HOST=proxmox.example.com[:8006]
# PVE_TOKEN_ID="user@pam!automation" # when using API token
# PVE_TOKEN_SECRET="xxxxxxxx-xxxx-xxxx" # when using API token
# PVE_USER="user" # when using password login
# PVE_REALM="pam" # default pam
# PVE_PASSWORD="..." # or provided via keyring
# PVE_SCHEME="https" # default https
# PVE_VERIFY_SSL="true|false" # default true
# PVE_NODE_FILTER="" # optional: restrict to node name
#
# Examples:
# scripts/proxmox-power.sh --op shutdown --all
# scripts/proxmox-power.sh --op start --ids 100 101 --only-qemu
# PVE_TOKEN_ID=me@pam!ci PVE_TOKEN_SECRET=... scripts/proxmox-power.sh --op shutdown --all
SCHEME=${PVE_SCHEME:-https}
HOST=${PVE_HOST:-}
VERIFY_SSL=${PVE_VERIFY_SSL:-true}
INSECURE_FLAG=""
if [[ ${VERIFY_SSL} != "true" ]]; then
INSECURE_FLAG="-k"
fi
usage() {
cat <<EOF
Usage: $0 --op start|shutdown [--all | --ids <vmid> [<vmid> ...]] [options]
Options:
--host HOST Proxmox host (env PVE_HOST). Example: proxmox.example.com:8006
--op OP Operation: start or shutdown
--all Apply to all VMs/containers in the cluster (honors filters)
--ids LIST Space-separated list of VMIDs to operate on
--only-qemu Only operate on QEMU VMs
--only-lxc Only operate on LXC containers
--include-stopped Include stopped guests when op=shutdown (no-op otherwise)
--force If shutdown times out, force stop
--timeout SEC Shutdown wait timeout (default 120)
--concurrency N Parallel operations (default 4)
--node NODE Restrict to a specific node name
--dry-run Show actions without executing
--insecure Do not verify SSL (same as PVE_VERIFY_SSL=false)
-h, --help Show this help
Auth (choose one):
API Token: env PVE_TOKEN_ID and PVE_TOKEN_SECRET
Password: env PVE_USER, PVE_PASSWORD (or from keyring), optional PVE_REALM (default pam)
Keyring:
If PVE_PASSWORD is empty and 'secret-tool' is available, the script tries:
secret-tool lookup service proxmox user "+$PVE_USER+" realm "+${PVE_REALM:-pam}+"
If PVE_TOKEN_SECRET is empty, it tries:
secret-tool lookup service proxmox token_id "+$PVE_TOKEN_ID+"
EOF
}
require_cmd() {
command -v "$1" >/dev/null 2>&1 || { echo "Error: required command '$1' not found" >&2; exit 1; }
}
get_keyring() {
local value=""
if command -v secret-tool >/dev/null 2>&1; then
value=$(secret-tool lookup "$@" || true)
fi
printf '%s' "$value"
}
# Globals set by auth_init
AUTH_HEADER=""
COOKIE_HEADER=""
CSRF_HEADER=""
auth_init() {
local base_url="$SCHEME://$HOST/api2/json"
if [[ -n "${PVE_TOKEN_ID:-}" && -z "${PVE_TOKEN_SECRET:-}" ]]; then
PVE_TOKEN_SECRET=$(get_keyring service proxmox token_id "${PVE_TOKEN_ID}") || true
fi
if [[ -n "${PVE_TOKEN_ID:-}" && -n "${PVE_TOKEN_SECRET:-}" ]]; then
AUTH_HEADER=("-H" "Authorization: PVEAPIToken=${PVE_TOKEN_ID}=${PVE_TOKEN_SECRET}")
return 0
fi
local user="${PVE_USER:-}"
local realm="${PVE_REALM:-pam}"
local password="${PVE_PASSWORD:-}"
if [[ -z "$user" ]]; then
echo "Error: set PVE_TOKEN_ID/PVE_TOKEN_SECRET or PVE_USER[/PVE_PASSWORD]" >&2
exit 2
fi
if [[ -z "$password" ]]; then
password=$(get_keyring service proxmox user "$user" realm "$realm") || true
fi
if [[ -z "$password" ]]; then
echo "Error: password not provided and not found in keyring for user '$user' realm '$realm'" >&2
exit 2
fi
# Login to get ticket and CSRF token
local resp
resp=$(curl -sS $INSECURE_FLAG -X POST \("${AUTH_HEADER[*]}"\) \
-d "username=${user}@${realm}" \
-d "password=${password}" \
"$base_url/access/ticket")
local ticket csrf
ticket=$(echo "$resp" | jq -r '.data.ticket // empty')
csrf=$(echo "$resp" | jq -r '.data.CSRFPreventionToken // empty')
if [[ -z "$ticket" || -z "$csrf" ]]; then
echo "Error: failed to obtain auth ticket (check credentials)" >&2
echo "$resp" | jq -r '.' >&2 || true
exit 3
fi
COOKIE_HEADER=("-H" "Cookie: PVEAuthCookie=${ticket}")
CSRF_HEADER=("-H" "CSRFPreventionToken: ${csrf}")
}
api_get() {
local path="$1"; shift
local url="$SCHEME://$HOST/api2/json$path"
curl -sS $INSECURE_FLAG "${AUTH_HEADER[@]}" "${COOKIE_HEADER[@]}" -X GET "$url" "$@"
}
api_post() {
local path="$1"; shift
local url="$SCHEME://$HOST/api2/json$path"
curl -sS $INSECURE_FLAG "${AUTH_HEADER[@]}" "${COOKIE_HEADER[@]}" "${CSRF_HEADER[@]}" -X POST "$url" "$@"
}
# Parse CLI
OP=""
DO_ALL=false
IDS=()
ONLY_QEMU=false
ONLY_LXC=false
INCLUDE_STOPPED=false
FORCE=false
TIMEOUT=120
CONCURRENCY=4
NODE_FILTER="${PVE_NODE_FILTER:-}"
DRY_RUN=false
while [[ $# -gt 0 ]]; do
case "$1" in
--op) OP="$2"; shift 2;;
--all) DO_ALL=true; shift;;
--ids) shift; while [[ $# -gt 0 && ! "$1" =~ ^-- ]]; do IDS+=("$1"); shift; done ;;
--only-qemu) ONLY_QEMU=true; shift;;
--only-lxc) ONLY_LXC=true; shift;;
--include-stopped) INCLUDE_STOPPED=true; shift;;
--force) FORCE=true; shift;;
--timeout) TIMEOUT="$2"; shift 2;;
--concurrency) CONCURRENCY="$2"; shift 2;;
--node) NODE_FILTER="$2"; shift 2;;
--host) HOST="$2"; shift 2;;
--dry-run) DRY_RUN=true; shift;;
--insecure) VERIFY_SSL=false; INSECURE_FLAG="-k"; shift;;
-h|--help) usage; exit 0;;
*) echo "Unknown argument: $1" >&2; usage; exit 2;;
esac
done
require_cmd curl
require_cmd jq
if [[ -z "$HOST" ]]; then
echo "Error: --host or PVE_HOST is required" >&2
usage
exit 2
fi
case "$OP" in
start|shutdown) :;;
*) echo "Error: --op must be 'start' or 'shutdown'" >&2; usage; exit 2;;
esac
if ! $DO_ALL && [[ ${#IDS[@]} -eq 0 ]]; then
echo "Error: specify --all or a list of --ids" >&2
exit 2
fi
if $ONLY_QEMU && $ONLY_LXC; then
echo "Error: cannot use --only-qemu and --only-lxc together" >&2
exit 2
fi
auth_init
# Collect targets
resources=$(api_get "/cluster/resources?type=vm")
filter_jq='[.data[] | {type, vmid: (.vmid|tostring), status, node}]'
items=$(echo "$resources" | jq "$filter_jq")
if [[ -n "$NODE_FILTER" ]]; then
items=$(echo "$items" | jq --arg node "$NODE_FILTER" '[.[] | select(.node==$node)]')
fi
if $ONLY_QEMU; then
items=$(echo "$items" | jq '[.[] | select(.type=="qemu")]')
elif $ONLY_LXC; then
items=$(echo "$items" | jq '[.[] | select(.type=="lxc")]')
fi
select_ids=()
if $DO_ALL; then
mapfile -t select_ids < <(echo "$items" | jq -r '.[].vmid')
else
select_ids=("${IDS[@]}")
fi
if [[ ${#select_ids[@]} -eq 0 ]]; then
echo "No matching guests found." >&2
exit 0
fi
# Build an associative map of vmid -> node,type,status
declare -A VM_NODE VM_TYPE VM_STATUS
while IFS=$'\t' read -r vid node type status; do
VM_NODE[$vid]="$node"
VM_TYPE[$vid]="$type"
VM_STATUS[$vid]="$status"
done < <(
echo "$items" | jq -r '.[] | "\(.vmid)\t\(.node)\t\(.type)\t\(.status)"'
)
work_list=()
for vid in "${select_ids[@]}"; do
if [[ -z "${VM_NODE[$vid]:-}" ]]; then
echo "Skip vmid=$vid (not found by filters)" >&2
continue
fi
# Idempotence: skip if already desired state
st="${VM_STATUS[$vid]}"
case "$OP" in
start)
if [[ "$st" == "running" ]]; then
echo "Already running: $vid (${VM_TYPE[$vid]} on ${VM_NODE[$vid]})"
continue
fi
;;
shutdown)
if [[ "$st" != "running" && $INCLUDE_STOPPED == false ]]; then
echo "Already stopped: $vid (${VM_TYPE[$vid]} on ${VM_NODE[$vid]})"
continue
fi
;;
esac
work_list+=("$vid")
done
if [[ ${#work_list[@]} -eq 0 ]]; then
echo "Nothing to do."
exit 0
fi
run_action() {
local vid="$1"
local node="${VM_NODE[$vid]}"
local type="${VM_TYPE[$vid]}"
local path_base="/nodes/${node}/${type}/${vid}/status"
echo "[$OP] ${type}:${vid} on node ${node}"
if $DRY_RUN; then
return 0
fi
case "$OP" in
start)
api_post "${path_base}/start" >/dev/null
;;
shutdown)
# Try graceful shutdown
api_post "${path_base}/shutdown" -d "timeout=${TIMEOUT}" >/dev/null || true
# Optionally force stop if still running after timeout
# We poll once after timeout window to check status
sleep 2
local st_json
st_json=$(api_get "/nodes/${node}/${type}/${vid}/status/current")
local cur
cur=$(echo "$st_json" | jq -r '.data.status // .data.status.current // empty')
if [[ "$cur" == "running" && $FORCE == true ]]; then
echo "Forcing stop: ${type}:${vid}"
api_post "${path_base}/stop" >/dev/null || true
fi
;;
esac
}
# Parallelize with xargs -P
export -f run_action api_post api_get
export SCHEME HOST INSECURE_FLAG AUTH_HEADER COOKIE_HEADER CSRF_HEADER TIMEOUT FORCE DRY_RUN
declare -p VM_NODE VM_TYPE VM_STATUS >/dev/null 2>&1 || true
printf '%s\n' "${work_list[@]}" | xargs -I{} -P "$CONCURRENCY" bash -c 'run_action "$@"' _ {}
echo "Done: $OP ${#work_list[@]} item(s)."

View file

@ -1,129 +0,0 @@
#!/usr/bin/env bash
check_kubectl() {
if ! command -v kubectl &>/dev/null; then
echo "Error: kubectl is not installed or not in PATH" >&2
exit 1
fi
log_info "kubectl found at $(command -v kubectl)"
}
VERBOSE=0
log_debug() { [[ $VERBOSE -ge 3 ]] && echo "[DEBUG] $*" || true; }
log_verbose() { [[ $VERBOSE -ge 2 ]] && echo "[VERBOSE] $*" || true; }
log_info() { [[ $VERBOSE -ge 1 ]] && echo "[INFO] $*" || true; }
log_error() { echo "[ERROR] $*" >&2; }
usage() {
cat <<EOF
Usage: $(basename "$0") [OPTIONS] [VERSION]
Upgrade ArgoCD to a new version. Requires an existing ArgoCD installation.
Examples:
$(basename "$0") # queries the current argo version and tries to update to the immediate newest version
$(basename "$0") v4.3.0 # incrementally update to target version
Options:
-h, --help Show this help message
--dry-run Show what would be done without making changes
-v Verbose output (info level)
-vv More verbose output (info + verbose level)
-vvv Debug output (all log levels)
EOF
}
DRY_RUN=false
while [[ $# -gt 0 ]]; do
case "$1" in
-h|--help)
usage
exit 0
;;
--dry-run)
DRY_RUN=true
shift
;;
-v|-vv|-vvv)
case "$1" in
-v) VERBOSE=1 ;;
-vv) VERBOSE=2 ;;
-vvv) VERBOSE=3 ;;
esac
shift
;;
-*)
echo "Error: Unknown option: $1" >&2
usage >&2
exit 1
;;
*)
TARGET_VERSION="$1"
shift
;;
esac
done
log_debug "Script started with target version: ${TARGET_VERSION:-auto}"
check_kubectl
log_info "Checking current kubectl context"
CURRENT_CONTEXT=$(kubectl config current-context 2>/dev/null)
log_verbose "Current context: $CURRENT_CONTEXT"
log_info "Checking for ArgoCD installation"
if ! kubectl get ns argocd &>/dev/null; then
log_error "ArgoCD namespace not found. This script only upgrades existing installations."
exit 1
fi
log_verbose "ArgoCD namespace found"
log_info "Checking current ArgoCD version"
CURRENT_VERSION=$(kubectl get deployment argocd-server -n argocd -o jsonpath='{.spec.template.spec.containers[0].image}' 2>/dev/null)
if [[ -n "$CURRENT_VERSION" ]]; then
CURRENT_VERSION=$(echo "$CURRENT_VERSION" | sed 's/.*argocd:v\?//' | tr -d ' \n')
if [[ -n "$CURRENT_VERSION" ]]; then
CURRENT_VERSION="${CURRENT_VERSION#v}"
log_verbose "Current ArgoCD version: $CURRENT_VERSION"
else
log_error "Could not extract ArgoCD version from image: $CURRENT_VERSION"
exit 1
fi
fi
if [[ -z "$TARGET_VERSION" ]]; then
log_info "No target version specified, querying for latest version"
log_verbose "Fetching latest release from GitHub"
LATEST_VERSION=$(curl -s https://api.github.com/repos/argoproj/argo-cd/releases/latest | grep -oP '"tag_name":\s*"\K[^"]+' | sed 's/^v//')
if [[ -n "$LATEST_VERSION" ]]; then
log_verbose "Latest version available: $LATEST_VERSION"
TARGET_VERSION="$LATEST_VERSION"
else
echo "Error: Could not fetch latest version" >&2
exit 1
fi
fi
log_info "Target version: $TARGET_VERSION"
log_debug "Determining update path from $CURRENT_VERSION to $TARGET_VERSION"
log_info "Applying ArgoCD manifests"
log_verbose "Downloading manifest from https://raw.githubusercontent.com/argoproj/argo-cd/v${TARGET_VERSION}/manifests/install.yaml"
curl -sLO "https://raw.githubusercontent.com/argoproj/argo-cd/v${TARGET_VERSION}/manifests/install.yaml"
log_debug "Applying manifest with kubectl"
if [[ "$DRY_RUN" == true ]]; then
log_verbose "Dry-run mode: would apply manifest"
kubectl apply -n argocd -f install.yaml --dry-run=client
else
kubectl apply -n argocd -f install.yaml
fi
log_verbose "Cleaning up downloaded manifest"
rm -f install.yaml
log_info "Update to ArgoCD $TARGET_VERSION initiated"

View file

@ -2,35 +2,37 @@
# Manual edits may be lost in future updates.
provider "registry.opentofu.org/gmichels/adguard" {
version = "1.7.0"
constraints = "1.7.0"
version = "1.5.0"
constraints = "1.5.0"
hashes = [
"h1:1vvJ6KcLUR8U2BHNtj7tMsgEsGXzTKMIFsHfcZYEVyc=",
"h1:5BDrsrU/Sdain/+KkhbNzxVL81rh69wG4iKOIBf9qys=",
"h1:70gWtux/jVZQgsDjr8+j0aRHKkGZqRWCmzoX9ddC7f4=",
"h1:Qdqipgukxph9vqXiEKVzFSgXfEmGiGw1JrrQvwJOtco=",
"h1:QveIrziFNxu+Go7pl7qjH5tqPOb8pgzfTdunVgsJ3vg=",
"h1:UrJdOlCLAWC7/I2Co02RtOKT3tSGb8TwOgJ7s0sOtCo=",
"h1:W6nZfQzWb3Ds1JRytBqzsZoNBa6x4OOe9J87f1nyCRA=",
"h1:c3RK8fSEr2yfPySC0WemOC/CR3608Ra4vFwGhvdrswg=",
"h1:jizPinVWDQUN6rKwiBgRm7PcgUJe4AWlCWghgH0v7xI=",
"h1:lb9gv3IiUZDA4P/kpuvOqZmidWMIbpG+sUecM1QclNo=",
"h1:sRIMccvZq71/CxTknprnRozCChEZSq4Nmt+M+DOjTq8=",
"h1:uOdtIfvNVEHheucpt51bSCYtX2W1LKELlOkBTbjBm6o=",
"h1:woGvhSgZDFj5+yH5uHonXSIn6AaeZekb3t9oXMZB/DQ=",
"zh:0b83aa1ade1a6f7c9b1af0488dad43bf00e733d1517463d4bee51c17612546da",
"zh:15d784c16545efaf6c368b642995bb0d0ef61b6961e67b072430d445ef6c02fc",
"zh:1c4da4d20c98795fee1ac0cd9ffd880a68f06992d6fe849342c4b19f79c8aff9",
"zh:41afcdcc5236fa40a0b7ec614cb830ef03d45f8f1b8988d24d80ec999ef34b9b",
"zh:4c8e832a5a842420b5163eb5eb2bd7d460ece524efc618bdba64e4f4a2d403b5",
"zh:58e19d2f9e4bd9f2a13b631c3213157ea80ef3aa7b3b8edcd8fb341f9c06c5e5",
"zh:7380ca4d053255f787ded10c26b19ebd23d3563ddbb36d0be66bb2cef293d27d",
"zh:7b21589bb31084bb68b2deb96bd4130b8b13c1c71614704d13d4cbdfc583f3c7",
"zh:82aee49172286676cdccbc97b809b84acf3edeb164ae77cafa837118ee3769a6",
"zh:95431a266520cce112474616c27c80f0017625ef7d80aaf69118360222d7974b",
"zh:a6dc4b60beafc471d049b856df4bf793838b1e8b2079efe4a12ebf6fbd482098",
"zh:d9c5c35be3ae54a52fb444b61e442445e74df6a4ab5bc4884b0f5d55eacc4ced",
"zh:f6bd2db5d9a178c9b5b020e505affc245a0ceaa8e662f37ad9743d65e1153322",
"h1:/BVJ705U4hzg8Lu7hFUOnm14VytiO24e7QvdW3bHInU=",
"h1:40Dl473FGcpXk1aDYJougGTQ4AlboqEbfwaqiaa/ZLM=",
"h1:6qSrrEeUYOVzlTRkKhwl3sGelERctLuXzcjffpEOR+g=",
"h1:8iqz0KeoKUs8rxbhL9S1hTDy2VnxtwGPA9ajKgLMtDA=",
"h1:AGaTqfUuLJsQojCkmxD5V0AVey7c2SEHnwgTyEtEli0=",
"h1:SfoIqvHbDjqKvn9wSepgIVwP8RR33lf7SBsPkqkgk9g=",
"h1:W7X6kxIaiQmMdmkhSOYq6/5gMfprIpySJ5hYNUgGlcw=",
"h1:dah3y4X64n2haPXDe03PPkbKYicURuTcDUYMu9aUCdY=",
"h1:ejPkb1vpc6Jiwug4WN2txsHpvnPG51EGiY8Seqjcmvs=",
"h1:iGwfAXS0LP8R49slcvaKpt7IwxV2+Oddm+uyYZcZlh8=",
"h1:lKtynh009YI+ZLJlMrJA+srtzSyWsrs8VyCbfj00c1k=",
"h1:omLzSDJ3RWoIEgalXRJjkPcnKJleh1cyA0fyISDs/zg=",
"h1:uo5+dEslEZ9XIwkrodAxd848DU6ZuEB8QknmpwaTaFk=",
"h1:xYNygsWX+fj6kg7RumOCRhJMNoicHOO2RaQxUvw+fyI=",
"zh:06600d0057dd7d9b542e1a7a1efed37ea1a5307ff5656b3eaf5c46dcf59190e4",
"zh:06fd9637553ad98eb7aeb6fdaa574c913158a43d899cc39a2a117262b2f32f7e",
"zh:13bf310887046201020d54882dcf228d9dff006bc4c3d36e93be0702e4fb9bac",
"zh:3511a5d639a7289675060d3ed0fcacf488e84f0d995ea25f5bab4992c4b20621",
"zh:3aa44e593b0cd2631d1458727d1a8a588f9f16a2f62f5eb4c25a9e8482bb76d8",
"zh:48af428acd9f9b935712490c138f9d36dd8529cf0222d1ede27423a143e1d1ec",
"zh:53c45604674367d3dbb4880d28dd8def2b264fc58b680425d68955807d394558",
"zh:73b749688898ec69a7ff0379a0ad7b78ef184aef23128c8fff0fbfa5f0dcc9a6",
"zh:8008b5afbe1619290aa595f675f5d8c0aec7cd46eb2ae179384982b99f2879f5",
"zh:85e63cb5d4eabfc327c798b43695499bd84ec8351da8fc11ed9e2b8d4b537e7a",
"zh:94c5bf2393aef3ab1898b20ba9d9c329db92f5b2941e0924130fe66e1f7225e6",
"zh:b1d55eb32b58c766b4698a390dccc8affd695ffa08893785096f911e4c34b95e",
"zh:c52b09e3ef5ec10d98db389f4bcd7fa5f4ae35bdf96901e4a2e48a3c2e24aa9b",
"zh:d5bc59b6ee2c34309671e3ea4293ef7b0642dbb32a89a509aa1bc77faf945f7c",
"zh:f809ab383cca0a5f83072981c64208cbd7fa67e986a86ee02dd2c82333221e32",
]
}

View file

@ -9,7 +9,7 @@ terraform {
required_providers {
adguard = {
source = "gmichels/adguard"
version = "1.7.0"
version = "1.5.0"
}
}
}
@ -35,16 +35,64 @@ resource "adguard_rewrite" "argo_3" {
domain = "argo.fuku"
answer = "192.168.1.33"
}
resource "adguard_rewrite" "loki_1" {
domain = "loki.fuku"
answer = "192.168.1.31"
}
resource "adguard_rewrite" "loki_2" {
domain = "loki.fuku"
answer = "192.168.1.32"
}
resource "adguard_rewrite" "loki_3" {
domain = "loki.fuku"
answer = "192.168.1.33"
}
resource "adguard_rewrite" "grafana_1" {
domain = "grafana.fuku"
answer = "192.168.1.31"
}
resource "adguard_rewrite" "grafana_2" {
domain = "grafana.fuku"
answer = "192.168.1.32"
}
resource "adguard_rewrite" "grafana_3" {
domain = "grafana.fuku"
answer = "192.168.1.33"
}
resource "adguard_rewrite" "feeds" {
domain = "feeds.roboces.dev"
answer = "192.168.1.12"
}
resource "adguard_rewrite" "feeds_local_1" {
domain = "feeds.fuku"
answer = "192.168.1.31"
}
resource "adguard_rewrite" "feeds_local_2" {
domain = "feeds.fuku"
answer = "192.168.1.32"
}
resource "adguard_rewrite" "feeds_local_3" {
domain = "feeds.fuku"
answer = "192.168.1.33"
}
resource "adguard_rewrite" "authentik" {
domain = "auth.fukurokuju.dev"
answer = "192.168.1.12"
}
resource "adguard_rewrite" "dd02" {
domain = "dd02.fuku"
answer = "192.168.1.19"
@ -69,28 +117,3 @@ resource "adguard_rewrite" "elastic_3" {
domain = "elastic.fuku"
answer = "192.168.1.33"
}
resource "adguard_rewrite" "agent1" {
domain = "agent1.fuku"
answer = "192.168.1.34"
}
resource "adguard_rewrite" "master1" {
domain = "master1.ramiel.fuku"
answer = "192.168.1.31"
}
resource "adguard_rewrite" "master2" {
domain = "master2.ramiel.fuku"
answer = "192.168.1.32"
}
resource "adguard_rewrite" "k3m3" {
domain = "k3m3.fuku"
answer = "192.168.1.43"
}
resource "adguard_rewrite" "pulse" {
answer = "pulse.fukurokuju.dev"
domain = "192.168.1.12"
}

View file

@ -2,34 +2,36 @@
# Manual edits may be lost in future updates.
provider "registry.opentofu.org/goauthentik/authentik" {
version = "2026.2.0"
constraints = "2026.2.0"
version = "2025.2.0"
constraints = "2025.2.0"
hashes = [
"h1:/y7//ItB3vYvtDzSgrd7eY4QHGQ7b2G/rF/rtXdvUiY=",
"h1:2MTCDpaUJ9AAckFf+lfhq5VNkl4/e1is7XIHfjFX20U=",
"h1:EECfgcbT6h+ppgQ3x06iQZSheZ4vJ6NVwXGYGzyuQQ4=",
"h1:Hg5gBZc/mPbMwH3r5AVbDycUFoeh1LlHtAvVKsnruTY=",
"h1:J9+XlKbvc8x99ZV779XH0swZhsJo+Zcrh7UCK5pKQKY=",
"h1:On3/Zzv3W72aGsJ4AhW/tnpi4hvq9cxwgf7tF6Tg+a4=",
"h1:imSeB1o2GiuyBKsK6prOkOT7dQVDK42TaxGWAb+wEvg=",
"h1:jpOkyfrzbb/LBCdW/0R2Ag+X9bRw6X1/2BRMoImfgQ8=",
"h1:pT8YP3VDxKxhT1X+UXmjN78C+8NNb3fIANWNjR0xRX4=",
"h1:pum2uBRNDUjPeP9aYszm+6GU+K7tZIpbbLrsN39l8iw=",
"h1:qYcmNSTHIU6XefHE11SmywKqgp84B6n2Fzwdj/8dRN8=",
"h1:zH1hHNBUvxXZBzxyQa6OPjDAlZyr3rA7LqwTVVZDW9s=",
"h1:zacZCsqLyCstv+qE+VhFvwCIGLQEdNBsMIM7r9umUSQ=",
"zh:00c44e8ee842e75de9cc4fd6193b10258d1dc840e5be4aaaf118ffc180dceee0",
"zh:13057f08bce3b63613e1be3997dd454ff9568c569dd983987b1550280fbe3d01",
"zh:410a1ff2ae4647cc0ab37894f81e4d474b588a0a7f005d05d55e8c3a40978dd2",
"zh:43830834d12b3c0eeabe397842f82ca3a6b58a5bc8dd837d55b821419b55ed61",
"zh:56eaedd196ed7c4003cee0434b891b38242b4fde2031978d0ddcfdf6e16ee5ad",
"zh:5b3c10bb63c3c215ed9e0918e5808b240e3f2ee8248d10cd4d824a4998a213c5",
"zh:99c14891bcb92a6b21ef4c0e60f6c0df23e3452808f3eefd67cde78d132c80d9",
"zh:9a32cdda9f939f8484e27d4200d004c44f016fe97579a111201083f4beea78e8",
"zh:ae5086816144f68de9a0002e7696321169a71473f9d161793f4ae996388f56de",
"zh:bd09409dd34608a4ef3ea80cfc5e397268e7872f2e84c1ccdc9b5698e36ddad5",
"zh:be7af8b9eb61b0eb5053f14360e5a68caeb32c115efe8e1b583f2e7c91352a2a",
"zh:e11726812a1b2caf6b6784a3d074d1f50e3d406e9629c02096a001e5a5979331",
"zh:e39183d10d8158ccab51208f4f727c7419b1b1e596f4feb23dc42aebb36d01e3",
"h1:3IWXiPFWWjLsyCf/paNU7wKpCxa7pAn4GScitbwtofk=",
"h1:3udT/dgoe8Z8YX6+407rQCmzkNsvQ8357KSqMN7klew=",
"h1:64BKLYoDRkgQww158lnyehe24XINrUoHN6h/iPqG0EA=",
"h1:A5tm4fnHaVnsn2MvTeH7q+mrs/Ml/FqrD67XiH81jCc=",
"h1:O7itcQKL+kjkZ79PuZj23lTIe9fwpjl2fyiWNsI6voA=",
"h1:SgBIbLZKf3G+rGCytdo9tnQ2LfYtTeljxl4QirsnVjo=",
"h1:bL786Y8Y5eWhwHfAn6B7SoYioYz4Pl4eep4LQOOklM8=",
"h1:hQyI+F+TaPRqc/XeanBmkn+gYfeG/tqFv43wRJ20r9M=",
"h1:jR2DnHv7UTgvb1MsQF1tKOMGgUkeCFKyi6YY6sUjOOg=",
"h1:q5hy+FtU9m57Q5s1woKat+m/4PJbz6vcSGkPTnohDXs=",
"h1:qFKhGTmYxrl1LEpJyx4iTxd0y3Rk/zfI3U4ybVJylK8=",
"h1:rnWB4NGF0BBD4CE40I5jLrqQoyXtfx2JArEfSZx6OpE=",
"h1:x9vdBK1E7jJU7buJM8D7GIoEEzY+wNZzGCTHI3tU/3E=",
"h1:ycPf4zuR4L0pcYi3osQhJeUaB7a8/oQLQvH+UcmJIj4=",
"zh:0bb96e37ac26c1718572c3bb1a4d30fb3c9dc94639e8d9f10db83394a636e829",
"zh:207822ee1ee4c76ef64a2adc5dbaa2ea253f7fbb0cde0561c92af04fe1ddbeba",
"zh:3e3d33149912946b5026070df615da87505c3dd4eaa0e414c8cd4dbd701ee182",
"zh:430419376b2b4104518fab5e2689b360612de7283b0a31dde35f9fd62d0c5e17",
"zh:51a081059dc8b71fee79807b76449df5735749ff5e05f7ea0d572f4cb0e088f9",
"zh:594ab6d4bad1bbdc47b1f5ca2126192d41c71ae7c9f4f5cc00ad50981e5b7cfe",
"zh:5d526d9af9fdb34b7218fc2c2672f0673ce553605f3233de8f98d1080625d9f9",
"zh:6ce248cf8663f1968139e7b4d02c4477388be73fac7f3223c8fe19971a112d4c",
"zh:9d0e9dd50c81c2b12de59a539d26896b54b74eb0b3ee17d0314eb47d527b3596",
"zh:a522f8ef643743c6613fcf66bde31a40e3b2121d2e09c7c48b806920524ffd13",
"zh:aecfcfac59ce3a9de8b707b5ed6f3485169ccfabda15f2a61ef8b17f39e92e83",
"zh:d3af22ed49db703207b7697d385f65d4379e0748a50af97283cd7fde0487d736",
"zh:d5d853844d84349aa454b4d1a7d68800e747e1fc3c12fe522088747c06fcba52",
"zh:f7e11091d75e26e4033eb0bf96ffe7b1444e07b81a8cb1aae1ab022b2dc6d164",
]
}

View file

@ -8,7 +8,7 @@ terraform {
required_providers {
authentik = {
source = "goauthentik/authentik"
version = "2026.2.0"
version = "2025.2.0"
}
}
}
@ -22,11 +22,6 @@ resource "authentik_group" "ci" {
users = [data.authentik_user.catalin.id]
}
resource "authentik_group" "vods" {
name = "vods"
users = [data.authentik_user.catalin.id]
}
resource "authentik_group" "admins" {
name = "authentik Admins"
is_superuser = true
@ -37,8 +32,13 @@ resource "authentik_group" "arrs" {
is_superuser = false
}
resource "authentik_group" "cloud" {
name = "cloud"
resource "authentik_group" "vpn" {
name = "vpn"
is_superuser = false
}
resource "authentik_group" "ftp" {
name = "ftp"
is_superuser = false
}
@ -123,7 +123,7 @@ module "sonarr" {
app_slug = "sonarr"
app_access_group_id = authentik_group.arrs.id
app_url = "https://sonarr.fukurokuju.dev"
internal_host = "http://192.168.1.3:30113/"
internal_host = "http://192.168.1.3:38013/"
internal_host_ssl_validation = false
app_icon = "https://sonarr.tv/img/logo.png"
}
@ -134,7 +134,7 @@ module "radarr" {
app_slug = "radarr"
app_access_group_id = authentik_group.arrs.id
app_url = "https://radarr.fukurokuju.dev"
internal_host = "http://192.168.1.3:30025/"
internal_host = "http://192.168.1.3:38012/"
internal_host_ssl_validation = false
app_icon = "https://radarr.video/img/background/logo.png"
}
@ -145,7 +145,7 @@ module "lidarr" {
app_slug = "lidarr"
app_access_group_id = authentik_group.arrs.id
app_url = "https://lidarr.fukurokuju.dev"
internal_host = "http://192.168.1.3:30071/"
internal_host = "http://192.168.1.3:38010/"
internal_host_ssl_validation = false
app_icon = "https://lidarr.audio/img/background/logo.png"
}
@ -166,65 +166,61 @@ module "prowlarr" {
app_slug = "prowlarr"
app_access_group_id = authentik_group.admins.id
app_url = "https://prowlarr.fukurokuju.dev"
internal_host = "http://192.168.1.3:30050"
internal_host = "http://192.168.1.3:38014"
internal_host_ssl_validation = false
}
module "rustical" {
module "sftpgo" {
source = "../modules/authentik-oidc"
app_name = "rustical"
app_slug = "rustical"
app_url = "https://cal.roboces.dev"
client_id = var.rustical_client_id
client_secret = var.rustical_client_secret
redirect_uris = [{ matching_mode = "strict", url = "https://cal.roboces.dev/frontend/login/oidc/callback" }]
app_access_group_id = ""
app_name = "SFTPGo"
app_slug = "SFTPGo"
client_id = var.sftpgo_client_id
client_secret = var.sftpgo_client_secret
client_type = "confidential"
app_access_group_id = authentik_group.ftp.id
redirect_uris = [
{
matching_mode = "regex",
url = "https://ftp.fukurokuju.dev/.*"
}
]
extra_property_mappings = [
]
app_icon = "https://ftp.fukurokuju.dev/static/img/logo.png"
access_token_validity = "days=10"
app_url = "https://ftp.fukurokuju.dev"
app_description = "SFTPGo"
sub_mode = "user_username"
}
module "jellyfin" {
source = "../modules/authentik-ldap"
app_name = "Jellyfin"
app_slug = "jellyfin"
base_dn = "DC=ldap,DC=fukurokuju,DC=dev"
name = "jellyfin"
app_url = "https://jelly.roboces.dev"
app_icon = "https://jelly.roboces.dev/web/touchicon.f5bbb798cb2c65908633.png"
app_access_group_id = authentik_group.arrs.id
}
module "ganymede" {
module "netbird" {
source = "../modules/authentik-oidc"
app_name = "Ganymede"
app_slug = "ganymede"
redirect_uris = [{ matching_mode = "strict", url = "https://vods.roboces.dev/api/v1/auth/oauth/callback" }]
client_id = var.ganymede_client_id
client_secret = var.ganymede_client_secret
app_url = "https://vods.roboces.dev"
app_icon = "https://vods.roboces.dev/favicon.ico"
app_access_group_id = authentik_group.vods.id
}
app_name = "netbird"
app_slug = "netbird"
client_id = var.netbird_client_id
client_secret = var.netbird_client_secret
client_type = "public"
app_access_group_id = authentik_group.vpn.id
redirect_uris = [
{
matching_mode = "strict",
url = "https://vpn.fukurokuju.dev",
},
{
matching_mode = "regex",
url = "https://vpn.fukurokuju.dev.*",
},
{
matching_mode = "strict",
url = "http://localhost:53000"
},
module "jellyseerr" {
source = "../modules/authentik-app"
app_name = "Solicitudes Jelly"
app_slug = "jellyseer"
app_url = "https://requests.roboces.dev"
app_icon = "https://requests.roboces.dev/os_icon.svg"
app_description = "Solicita series, animes y pelis para ser añadidas automáticamente a Jellyfin"
app_access_group_id = authentik_group.arrs.id
}
module "cloud" {
source = "../modules/authentik-oidc"
app_name = "Cloud"
app_slug = "cloud"
app_url = "https://cloud.roboces.dev"
client_id = var.oxicloud_client_id
client_secret = var.oxicloud_client_secret
app_icon = "https://cloud.roboces.dev/themes/opencloud/assets/favicon.svg"
redirect_uris = [{
matching_mode = "strict", url = "https://cloud.roboces.dev/api/auth/oidc/callback"
}]
app_access_group_id = authentik_group.cloud.id
]
sub_mode = "user_id"
extra_property_mappings = [
"goauthentik.io/providers/oauth2/scope-authentik_api"
]
app_icon = "https://vpn.fukurokuju.dev/apple-icon.png"
access_token_validity = "days=10"
}

View file

@ -1,5 +1,7 @@
AUTHENTIK_URL=https://auth.fukurokuju.dev
AUTHENTIK_TOKEN=
TF_VAR_firezone_client_id=
TF_VAR_firezone_client_secret=
TF_VAR_gitea_client_id=
TF_VAR_gitea_client_secret=
TF_VAR_miniflux_client_id=
@ -8,9 +10,5 @@ TF_VAR_portainer_client_id=
TF_VAR_portainer_client_secret=
TF_VAR_paperless_client_id=
TF_VAR_paperless_client_secret=
TF_VAR_rustical_client_id=
TF_VAR_rustical_client_secret=
TF_VAR_ganymede_client_id=
TF_VAR_ganymede_client_secret=
TF_VAR_oxicloud_client_id=aef61f77326b813cf8d8ba71d1ac994b5642685ca37e4710ab0079e91d87702d55fd9775d473b05aff45603bf08e78dba26850af3a815f3c3ac171d163368aa0
TF_VAR_oxicloud_client_secret=a4038df17c9fd06f86372aeaaae8f3fd1374d8978983af7b398d948ef15d1efe522a1faa2fc7652bc410c516d96cd2e4211dad4e05ba6297bdd8d9090460d5fc
TF_VAR_netbird_client_id=
TF_VAR_netbird_client_secret=

View file

@ -39,34 +39,22 @@ variable "paperless_client_secret" {
type = string
}
variable "rustical_client_id" {
description = "Rustical client ID"
variable "netbird_client_id" {
description = "Netbird client ID"
type = string
}
variable "rustical_client_secret" {
description = "Tandoor client secret"
variable "netbird_client_secret" {
description = "Netbird client secret"
type = string
}
variable "ganymede_client_id" {
description = "Ganymede client ID"
variable "sftpgo_client_id" {
description = "SFTPGo client ID"
type = string
}
variable "ganymede_client_secret" {
description = "Ganymede client secret"
type = string
}
variable "oxicloud_client_id" {
description = "Oxicloud client ID"
type = string
}
variable "oxicloud_client_secret" {
description = "Oxicloud client secret"
variable "sftpgo_client_secret" {
description = "SFTPGo client secret"
type = string
}

View file

@ -1,26 +0,0 @@
terraform {
required_version = ">= 1.6"
required_providers {
authentik = {
source = "goauthentik/authentik"
version = "2026.2.0"
}
}
}
resource "authentik_application" "app" {
name = var.app_name
slug = var.app_slug
open_in_new_tab = var.open_in_new_tab
meta_icon = var.app_icon
meta_description = var.app_description
meta_publisher = var.app_publisher
meta_launch_url = var.app_url
}
resource "authentik_policy_binding" "app_access" {
target = authentik_application.app.uuid
group = var.app_access_group_id
order = 0
count = var.app_access_group_id != "" ? 1 : 0 # only add it if the group's name exists
}

Some files were not shown because too many files have changed in this diff Show more