Compare commits

..

30 commits

Author SHA1 Message Date
Renovate bot
2517f8a30d Update Helm release longhorn to v1.8.1 2025-03-06 00:02:17 +00:00
dd6202a26f Bump some versions
Signed-off-by: Martyn Ranyard <m@rtyn.berlin>
2025-03-05 18:09:11 +00:00
a70b28c905 Let it go
Signed-off-by: Martyn Ranyard <m@rtyn.berlin>
2025-03-05 17:39:55 +00:00
2b39418104 Oops wrong directory
Signed-off-by: Martyn Ranyard <m@rtyn.berlin>
2025-03-05 17:39:10 +00:00
83b0b4be59 Expose so I can use if needed in-house
Signed-off-by: Martyn Ranyard <m@rtyn.berlin>
2025-03-05 17:38:08 +00:00
13a005f6b5 Calm argo tf down
Signed-off-by: Martyn Ranyard <m@rtyn.berlin>
2025-03-05 17:36:37 +00:00
440682a8ff Add the tailscale proxy for use by ISO downloaders
Signed-off-by: Martyn Ranyard <m@rtyn.berlin>
2025-03-05 17:34:34 +00:00
7a57c678c2 Move TS to a separate deploy
Signed-off-by: Martyn Ranyard <m@rtyn.berlin>
2025-03-05 16:26:53 +00:00
020d13ada2 Not optional lol
Signed-off-by: Martyn Ranyard <m@rtyn.berlin>
2025-03-05 11:49:46 +00:00
f709eec878 Real sidecar perhaps with manual start
Signed-off-by: Martyn Ranyard <m@rtyn.berlin>
2025-03-05 11:41:43 +00:00
3c3107b021 This is frustratingly necessary
Signed-off-by: Martyn Ranyard <m@rtyn.berlin>
2025-03-05 11:27:13 +00:00
9cd9b6fdfe Fix version, renovate has our back
Signed-off-by: Martyn Ranyard <m@rtyn.berlin>
2025-03-05 11:25:58 +00:00
c91aa4c42b This is frustratingly necessary
Signed-off-by: Martyn Ranyard <m@rtyn.berlin>
2025-03-05 11:23:02 +00:00
a885f275a6 This is frustratingly necessary
Signed-off-by: Martyn Ranyard <m@rtyn.berlin>
2025-03-05 11:16:50 +00:00
a7597c2b52 Preparing for magic
Signed-off-by: Martyn Ranyard <m@rtyn.berlin>
2025-03-05 11:06:34 +00:00
187db433de Preparing for magic
Signed-off-by: Martyn Ranyard <m@rtyn.berlin>
2025-03-05 11:05:40 +00:00
8893c46d72 Merge pull request 'Update grafana/grafana Docker tag to v9.5.21' (#21) from renovate/grafana-grafana-9.x into main
Reviewed-on: #21
2025-03-05 10:49:06 +00:00
7f6b2e43f4 Let me shoot myself in the foot if I want to
Signed-off-by: Martyn Ranyard <m@rtyn.berlin>
2025-03-05 10:11:50 +00:00
6e45ada258 Let's have a default ingress class, eh? (yaml was a mistake)
Signed-off-by: Martyn Ranyard <m@rtyn.berlin>
2025-03-05 10:09:08 +00:00
eb0b528423 Let's have a default ingress class, eh?
Signed-off-by: Martyn Ranyard <m@rtyn.berlin>
2025-03-05 10:08:08 +00:00
6c29998db6 We might need it in future, but not for now
Signed-off-by: Martyn Ranyard <m@rtyn.berlin>
2025-03-05 09:56:09 +00:00
a1c4693c7c Running Drupal in k8s is a fools errand
Signed-off-by: Martyn Ranyard <m@rtyn.berlin>
2025-03-05 09:51:49 +00:00
1152916cc3 reconciling reality
Signed-off-by: Martyn Ranyard <m@rtyn.berlin>
2025-03-05 09:40:59 +00:00
6369b00e24 how are these not updated already
Signed-off-by: Martyn Ranyard <m@rtyn.berlin>
2025-03-05 09:31:52 +00:00
9d0b2425a9 Remove old oldseries
Signed-off-by: Martyn Ranyard <m@rtyn.berlin>
2025-03-05 09:29:07 +00:00
fae5c02ba6 Remove old oldseries
Signed-off-by: Martyn Ranyard <m@rtyn.berlin>
2025-03-05 09:27:37 +00:00
29e535cb29 Allow insecure argo, as its not exposed anyway and would be fronted by ingress-nginx anyway. Allows for http webhooks.
Signed-off-by: Martyn Ranyard <m@rtyn.berlin>
2025-03-05 09:19:04 +00:00
dab777f748 Merge pull request 'Update benbusby/whoogle-search Docker digest to 5bbb30f' (#16) from renovate/benbusby-whoogle-search into main
Reviewed-on: #16
2025-03-05 08:53:39 +00:00
Renovate bot
8987b8d2a0 Update grafana/grafana Docker tag to v9.5.21 2025-03-03 00:01:47 +00:00
Renovate bot
88f6961dc1 Update benbusby/whoogle-search Docker digest to 5bbb30f 2025-03-01 00:01:28 +00:00
22 changed files with 173 additions and 123 deletions

View file

@ -24,3 +24,11 @@ patches:
version: v1
kind: ConfigMap
name: argocd-cm
- patch: |-
- op: add
path: /spec/template/spec/containers/0/args/-
value: --insecure=true
target:
kind: Deployment
name: argocd-server

View file

@ -33,8 +33,6 @@ spec:
mountPath: /config
- name: series
mountPath: /series
- name: oldseries
mountPath: /oldseries
- name: films
mountPath: /films
volumes:
@ -44,9 +42,6 @@ spec:
- name: series
persistentVolumeClaim:
claimName: smb-series
- name: oldseries
persistentVolumeClaim:
claimName: smb-oldseries
- name: films
persistentVolumeClaim:
claimName: smb-films

View file

@ -12,18 +12,6 @@ spec:
---
apiVersion: v1
kind: PersistentVolumeClaim
metadata:
name: smb-oldseries
spec:
accessModes:
- ReadWriteOnce
resources:
requests:
storage: 1Gi
storageClassName: smb-oldseries
---
apiVersion: v1
kind: PersistentVolumeClaim
metadata:
name: smb-films
spec:
@ -32,4 +20,4 @@ spec:
resources:
requests:
storage: 1Gi
storageClassName: smb-films
storageClassName: smb-films

View file

@ -32,7 +32,7 @@ spec:
automountServiceAccountToken: false
containers:
- env: []
image: grafana/grafana:9.5.3
image: grafana/grafana:9.5.21
name: grafana
ports:
- containerPort: 3000

View file

@ -18,7 +18,7 @@ spec:
app: lidarr
spec:
containers:
- image: hotio/lidarr:release
- image: hotio/lidarr:release-2.9.6.4552
name: lidarr
resources:
requests:

View file

@ -16,7 +16,7 @@ spec:
app: prowlarr
spec:
containers:
- image: hotio/prowlarr:release-1.26.1.4844
- image: hotio/prowlarr:release-1.31.2.4975
imagePullPolicy: Always
name: prowlarr
ports:

View file

@ -18,7 +18,7 @@ spec:
app: radarr
spec:
containers:
- image: hotio/radarr:release-5.18.4.9674
- image: hotio/radarr:release-5.19.3.9730
imagePullPolicy: IfNotPresent
name: radarr
ports:

View file

@ -38,7 +38,7 @@ metadata:
annotations:
configmap.reloader.stakater.com/reload: "ser2net"
spec:
replicas: 1
replicas: 0
strategy:
type: Recreate
selector:

View file

@ -4,7 +4,7 @@ metadata:
name: smb-films
provisioner: smb.csi.k8s.io
parameters:
source: "//172.20.0.125/films"
source: "//172.20.0.70/films"
csi.storage.k8s.io/node-stage-secret-name: smb-creds
csi.storage.k8s.io/node-stage-secret-namespace: kube-system
reclaimPolicy: Retain

View file

@ -1,17 +0,0 @@
apiVersion: storage.k8s.io/v1
kind: StorageClass
metadata:
name: smb-oldseries
provisioner: smb.csi.k8s.io
parameters:
source: "//hp40l/disk2/oldseries"
csi.storage.k8s.io/node-stage-secret-name: smb-creds
csi.storage.k8s.io/node-stage-secret-namespace: kube-system
reclaimPolicy: Retain
volumeBindingMode: Immediate
mountOptions:
- dir_mode=0777
- file_mode=0777
- uid=1001
- gid=1001
- noperm

View file

@ -4,7 +4,7 @@ metadata:
name: smb-series
provisioner: smb.csi.k8s.io
parameters:
source: "//hp40l/disk2/series"
source: "//172.20.0.70/series"
csi.storage.k8s.io/node-stage-secret-name: smb-creds
csi.storage.k8s.io/node-stage-secret-namespace: kube-system
reclaimPolicy: Retain

View file

@ -0,0 +1,13 @@
apiVersion: v1
kind: ConfigMap
metadata:
name: tailscale-script
data:
script.sh: |
tailscaled --socks5-server=localhost:1055 --outbound-http-proxy-listen=localhost:1055 --tun=userspace-networking &
tailscale up --authkey=$TS_AUTHKEY --advertise-tags=tag:k8s --hostname k8s-tailscale-proxy &
sleep 3
echo "Waiting for $COUNTRY to do something"
while ! tailscale exit-node list 2>/dev/null | grep $COUNTRY >/dev/null; do echo -n . ;sleep 5; done
tailscale set --exit-node $(tailscale exit-node list | grep $COUNTRY | cut -f2 -d' ' | shuf | head -n1)
while true; do sleep 1; done

View file

@ -0,0 +1,100 @@
apiVersion: apps/v1
kind: Deployment
metadata:
labels:
app: tailscale-proxy
name: tailscale-proxy
spec:
replicas: 1
selector:
matchLabels:
app: tailscale-proxy
strategy:
rollingUpdate:
maxSurge: 25%
maxUnavailable: 25%
type: RollingUpdate
template:
metadata:
labels:
app: tailscale-proxy
spec:
containers:
- command:
- /bin/sh
- -c
- 'sh /script/script.sh'
env:
- name: TS_KUBE_SECRET
value: tailscale
- name: COUNTRY
value: Switzerland
- name: TS_AUTHKEY
valueFrom:
secretKeyRef:
key: TS_AUTHKEY
name: tailscale-auth
image: ghcr.io/tailscale/tailscale:v1.80.3
imagePullPolicy: IfNotPresent
startupProbe:
exec:
command:
- /bin/sh
- -c
- tailscale ip | grep ^100 > /dev/null
periodSeconds: 30
failureThreshold: 30
livenessProbe:
exec:
command:
- /bin/sh
- -c
- tailscale ip | grep ^100 > /dev/null
periodSeconds: 30
failureThreshold: 2
readinessProbe:
exec:
command:
- /bin/sh
- -c
- http_proxy=127.0.0.1:1055 wget -O- ifconfig.co/country 2>&1 | grep $COUNTRY > /dev/null
initialDelaySeconds: 60
periodSeconds: 60
failureThreshold: 3
name: tailscale
securityContext:
privileged: true
runAsGroup: 0
runAsUser: 0
terminationMessagePath: /dev/termination-log
terminationMessagePolicy: File
volumeMounts:
- mountPath: /var/run/secrets/kubernetes.io/serviceaccount
name: kube-api-access-t4rzn
readOnly: true
- mountPath: /script
name: script
serviceAccount: tailscale
serviceAccountName: tailscale
volumes:
- name: script
configMap:
name: tailscale-script
- name: kube-api-access-t4rzn
projected:
defaultMode: 420
sources:
- serviceAccountToken:
expirationSeconds: 3607
path: token
- configMap:
items:
- key: ca.crt
path: ca.crt
name: kube-root-ca.crt
- downwardAPI:
items:
- fieldRef:
apiVersion: v1
fieldPath: metadata.namespace
path: namespace

View file

@ -0,0 +1,4 @@
apiVersion: v1
kind: ServiceAccount
metadata:
name: tailscale

View file

@ -0,0 +1,15 @@
apiVersion: v1
kind: Service
metadata:
labels:
app: tailscale-proxy
app.kubernetes.io/instance: tailscale-proxy
name: tailscale-proxy
spec:
ports:
- port: 1055
protocol: TCP
targetPort: 1055
selector:
app: tailscale-proxy
type: LoadBalancer

View file

@ -14,6 +14,7 @@ spec:
labels:
app: qbittorrent
spec:
initContainers:
containers:
- image: qbittorrentofficial/qbittorrent-nox:latest
name: qbittorrent
@ -34,40 +35,6 @@ spec:
value: "/config"
- name: QBT_DOWNLOADS
value: "/downloads"
- env:
- name: TS_KUBE_SECRET
value: tailscale
- name: TS_USERSPACE
value: "false"
- name: TS_OUTBOUND_HTTP_PROXY_LISTEN
value: "localhost:1055"
- name: TS_SOCKS5_SERVER
value: "localhost:1055"
- name: TS_EXTRA_ARGS
value: "--exit-node=100.90.55.121"
- name: TS_AUTHKEY
valueFrom:
secretKeyRef:
key: TS_AUTHKEY
name: tailscale-auth
optional: true
livenessProbe:
exec:
command:
- ping
- -c1
- 100.100.100.100
initialDelaySeconds: 120
periodSeconds: 5
image: ghcr.io/tailscale/tailscale:latest
name: ts-sidecar
securityContext:
runAsGroup: 1000
runAsUser: 1000
volumeMounts:
- mountPath: /var/run/secrets/kubernetes.io/serviceaccount
name: kube-api-access-t4rzn
readOnly: true
preemptionPolicy: PreemptLowerPriority
priority: 0
serviceAccountName: tailscale

View file

@ -24,7 +24,7 @@ spec:
value: en
- name: WHOOGLE_CONFIG_SEARCH_LANGUAGE
value: en
image: benbusby/whoogle-search@sha256:ecccdb598f890140bf5564ea0307d3a72871ab3d14fbf22e308b904846e5c590
image: benbusby/whoogle-search@sha256:5bbb30fc4cf67563b48529c5291813b3d49c290e1e8b9e3aaa5081e9cb6e40c0
imagePullPolicy: IfNotPresent
livenessProbe:
failureThreshold: 3

View file

@ -8,4 +8,4 @@ spec:
storageClassName: longhorn-fast
resources:
requests:
storage: 128Mi
storage: 1280Mi

View file

@ -14,6 +14,10 @@ spec:
targetRevision: 4.12.0
helm:
parameters:
- name: controller.ingressClassResource.default
value: "true"
- name: controller.config.annotations-risk-level
value: "Critical"
- name: controller.service.type
value: LoadBalancer
- name: controller.allowSnippetAnnotations

View file

@ -1,44 +0,0 @@
apiVersion: argoproj.io/v1alpha1
kind: Application
metadata:
name: bikerwitch
namespace: argocd
spec:
destination:
namespace: bikerwitch
server: https://kubernetes.default.svc
project: default
source:
helm:
parameters:
- name: service.type
value: LoadBalancer
- name: persistence.enabled
value: "true"
- name: persistence.storageClass
value: "longhorn-fast"
- name: image.repository
value: drupal
- name: image.tag
value: 9.4-php8.0-apache
values: |-
ingress:
enabled: true
annotations:
cert-manager.io/cluster-issuer: letsencrypt
kubernetes.io/ingress.class: nginx
external-dns.alpha.kubernetes.io/target: armnleg.martyn.berlin
hosts:
- host: bikerwitch.martyn.berlin
paths:
- /
- host: www.bikerwitch.org.uk
paths:
- /
tls:
- hosts:
- bikerwitch.martyn.berlin
- www.bikerwitch.org.uk
path: apps-helm/drupal
repoURL: https://git.martyn.berlin/martyn/infra4talos.git
targetRevision: HEAD

View file

@ -25,7 +25,7 @@ spec:
value: "false"
chart: longhorn
repoURL: https://charts.longhorn.io
targetRevision: 1.8.0
targetRevision: 1.8.1
syncPolicy:
automated:
selfHeal: true

View file

@ -0,0 +1,17 @@
apiVersion: argoproj.io/v1alpha1
kind: Application
metadata:
name: tailscale-proxy
namespace: argocd
spec:
destination:
namespace: tailscale-proxy
server: https://kubernetes.default.svc
project: apps
source:
path: apps-kustomized/tailscale-proxy
repoURL: https://git.martyn.berlin/martyn/infra4talos
targetRevision: HEAD
syncPolicy:
automated:
selfHeal: true