Browse Source

add diun, immich, syncthing

Josh Bicking 3 months ago
parent
commit
a32b479aa2
16 changed files with 340 additions and 19 deletions
  1. 4 0
      backup/velero_restore_new.py
  2. 1 1
      bazarr-pvc.yaml
  3. 0 12
      cloudflared.yaml
  4. 20 0
      diun-pvc.yaml
  5. 95 0
      diun.yaml
  6. 15 0
      immich/ingress.yaml
  7. 20 0
      immich/pvc.yaml
  8. 104 0
      immich/values.yaml
  9. 1 1
      lidarr.yaml
  10. 1 1
      prowlarr-pvc.yaml
  11. 1 1
      radarr-pvc.yaml
  12. 1 1
      radarr.yaml
  13. 1 1
      sonarr-pvc.yaml
  14. 1 1
      sonarr.yaml
  15. 15 0
      syncthing-pvc.yaml
  16. 60 0
      syncthing.yaml

+ 4 - 0
backup/velero_restore_new.py

@@ -1,4 +1,5 @@
 import datetime
+import time
 import os
 import json
 import subprocess
@@ -48,6 +49,8 @@ def main():
             check=False, # OK if this namespace doesn't exist,
         )
 
+    # TODO check for pv with mount points in these namespaces
+
     subprocess.run(
         ["/usr/local/bin/velero", "restore", "create", "--from-backup", newest_backup['metadata']['name'], "--include-namespaces", ",".join(namespaces), "--wait"],
         env=k3s_env,
@@ -81,6 +84,7 @@ def wait_until_up(url: str, timeout_sec: int):
         except subprocess.CalledProcessError as exc:
             if start + datetime.timedelta(seconds=timeout_sec) < datetime.datetime.now():
                 raise ValueError(f">{timeout_sec} seconds passed & {url} is not up: {exc}")
+            time.sleep(5)
 
 
 def ntfy_send(data):

+ 1 - 1
bazarr-pvc.yaml

@@ -11,4 +11,4 @@ spec:
     - ReadWriteOnce
   resources:
     requests:
-      storage: 1Gi
+      storage: 10Gi

+ 0 - 12
cloudflared.yaml

@@ -62,22 +62,10 @@ data:
     credentials-file: /etc/cloudflared/creds/credentials.json
     metrics: 0.0.0.0:2000
     ingress:
-    - hostname: jibby.org
-      service: http://jekyll-service.blog.svc.cluster.local:80
-    - hostname: nextcloud.jibby.org
-      service: http://nextcloud.nextcloud.svc.cluster.local:8080
     - hostname: gogs.jibby.org
       service: http://gogs-service.gogs.svc.cluster.local:3000
     #- hostname: matrix.jibby.org
     #  service: http://matrix-service.matrix.svc.cluster.local:8008
-    - hostname: selfoss.jibby.org
-      service: http://selfoss-service.selfoss.svc.cluster.local:8888
-    - hostname: plex.jibby.org
-      service: http://plex-service.plex.svc.cluster.local:32400
-    - hostname: jellyfin.jibby.org
-      service: http://jellyfin-service.plex.svc.cluster.local:8096
-    - hostname: s3.jibby.org
-      service: http://rook-ceph-rgw-ceph-objectstore.rook-ceph.svc.cluster.local:6980
     - hostname: miniflux.jibby.org
       service: http://miniflux-service.miniflux.svc.cluster.local:8080
     - hostname: vaultwarden.jibby.org

+ 20 - 0
diun-pvc.yaml

@@ -0,0 +1,20 @@
+---
+apiVersion: v1
+kind: Namespace
+metadata:
+    name: diun
+---
+apiVersion: v1
+kind: PersistentVolumeClaim
+metadata:
+  name: diun-pvc
+  namespace: diun
+  labels:
+    app: diun
+spec:
+  storageClassName: ceph-block
+  accessModes:
+    - ReadWriteOnce
+  resources:
+    requests:
+      storage: 2Gi

+ 95 - 0
diun.yaml

@@ -0,0 +1,95 @@
+---
+apiVersion: v1
+kind: Namespace
+metadata:
+    name: diun
+---
+apiVersion: v1
+kind: ServiceAccount
+metadata:
+  namespace: diun
+  name: diun
+---
+apiVersion: rbac.authorization.k8s.io/v1
+kind: ClusterRole
+metadata:
+  name: diun
+rules:
+  - apiGroups:
+      - ""
+    resources:
+      - pods
+    verbs:
+      - get
+      - watch
+      - list
+---
+apiVersion: rbac.authorization.k8s.io/v1
+kind: ClusterRoleBinding
+metadata:
+  name: diun
+roleRef:
+  apiGroup: rbac.authorization.k8s.io
+  kind: ClusterRole
+  name: diun
+subjects:
+  - kind: ServiceAccount
+    name: diun
+    namespace: diun
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+  namespace: diun
+  name: diun
+spec:
+  replicas: 1
+  selector:
+    matchLabels:
+      app: diun
+  template:
+    metadata:
+      labels:
+        app: diun
+    spec:
+      serviceAccountName: diun
+      containers:
+        - name: diun
+          image: crazymax/diun:latest
+          imagePullPolicy: Always
+          args: ["serve"]
+          env:
+            - name: TZ
+              value: "America/New_York"
+            - name: LOG_LEVEL
+              value: "info"
+            - name: LOG_JSON
+              value: "false"
+            - name: DIUN_WATCH_WORKERS
+              value: "20"
+            - name: DIUN_WATCH_SCHEDULE
+              value: "0 */6 * * *"
+            - name: DIUN_WATCH_JITTER
+              value: "30s"
+            - name: DIUN_PROVIDERS_KUBERNETES
+              value: "true"
+            - name: DIUN_PROVIDERS_KUBERNETES_WATCHBYDEFAULT
+              value: "true"
+            - name: DIUN_NOTIF_NTFY_ENDPOINT
+              value: "https://ntfy.jibby.org"
+            - name: DIUN_NOTIF_NTFY_TOKEN
+              valueFrom:
+                secretKeyRef:
+                  name: diun
+                  key: ntfy-token
+                  optional: false
+            - name: DIUN_NOTIF_NTFY_TOPIC
+              value: "diun"
+          volumeMounts:
+            - mountPath: "/data"
+              name: "data"
+      restartPolicy: Always
+      volumes:
+        - name: data
+          persistentVolumeClaim:
+            claimName: diun-pvc

+ 15 - 0
immich/ingress.yaml

@@ -0,0 +1,15 @@
+apiVersion: traefik.containo.us/v1alpha1
+kind: IngressRoute
+metadata:
+  name: immich
+  namespace: immich
+spec:
+  entryPoints:
+  - websecure
+  routes:
+  - kind: Rule
+    match: Host(`immich.jibby.org`)
+    services:
+    - kind: Service
+      name: immich-server
+      port: 3001

+ 20 - 0
immich/pvc.yaml

@@ -0,0 +1,20 @@
+---
+apiVersion: v1
+kind: Namespace
+metadata:
+    name: immich
+---
+apiVersion: v1
+kind: PersistentVolumeClaim
+metadata:
+  name: immich-pvc
+  namespace: immich
+  labels:
+    app: immich
+spec:
+  storageClassName: ceph-block
+  accessModes:
+    - ReadWriteOnce
+  resources:
+    requests:
+      storage: 300Gi

+ 104 - 0
immich/values.yaml

@@ -0,0 +1,104 @@
+# for immich 0.3.1
+## This chart relies on the common library chart from bjw-s
+## You can find it at https://github.com/bjw-s/helm-charts/tree/main/charts/library/common
+## Refer there for more detail about the supported values
+
+# These entries are shared between all the Immich components
+
+env:
+  LOG_LEVEL: 'debug'
+  TZ: 'America/New_York'
+  REDIS_HOSTNAME: '{{ printf "%s-redis-master" .Release.Name }}'
+  DB_HOSTNAME: "{{ .Release.Name }}-postgresql"
+  DB_USERNAME: "{{ .Values.postgresql.global.postgresql.auth.username }}"
+  DB_DATABASE_NAME: "{{ .Values.postgresql.global.postgresql.auth.database }}"
+  # -- You should provide your own secret outside of this helm-chart and use `postgresql.global.postgresql.auth.existingSecret` to provide credentials to the postgresql instance
+  DB_PASSWORD: "{{ .Values.postgresql.global.postgresql.auth.password }}"
+  IMMICH_MACHINE_LEARNING_URL: '{{ printf "http://%s-machine-learning:3003" .Release.Name }}'
+
+image:
+  tag: v1.91.4
+
+immich:
+  persistence:
+    # Main data store for all photos shared between different components.
+    library:
+      # Automatically creating the library volume is not supported by this chart
+      # You have to specify an existing PVC to use
+      existingClaim: immich-pvc
+
+# Dependencies
+
+postgresql:
+  enabled: true
+  image:
+    repository: tensorchord/pgvecto-rs
+    tag: pg14-v0.1.11
+  global:
+    postgresql:
+      auth:
+        username: immich
+        database: immich
+        password: immich
+  primary:
+    persistence:
+      size: 8Gi
+      storageClass: ceph-block
+    initdb:
+      scripts:
+        create-extensions.sql: |
+          CREATE EXTENSION cube;
+          CREATE EXTENSION earthdistance;
+          CREATE EXTENSION vectors;
+
+redis:
+  enabled: true
+  architecture: standalone
+  auth:
+    enabled: false
+  master:
+    persistence:
+      size: 8Gi
+      storageClass: ceph-block
+
+# Immich components
+
+server:
+  enabled: true
+  image:
+    repository: ghcr.io/immich-app/immich-server
+    pullPolicy: IfNotPresent
+
+  ingress:
+    main:
+      enabled: false
+      annotations:
+        # proxy-body-size is set to 0 to remove the body limit on file uploads
+        nginx.ingress.kubernetes.io/proxy-body-size: "0"
+      hosts:
+        - host: immich.local
+          paths:
+            - path: "/"
+      tls: []
+
+microservices:
+  enabled: true
+  image:
+    repository: ghcr.io/immich-app/immich-server
+    pullPolicy: IfNotPresent
+
+machine-learning:
+  enabled: true
+  image:
+    repository: ghcr.io/immich-app/immich-machine-learning
+    pullPolicy: IfNotPresent
+  env:
+    TRANSFORMERS_CACHE: /cache
+  persistence:
+    cache:
+      enabled: true
+      size: 10Gi
+      # Optional: Set this to pvc to avoid downloading the ML models every start.
+      type: pvc
+      accessMode: ReadWriteOnce
+      storageClass: ceph-block

+ 1 - 1
lidarr.yaml

@@ -18,7 +18,7 @@ spec:
     spec:
       containers:
       - name: lidarr
-        image: lscr.io/linuxserver/lidarr:develop-1.1.3.2982-ls75
+        image: lscr.io/linuxserver/lidarr:nightly-2.1.3.3913-ls51
         ports:
         - containerPort: 8686
           name: http-web-svc

+ 1 - 1
prowlarr-pvc.yaml

@@ -11,4 +11,4 @@ spec:
     - ReadWriteOnce
   resources:
     requests:
-      storage: 1Gi
+      storage: 10Gi

+ 1 - 1
radarr-pvc.yaml

@@ -11,4 +11,4 @@ spec:
     - ReadWriteOnce
   resources:
     requests:
-      storage: 1Gi
+      storage: 10Gi

+ 1 - 1
radarr.yaml

@@ -18,7 +18,7 @@ spec:
     spec:
       containers:
       - name: radarr
-        image: lscr.io/linuxserver/radarr:nightly-4.4.0.6875-ls428
+        image: lscr.io/linuxserver/radarr:nightly-5.3.1.8427-ls148
         ports:
         - containerPort: 7878
           name: http-web-svc

+ 1 - 1
sonarr-pvc.yaml

@@ -11,4 +11,4 @@ spec:
     - ReadWriteOnce
   resources:
     requests:
-      storage: 1Gi
+      storage: 10Gi

+ 1 - 1
sonarr.yaml

@@ -18,7 +18,7 @@ spec:
     spec:
       containers:
       - name: sonarr
-        image: lscr.io/linuxserver/sonarr:develop-4.0.0.344-ls383
+        image: lscr.io/linuxserver/sonarr:develop-4.0.0.752-ls25
         ports:
         - containerPort: 8989
           name: http-web-svc

+ 15 - 0
syncthing-pvc.yaml

@@ -0,0 +1,15 @@
+---
+apiVersion: v1
+kind: PersistentVolumeClaim
+metadata:
+  name: syncthing-pvc
+  namespace: plex
+  labels:
+    app: syncthing
+spec:
+  storageClassName: ceph-block
+  accessModes:
+    - ReadWriteOnce
+  resources:
+    requests:
+      storage: 2Gi

+ 60 - 0
syncthing.yaml

@@ -0,0 +1,60 @@
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+  namespace: plex
+  name: syncthing
+spec:
+  replicas: 1
+  selector:
+    matchLabels:
+      app: syncthing
+  template:
+    metadata:
+      labels:
+        app: syncthing
+      annotations:
+        backup.velero.io/backup-volumes-excludes: data
+    spec:
+      containers:
+        - name: syncthing
+          image: linuxserver/syncthing:1.27.2
+          imagePullPolicy: Always
+          ports:
+          - containerPort: 8384
+          name: http-web-svc
+          env:
+            - name: PUID
+              value: "1000"
+            - name: PGID
+              value: "1000"
+            - name: TZ
+              value: "America/New_York"
+          volumeMounts:
+            - mountPath: "/data"
+              name: "data"
+            - mountPath: "/config"
+              name: "config"
+      restartPolicy: Always
+      volumes:
+      - name: data
+        persistentVolumeClaim:
+          claimName: data-pvc
+      - name: config
+        persistentVolumeClaim:
+          claimName: syncthing-pvc
+---
+apiVersion: v1
+kind: Service
+metadata:
+  name: syncthing-service
+  namespace: plex
+spec:
+  selector:
+    app: syncthing
+  type: ClusterIP
+  ports:
+  - name: syncthing-web-port
+    protocol: TCP
+    port: 8384
+    targetPort: http-web-svc