--- # Source: harbor/templates/core/core-secret.yaml apiVersion: v1 kind: Secret metadata: name: harbor-harbor-harbor-core labels: heritage: Helm release: harbor-harbor chart: harbor app: "harbor" annotations: kube-1password: nzrnkmyueqyr7qantfnizndoni kube-1password/vault: Kubernetes kube-1password/secret-text-parse: "true" type: Opaque --- # Source: harbor/templates/database/database-secret.yaml apiVersion: v1 kind: Secret metadata: name: "harbor-harbor-harbor-database" labels: heritage: Helm release: harbor-harbor chart: harbor app: "harbor" annotations: kube-1password: fyedoxemaq6ro7mxh5espv4ynu kube-1password/vault: Kubernetes kube-1password/secret-text-parse: "true" type: Opaque --- # Source: harbor/templates/exporter/exporter-secret.yaml apiVersion: v1 kind: Secret metadata: name: harbor-harbor-harbor-exporter labels: heritage: Helm release: harbor-harbor chart: harbor app: "harbor" annotations: kube-1password: z2zxjpo26imlov3dxoq3ruwvw4 kube-1password/vault: Kubernetes kube-1password/secret-text-parse: "true" type: Opaque --- # Source: harbor/templates/jobservice/jobservice-secrets.yaml apiVersion: v1 kind: Secret metadata: name: "harbor-harbor-harbor-jobservice" labels: heritage: Helm release: harbor-harbor chart: harbor app: "harbor" annotations: kube-1password: fx6f6bomevldvtuuffvifuwm74 kube-1password/vault: Kubernetes kube-1password/secret-text-parse: "true" type: Opaque --- # Source: harbor/templates/registry/registry-secret.yaml apiVersion: v1 kind: Secret metadata: name: "harbor-harbor-harbor-registry" labels: heritage: Helm release: harbor-harbor chart: harbor app: "harbor" annotations: kube-1password: p2mdm5s7kmkffjk5ttakhvjru4 kube-1password/vault: Kubernetes kube-1password/secret-text-parse: "true" type: Opaque --- # Source: harbor/templates/registry/registry-secret.yaml apiVersion: v1 kind: Secret metadata: name: "harbor-harbor-harbor-registry-htpasswd" labels: heritage: Helm release: harbor-harbor chart: harbor app: "harbor" annotations: kube-1password: qfmged45pt5jsytf2zz5dgaii4 kube-1password/vault: Kubernetes kube-1password/secret-text-parse: "true" type: Opaque --- # Source: harbor/templates/core/core-cm.yaml apiVersion: v1 kind: ConfigMap metadata: name: harbor-harbor-harbor-core labels: heritage: Helm release: harbor-harbor chart: harbor app: "harbor" data: app.conf: |+ appname = Harbor runmode = prod enablegzip = true [prod] httpport = 8080 PORT: "8080" DATABASE_TYPE: "postgresql" POSTGRESQL_HOST: "harbor-harbor-harbor-database" POSTGRESQL_PORT: "5432" POSTGRESQL_USERNAME: "postgres" POSTGRESQL_DATABASE: "registry" POSTGRESQL_SSLMODE: "disable" POSTGRESQL_MAX_IDLE_CONNS: "100" POSTGRESQL_MAX_OPEN_CONNS: "900" EXT_ENDPOINT: "https://docker.cluster.fun" CORE_URL: "http://harbor-harbor-harbor-core:80" JOBSERVICE_URL: "http://harbor-harbor-harbor-jobservice" REGISTRY_URL: "http://harbor-harbor-harbor-registry:5000" TOKEN_SERVICE_URL: "http://harbor-harbor-harbor-core:80/service/token" WITH_NOTARY: "false" NOTARY_URL: "http://harbor-harbor-harbor-notary-server:4443" CORE_LOCAL_URL: "http://127.0.0.1:8080" WITH_TRIVY: "false" TRIVY_ADAPTER_URL: "http://harbor-harbor-harbor-trivy:8080" REGISTRY_STORAGE_PROVIDER_NAME: "s3" WITH_CHARTMUSEUM: "false" CHART_REPOSITORY_URL: "http://harbor-harbor-harbor-chartmuseum" LOG_LEVEL: "info" CONFIG_PATH: "/etc/core/app.conf" CHART_CACHE_DRIVER: "redis" _REDIS_URL_CORE: "redis://harbor-harbor-harbor-redis:6379/0?idle_timeout_seconds=30" _REDIS_URL_REG: "redis://harbor-harbor-harbor-redis:6379/2?idle_timeout_seconds=30" PORTAL_URL: "http://harbor-harbor-harbor-portal" REGISTRY_CONTROLLER_URL: "http://harbor-harbor-harbor-registry:8080" REGISTRY_CREDENTIAL_USERNAME: "harbor_registry_user" HTTP_PROXY: "" HTTPS_PROXY: "" NO_PROXY: "harbor-harbor-harbor-core,harbor-harbor-harbor-jobservice,harbor-harbor-harbor-database,harbor-harbor-harbor-chartmuseum,harbor-harbor-harbor-notary-server,harbor-harbor-harbor-notary-signer,harbor-harbor-harbor-registry,harbor-harbor-harbor-portal,harbor-harbor-harbor-trivy,harbor-harbor-harbor-exporter,127.0.0.1,localhost,.local,.internal" PERMITTED_REGISTRY_TYPES_FOR_PROXY_CACHE: "docker-hub,harbor,azure-acr,aws-ecr,google-gcr,quay,docker-registry" METRIC_ENABLE: "true" METRIC_PATH: "/metrics" METRIC_PORT: "8001" METRIC_NAMESPACE: harbor METRIC_SUBSYSTEM: core --- # Source: harbor/templates/exporter/exporter-cm-env.yaml apiVersion: v1 kind: ConfigMap metadata: name: "harbor-harbor-harbor-exporter-env" labels: heritage: Helm release: harbor-harbor chart: harbor app: "harbor" data: HTTP_PROXY: "" HTTPS_PROXY: "" NO_PROXY: "harbor-harbor-harbor-core,harbor-harbor-harbor-jobservice,harbor-harbor-harbor-database,harbor-harbor-harbor-chartmuseum,harbor-harbor-harbor-notary-server,harbor-harbor-harbor-notary-signer,harbor-harbor-harbor-registry,harbor-harbor-harbor-portal,harbor-harbor-harbor-trivy,harbor-harbor-harbor-exporter,127.0.0.1,localhost,.local,.internal" LOG_LEVEL: "info" HARBOR_EXPORTER_PORT: "8001" HARBOR_EXPORTER_METRICS_PATH: "/metrics" HARBOR_EXPORTER_METRICS_ENABLED: "true" HARBOR_EXPORTER_CACHE_TIME: "23" HARBOR_EXPORTER_CACHE_CLEAN_INTERVAL: "14400" HARBOR_METRIC_NAMESPACE: harbor HARBOR_METRIC_SUBSYSTEM: exporter HARBOR_REDIS_URL: "redis://harbor-harbor-harbor-redis:6379/1" HARBOR_REDIS_NAMESPACE: harbor_job_service_namespace HARBOR_REDIS_TIMEOUT: "3600" HARBOR_SERVICE_SCHEME: "http" HARBOR_SERVICE_HOST: "harbor-harbor-harbor-core" HARBOR_SERVICE_PORT: "80" HARBOR_DATABASE_HOST: "harbor-harbor-harbor-database" HARBOR_DATABASE_PORT: "5432" HARBOR_DATABASE_USERNAME: "postgres" HARBOR_DATABASE_DBNAME: "registry" HARBOR_DATABASE_SSLMODE: "disable" HARBOR_DATABASE_MAX_IDLE_CONNS: "100" HARBOR_DATABASE_MAX_OPEN_CONNS: "900" --- # Source: harbor/templates/jobservice/jobservice-cm-env.yaml apiVersion: v1 kind: ConfigMap metadata: name: "harbor-harbor-harbor-jobservice-env" labels: heritage: Helm release: harbor-harbor chart: harbor app: "harbor" data: CORE_URL: "http://harbor-harbor-harbor-core:80" TOKEN_SERVICE_URL: "http://harbor-harbor-harbor-core:80/service/token" REGISTRY_URL: "http://harbor-harbor-harbor-registry:5000" REGISTRY_CONTROLLER_URL: "http://harbor-harbor-harbor-registry:8080" REGISTRY_CREDENTIAL_USERNAME: "harbor_registry_user" HTTP_PROXY: "" HTTPS_PROXY: "" NO_PROXY: "harbor-harbor-harbor-core,harbor-harbor-harbor-jobservice,harbor-harbor-harbor-database,harbor-harbor-harbor-chartmuseum,harbor-harbor-harbor-notary-server,harbor-harbor-harbor-notary-signer,harbor-harbor-harbor-registry,harbor-harbor-harbor-portal,harbor-harbor-harbor-trivy,harbor-harbor-harbor-exporter,127.0.0.1,localhost,.local,.internal" METRIC_NAMESPACE: harbor METRIC_SUBSYSTEM: jobservice --- # Source: harbor/templates/jobservice/jobservice-cm.yaml apiVersion: v1 kind: ConfigMap metadata: name: "harbor-harbor-harbor-jobservice" labels: heritage: Helm release: harbor-harbor chart: harbor app: "harbor" data: config.yml: |+ #Server listening port protocol: "http" port: 8080 worker_pool: workers: 10 backend: "redis" redis_pool: redis_url: "redis://harbor-harbor-harbor-redis:6379/1" namespace: "harbor_job_service_namespace" idle_timeout_second: 3600 job_loggers: - name: "STD_OUTPUT" level: INFO metric: enabled: true path: /metrics port: 8001 #Loggers for the job service loggers: - name: "STD_OUTPUT" level: INFO --- # Source: harbor/templates/portal/configmap.yaml apiVersion: v1 kind: ConfigMap metadata: name: "harbor-harbor-harbor-portal" labels: heritage: Helm release: harbor-harbor chart: harbor app: "harbor" data: nginx.conf: |+ worker_processes auto; pid /tmp/nginx.pid; events { worker_connections 1024; } http { client_body_temp_path /tmp/client_body_temp; proxy_temp_path /tmp/proxy_temp; fastcgi_temp_path /tmp/fastcgi_temp; uwsgi_temp_path /tmp/uwsgi_temp; scgi_temp_path /tmp/scgi_temp; server { listen 8080; listen [::]:8080; server_name localhost; root /usr/share/nginx/html; index index.html index.htm; include /etc/nginx/mime.types; gzip on; gzip_min_length 1000; gzip_proxied expired no-cache no-store private auth; gzip_types text/plain text/css application/json application/javascript application/x-javascript text/xml application/xml application/xml+rss text/javascript; location / { try_files $uri $uri/ /index.html; } location = /index.html { add_header Cache-Control "no-store, no-cache, must-revalidate"; } } } --- # Source: harbor/templates/registry/registry-cm.yaml apiVersion: v1 kind: ConfigMap metadata: name: "harbor-harbor-harbor-registry" labels: heritage: Helm release: harbor-harbor chart: harbor app: "harbor" data: config.yml: |+ version: 0.1 log: level: info fields: service: registry storage: s3: region: fr-par bucket: cluster.fun regionendpoint: https://s3.fr-par.scw.cloud rootdirectory: /harbor cache: layerinfo: redis maintenance: uploadpurging: enabled: false delete: enabled: true redirect: disable: true redis: addr: harbor-harbor-harbor-redis:6379 db: 2 readtimeout: 10s writetimeout: 10s dialtimeout: 10s pool: maxidle: 100 maxactive: 500 idletimeout: 60s http: addr: :5000 relativeurls: false # set via environment variable # secret: placeholder debug: addr: :8001 prometheus: enabled: true path: /metrics auth: htpasswd: realm: harbor-registry-basic-realm path: /etc/registry/passwd validation: disabled: true compatibility: schema1: enabled: true ctl-config.yml: |+ --- protocol: "http" port: 8080 log_level: info registry_config: "/etc/registry/config.yml" --- # Source: harbor/templates/core/core-svc.yaml apiVersion: v1 kind: Service metadata: name: harbor-harbor-harbor-core labels: heritage: Helm release: harbor-harbor chart: harbor app: "harbor" spec: ports: - name: http-web port: 80 targetPort: 8080 - name: http-metrics port: 8001 selector: release: harbor-harbor app: "harbor" component: core --- # Source: harbor/templates/database/database-svc.yaml apiVersion: v1 kind: Service metadata: name: "harbor-harbor-harbor-database" labels: heritage: Helm release: harbor-harbor chart: harbor app: "harbor" spec: ports: - port: 5432 selector: release: harbor-harbor app: "harbor" component: database --- # Source: harbor/templates/exporter/exporter-svc.yaml apiVersion: v1 kind: Service metadata: name: "harbor-harbor-harbor-exporter" labels: heritage: Helm release: harbor-harbor chart: harbor app: "harbor" spec: ports: - name: http-metrics port: 8001 selector: release: harbor-harbor app: "harbor" component: exporter --- # Source: harbor/templates/jobservice/jobservice-svc.yaml apiVersion: v1 kind: Service metadata: name: "harbor-harbor-harbor-jobservice" labels: heritage: Helm release: harbor-harbor chart: harbor app: "harbor" spec: ports: - name: http-jobservice port: 80 targetPort: 8080 - name: http-metrics port: 8001 selector: release: harbor-harbor app: "harbor" component: jobservice --- # Source: harbor/templates/portal/service.yaml apiVersion: v1 kind: Service metadata: name: "harbor-harbor-harbor-portal" labels: heritage: Helm release: harbor-harbor chart: harbor app: "harbor" spec: ports: - port: 80 targetPort: 8080 selector: release: harbor-harbor app: "harbor" component: portal --- # Source: harbor/templates/redis/service.yaml apiVersion: v1 kind: Service metadata: name: harbor-harbor-harbor-redis labels: heritage: Helm release: harbor-harbor chart: harbor app: "harbor" spec: ports: - port: 6379 selector: release: harbor-harbor app: "harbor" component: redis --- # Source: harbor/templates/registry/registry-svc.yaml apiVersion: v1 kind: Service metadata: name: "harbor-harbor-harbor-registry" labels: heritage: Helm release: harbor-harbor chart: harbor app: "harbor" spec: ports: - name: http-registry port: 5000 - name: http-controller port: 8080 - name: http-metrics port: 8001 selector: release: harbor-harbor app: "harbor" component: registry --- # Source: harbor/templates/core/core-dpl.yaml apiVersion: apps/v1 kind: Deployment metadata: name: harbor-harbor-harbor-core labels: heritage: Helm release: harbor-harbor chart: harbor app: "harbor" component: core spec: replicas: 2 selector: matchLabels: release: harbor-harbor app: "harbor" component: core template: metadata: labels: release: harbor-harbor app: "harbor" component: core annotations: checksum/configmap: 167dd1a6053c18e7ef228ae34b781b938287b997345e41d919b64437cd59721d checksum/secret: 3267ef0049cdfabcdaa15a4e87e8624bbdd3e69acb51169d3cec91c6c321dda5 checksum/secret-jobservice: 87b4ffaead27c455b8f39f9223543537f996895e5042c22fc1e579d308726d6b spec: securityContext: runAsUser: 10000 fsGroup: 10000 automountServiceAccountToken: false terminationGracePeriodSeconds: 120 containers: - name: core image: goharbor/harbor-core:v2.4.1 imagePullPolicy: IfNotPresent startupProbe: httpGet: path: /api/v2.0/ping scheme: HTTP port: 8080 failureThreshold: 360 initialDelaySeconds: 10 periodSeconds: 10 livenessProbe: httpGet: path: /api/v2.0/ping scheme: HTTP port: 8080 failureThreshold: 2 periodSeconds: 10 readinessProbe: httpGet: path: /api/v2.0/ping scheme: HTTP port: 8080 failureThreshold: 2 periodSeconds: 10 envFrom: - configMapRef: name: "harbor-harbor-harbor-core" - secretRef: name: "harbor-harbor-harbor-core" env: - name: CORE_SECRET valueFrom: secretKeyRef: name: harbor-harbor-harbor-core key: secret - name: JOBSERVICE_SECRET valueFrom: secretKeyRef: name: "harbor-harbor-harbor-jobservice" key: JOBSERVICE_SECRET ports: - containerPort: 8080 volumeMounts: - name: config mountPath: /etc/core/app.conf subPath: app.conf - name: secret-key mountPath: /etc/core/key subPath: key - name: token-service-private-key mountPath: /etc/core/private_key.pem subPath: tls.key - name: ca-download mountPath: /etc/core/ca - name: psc mountPath: /etc/core/token resources: requests: memory: 64Mi volumes: - name: config configMap: name: harbor-harbor-harbor-core items: - key: app.conf path: app.conf - name: secret-key secret: secretName: harbor-harbor-harbor-core items: - key: secretKey path: key - name: token-service-private-key secret: secretName: harbor-harbor-harbor-core - name: ca-download secret: - name: psc emptyDir: {} affinity: podAntiAffinity: requiredDuringSchedulingIgnoredDuringExecution: - labelSelector: matchExpressions: - key: component operator: In values: - core - key: app operator: In values: - harbor topologyKey: kubernetes.io/hostname priorityClassName: system-cluster-critical --- # Source: harbor/templates/exporter/exporter-dpl.yaml apiVersion: apps/v1 kind: Deployment metadata: name: harbor-harbor-harbor-exporter labels: heritage: Helm release: harbor-harbor chart: harbor app: "harbor" component: exporter spec: replicas: 1 selector: matchLabels: release: harbor-harbor app: "harbor" component: exporter template: metadata: labels: heritage: Helm release: harbor-harbor chart: harbor app: "harbor" component: exporter annotations: spec: securityContext: runAsUser: 10000 fsGroup: 10000 automountServiceAccountToken: false containers: - name: exporter image: goharbor/harbor-exporter:v2.4.1 imagePullPolicy: IfNotPresent livenessProbe: httpGet: path: / port: 8001 initialDelaySeconds: 300 periodSeconds: 10 readinessProbe: httpGet: path: / port: 8001 initialDelaySeconds: 30 periodSeconds: 10 args: ["-log-level", "info"] envFrom: - configMapRef: name: "harbor-harbor-harbor-exporter-env" - secretRef: name: "harbor-harbor-harbor-exporter" ports: - containerPort: 8080 volumeMounts: volumes: - name: config secret: secretName: "harbor-harbor-harbor-exporter" --- # Source: harbor/templates/jobservice/jobservice-dpl.yaml apiVersion: apps/v1 kind: Deployment metadata: name: "harbor-harbor-harbor-jobservice" labels: heritage: Helm release: harbor-harbor chart: harbor app: "harbor" component: jobservice spec: replicas: 1 strategy: type: Recreate rollingUpdate: null selector: matchLabels: release: harbor-harbor app: "harbor" component: jobservice template: metadata: labels: heritage: Helm release: harbor-harbor chart: harbor app: "harbor" component: jobservice annotations: checksum/configmap: af6da052830476467f006a29d110274d1764756243b20e51aebdecd7d677b19a checksum/configmap-env: ba3a529d03e0d0f9dbaab1bd37a3c43f3f914a0af9339b2c49311b37c7aec049 checksum/secret: 86d4dd7172a17e4ee3a7b0d58930056a0787e6244e97ac6362a1434e96bffd64 checksum/secret-core: 6f1def0912bfbb511b8d3ff055c3f95d998cd7e7c1432417b53cf5f4a4c289b5 spec: securityContext: runAsUser: 10000 fsGroup: 10000 automountServiceAccountToken: false terminationGracePeriodSeconds: 120 containers: - name: jobservice image: goharbor/harbor-jobservice:v2.4.1 imagePullPolicy: IfNotPresent livenessProbe: httpGet: path: /api/v1/stats scheme: HTTP port: 8080 initialDelaySeconds: 300 periodSeconds: 10 readinessProbe: httpGet: path: /api/v1/stats scheme: HTTP port: 8080 initialDelaySeconds: 20 periodSeconds: 10 resources: requests: memory: 64Mi env: - name: CORE_SECRET valueFrom: secretKeyRef: name: harbor-harbor-harbor-core key: secret envFrom: - configMapRef: name: "harbor-harbor-harbor-jobservice-env" - secretRef: name: "harbor-harbor-harbor-jobservice" ports: - containerPort: 8080 volumeMounts: - name: jobservice-config mountPath: /etc/jobservice/config.yml subPath: config.yml - name: job-logs mountPath: /var/log/jobs subPath: volumes: - name: jobservice-config configMap: name: "harbor-harbor-harbor-jobservice" - name: job-logs emptyDir: {} --- # Source: harbor/templates/portal/deployment.yaml apiVersion: apps/v1 kind: Deployment metadata: name: "harbor-harbor-harbor-portal" labels: heritage: Helm release: harbor-harbor chart: harbor app: "harbor" component: portal spec: replicas: 2 selector: matchLabels: release: harbor-harbor app: "harbor" component: portal template: metadata: labels: release: harbor-harbor app: "harbor" component: portal annotations: spec: securityContext: runAsUser: 10000 fsGroup: 10000 automountServiceAccountToken: false containers: - name: portal image: goharbor/harbor-portal:v2.4.1 imagePullPolicy: IfNotPresent resources: requests: memory: 64Mi livenessProbe: httpGet: path: / scheme: HTTP port: 8080 initialDelaySeconds: 300 periodSeconds: 10 readinessProbe: httpGet: path: / scheme: HTTP port: 8080 initialDelaySeconds: 1 periodSeconds: 10 ports: - containerPort: 8080 volumeMounts: - name: portal-config mountPath: /etc/nginx/nginx.conf subPath: nginx.conf volumes: - name: portal-config configMap: name: "harbor-harbor-harbor-portal" affinity: podAntiAffinity: requiredDuringSchedulingIgnoredDuringExecution: - labelSelector: matchExpressions: - key: component operator: In values: - portal - key: app operator: In values: - harbor topologyKey: kubernetes.io/hostname priorityClassName: system-cluster-critical --- # Source: harbor/templates/registry/registry-dpl.yaml apiVersion: apps/v1 kind: Deployment metadata: name: "harbor-harbor-harbor-registry" labels: heritage: Helm release: harbor-harbor chart: harbor app: "harbor" component: registry spec: replicas: 2 strategy: type: Recreate rollingUpdate: null selector: matchLabels: release: harbor-harbor app: "harbor" component: registry template: metadata: labels: heritage: Helm release: harbor-harbor chart: harbor app: "harbor" component: registry annotations: checksum/configmap: 9556d2769d48cf3b1a5b97cb95ab15a184e156c38d4d25f1b5b6290f36e3a592 checksum/secret: fdbd36eee535adc702ff39bc7f483c5ce5e40ca5cd35bde8b83614383411efe8 checksum/secret-jobservice: 2c9a6a2532bb1b532b831db3a7e3b562cdd2829abe94188f5eb3a3f8ab4908fc checksum/secret-core: 6201925d9501d8469ca1fef56e13a62e76b0fc525761760aa1b1a4488d79a221 spec: securityContext: runAsUser: 10000 fsGroup: 10000 automountServiceAccountToken: false terminationGracePeriodSeconds: 120 containers: - name: registry image: goharbor/registry-photon:v2.4.1 imagePullPolicy: IfNotPresent livenessProbe: httpGet: path: / scheme: HTTP port: 5000 initialDelaySeconds: 300 periodSeconds: 10 readinessProbe: httpGet: path: / scheme: HTTP port: 5000 initialDelaySeconds: 1 periodSeconds: 10 resources: requests: memory: 64Mi args: ["serve", "/etc/registry/config.yml"] envFrom: - secretRef: name: "harbor-harbor-harbor-registry" env: ports: - containerPort: 5000 - containerPort: 5001 volumeMounts: - name: registry-data mountPath: /storage subPath: - name: registry-htpasswd mountPath: /etc/registry/passwd subPath: passwd - name: registry-config mountPath: /etc/registry/config.yml subPath: config.yml - name: registryctl image: goharbor/harbor-registryctl:v2.4.1 imagePullPolicy: IfNotPresent livenessProbe: httpGet: path: /api/health scheme: HTTP port: 8080 initialDelaySeconds: 300 periodSeconds: 10 readinessProbe: httpGet: path: /api/health scheme: HTTP port: 8080 initialDelaySeconds: 1 periodSeconds: 10 resources: requests: memory: 64Mi envFrom: - secretRef: name: "harbor-harbor-harbor-registry" env: - name: CORE_SECRET valueFrom: secretKeyRef: name: harbor-harbor-harbor-core key: secret - name: JOBSERVICE_SECRET valueFrom: secretKeyRef: name: harbor-harbor-harbor-jobservice key: JOBSERVICE_SECRET ports: - containerPort: 8080 volumeMounts: - name: registry-data mountPath: /storage subPath: - name: registry-config mountPath: /etc/registry/config.yml subPath: config.yml - name: registry-config mountPath: /etc/registryctl/config.yml subPath: ctl-config.yml volumes: - name: registry-htpasswd secret: secretName: harbor-harbor-harbor-registry-htpasswd items: - key: REGISTRY_HTPASSWD path: passwd - name: registry-config configMap: name: "harbor-harbor-harbor-registry" - name: registry-data emptyDir: {} affinity: podAntiAffinity: requiredDuringSchedulingIgnoredDuringExecution: - labelSelector: matchExpressions: - key: component operator: In values: - registry - key: app operator: In values: - harbor topologyKey: kubernetes.io/hostname priorityClassName: system-cluster-critical --- # Source: harbor/templates/database/database-ss.yaml apiVersion: apps/v1 kind: StatefulSet metadata: name: "harbor-harbor-harbor-database" labels: heritage: Helm release: harbor-harbor chart: harbor app: "harbor" component: database spec: replicas: 1 serviceName: "harbor-harbor-harbor-database" selector: matchLabels: release: harbor-harbor app: "harbor" component: database template: metadata: labels: heritage: Helm release: harbor-harbor chart: harbor app: "harbor" component: database annotations: checksum/secret: 7a382608359a04f6943a40781d4010c95b076ef1dc524f02dfdbbe1f1d4b0615 spec: securityContext: runAsUser: 999 fsGroup: 999 automountServiceAccountToken: false terminationGracePeriodSeconds: 120 initContainers: # as we change the data directory to a sub folder to support psp, the init container here # is used to migrate the existing data. See https://github.com/goharbor/harbor-helm/issues/756 # for more detail. # we may remove it after several releases - name: "data-migrator" image: goharbor/harbor-db:v2.4.1 imagePullPolicy: IfNotPresent command: ["/bin/sh"] args: ["-c", "[ -e /var/lib/postgresql/data/postgresql.conf ] && [ ! -d /var/lib/postgresql/data/pgdata ] && mkdir -m 0700 /var/lib/postgresql/data/pgdata && mv /var/lib/postgresql/data/* /var/lib/postgresql/data/pgdata/ || true"] volumeMounts: - name: database-data mountPath: /var/lib/postgresql/data subPath: # with "fsGroup" set, each time a volume is mounted, Kubernetes must recursively chown() and chmod() all the files and directories inside the volume # this causes the postgresql reports the "data directory /var/lib/postgresql/data/pgdata has group or world access" issue when using some CSIs e.g. Ceph # use this init container to correct the permission # as "fsGroup" applied before the init container running, the container has enough permission to execute the command - name: "data-permissions-ensurer" image: goharbor/harbor-db:v2.4.1 imagePullPolicy: IfNotPresent command: ["/bin/sh"] args: ["-c", "chmod -R 700 /var/lib/postgresql/data/pgdata || true"] volumeMounts: - name: database-data mountPath: /var/lib/postgresql/data subPath: containers: - name: database image: goharbor/harbor-db:v2.4.1 imagePullPolicy: IfNotPresent livenessProbe: exec: command: - /docker-healthcheck.sh initialDelaySeconds: 300 periodSeconds: 10 readinessProbe: exec: command: - /docker-healthcheck.sh initialDelaySeconds: 1 periodSeconds: 10 envFrom: - secretRef: name: "harbor-harbor-harbor-database" env: # put the data into a sub directory to avoid the permission issue in k8s with restricted psp enabled # more detail refer to https://github.com/goharbor/harbor-helm/issues/756 - name: PGDATA value: "/var/lib/postgresql/data/pgdata" volumeMounts: - name: database-data mountPath: /var/lib/postgresql/data subPath: - name: shm-volume mountPath: /dev/shm volumes: - name: shm-volume emptyDir: medium: Memory sizeLimit: 512Mi volumeClaimTemplates: - metadata: name: "database-data" labels: heritage: Helm release: harbor-harbor chart: harbor app: "harbor" spec: accessModes: ["ReadWriteOnce"] resources: requests: storage: "1Gi" --- # Source: harbor/templates/redis/statefulset.yaml apiVersion: apps/v1 kind: StatefulSet metadata: name: harbor-harbor-harbor-redis labels: heritage: Helm release: harbor-harbor chart: harbor app: "harbor" component: redis spec: replicas: 1 serviceName: harbor-harbor-harbor-redis selector: matchLabels: release: harbor-harbor app: "harbor" component: redis template: metadata: labels: heritage: Helm release: harbor-harbor chart: harbor app: "harbor" component: redis spec: securityContext: runAsUser: 999 fsGroup: 999 automountServiceAccountToken: false terminationGracePeriodSeconds: 120 containers: - name: redis image: goharbor/redis-photon:v2.4.1 imagePullPolicy: IfNotPresent livenessProbe: tcpSocket: port: 6379 initialDelaySeconds: 300 periodSeconds: 10 readinessProbe: tcpSocket: port: 6379 initialDelaySeconds: 1 periodSeconds: 10 volumeMounts: - name: data mountPath: /var/lib/redis subPath: volumeClaimTemplates: - metadata: name: data labels: heritage: Helm release: harbor-harbor chart: harbor app: "harbor" spec: accessModes: ["ReadWriteOnce"] resources: requests: storage: "1Gi" --- # Source: harbor/templates/ingress/ingress.yaml apiVersion: networking.k8s.io/v1 kind: Ingress metadata: name: "harbor-harbor-harbor-ingress" labels: heritage: Helm release: harbor-harbor chart: harbor app: "harbor" annotations: cert-manager.io/cluster-issuer: letsencrypt ingress.kubernetes.io/proxy-body-size: "0" ingress.kubernetes.io/ssl-redirect: "true" nginx.ingress.kubernetes.io/force-ssl-redirect: "true" nginx.ingress.kubernetes.io/proxy-body-size: "0" nginx.ingress.kubernetes.io/ssl-redirect: "true" spec: tls: - secretName: harbor-harbor-ingress hosts: - harbor.cluster.fun - docker.cluster.fun rules: - http: paths: - path: / pathType: Prefix backend: service: name: harbor-harbor-harbor-portal port: number: 80 - path: /api/ pathType: Prefix backend: service: name: harbor-harbor-harbor-core port: number: 80 - path: /service/ pathType: Prefix backend: service: name: harbor-harbor-harbor-core port: number: 80 - path: /v2 pathType: Prefix backend: service: name: harbor-harbor-harbor-core port: number: 80 - path: /chartrepo/ pathType: Prefix backend: service: name: harbor-harbor-harbor-core port: number: 80 - path: /c/ pathType: Prefix backend: service: name: harbor-harbor-harbor-core port: number: 80 host: harbor.cluster.fun - http: paths: - path: / pathType: Prefix backend: service: name: harbor-harbor-harbor-portal port: number: 80 - path: /api/ pathType: Prefix backend: service: name: harbor-harbor-harbor-core port: number: 80 - path: /service/ pathType: Prefix backend: service: name: harbor-harbor-harbor-core port: number: 80 - path: /v2 pathType: Prefix backend: service: name: harbor-harbor-harbor-core port: number: 80 - path: /chartrepo/ pathType: Prefix backend: service: name: harbor-harbor-harbor-core port: number: 80 - path: /c/ pathType: Prefix backend: service: name: harbor-harbor-harbor-core port: number: 80 host: docker.cluster.fun