From 2144eb6fc8e4d029da8548019321158415b07a41 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 23 Jul 2025 07:39:53 +0000 Subject: [PATCH 1/7] chore(deps): update dependency flux (#648) Co-authored-by: rmvangun <85766511+rmvangun@users.noreply.github.com> Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- .github/workflows/ci.yaml | 7 ++- contexts/.gitignore | 2 +- .../prometheus/flux/helm-release.yaml | 2 +- terraform/cluster/talos/README.md | 5 +- terraform/cluster/talos/main.tf | 50 +++-------------- .../talos/modules/machine/.terraform.lock.hcl | 19 +++++++ .../cluster/talos/modules/machine/main.tf | 30 +++++++++++ .../talos/modules/machine/test.tftest.hcl | 14 +++-- .../talos/modules/machine/variables.tf | 11 ++++ .../cluster/talos/resources/healthcheck.ps1 | 34 ------------ .../cluster/talos/resources/healthcheck.sh | 53 ------------------- terraform/cluster/talos/test.tftest.hcl | 4 +- terraform/cluster/talos/variables.tf | 10 ---- terraform/gitops/flux/variables.tf | 4 +- 14 files changed, 88 insertions(+), 157 deletions(-) delete mode 100644 terraform/cluster/talos/resources/healthcheck.ps1 delete mode 100755 terraform/cluster/talos/resources/healthcheck.sh diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 7b5ce54e..348d4dc3 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -59,7 +59,12 @@ jobs: - name: Run shellcheck run: | sudo apt-get install -y shellcheck - find . -name "*.sh" -print0 | xargs -0 shellcheck + shell_files=$(find . -name "*.sh" -print) + if [ -n "$shell_files" ]; then + echo "$shell_files" | xargs shellcheck + else + echo "No shell scripts found to check" + fi - name: Setup Terraform uses: hashicorp/setup-terraform@b9cd54a3c349d3f38e8881555d616ced269862dd # v3.1.2 diff --git a/contexts/.gitignore b/contexts/.gitignore index ca8d105b..4d93ab0a 100644 --- a/contexts/.gitignore +++ b/contexts/.gitignore @@ -1 +1 @@ -/_template/ +!/_template/ diff --git a/kustomize/telemetry/resources/prometheus/flux/helm-release.yaml b/kustomize/telemetry/resources/prometheus/flux/helm-release.yaml index 29a281fd..ba8cc08f 100644 --- a/kustomize/telemetry/resources/prometheus/flux/helm-release.yaml +++ b/kustomize/telemetry/resources/prometheus/flux/helm-release.yaml @@ -10,7 +10,7 @@ spec: spec: chart: flux2 # renovate: datasource=helm depName=flux package=flux2 helmRepo=https://fluxcd-community.github.io/helm-charts - version: 2.16.2 + version: 2.16.3 sourceRef: kind: HelmRepository name: fluxcd-community diff --git a/terraform/cluster/talos/README.md b/terraform/cluster/talos/README.md index 38606138..73f53b19 100644 --- a/terraform/cluster/talos/README.md +++ b/terraform/cluster/talos/README.md @@ -11,7 +11,6 @@ | Name | Version | |------|---------| | [local](#provider\_local) | 2.5.3 | -| [null](#provider\_null) | 3.2.4 | | [talos](#provider\_talos) | 0.8.1 | ## Modules @@ -28,7 +27,6 @@ |------|------| | [local_sensitive_file.kubeconfig](https://registry.terraform.io/providers/hashicorp/local/latest/docs/resources/sensitive_file) | resource | | [local_sensitive_file.talosconfig](https://registry.terraform.io/providers/hashicorp/local/latest/docs/resources/sensitive_file) | resource | -| [null_resource.healthcheck](https://registry.terraform.io/providers/hashicorp/null/latest/docs/resources/resource) | resource | | [talos_cluster_kubeconfig.this](https://registry.terraform.io/providers/siderolabs/talos/0.8.1/docs/resources/cluster_kubeconfig) | resource | | [talos_machine_secrets.this](https://registry.terraform.io/providers/siderolabs/talos/0.8.1/docs/resources/machine_secrets) | resource | | [talos_client_configuration.this](https://registry.terraform.io/providers/siderolabs/talos/0.8.1/docs/data-sources/client_configuration) | data source | @@ -43,8 +41,7 @@ | [context\_path](#input\_context\_path) | The path to the context folder, where kubeconfig and talosconfig are stored | `string` | `""` | no | | [controlplane\_config\_patches](#input\_controlplane\_config\_patches) | A YAML string of controlplane config patches to apply. Can be an empty string or valid YAML. | `string` | `""` | no | | [controlplanes](#input\_controlplanes) | A list of machine configuration details for control planes. |
list(object({
hostname = optional(string)
endpoint = string
node = string
disk_selector = optional(object({
busPath = optional(string)
modalias = optional(string)
model = optional(string)
name = optional(string)
serial = optional(string)
size = optional(string)
type = optional(string)
uuid = optional(string)
wwid = optional(string)
}))
wipe_disk = optional(bool, true)
extra_kernel_args = optional(list(string), [])
config_patches = optional(string, "")
}))
| `[]` | no | -| [kubernetes\_version](#input\_kubernetes\_version) | The kubernetes version to deploy. | `string` | `"1.33.2"` | no | -| [os\_type](#input\_os\_type) | The operating system type, must be either 'unix' or 'windows' | `string` | `"unix"` | no | +| [kubernetes\_version](#input\_kubernetes\_version) | The kubernetes version to deploy. | `string` | `"1.33.3"` | no | | [talos\_version](#input\_talos\_version) | The talos version to deploy. | `string` | `"1.10.5"` | no | | [worker\_config\_patches](#input\_worker\_config\_patches) | A YAML string of worker config patches to apply. Can be an empty string or valid YAML. | `string` | `""` | no | | [workers](#input\_workers) | A list of machine configuration details |
list(object({
hostname = optional(string)
endpoint = string
node = string
disk_selector = optional(object({
busPath = optional(string)
modalias = optional(string)
model = optional(string)
name = optional(string)
serial = optional(string)
size = optional(string)
type = optional(string)
uuid = optional(string)
wwid = optional(string)
}))
wipe_disk = optional(bool, true)
extra_kernel_args = optional(list(string), [])
config_patches = optional(string, "")
}))
| `[]` | no | diff --git a/terraform/cluster/talos/main.tf b/terraform/cluster/talos/main.tf index b697ec39..98038977 100644 --- a/terraform/cluster/talos/main.tf +++ b/terraform/cluster/talos/main.tf @@ -50,6 +50,8 @@ module "controlplane_bootstrap" { machine_type = "controlplane" endpoint = var.controlplanes[0].endpoint bootstrap = true // Bootstrap the first control plane node + talosconfig_path = local.talosconfig_path + enable_health_check = true config_patches = compact(concat([ var.common_config_patches, var.controlplane_config_patches, @@ -76,6 +78,8 @@ module "controlplanes" { machine_type = "controlplane" endpoint = var.controlplanes[count.index + 1].endpoint bootstrap = false // Do not bootstrap other control plane nodes + talosconfig_path = local.talosconfig_path + enable_health_check = true config_patches = compact(concat([ var.common_config_patches, var.controlplane_config_patches, @@ -105,6 +109,8 @@ module "workers" { talos_version = var.talos_version machine_type = "worker" endpoint = var.workers[count.index].endpoint + talosconfig_path = local.talosconfig_path + enable_health_check = true config_patches = compact(concat([ var.common_config_patches, var.worker_config_patches, @@ -157,48 +163,4 @@ resource "local_sensitive_file" "talosconfig" { } } -#----------------------------------------------------------------------------------------------------------------------- -# Cluster Health -#----------------------------------------------------------------------------------------------------------------------- - -# The following workaround is required until resolution of https://github.com/siderolabs/terraform-provider-talos/issues/221 - -# data "talos_cluster_health" "this" { -# depends_on = [ -# module.controlplane_bootstrap, -# module.controlplanes, -# module.workers -# ] -# client_configuration = talos_machine_secrets.this.client_configuration -# control_plane_nodes = var.controlplanes.*.node -# worker_nodes = var.workers.*.node -# endpoints = var.controlplanes.*.endpoint -# } - -locals { - healthcheck_command = var.os_type == "unix" ? "${path.module}/resources/healthcheck.sh" : "& { & '${path.module}/resources/healthcheck.ps1' }" - healthcheck_interpreter = var.os_type == "unix" ? ["sh", "-c"] : ["powershell", "-Command"] -} - -resource "null_resource" "healthcheck" { - triggers = { - always_run = timestamp() // Ensures the resource runs every time - } - - depends_on = [ - local_sensitive_file.kubeconfig, - local_sensitive_file.talosconfig - ] - - provisioner "local-exec" { - command = local.healthcheck_command - interpreter = local.healthcheck_interpreter - environment = { - KUBECONFIG = local.kubeconfig_path - NODE_COUNT = length(var.controlplanes) + length(var.workers) - TIMEOUT = 300 # 5 minutes - INTERVAL = 5 # 5 seconds - } - } -} diff --git a/terraform/cluster/talos/modules/machine/.terraform.lock.hcl b/terraform/cluster/talos/modules/machine/.terraform.lock.hcl index 515ed004..8c436b2f 100644 --- a/terraform/cluster/talos/modules/machine/.terraform.lock.hcl +++ b/terraform/cluster/talos/modules/machine/.terraform.lock.hcl @@ -1,6 +1,25 @@ # This file is maintained automatically by "terraform init". # Manual edits may be lost in future updates. +provider "registry.terraform.io/hashicorp/null" { + version = "3.2.4" + hashes = [ + "h1:L5V05xwp/Gto1leRryuesxjMfgZwjb7oool4WS1UEFQ=", + "zh:59f6b52ab4ff35739647f9509ee6d93d7c032985d9f8c6237d1f8a59471bbbe2", + "zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3", + "zh:795c897119ff082133150121d39ff26cb5f89a730a2c8c26f3a9c1abf81a9c43", + "zh:7b9c7b16f118fbc2b05a983817b8ce2f86df125857966ad356353baf4bff5c0a", + "zh:85e33ab43e0e1726e5f97a874b8e24820b6565ff8076523cc2922ba671492991", + "zh:9d32ac3619cfc93eb3c4f423492a8e0f79db05fec58e449dee9b2d5873d5f69f", + "zh:9e15c3c9dd8e0d1e3731841d44c34571b6c97f5b95e8296a45318b94e5287a6e", + "zh:b4c2ab35d1b7696c30b64bf2c0f3a62329107bd1a9121ce70683dec58af19615", + "zh:c43723e8cc65bcdf5e0c92581dcbbdcbdcf18b8d2037406a5f2033b1e22de442", + "zh:ceb5495d9c31bfb299d246ab333f08c7fb0d67a4f82681fbf47f2a21c3e11ab5", + "zh:e171026b3659305c558d9804062762d168f50ba02b88b231d20ec99578a6233f", + "zh:ed0fe2acdb61330b01841fa790be00ec6beaac91d41f311fb8254f74eb6a711f", + ] +} + provider "registry.terraform.io/siderolabs/talos" { version = "0.8.0" hashes = [ diff --git a/terraform/cluster/talos/modules/machine/main.tf b/terraform/cluster/talos/modules/machine/main.tf index 1bb0a832..f8ab6323 100644 --- a/terraform/cluster/talos/modules/machine/main.tf +++ b/terraform/cluster/talos/modules/machine/main.tf @@ -8,6 +8,9 @@ terraform { talos = { source = "siderolabs/talos" } + null = { + source = "hashicorp/null" + } } } @@ -74,3 +77,30 @@ resource "talos_machine_bootstrap" "bootstrap" { endpoint = var.endpoint client_configuration = var.client_configuration } + +#----------------------------------------------------------------------------------------------------------------------- +# Node Health Check +#----------------------------------------------------------------------------------------------------------------------- + +locals { + # Use hostname if available, otherwise fall back to node address + node_name = var.hostname != null && var.hostname != "" ? var.hostname : var.node +} + +resource "null_resource" "node_healthcheck" { + triggers = { + node_id = var.node + } + + depends_on = [ + talos_machine_configuration_apply.this, + talos_machine_bootstrap.bootstrap + ] + + provisioner "local-exec" { + command = var.enable_health_check ? "windsor check node-health --nodes ${local.node_name} --timeout 5m" : "echo 'Health check disabled for testing'" + environment = var.enable_health_check ? { + TALOSCONFIG = var.talosconfig_path + } : {} + } +} diff --git a/terraform/cluster/talos/modules/machine/test.tftest.hcl b/terraform/cluster/talos/modules/machine/test.tftest.hcl index f5132728..bff71892 100644 --- a/terraform/cluster/talos/modules/machine/test.tftest.hcl +++ b/terraform/cluster/talos/modules/machine/test.tftest.hcl @@ -4,6 +4,10 @@ mock_provider "talos" { mock_resource "talos_machine_bootstrap" {} } +mock_provider "null" { + mock_resource "null_resource" {} +} + variables { machine_type = "controlplane" endpoint = "dummy" @@ -47,10 +51,12 @@ variables { token = "dummy" } } - cluster_name = "dummy" - cluster_endpoint = "https://dummy" - kubernetes_version = "dummy" - talos_version = "1.10.1" + cluster_name = "dummy" + cluster_endpoint = "https://dummy" + kubernetes_version = "dummy" + talos_version = "1.10.1" + talosconfig_path = "/tmp/dummy-talosconfig" + enable_health_check = false } run "machine_config_patch_with_disk_and_hostname" { diff --git a/terraform/cluster/talos/modules/machine/variables.tf b/terraform/cluster/talos/modules/machine/variables.tf index 384bfc92..54ae088a 100644 --- a/terraform/cluster/talos/modules/machine/variables.tf +++ b/terraform/cluster/talos/modules/machine/variables.tf @@ -112,3 +112,14 @@ variable "bootstrap" { type = bool default = false } + +variable "talosconfig_path" { + description = "Path to the talosconfig file for health checking." + type = string +} + +variable "enable_health_check" { + description = "Whether to enable health checking for this node." + type = bool + default = true +} diff --git a/terraform/cluster/talos/resources/healthcheck.ps1 b/terraform/cluster/talos/resources/healthcheck.ps1 deleted file mode 100644 index d80241d0..00000000 --- a/terraform/cluster/talos/resources/healthcheck.ps1 +++ /dev/null @@ -1,34 +0,0 @@ -# Number of nodes to check for readiness -[int]$NODE_COUNT = if ($env:NODE_COUNT -ne $null) { [int]$env:NODE_COUNT } else { (kubectl get nodes --no-headers 2>$null | Where-Object { $_.Trim() -ne "" } | Measure-Object | Select-Object -ExpandProperty Count) } -[int]$TIMEOUT = if ($env:TIMEOUT -ne $null) { [int]$env:TIMEOUT } else { 300 } # Default timeout of 300 seconds -[int]$INTERVAL = if ($env:INTERVAL -ne $null) { [int]$env:INTERVAL } else { 10 } # Default check interval of 10 seconds - -$start_time = Get-Date -$previous_ready_count = 0 - -Write-Host "Waiting for $NODE_COUNT nodes to be ready..." - -while ($true) { - $ready_nodes = kubectl get nodes --no-headers 2>$null | Where-Object { $_ -match '\sReady\s' } | ForEach-Object { $_.Split(' ')[0] } - $ready_count = $ready_nodes.Count - - if ($ready_count -ne $previous_ready_count) { - Write-Host "$ready_count / $NODE_COUNT nodes are ready" - $previous_ready_count = $ready_count - } - - if ($ready_count -eq $NODE_COUNT) { - Write-Host "All nodes are ready" - exit 0 - } - - $current_time = Get-Date - $elapsed_time = ($current_time - $start_time).TotalSeconds - - if ($elapsed_time -ge $TIMEOUT) { - Write-Host "Timeout reached: Not all nodes are ready" - exit 1 - } - - Start-Sleep -Seconds $INTERVAL -} diff --git a/terraform/cluster/talos/resources/healthcheck.sh b/terraform/cluster/talos/resources/healthcheck.sh deleted file mode 100755 index 3860cc1d..00000000 --- a/terraform/cluster/talos/resources/healthcheck.sh +++ /dev/null @@ -1,53 +0,0 @@ -#!/usr/bin/env sh - -# Set the number of nodes to check for readiness. If not provided, default to the current number of nodes. -NODE_COUNT=${NODE_COUNT:-$(kubectl get nodes --no-headers 2>/dev/null | awk 'NF' | wc -l)} -# Set the timeout period in seconds. Default is 300 seconds (5 minutes). -TIMEOUT=${TIMEOUT:-300} -# Set the interval between readiness checks in seconds. Default is 10 seconds. -INTERVAL=${INTERVAL:-10} - -# Record the start time of the script to calculate elapsed time later. -start_time=$(date +%s) -# Initialize the previous ready count to track changes in node readiness. -previous_ready_count=0 - -# Inform the user about the number of nodes expected to be ready. -echo "Waiting for $NODE_COUNT nodes to be ready" - -# Continuously check the readiness of nodes. -while true; do - # Attempt to get the list of nodes that are in the 'Ready' state. - if ready_nodes=$(kubectl get nodes --no-headers 2>/dev/null | awk '$2 == "Ready" {print $1}'); then - # Count the number of nodes that are ready. - ready_count=$(echo "$ready_nodes" | awk 'NF' | wc -l) - else - # If the command fails, assume no nodes are ready. - ready_count=0 - fi - - # If the number of ready nodes has changed, print the current status. - if [ "$ready_count" -ne "$previous_ready_count" ]; then - echo "$ready_count / $NODE_COUNT nodes are ready" - previous_ready_count=$ready_count - fi - - # If all nodes are ready, exit the script successfully. - if [ "$ready_count" -eq "$NODE_COUNT" ]; then - echo "All nodes are ready" - exit 0 - fi - - # Calculate the elapsed time since the script started. - current_time=$(date +%s) - elapsed_time=$((current_time - start_time)) - - # If the elapsed time exceeds the timeout, exit the script with an error. - if [ "$elapsed_time" -ge "$TIMEOUT" ]; then - echo "Timeout reached: Not all nodes are ready" - exit 1 - fi - - # Wait for the specified interval before checking again. - sleep "$INTERVAL" -done diff --git a/terraform/cluster/talos/test.tftest.hcl b/terraform/cluster/talos/test.tftest.hcl index 9f7ab08f..3d5289c5 100644 --- a/terraform/cluster/talos/test.tftest.hcl +++ b/terraform/cluster/talos/test.tftest.hcl @@ -215,12 +215,11 @@ run "no_config_files" { } # Verifies that all input validation rules are enforced simultaneously, ensuring that -# invalid values for os_type, kubernetes_version, talos_version, cluster_name, +# invalid values for kubernetes_version, talos_version, cluster_name, # cluster_endpoint, and YAML configs are properly caught and reported run "multiple_invalid_inputs" { command = plan expect_failures = [ - var.os_type, var.kubernetes_version, var.talos_version, var.cluster_name, @@ -232,7 +231,6 @@ run "multiple_invalid_inputs" { var.workers, ] variables { - os_type = "macos" kubernetes_version = "v1.33" talos_version = "v1.10.1" cluster_name = "" diff --git a/terraform/cluster/talos/variables.tf b/terraform/cluster/talos/variables.tf index 9c1e5b19..c83d9f81 100644 --- a/terraform/cluster/talos/variables.tf +++ b/terraform/cluster/talos/variables.tf @@ -4,16 +4,6 @@ variable "context_path" { default = "" } -variable "os_type" { - description = "The operating system type, must be either 'unix' or 'windows'" - type = string - default = "unix" - validation { - condition = var.os_type == "unix" || var.os_type == "windows" - error_message = "The operating system type must be either 'unix' or 'windows'." - } -} - variable "kubernetes_version" { description = "The kubernetes version to deploy." type = string diff --git a/terraform/gitops/flux/variables.tf b/terraform/gitops/flux/variables.tf index 0edbbf3f..c861d908 100644 --- a/terraform/gitops/flux/variables.tf +++ b/terraform/gitops/flux/variables.tf @@ -8,7 +8,7 @@ variable "flux_helm_version" { description = "The version of Flux Helm chart to install" type = string # renovate: datasource=helm depName=flux package=flux2 helmRepo=https://fluxcd-community.github.io/helm-charts - default = "2.16.2" + default = "2.16.3" validation { condition = can(regex("^[0-9]+\\.[0-9]+\\.[0-9]+$", var.flux_helm_version)) @@ -20,7 +20,7 @@ variable "flux_version" { description = "The version of Flux to install" type = string # renovate: datasource=github-releases depName=flux package=fluxcd/flux2 - default = "2.6.3" + default = "2.6.4" validation { condition = can(regex("^[0-9]+\\.[0-9]+\\.[0-9]+$", var.flux_version)) From 990cd1b43d1367f644fa85c578d0001dbef38ac9 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 23 Jul 2025 14:56:50 +0000 Subject: [PATCH 2/7] chore(deps): update dependency aws/aws-cli to v2.27.57 (#689) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- aqua.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aqua.yaml b/aqua.yaml index 0c9eecb6..3d6431c2 100644 --- a/aqua.yaml +++ b/aqua.yaml @@ -22,7 +22,7 @@ packages: - name: lima-vm/lima@v1.2.0 - name: docker/cli@v27.4.1 - name: docker/compose@v2.38.2 - - name: aws/aws-cli@2.27.55 + - name: aws/aws-cli@2.27.57 - name: helm/helm@v3.18.4 - name: fluxcd/flux2@v2.6.4 - name: hashicorp/vault@v1.20.0 From 17f43aa1a95c706fa12be33e4a1c0486e9d3477d Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 23 Jul 2025 20:56:08 +0000 Subject: [PATCH 3/7] chore(deps): update dependency kubernetes/kubectl to v1.33.3 (#678) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- aqua.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aqua.yaml b/aqua.yaml index 3d6431c2..d42c36c7 100644 --- a/aqua.yaml +++ b/aqua.yaml @@ -15,7 +15,7 @@ packages: - name: siderolabs/talos@v1.10.5 - name: siderolabs/omni/omnictl@v0.52.0 - name: siderolabs/omni/omni@v0.52.0 - - name: kubernetes/kubectl@v1.33.2 + - name: kubernetes/kubectl@v1.33.3 - name: go-task/task@v3.44.0 - name: golang/go@go1.24.5 - name: abiosoft/colima@v0.8.1 From 78d4fceba8862f51ec5da01717fc174e536fd3b7 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 23 Jul 2025 21:10:12 +0000 Subject: [PATCH 4/7] chore(deps): update dependency lima-vm/lima to v1.2.1 (#687) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- aqua.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aqua.yaml b/aqua.yaml index d42c36c7..f6e93399 100644 --- a/aqua.yaml +++ b/aqua.yaml @@ -19,7 +19,7 @@ packages: - name: go-task/task@v3.44.0 - name: golang/go@go1.24.5 - name: abiosoft/colima@v0.8.1 - - name: lima-vm/lima@v1.2.0 + - name: lima-vm/lima@v1.2.1 - name: docker/cli@v27.4.1 - name: docker/compose@v2.38.2 - name: aws/aws-cli@2.27.57 From ce9aa2128baa551e1dff43887d6bf277a04e2409 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 23 Jul 2025 21:23:26 +0000 Subject: [PATCH 5/7] chore(deps): update dependency aws/aws-cli to v2.27.58 (#692) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- aqua.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aqua.yaml b/aqua.yaml index f6e93399..fac9ba01 100644 --- a/aqua.yaml +++ b/aqua.yaml @@ -22,7 +22,7 @@ packages: - name: lima-vm/lima@v1.2.1 - name: docker/cli@v27.4.1 - name: docker/compose@v2.38.2 - - name: aws/aws-cli@2.27.57 + - name: aws/aws-cli@2.27.58 - name: helm/helm@v3.18.4 - name: fluxcd/flux2@v2.6.4 - name: hashicorp/vault@v1.20.0 From a822fe9ad84021e2d0fc2f304ae9a7049d71d001 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 24 Jul 2025 01:02:02 +0000 Subject: [PATCH 6/7] chore(deps): update github/codeql-action action to v3.29.4 (#690) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 348d4dc3..f573e591 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -90,7 +90,7 @@ jobs: output_file_path: console,results.sarif - name: Upload SARIF file - uses: github/codeql-action/upload-sarif@181d5eefc20863364f96762470ba6f862bdef56b # v3.29.2 + uses: github/codeql-action/upload-sarif@4e828ff8d448a8a6e532957b1811f387a63867e8 # v3.29.4 with: sarif_file: results.sarif From c2d396c5d3f8cc7035e12e08193f9128e8d465ab Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 24 Jul 2025 01:02:27 +0000 Subject: [PATCH 7/7] chore(deps): update kubectl docker tag to v1.33.3 --- kustomize/csi/cleanup/pvcs/deployment.yaml | 2 +- kustomize/ingress/cleanup/ingresses/deployment.yaml | 2 +- kustomize/ingress/cleanup/loadbalancers/deployment.yaml | 2 +- kustomize/object-store/resources/common/job.yaml | 2 +- .../pki/resources/private-issuer/ca/copy-root-cert-job.yaml | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/kustomize/csi/cleanup/pvcs/deployment.yaml b/kustomize/csi/cleanup/pvcs/deployment.yaml index 59d0f14e..903dca40 100644 --- a/kustomize/csi/cleanup/pvcs/deployment.yaml +++ b/kustomize/csi/cleanup/pvcs/deployment.yaml @@ -22,7 +22,7 @@ spec: initContainers: - name: cleanup # renovate: datasource=docker depName=kubectl package=bitnami/kubectl - image: bitnami/kubectl:1.33.2 + image: bitnami/kubectl:1.33.3 env: - name: RESOURCE_WAIT_TIMEOUT value: "300" diff --git a/kustomize/ingress/cleanup/ingresses/deployment.yaml b/kustomize/ingress/cleanup/ingresses/deployment.yaml index de3fa53d..a5907db3 100644 --- a/kustomize/ingress/cleanup/ingresses/deployment.yaml +++ b/kustomize/ingress/cleanup/ingresses/deployment.yaml @@ -22,7 +22,7 @@ spec: initContainers: - name: cleanup # renovate: datasource=docker depName=kubectl package=bitnami/kubectl - image: bitnami/kubectl:1.33.2 + image: bitnami/kubectl:1.33.3 env: - name: RESOURCE_WAIT_TIMEOUT value: "300" diff --git a/kustomize/ingress/cleanup/loadbalancers/deployment.yaml b/kustomize/ingress/cleanup/loadbalancers/deployment.yaml index ecd8063d..6befc1ba 100644 --- a/kustomize/ingress/cleanup/loadbalancers/deployment.yaml +++ b/kustomize/ingress/cleanup/loadbalancers/deployment.yaml @@ -22,7 +22,7 @@ spec: initContainers: - name: cleanup # renovate: datasource=docker depName=kubectl package=bitnami/kubectl - image: bitnami/kubectl:1.33.2 + image: bitnami/kubectl:1.33.3 env: - name: RESOURCE_WAIT_TIMEOUT value: "300" diff --git a/kustomize/object-store/resources/common/job.yaml b/kustomize/object-store/resources/common/job.yaml index 95b35034..60e71f94 100644 --- a/kustomize/object-store/resources/common/job.yaml +++ b/kustomize/object-store/resources/common/job.yaml @@ -15,7 +15,7 @@ spec: containers: - name: generate-creds # renovate: datasource=docker depName=kubectl package=bitnami/kubectl - image: bitnami/kubectl:1.33.2 + image: bitnami/kubectl:1.33.3 command: ["/bin/bash", "-c"] args: - | diff --git a/kustomize/pki/resources/private-issuer/ca/copy-root-cert-job.yaml b/kustomize/pki/resources/private-issuer/ca/copy-root-cert-job.yaml index 3ead77c6..06f258e9 100644 --- a/kustomize/pki/resources/private-issuer/ca/copy-root-cert-job.yaml +++ b/kustomize/pki/resources/private-issuer/ca/copy-root-cert-job.yaml @@ -11,7 +11,7 @@ spec: containers: - name: copy-root-cert # renovate: datasource=docker depName=kubectl package=bitnami/kubectl - image: bitnami/kubectl:1.33.2 + image: bitnami/kubectl:1.33.3 command: - /bin/sh - -c