Skip to content

Commit

Permalink
chore: add a way to get provider data in the infra provider
Browse files Browse the repository at this point in the history
MachineRequest is private in the provider context, so the provider data
should be exposed through some method.

Signed-off-by: Artem Chernyshev <artem.chernyshev@talos-systems.com>
  • Loading branch information
Unix4ever committed Oct 2, 2024
1 parent cc71fb6 commit 8334c59
Show file tree
Hide file tree
Showing 13 changed files with 63 additions and 38 deletions.
16 changes: 8 additions & 8 deletions .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# THIS FILE WAS AUTOMATICALLY GENERATED, PLEASE DO NOT EDIT.
#
# Generated on 2024-09-07T21:30:27Z by kres 8be5fa7.
# Generated on 2024-09-30T14:56:12Z by kres 8be5fa7.

name: default
concurrency:
Expand Down Expand Up @@ -146,7 +146,7 @@ jobs:
INTEGRATION_RUN_E2E_TEST: "true"
INTEGRATION_TEST_ARGS: --test.run CleanState/|Auth/|DefaultCluster/
RUN_TALEMU_TESTS: "true"
TALEMU_TEST_ARGS: --test.run ImmediateClusterDestruction/|EncryptedCluster/|SinglenodeCluster/|ScaleUpAndDown/|ScaleUpAndDownMachineClassBasedMachineSets/|TalosUpgrades/|KubernetesUpgrades/|MaintenanceDowngrade/|ClusterTemplate/
TALEMU_TEST_ARGS: --test.run ImmediateClusterDestruction/|EncryptedCluster/|SinglenodeCluster/|ScaleUpAndDown/|ScaleUpAndDownMachineClassBasedMachineSets/|TalosUpgrades/|KubernetesUpgrades/|MaintenanceDowngrade/|ClusterTemplate/|ScaleUpAndDownAutoProvisionMachineSets/
WITH_DEBUG: "true"
run: |
sudo -E make run-integration-test
Expand Down Expand Up @@ -270,7 +270,7 @@ jobs:
INTEGRATION_RUN_E2E_TEST: "false"
INTEGRATION_TEST_ARGS: --test.run CleanState/|EtcdBackupAndRestore
RUN_TALEMU_TESTS: "false"
TALEMU_TEST_ARGS: --test.run ImmediateClusterDestruction/|EncryptedCluster/|SinglenodeCluster/|ScaleUpAndDown/|ScaleUpAndDownMachineClassBasedMachineSets/|TalosUpgrades/|KubernetesUpgrades/|MaintenanceDowngrade/|ClusterTemplate/
TALEMU_TEST_ARGS: --test.run ImmediateClusterDestruction/|EncryptedCluster/|SinglenodeCluster/|ScaleUpAndDown/|ScaleUpAndDownMachineClassBasedMachineSets/|TalosUpgrades/|KubernetesUpgrades/|MaintenanceDowngrade/|ClusterTemplate/|ScaleUpAndDownAutoProvisionMachineSets/
WITH_DEBUG: "true"
run: |
sudo -E make run-integration-test
Expand Down Expand Up @@ -348,7 +348,7 @@ jobs:
INTEGRATION_RUN_E2E_TEST: "false"
INTEGRATION_TEST_ARGS: --test.run CleanState/|ScaleUpAndDown/|ScaleUpAndDownMachineClassBasedMachineSets/|RollingUpdateParallelism/|ForcedMachineRemoval/|ReplaceControlPlanes/|ConfigPatching/|KubernetesNodeAudit/
RUN_TALEMU_TESTS: "false"
TALEMU_TEST_ARGS: --test.run ImmediateClusterDestruction/|EncryptedCluster/|SinglenodeCluster/|ScaleUpAndDown/|ScaleUpAndDownMachineClassBasedMachineSets/|TalosUpgrades/|KubernetesUpgrades/|MaintenanceDowngrade/|ClusterTemplate/
TALEMU_TEST_ARGS: --test.run ImmediateClusterDestruction/|EncryptedCluster/|SinglenodeCluster/|ScaleUpAndDown/|ScaleUpAndDownMachineClassBasedMachineSets/|TalosUpgrades/|KubernetesUpgrades/|MaintenanceDowngrade/|ClusterTemplate/|ScaleUpAndDownAutoProvisionMachineSets/
WITH_DEBUG: "true"
run: |
sudo -E make run-integration-test
Expand Down Expand Up @@ -426,7 +426,7 @@ jobs:
INTEGRATION_RUN_E2E_TEST: "false"
INTEGRATION_TEST_ARGS: --test.run CleanState/|TalosImageGeneration/|ImmediateClusterDestruction/|DefaultCluster/|EncryptedCluster/|SinglenodeCluster/|Auth/
RUN_TALEMU_TESTS: "false"
TALEMU_TEST_ARGS: --test.run ImmediateClusterDestruction/|EncryptedCluster/|SinglenodeCluster/|ScaleUpAndDown/|ScaleUpAndDownMachineClassBasedMachineSets/|TalosUpgrades/|KubernetesUpgrades/|MaintenanceDowngrade/|ClusterTemplate/
TALEMU_TEST_ARGS: --test.run ImmediateClusterDestruction/|EncryptedCluster/|SinglenodeCluster/|ScaleUpAndDown/|ScaleUpAndDownMachineClassBasedMachineSets/|TalosUpgrades/|KubernetesUpgrades/|MaintenanceDowngrade/|ClusterTemplate/|ScaleUpAndDownAutoProvisionMachineSets/
WITH_DEBUG: "true"
run: |
sudo -E make run-integration-test
Expand Down Expand Up @@ -504,7 +504,7 @@ jobs:
INTEGRATION_RUN_E2E_TEST: "false"
INTEGRATION_TEST_ARGS: --test.run CleanState/|ClusterTemplate/
RUN_TALEMU_TESTS: "false"
TALEMU_TEST_ARGS: --test.run ImmediateClusterDestruction/|EncryptedCluster/|SinglenodeCluster/|ScaleUpAndDown/|ScaleUpAndDownMachineClassBasedMachineSets/|TalosUpgrades/|KubernetesUpgrades/|MaintenanceDowngrade/|ClusterTemplate/
TALEMU_TEST_ARGS: --test.run ImmediateClusterDestruction/|EncryptedCluster/|SinglenodeCluster/|ScaleUpAndDown/|ScaleUpAndDownMachineClassBasedMachineSets/|TalosUpgrades/|KubernetesUpgrades/|MaintenanceDowngrade/|ClusterTemplate/|ScaleUpAndDownAutoProvisionMachineSets/
WITH_DEBUG: "true"
run: |
sudo -E make run-integration-test
Expand Down Expand Up @@ -582,7 +582,7 @@ jobs:
INTEGRATION_RUN_E2E_TEST: "false"
INTEGRATION_TEST_ARGS: --test.run CleanState/|TalosUpgrades/|KubernetesUpgrades/|MaintenanceDowngrade/
RUN_TALEMU_TESTS: "false"
TALEMU_TEST_ARGS: --test.run ImmediateClusterDestruction/|EncryptedCluster/|SinglenodeCluster/|ScaleUpAndDown/|ScaleUpAndDownMachineClassBasedMachineSets/|TalosUpgrades/|KubernetesUpgrades/|MaintenanceDowngrade/|ClusterTemplate/
TALEMU_TEST_ARGS: --test.run ImmediateClusterDestruction/|EncryptedCluster/|SinglenodeCluster/|ScaleUpAndDown/|ScaleUpAndDownMachineClassBasedMachineSets/|TalosUpgrades/|KubernetesUpgrades/|MaintenanceDowngrade/|ClusterTemplate/|ScaleUpAndDownAutoProvisionMachineSets/
WITH_DEBUG: "true"
run: |
sudo -E make run-integration-test
Expand Down Expand Up @@ -660,7 +660,7 @@ jobs:
INTEGRATION_RUN_E2E_TEST: "false"
INTEGRATION_TEST_ARGS: --test.run CleanState/|WorkloadProxy
RUN_TALEMU_TESTS: "false"
TALEMU_TEST_ARGS: --test.run ImmediateClusterDestruction/|EncryptedCluster/|SinglenodeCluster/|ScaleUpAndDown/|ScaleUpAndDownMachineClassBasedMachineSets/|TalosUpgrades/|KubernetesUpgrades/|MaintenanceDowngrade/|ClusterTemplate/
TALEMU_TEST_ARGS: --test.run ImmediateClusterDestruction/|EncryptedCluster/|SinglenodeCluster/|ScaleUpAndDown/|ScaleUpAndDownMachineClassBasedMachineSets/|TalosUpgrades/|KubernetesUpgrades/|MaintenanceDowngrade/|ClusterTemplate/|ScaleUpAndDownAutoProvisionMachineSets/
WITH_DEBUG: "true"
run: |
sudo -E make run-integration-test
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/e2e-backups-cron.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# THIS FILE WAS AUTOMATICALLY GENERATED, PLEASE DO NOT EDIT.
#
# Generated on 2024-09-06T00:35:46Z by kres 8be5fa7.
# Generated on 2024-09-30T14:56:12Z by kres 8be5fa7.

name: e2e-backups-cron
concurrency:
Expand Down Expand Up @@ -63,7 +63,7 @@ jobs:
INTEGRATION_RUN_E2E_TEST: "false"
INTEGRATION_TEST_ARGS: --test.run CleanState/|EtcdBackupAndRestore
RUN_TALEMU_TESTS: "false"
TALEMU_TEST_ARGS: --test.run ImmediateClusterDestruction/|EncryptedCluster/|SinglenodeCluster/|ScaleUpAndDown/|ScaleUpAndDownMachineClassBasedMachineSets/|TalosUpgrades/|KubernetesUpgrades/|MaintenanceDowngrade/|ClusterTemplate/
TALEMU_TEST_ARGS: --test.run ImmediateClusterDestruction/|EncryptedCluster/|SinglenodeCluster/|ScaleUpAndDown/|ScaleUpAndDownMachineClassBasedMachineSets/|TalosUpgrades/|KubernetesUpgrades/|MaintenanceDowngrade/|ClusterTemplate/|ScaleUpAndDownAutoProvisionMachineSets/
WITH_DEBUG: "true"
run: |
sudo -E make run-integration-test
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/e2e-scaling-cron.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# THIS FILE WAS AUTOMATICALLY GENERATED, PLEASE DO NOT EDIT.
#
# Generated on 2024-09-06T00:35:46Z by kres 8be5fa7.
# Generated on 2024-09-30T14:56:12Z by kres 8be5fa7.

name: e2e-scaling-cron
concurrency:
Expand Down Expand Up @@ -63,7 +63,7 @@ jobs:
INTEGRATION_RUN_E2E_TEST: "false"
INTEGRATION_TEST_ARGS: --test.run CleanState/|ScaleUpAndDown/|ScaleUpAndDownMachineClassBasedMachineSets/|RollingUpdateParallelism/|ForcedMachineRemoval/|ReplaceControlPlanes/|ConfigPatching/|KubernetesNodeAudit/
RUN_TALEMU_TESTS: "false"
TALEMU_TEST_ARGS: --test.run ImmediateClusterDestruction/|EncryptedCluster/|SinglenodeCluster/|ScaleUpAndDown/|ScaleUpAndDownMachineClassBasedMachineSets/|TalosUpgrades/|KubernetesUpgrades/|MaintenanceDowngrade/|ClusterTemplate/
TALEMU_TEST_ARGS: --test.run ImmediateClusterDestruction/|EncryptedCluster/|SinglenodeCluster/|ScaleUpAndDown/|ScaleUpAndDownMachineClassBasedMachineSets/|TalosUpgrades/|KubernetesUpgrades/|MaintenanceDowngrade/|ClusterTemplate/|ScaleUpAndDownAutoProvisionMachineSets/
WITH_DEBUG: "true"
run: |
sudo -E make run-integration-test
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/e2e-short-cron.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# THIS FILE WAS AUTOMATICALLY GENERATED, PLEASE DO NOT EDIT.
#
# Generated on 2024-09-06T00:35:46Z by kres 8be5fa7.
# Generated on 2024-09-30T14:56:12Z by kres 8be5fa7.

name: e2e-short-cron
concurrency:
Expand Down Expand Up @@ -63,7 +63,7 @@ jobs:
INTEGRATION_RUN_E2E_TEST: "false"
INTEGRATION_TEST_ARGS: --test.run CleanState/|TalosImageGeneration/|ImmediateClusterDestruction/|DefaultCluster/|EncryptedCluster/|SinglenodeCluster/|Auth/
RUN_TALEMU_TESTS: "false"
TALEMU_TEST_ARGS: --test.run ImmediateClusterDestruction/|EncryptedCluster/|SinglenodeCluster/|ScaleUpAndDown/|ScaleUpAndDownMachineClassBasedMachineSets/|TalosUpgrades/|KubernetesUpgrades/|MaintenanceDowngrade/|ClusterTemplate/
TALEMU_TEST_ARGS: --test.run ImmediateClusterDestruction/|EncryptedCluster/|SinglenodeCluster/|ScaleUpAndDown/|ScaleUpAndDownMachineClassBasedMachineSets/|TalosUpgrades/|KubernetesUpgrades/|MaintenanceDowngrade/|ClusterTemplate/|ScaleUpAndDownAutoProvisionMachineSets/
WITH_DEBUG: "true"
run: |
sudo -E make run-integration-test
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/e2e-templates-cron.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# THIS FILE WAS AUTOMATICALLY GENERATED, PLEASE DO NOT EDIT.
#
# Generated on 2024-09-06T00:35:46Z by kres 8be5fa7.
# Generated on 2024-09-30T14:56:12Z by kres 8be5fa7.

name: e2e-templates-cron
concurrency:
Expand Down Expand Up @@ -63,7 +63,7 @@ jobs:
INTEGRATION_RUN_E2E_TEST: "false"
INTEGRATION_TEST_ARGS: --test.run CleanState/|ClusterTemplate/
RUN_TALEMU_TESTS: "false"
TALEMU_TEST_ARGS: --test.run ImmediateClusterDestruction/|EncryptedCluster/|SinglenodeCluster/|ScaleUpAndDown/|ScaleUpAndDownMachineClassBasedMachineSets/|TalosUpgrades/|KubernetesUpgrades/|MaintenanceDowngrade/|ClusterTemplate/
TALEMU_TEST_ARGS: --test.run ImmediateClusterDestruction/|EncryptedCluster/|SinglenodeCluster/|ScaleUpAndDown/|ScaleUpAndDownMachineClassBasedMachineSets/|TalosUpgrades/|KubernetesUpgrades/|MaintenanceDowngrade/|ClusterTemplate/|ScaleUpAndDownAutoProvisionMachineSets/
WITH_DEBUG: "true"
run: |
sudo -E make run-integration-test
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/e2e-upgrades-cron.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# THIS FILE WAS AUTOMATICALLY GENERATED, PLEASE DO NOT EDIT.
#
# Generated on 2024-09-06T00:35:46Z by kres 8be5fa7.
# Generated on 2024-09-30T14:56:12Z by kres 8be5fa7.

name: e2e-upgrades-cron
concurrency:
Expand Down Expand Up @@ -63,7 +63,7 @@ jobs:
INTEGRATION_RUN_E2E_TEST: "false"
INTEGRATION_TEST_ARGS: --test.run CleanState/|TalosUpgrades/|KubernetesUpgrades/|MaintenanceDowngrade/
RUN_TALEMU_TESTS: "false"
TALEMU_TEST_ARGS: --test.run ImmediateClusterDestruction/|EncryptedCluster/|SinglenodeCluster/|ScaleUpAndDown/|ScaleUpAndDownMachineClassBasedMachineSets/|TalosUpgrades/|KubernetesUpgrades/|MaintenanceDowngrade/|ClusterTemplate/
TALEMU_TEST_ARGS: --test.run ImmediateClusterDestruction/|EncryptedCluster/|SinglenodeCluster/|ScaleUpAndDown/|ScaleUpAndDownMachineClassBasedMachineSets/|TalosUpgrades/|KubernetesUpgrades/|MaintenanceDowngrade/|ClusterTemplate/|ScaleUpAndDownAutoProvisionMachineSets/
WITH_DEBUG: "true"
run: |
sudo -E make run-integration-test
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/e2e-workload-proxy-cron.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# THIS FILE WAS AUTOMATICALLY GENERATED, PLEASE DO NOT EDIT.
#
# Generated on 2024-09-06T00:35:46Z by kres 8be5fa7.
# Generated on 2024-09-30T14:56:12Z by kres 8be5fa7.

name: e2e-workload-proxy-cron
concurrency:
Expand Down Expand Up @@ -63,7 +63,7 @@ jobs:
INTEGRATION_RUN_E2E_TEST: "false"
INTEGRATION_TEST_ARGS: --test.run CleanState/|WorkloadProxy
RUN_TALEMU_TESTS: "false"
TALEMU_TEST_ARGS: --test.run ImmediateClusterDestruction/|EncryptedCluster/|SinglenodeCluster/|ScaleUpAndDown/|ScaleUpAndDownMachineClassBasedMachineSets/|TalosUpgrades/|KubernetesUpgrades/|MaintenanceDowngrade/|ClusterTemplate/
TALEMU_TEST_ARGS: --test.run ImmediateClusterDestruction/|EncryptedCluster/|SinglenodeCluster/|ScaleUpAndDown/|ScaleUpAndDownMachineClassBasedMachineSets/|TalosUpgrades/|KubernetesUpgrades/|MaintenanceDowngrade/|ClusterTemplate/|ScaleUpAndDownAutoProvisionMachineSets/
WITH_DEBUG: "true"
run: |
sudo -E make run-integration-test
Expand Down
38 changes: 22 additions & 16 deletions client/pkg/infra/controllers/provision.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@ package controllers

import (
"context"
"slices"

"github.com/cosi-project/runtime/pkg/controller"
"github.com/cosi-project/runtime/pkg/controller/generic"
Expand All @@ -15,6 +14,7 @@ import (
"github.com/cosi-project/runtime/pkg/safe"
"github.com/cosi-project/runtime/pkg/state"
"github.com/siderolabs/gen/optional"
"github.com/siderolabs/gen/xerrors"
"go.uber.org/zap"

"github.com/siderolabs/omni/client/api/omni/specs"
Expand Down Expand Up @@ -184,19 +184,9 @@ func (ctrl *ProvisionController[T]) reconcileRunning(ctx context.Context, r cont

steps := ctrl.provisioner.ProvisionSteps()

initialStep, ok := res.Metadata().Annotations().Get(currentStepAnnotation)
initialStep, _ := res.Metadata().Annotations().Get(currentStepAnnotation)

var initialStepIndex int

if ok {
if index := slices.IndexFunc(steps, func(step provision.Step[T]) bool {
return step.Name() == initialStep
}); index != -1 {
initialStepIndex = index
}
}

for _, step := range steps[initialStepIndex:] {
for _, step := range steps {
if initialStep != "" && step.Name() != initialStep {
continue
}
Expand All @@ -205,23 +195,35 @@ func (ctrl *ProvisionController[T]) reconcileRunning(ctx context.Context, r cont

logger.Info("running provision step", zap.String("step", step.Name()))

res.Metadata().Annotations().Set(currentStepAnnotation, step.Name())
var requeueError error

if err = safe.WriterModify(ctx, r, res.(T), func(st T) error { //nolint:forcetypeassert
return step.Run(ctx, logger, provision.NewContext(
err = step.Run(ctx, logger, provision.NewContext(
machineRequest,
machineRequestStatus,
st,
connectionParams,
ctrl.imageFactory,
))

st.Metadata().Annotations().Set(currentStepAnnotation, step.Name())

if err != nil {
if !xerrors.TypeIs[*controller.RequeueError](err) {
return err
}

requeueError = err
}

return nil
}); err != nil {
logger.Error("machine provision failed", zap.Error(err), zap.String("step", step.Name()))

machineRequestStatus.TypedSpec().Value.Error = err.Error()
machineRequestStatus.TypedSpec().Value.Stage = specs.MachineRequestStatusSpec_FAILED

return nil
return nil //nolint:nilerr
}

if err = safe.WriterModify(ctx, r, machineRequestStatus, func(res *infra.MachineRequestStatus) error {
Expand All @@ -231,6 +233,10 @@ func (ctrl *ProvisionController[T]) reconcileRunning(ctx context.Context, r cont
}); err != nil {
return err
}

if requeueError != nil {
return requeueError
}
}

machineRequestStatus.TypedSpec().Value.Stage = specs.MachineRequestStatusSpec_PROVISIONED
Expand Down
6 changes: 6 additions & 0 deletions client/pkg/infra/provision/context.go
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ import (
"github.com/siderolabs/gen/xslices"
"github.com/siderolabs/image-factory/pkg/schematic"
"go.uber.org/zap"
"gopkg.in/yaml.v3"

"github.com/siderolabs/omni/client/api/omni/specs"
"github.com/siderolabs/omni/client/pkg/omni/resources/infra"
Expand Down Expand Up @@ -122,6 +123,11 @@ func (context *Context[T]) SetMachineInfraID(value string) {
context.MachineRequestStatus.Metadata().Labels().Set(omni.LabelMachineInfraID, value)
}

// UnmarshalProviderData reads provider data string from the machine request into the dest.
func (context *Context[T]) UnmarshalProviderData(dest any) error {
return yaml.Unmarshal([]byte(context.machineRequest.TypedSpec().Value.ProviderData), dest)
}

// GenerateSchematicID generate the final schematic out of the machine request.
// This method also calls the image factory and uploads the schematic there.
func (context *Context[T]) GenerateSchematicID(ctx context.Context, logger *zap.Logger, opts ...SchematicOption) (string, error) {
Expand Down
5 changes: 5 additions & 0 deletions client/pkg/infra/provision/errors.go
Original file line number Diff line number Diff line change
Expand Up @@ -19,3 +19,8 @@ func NewRetryError(err error, interval time.Duration) error {
func NewRetryErrorf(interval time.Duration, format string, args ...any) error {
return controller.NewRequeueErrorf(interval, format, args...)
}

// NewRetryInterval should be returned from the provisioner when it should be called after some interval again.
func NewRetryInterval(interval time.Duration) error {
return controller.NewRequeueInterval(interval)
}
10 changes: 8 additions & 2 deletions frontend/src/components/common/Form/ContentWrapper.vue
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ included in the LICENSE file.
<template>
<div class="flex justify-between py-3 px-3 items-center gap-2" v-if="control.label">
<div class="text-naturals-N11 text-xs flex items-center gap-2">
{{ control.label }}
{{ control.label }}{{ description }}
<tooltip :description="control.errors" v-if="control.errors">
<t-icon icon="warning" class="text-yellow-Y1 w-4 h-4"/>
</tooltip>
Expand All @@ -27,11 +27,17 @@ included in the LICENSE file.
<script setup lang="ts">
import Tooltip from '../Tooltip/Tooltip.vue';
import TIcon from '../Icon/TIcon.vue';
import { computed } from 'vue';

defineProps<{
const props = defineProps<{
control: {
label: string
errors: string
description?: string
},
}>()

const description = computed(() => {
return props.control.description ? ` (${props.control.description})` : "";
})
</script>
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@ func NewMachineProvisionController() *MachineProvisionController {
machineRequestSet.TypedSpec().Value.KernelArgs = provision.KernelArgs
machineRequestSet.TypedSpec().Value.MetaValues = provision.MetaValues
machineRequestSet.TypedSpec().Value.TalosVersion = provision.TalosVersion
machineRequestSet.TypedSpec().Value.ProviderData = provision.ProviderData

pressure, err := safe.ReaderGetByID[*omni.MachineRequestSetPressure](ctx, r, machineClass.Metadata().ID())
if err != nil && !state.IsNotFoundError(err) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -147,6 +147,7 @@ func (h *machineRequestSetStatusHandler) scaleUp(ctx context.Context, r controll
request.TypedSpec().Value.Extensions = machineRequestSet.TypedSpec().Value.Extensions
request.TypedSpec().Value.KernelArgs = machineRequestSet.TypedSpec().Value.KernelArgs
request.TypedSpec().Value.MetaValues = machineRequestSet.TypedSpec().Value.MetaValues
request.TypedSpec().Value.ProviderData = machineRequestSet.TypedSpec().Value.ProviderData

request.Metadata().Labels().Set(omni.LabelInfraProviderID, machineRequestSet.TypedSpec().Value.ProviderId)
request.Metadata().Labels().Set(omni.LabelMachineRequestSet, machineRequestSet.Metadata().ID())
Expand Down

0 comments on commit 8334c59

Please sign in to comment.