Skip to content

Commit

Permalink
feat(stageExecution): reduce execution context size
Browse files Browse the repository at this point in the history
- there are a lot of fields that are duplicated in the stage context and stage outputs. This commit removes some of the duplication.

- use a config class(TaskConfigurationProperties) instead of dynamic config service.This helps in loading configs of type list more easily, without having to resort to reading them as strings and then splitting them on a delimiter
  • Loading branch information
kirangodishala committed Sep 18, 2024
1 parent 7068d30 commit 1bf33fd
Show file tree
Hide file tree
Showing 19 changed files with 3,938 additions and 42 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,8 @@
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Map;
import java.util.stream.Collectors;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;

Expand Down Expand Up @@ -91,6 +93,22 @@ default Collection<String> aliases() {
return Collections.emptyList();
}

/**
* method to filter certain keys from a stage's "context.outputs" key. This takes in the
* context.outputs map as an input, as well as a collection of keys to be filtered from it. It
* then returns the outputs map sans the keys to filter.
*
* @param outputs Map of a stage context's "outputs"
* @param keysToFilter Collection of keys that need to be filtered from outputs
* @return filtered map of stage context's "outputs"
*/
default Map<String, Object> filterContextOutputs(
Map<String, Object> outputs, Collection<String> keysToFilter) {
return outputs.entrySet().stream()
.filter(map -> !keysToFilter.contains(map.getKey()))
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
}

/** Allows backwards compatibility of a task's "type", even through class renames / refactors. */
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,8 @@ import com.netflix.spinnaker.orca.api.pipeline.OverridableTimeoutRetryableTask
import com.netflix.spinnaker.orca.api.pipeline.models.StageExecution
import com.netflix.spinnaker.orca.api.pipeline.TaskResult
import com.netflix.spinnaker.orca.clouddriver.KatoRestService
import com.netflix.spinnaker.orca.clouddriver.config.TaskConfigurationProperties
import com.netflix.spinnaker.orca.clouddriver.config.TaskConfigurationProperties.WaitOnJobCompletionTaskConfig
import com.netflix.spinnaker.orca.clouddriver.utils.CloudProviderAware
import com.netflix.spinnaker.orca.front50.Front50Service
import com.netflix.spinnaker.orca.clouddriver.exception.JobFailedException
Expand Down Expand Up @@ -53,6 +55,7 @@ public class WaitOnJobCompletion implements CloudProviderAware, OverridableTimeo
private final RetrySupport retrySupport
private final JobUtils jobUtils
private final ExecutionRepository repository
private final WaitOnJobCompletionTaskConfig configProperties
Front50Service front50Service

static final String REFRESH_TYPE = "Job"
Expand All @@ -68,13 +71,17 @@ public class WaitOnJobCompletion implements CloudProviderAware, OverridableTimeo
RetrySupport retrySupport,
JobUtils jobUtils,
@Nullable Front50Service front50Service,
TaskConfigurationProperties configProperties,
ExecutionRepository repository) {
this.katoRestService = katoRestService
this.objectMapper = objectMapper
this.retrySupport = retrySupport
this.jobUtils = jobUtils
this.front50Service = front50Service
this.configProperties = configProperties.getWaitOnJobCompletionTask()
this.repository = repository

log.info("output keys to filter: {}", this.configProperties.getExcludeKeysFromOutputs())
}

@Override
Expand Down Expand Up @@ -190,7 +197,14 @@ public class WaitOnJobCompletion implements CloudProviderAware, OverridableTimeo
}
}

TaskResult.builder(status).context(outputs).outputs(outputs).build()
// exclude certain configured keys from being stored in the stage outputs
Map<String, Object> filteredOutputs = filterContextOutputs(outputs, configProperties.getExcludeKeysFromOutputs())
log.info("context outputs will only contain: ${filteredOutputs.keySet()} keys")

TaskResult.builder(status)
.context(outputs)
.outputs(filteredOutputs)
.build()
}

private Boolean applicationExists(String appName) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,8 @@
})
@EnableConfigurationProperties({
CloudDriverConfigurationProperties.class,
PollerConfigurationProperties.class
PollerConfigurationProperties.class,
TaskConfigurationProperties.class
})
@Slf4j
public class CloudDriverConfiguration {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
/*
* Copyright 2021 Salesforce.com, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package com.netflix.spinnaker.orca.clouddriver.config;

import com.netflix.spinnaker.orca.clouddriver.tasks.job.WaitOnJobCompletion;
import com.netflix.spinnaker.orca.clouddriver.tasks.manifest.PromoteManifestKatoOutputsTask;
import com.netflix.spinnaker.orca.clouddriver.tasks.manifest.ResolveDeploySourceManifestTask;
import java.util.Set;
import lombok.Data;
import org.springframework.boot.context.properties.ConfigurationProperties;

@Data
@ConfigurationProperties("tasks.clouddriver")
/** configuration properties for various Orca tasks that are in the orca-clouddriver module */
public class TaskConfigurationProperties {

/** properties that pertain to {@link WaitOnJobCompletion} task. */
private WaitOnJobCompletionTaskConfig waitOnJobCompletionTask =
new WaitOnJobCompletionTaskConfig();

/** properties that pertain to {@link PromoteManifestKatoOutputsTask} task */
private PromoteManifestKatoOutputsTaskConfig promoteManifestKatoOutputsTask =
new PromoteManifestKatoOutputsTaskConfig();

/** properties that pertain to {@link ResolveDeploySourceManifestTask} task */
private ResolveDeploySourceManifestTaskConfig resolveDeploySourceManifestTask =
new ResolveDeploySourceManifestTaskConfig();

@Data
public static class WaitOnJobCompletionTaskConfig {
/**
* set of keys that will be excluded from the "outputs" key in the stage execution context.
* Default or empty set means that no keys will be excluded.
*/
private Set<String> excludeKeysFromOutputs = Set.of();
}

@Data
public static class PromoteManifestKatoOutputsTaskConfig {
/**
* set of keys that will be excluded from the "outputs" key in the stage execution context.
* Default or empty set means that no keys will be excluded.
*/
private Set<String> excludeKeysFromOutputs = Set.of();
}

@Data
public static class ResolveDeploySourceManifestTaskConfig {
/**
* set of keys that will be excluded from the "outputs" key in the stage execution context.
* Default or empty set means that no keys will be excluded.
*/
private Set<String> excludeKeysFromOutputs = Set.of();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,8 @@
import com.netflix.spinnaker.orca.api.pipeline.TaskResult;
import com.netflix.spinnaker.orca.api.pipeline.models.ExecutionStatus;
import com.netflix.spinnaker.orca.api.pipeline.models.StageExecution;
import com.netflix.spinnaker.orca.clouddriver.config.TaskConfigurationProperties;
import com.netflix.spinnaker.orca.clouddriver.config.TaskConfigurationProperties.PromoteManifestKatoOutputsTaskConfig;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
Expand All @@ -34,7 +36,6 @@
import java.util.stream.Collectors;
import javax.annotation.Nonnull;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;

@Component
Expand All @@ -50,7 +51,15 @@ public class PromoteManifestKatoOutputsTask implements Task {
private static final String CREATED_ARTIFACTS_KEY = "createdArtifacts";
private static final String ARTIFACTS_KEY = "artifacts";

@Autowired ObjectMapper objectMapper;
private final ObjectMapper objectMapper;
private final PromoteManifestKatoOutputsTaskConfig configProperties;

public PromoteManifestKatoOutputsTask(
ObjectMapper objectMapper, TaskConfigurationProperties configProperties) {
this.objectMapper = objectMapper;
this.configProperties = configProperties.getPromoteManifestKatoOutputsTask();
log.info("output keys to filter: {}", this.configProperties.getExcludeKeysFromOutputs());
}

@Nonnull
@Override
Expand All @@ -77,7 +86,15 @@ public TaskResult execute(@Nonnull StageExecution stage) {
addToOutputs(outputs, allResults, CREATED_ARTIFACTS_KEY, ARTIFACTS_KEY);
convertKey(outputs, ARTIFACTS_KEY, artifactListType);

return TaskResult.builder(ExecutionStatus.SUCCEEDED).context(outputs).outputs(outputs).build();
// exclude certain configured keys from being stored in the stage outputs
Map<String, Object> filteredOutputs =
filterContextOutputs(outputs, configProperties.getExcludeKeysFromOutputs());
log.info("context outputs will only contain: {} keys", filteredOutputs.keySet());

return TaskResult.builder(ExecutionStatus.SUCCEEDED)
.context(outputs)
.outputs(filteredOutputs)
.build();
}

private void convertKey(Map<String, Object> outputs, String key, TypeReference tr) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,22 +22,32 @@
import com.netflix.spinnaker.orca.api.pipeline.TaskResult;
import com.netflix.spinnaker.orca.api.pipeline.models.ExecutionStatus;
import com.netflix.spinnaker.orca.api.pipeline.models.StageExecution;
import com.netflix.spinnaker.orca.clouddriver.config.TaskConfigurationProperties;
import com.netflix.spinnaker.orca.clouddriver.config.TaskConfigurationProperties.ResolveDeploySourceManifestTaskConfig;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import javax.annotation.Nonnull;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;

@Component
@NonnullByDefault
public final class ResolveDeploySourceManifestTask implements Task {
public static final String TASK_NAME = "resolveDeploySourceManifest";
private final Logger log = LoggerFactory.getLogger(getClass());

private final ManifestEvaluator manifestEvaluator;
private final ResolveDeploySourceManifestTaskConfig configProperties;

@Autowired
public ResolveDeploySourceManifestTask(ManifestEvaluator manifestEvaluator) {
public ResolveDeploySourceManifestTask(
ManifestEvaluator manifestEvaluator, TaskConfigurationProperties configProperties) {
this.manifestEvaluator = manifestEvaluator;
this.configProperties = configProperties.getResolveDeploySourceManifestTask();
log.info("output keys to filter: {}", this.configProperties.getExcludeKeysFromOutputs());
}

@Nonnull
Expand All @@ -48,7 +58,16 @@ public TaskResult execute(@Nonnull StageExecution stage) {
DeployManifestContext context = stage.mapTo(DeployManifestContext.class);
ManifestEvaluator.Result result = manifestEvaluator.evaluate(stage, context);
ImmutableMap<String, Object> outputs = getOutputs(result);
return TaskResult.builder(ExecutionStatus.SUCCEEDED).context(outputs).outputs(outputs).build();

// exclude certain configured keys from being stored in the stage outputs
Map<String, Object> filteredOutputs =
filterContextOutputs(outputs, configProperties.getExcludeKeysFromOutputs());
log.info("context outputs will only contain: {} keys", filteredOutputs.keySet());

return TaskResult.builder(ExecutionStatus.SUCCEEDED)
.context(outputs)
.outputs(filteredOutputs)
.build();
}

private ImmutableMap<String, Object> getOutputs(ManifestEvaluator.Result result) {
Expand Down
Loading

0 comments on commit 1bf33fd

Please sign in to comment.