Skip to content

Commit

Permalink
refactor(artifacts): resolve artifacts methods
Browse files Browse the repository at this point in the history
  • Loading branch information
nemesisOsorio committed Oct 25, 2023
1 parent f1d0a77 commit b66b3e0
Show file tree
Hide file tree
Showing 2 changed files with 41 additions and 183 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -36,16 +36,13 @@
import com.netflix.spinnaker.orca.pipeline.persistence.ExecutionRepository;
import com.netflix.spinnaker.orca.pipeline.persistence.ExecutionRepository.ExecutionCriteria;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import java.util.stream.Stream;
Expand Down Expand Up @@ -215,20 +212,7 @@ public List<Artifact> getArtifactsForPipelineIdWithoutStageRef(
*/
public void validateArtifactConstraintsFromTrigger(Map<String, Object> pipeline) {
Map<String, Object> trigger = (Map<String, Object>) pipeline.get("trigger");
List<?> expectedArtifactIds =
(List<?>) trigger.getOrDefault("expectedArtifactIds", emptyList());

ImmutableList<ExpectedArtifact> expectedArtifacts =
Optional.ofNullable((List<?>) pipeline.get("expectedArtifacts"))
.map(Collection::stream)
.orElse(Stream.empty())
.map(it -> objectMapper.convertValue(it, ExpectedArtifact.class))
.filter(
artifact ->
expectedArtifactIds.contains(artifact.getId())
|| artifact.isUseDefaultArtifact()
|| artifact.isUsePriorArtifact())
.collect(toImmutableList());
ImmutableList<ExpectedArtifact> expectedArtifacts = filterExpectedArtifacts(pipeline, trigger);

ImmutableSet<Artifact> receivedArtifacts = concatReceivedArtifacts(pipeline, trigger);

Expand All @@ -237,19 +221,6 @@ public void validateArtifactConstraintsFromTrigger(Map<String, Object> pipeline)
.resolveExpectedArtifacts(expectedArtifacts);
}

private ImmutableSet<Artifact> concatReceivedArtifacts(
Map<String, Object> pipeline, Map<String, Object> trigger) {
return Stream.concat(
Optional.ofNullable((List<?>) pipeline.get("receivedArtifacts"))
.map(Collection::stream)
.orElse(Stream.empty()),
Optional.ofNullable((List<?>) trigger.get("artifacts"))
.map(Collection::stream)
.orElse(Stream.empty()))
.map(it -> objectMapper.convertValue(it, Artifact.class))
.collect(toImmutableSet());
}

/**
* This will only resolve the expected artifacts that match received artifacts. <br>
* If the expected artifact is not present in received artifacts, it is not resolved. <br>
Expand Down Expand Up @@ -280,86 +251,14 @@ public void resolveArtifactsFromTrigger(Map pipeline) {
/* requireUniqueMatches= */ true)
.resolveExpectedArtifacts(expectedArtifacts, false);

ImmutableSet<Artifact> allArtifacts =
ImmutableSet.<Artifact>builder()
.addAll(receivedArtifacts)
.addAll(resolveResult.getResolvedArtifacts())
.build();

try {
trigger.put(
"artifacts",
objectMapper.readValue(objectMapper.writeValueAsString(allArtifacts), List.class));
trigger.put(
"expectedArtifacts",
objectMapper.readValue(
objectMapper.writeValueAsString(resolveResult.getResolvedExpectedArtifacts()),
List.class));
trigger.put(
"resolvedExpectedArtifacts",
objectMapper.readValue(
objectMapper.writeValueAsString(resolveResult.getResolvedExpectedArtifacts()),
List.class)); // Add the actual expectedArtifacts we included in the ids.
} catch (IOException e) {
throw new ArtifactResolutionException(
"Failed to store artifacts in trigger: " + e.getMessage(), e);
}
}

private List<String> getExpectedArtifactIdsFromMap(Map<String, Object> trigger) {
List<String> expectedArtifactIds = (List<String>) trigger.get("expectedArtifactIds");
return (expectedArtifactIds != null) ? expectedArtifactIds : emptyList();
saveArtifactsInTrigger(trigger, receivedArtifacts, resolveResult);
}

public void resolveArtifacts(Map pipeline) {
Map<String, Object> trigger = (Map<String, Object>) pipeline.get("trigger");
List<?> triggers = Optional.ofNullable((List<?>) pipeline.get("triggers")).orElse(emptyList());
Set<String> expectedArtifactIdsListConcat =
new HashSet<>(getExpectedArtifactIdsFromMap(trigger));

// Due to 8df68b79cf1 getBoundArtifactForStage now does resolution which
// can potentially return null artifact back, which will throw an exception
// for stages that expect a non-null value. Before this commit, when
// getBoundArtifactForStage was called, the method would just retrieve the
// bound artifact from the stage context, and return the appropriate
// artifact back. This change prevents tasks like CreateBakeManifestTask
// from working properly, if null is returned.
//
// reference: https://github.com/spinnaker/orca/pull/4397
triggers.stream()
.map(it -> (Map<String, Object>) it)
// This filter prevents multiple triggers from adding its
// expectedArtifactIds unless it is the expected trigger type that was
// triggered
//
// reference: https://github.com/spinnaker/orca/pull/4322
.filter(it -> trigger.getOrDefault("type", "").equals(it.get("type")))
.map(this::getExpectedArtifactIdsFromMap)
.forEach(expectedArtifactIdsListConcat::addAll);

final List<String> expectedArtifactIds = new ArrayList<>(expectedArtifactIdsListConcat);
ImmutableList<ExpectedArtifact> expectedArtifacts =
Optional.ofNullable((List<?>) pipeline.get("expectedArtifacts"))
.map(Collection::stream)
.orElse(Stream.empty())
.map(it -> objectMapper.convertValue(it, ExpectedArtifact.class))
.filter(
artifact ->
expectedArtifactIds.contains(artifact.getId())
|| artifact.isUseDefaultArtifact()
|| artifact.isUsePriorArtifact())
.collect(toImmutableList());
ImmutableList<ExpectedArtifact> expectedArtifacts = filterExpectedArtifacts(pipeline, trigger);

ImmutableSet<Artifact> receivedArtifacts =
Stream.concat(
Optional.ofNullable((List<?>) pipeline.get("receivedArtifacts"))
.map(Collection::stream)
.orElse(Stream.empty()),
Optional.ofNullable((List<?>) trigger.get("artifacts"))
.map(Collection::stream)
.orElse(Stream.empty()))
.map(it -> objectMapper.convertValue(it, Artifact.class))
.collect(toImmutableSet());
ImmutableSet<Artifact> receivedArtifacts = concatReceivedArtifacts(pipeline, trigger);

ArtifactResolver.ResolveResult resolveResult =
ArtifactResolver.getInstance(
Expand All @@ -368,6 +267,43 @@ public void resolveArtifacts(Map pipeline) {
/* requireUniqueMatches= */ true)
.resolveExpectedArtifacts(expectedArtifacts);

saveArtifactsInTrigger(trigger, receivedArtifacts, resolveResult);
}

private ImmutableList<ExpectedArtifact> filterExpectedArtifacts(
Map pipeline, Map<String, Object> trigger) {
List<?> expectedArtifactIds =
(List<?>) trigger.getOrDefault("expectedArtifactIds", emptyList());

return Optional.ofNullable((List<?>) pipeline.get("expectedArtifacts"))
.map(Collection::stream)
.orElse(Stream.empty())
.map(it -> objectMapper.convertValue(it, ExpectedArtifact.class))
.filter(
artifact ->
expectedArtifactIds.contains(artifact.getId())
|| artifact.isUseDefaultArtifact()
|| artifact.isUsePriorArtifact())
.collect(toImmutableList());
}

private ImmutableSet<Artifact> concatReceivedArtifacts(
Map<String, Object> pipeline, Map<String, Object> trigger) {
return Stream.concat(
Optional.ofNullable((List<?>) pipeline.get("receivedArtifacts"))
.map(Collection::stream)
.orElse(Stream.empty()),
Optional.ofNullable((List<?>) trigger.get("artifacts"))
.map(Collection::stream)
.orElse(Stream.empty()))
.map(it -> objectMapper.convertValue(it, Artifact.class))
.collect(toImmutableSet());
}

private void saveArtifactsInTrigger(
Map<String, Object> trigger,
ImmutableSet<Artifact> receivedArtifacts,
ArtifactResolver.ResolveResult resolveResult) {
ImmutableSet<Artifact> allArtifacts =
ImmutableSet.<Artifact>builder()
.addAll(receivedArtifacts)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@ package com.netflix.spinnaker.orca.pipeline.util

import com.fasterxml.jackson.core.type.TypeReference
import com.fasterxml.jackson.databind.ObjectMapper
import com.netflix.spinnaker.kork.artifacts.ArtifactTypes
import com.netflix.spinnaker.kork.artifacts.model.Artifact
import com.netflix.spinnaker.kork.artifacts.model.ExpectedArtifact
import com.netflix.spinnaker.orca.api.pipeline.models.ExecutionStatus
Expand Down Expand Up @@ -493,83 +492,6 @@ class ArtifactUtilsSpec extends Specification {
initialArtifacts == finalArtifacts
}

def "should find artifact if triggers is present in pipeline"() {
given:
def defaultArtifact = Artifact.builder()
.customKind(true)
.build()

def matchArtifact = Artifact.builder()
.name("my-pipeline-artifact")
.type("embedded/base64")
.reference("aGVsbG8gd29ybGQK")
.build()

def expectedArtifact = ExpectedArtifact.builder()
.usePriorArtifact(false)
.useDefaultArtifact(false)
.id("my-id")
.defaultArtifact(defaultArtifact)
.matchArtifact(matchArtifact)
.build()

def expectedArtifact2 = ExpectedArtifact.builder()
.usePriorArtifact(false)
.useDefaultArtifact(false)
.id("my-id-2")
.defaultArtifact(defaultArtifact)
.matchArtifact(matchArtifact)
.build()

def pipeline = [
"id": "abc",
"stages": [
stage {
expectedArtifacts: [expectedArtifact]
inputArtifacts: [
"id": "my-id"
]
}
],
expectedArtifacts: [
expectedArtifact
],
trigger: [
artifacts: [
Artifact.builder()
.type(ArtifactTypes.EMBEDDED_BASE64.getMimeType())
.name(matchArtifact.getName())
.reference(matchArtifact.getReference())
.build()
],
type: "some-type"
],
triggers: [
[
enabled: true,
expectedArtifactIds: [
expectedArtifact.getId()
],
type: "some-type"
],
[
enabled: true,
expectedArtifactIds: [
expectedArtifact2.getId()
],
type: "some-other-type"
]
]
]

def pipelineMap = getObjectMapper().convertValue(pipeline, Map.class)
when:
makeArtifactUtils().resolveArtifacts(pipelineMap)

then:
pipelineMap.trigger.resolvedExpectedArtifacts.size() == 1
}

private List<Artifact> extractTriggerArtifacts(Map<String, Object> trigger) {
return objectMapper.convertValue(trigger.artifacts, new TypeReference<List<Artifact>>(){});
}
Expand Down

0 comments on commit b66b3e0

Please sign in to comment.