Skip to content

Commit

Permalink
bazel/ci: Cleanup flags and env vars (envoyproxy#30211)
Browse files Browse the repository at this point in the history
Signed-off-by: Ryan Northey <ryan@synca.io>
  • Loading branch information
phlax authored Oct 16, 2023
1 parent 1b0b3e2 commit 18b4f99
Show file tree
Hide file tree
Showing 10 changed files with 100 additions and 132 deletions.
106 changes: 63 additions & 43 deletions .azure-pipelines/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -176,31 +176,68 @@ steps:
tmpfsDockerDisabled: "${{ parameters.tmpfsDockerDisabled }}"

- script: |
if [[ "${{ parameters.bazelUseBES }}" == 'false' ]]; then
unset GOOGLE_BES_PROJECT_ID
ENVOY_SHARED_TMP_DIR=/tmp/bazel-shared
mkdir -p "$ENVOY_SHARED_TMP_DIR"
BAZEL_BUILD_EXTRA_OPTIONS="${{ parameters.bazelBuildExtraOptions }}"
if [[ "${{ parameters.rbe }}" == "True" ]]; then
# mktemp will create a tempfile with u+rw permission minus umask, it will not be readable by all
# users by default.
GCP_SERVICE_ACCOUNT_KEY_PATH=$(mktemp -p "${ENVOY_SHARED_TMP_DIR}" -t gcp_service_account.XXXXXX.json)
bash -c 'echo "$(GcpServiceAccountKey)"' | base64 --decode > "${GCP_SERVICE_ACCOUNT_KEY_PATH}"
BAZEL_BUILD_EXTRA_OPTIONS+=" ${{ parameters.bazelConfigRBE }} --google_credentials=${GCP_SERVICE_ACCOUNT_KEY_PATH}"
ENVOY_RBE=1
if [[ "${{ parameters.bazelUseBES }}" == "True" ]]; then
BAZEL_BUILD_EXTRA_OPTIONS+=" --config=rbe-google-bes --bes_instance_name=${GOOGLE_BES_PROJECT_ID}"
fi
else
echo "using local build cache."
# Normalize branches - `release/vX.xx`, `vX.xx`, `vX.xx.x` -> `vX.xx`
TARGET_BRANCH=$(echo "${CI_TARGET_BRANCH}" | cut -d/ -f2-)
BRANCH_NAME="$(echo "${TARGET_BRANCH}" | cut -d/ -f2 | cut -d. -f-2)"
if [[ "$BRANCH_NAME" == "merge" ]]; then
# Manually run PR commit - there is no easy way of telling which branch
# it is, so just set it to `main` - otherwise it tries to cache as `branch/merge`
BRANCH_NAME=main
fi
BAZEL_REMOTE_INSTANCE="branch/${BRANCH_NAME}"
echo "instance_name: ${BAZEL_REMOTE_INSTANCE}."
BAZEL_BUILD_EXTRA_OPTIONS+=" --config=ci --config=cache-local --remote_instance_name=${BAZEL_REMOTE_INSTANCE} --remote_timeout=600"
fi
ci/run_envoy_docker.sh 'ci/do_ci.sh fetch-${{ parameters.ciTarget }}'
condition: and(not(canceled()), not(failed()), ne('${{ parameters.cacheName }}', ''), ne(variables.CACHE_RESTORED, 'true'))
if [[ "${{ parameters.cacheTestResults }}" != "True" ]]; then
VERSION_DEV="$(cut -d- -f2 "VERSION.txt")"
# Use uncached test results for non-release scheduledruns.
if [[ $VERSION_DEV == "dev" ]]; then
BAZEL_EXTRA_TEST_OPTIONS+=" --nocache_test_results"
fi
fi
# Any PR or CI run in envoy-presubmit uses the fake SCM hash
if [[ "${{ variables['Build.Reason'] }}" == "PullRequest" || "${{ variables['Build.DefinitionName'] }}" == 'envoy-presubmit' ]]; then
# sha1sum of `ENVOY_PULL_REQUEST`
BAZEL_FAKE_SCM_REVISION=e3b4a6e9570da15ac1caffdded17a8bebdc7dfc9
fi
echo "##vso[task.setvariable variable=BAZEL_BUILD_EXTRA_OPTIONS]${BAZEL_BUILD_EXTRA_OPTIONS}"
echo "##vso[task.setvariable variable=BAZEL_EXTRA_TEST_OPTIONS]${BAZEL_EXTRA_TEST_OPTIONS}"
echo "##vso[task.setvariable variable=BAZEL_FAKE_SCM_REVISION]${BAZEL_FAKE_SCM_REVISION}"
echo "##vso[task.setvariable variable=BAZEL_STARTUP_EXTRA_OPTIONS]${{ parameters.bazelStartupExtraOptions }}"
echo "##vso[task.setvariable variable=CI_TARGET_BRANCH]${CI_TARGET_BRANCH}"
echo "##vso[task.setvariable variable=ENVOY_BUILD_FILTER_EXAMPLE]${{ parameters.envoyBuildFilterExample }}"
echo "##vso[task.setvariable variable=ENVOY_DOCKER_BUILD_DIR]$(Build.StagingDirectory)"
echo "##vso[task.setvariable variable=ENVOY_RBE]${ENVOY_RBE}"
echo "##vso[task.setvariable variable=ENVOY_SHARED_TMP_DIR]${ENVOY_SHARED_TMP_DIR}"
echo "##vso[task.setvariable variable=GCP_SERVICE_ACCOUNT_KEY_PATH]${GCP_SERVICE_ACCOUNT_KEY_PATH}"
echo "##vso[task.setvariable variable=GITHUB_TOKEN]${{ parameters.authGithub }}"
workingDirectory: $(Build.SourcesDirectory)
env:
ENVOY_DOCKER_BUILD_DIR: $(Build.StagingDirectory)
GITHUB_TOKEN: "${{ parameters.authGithub }}"
BAZEL_STARTUP_EXTRA_OPTIONS: "${{ parameters.bazelStartupExtraOptions }}"
${{ if eq(variables['Build.Reason'], 'PullRequest') }}:
CI_TARGET_BRANCH: "origin/$(System.PullRequest.TargetBranch)"
${{ if ne(variables['Build.Reason'], 'PullRequest') }}:
CI_TARGET_BRANCH: "origin/$(Build.SourceBranchName)"
# Any PR or CI run in envoy-presubmit uses the fake SCM hash
${{ if or(eq(variables['Build.Reason'], 'PullRequest'), eq(variables['Build.DefinitionName'], 'envoy-presubmit')) }}:
# sha1sum of `ENVOY_PULL_REQUEST`
BAZEL_FAKE_SCM_REVISION: e3b4a6e9570da15ac1caffdded17a8bebdc7dfc9
${{ if parameters.rbe }}:
GCP_SERVICE_ACCOUNT_KEY: $(GcpServiceAccountKey)
ENVOY_RBE: "1"
BAZEL_BUILD_EXTRA_OPTIONS: "${{ parameters.bazelConfigRBE }} ${{ parameters.bazelBuildExtraOptions }}"
${{ if eq(parameters.rbe, false) }}:
BAZEL_BUILD_EXTRA_OPTIONS: "--config=ci ${{ parameters.bazelBuildExtraOptions }}"
BAZEL_REMOTE_CACHE: $(LocalBuildCache)
displayName: "CI env ${{ parameters.ciTarget }}"

- script: ci/run_envoy_docker.sh 'ci/do_ci.sh fetch-${{ parameters.ciTarget }}'
condition: and(not(canceled()), not(failed()), ne('${{ parameters.cacheName }}', ''), ne(variables.CACHE_RESTORED, 'true'))
workingDirectory: $(Build.SourcesDirectory)
env:
${{ each var in parameters.env }}:
${{ var.key }}: ${{ var.value }}
displayName: "Fetch assets (${{ parameters.ciTarget }})"
Expand Down Expand Up @@ -231,34 +268,10 @@ steps:
displayName: "Enable IPv6"
condition: ${{ parameters.managedAgent }}

- script: |
if [[ "${{ parameters.bazelUseBES }}" == 'false' ]]; then
unset GOOGLE_BES_PROJECT_ID
fi
ci/run_envoy_docker.sh 'ci/do_ci.sh ${{ parameters.ciTarget }}'
- script: ci/run_envoy_docker.sh 'ci/do_ci.sh ${{ parameters.ciTarget }}'
workingDirectory: $(Build.SourcesDirectory)
env:
ENVOY_DOCKER_BUILD_DIR: $(Build.StagingDirectory)
ENVOY_BUILD_FILTER_EXAMPLE: ${{ parameters.envoyBuildFilterExample }}
GITHUB_TOKEN: "${{ parameters.authGithub }}"
BAZEL_STARTUP_EXTRA_OPTIONS: "${{ parameters.bazelStartupExtraOptions }}"
${{ if ne(parameters['cacheTestResults'], true) }}:
BAZEL_NO_CACHE_TEST_RESULTS: 1
${{ if eq(variables['Build.Reason'], 'PullRequest') }}:
CI_TARGET_BRANCH: "origin/$(System.PullRequest.TargetBranch)"
${{ if ne(variables['Build.Reason'], 'PullRequest') }}:
CI_TARGET_BRANCH: "origin/$(Build.SourceBranchName)"
# Any PR or CI run in envoy-presubmit uses the fake SCM hash
${{ if or(eq(variables['Build.Reason'], 'PullRequest'), eq(variables['Build.DefinitionName'], 'envoy-presubmit')) }}:
# sha1sum of `ENVOY_PULL_REQUEST`
BAZEL_FAKE_SCM_REVISION: e3b4a6e9570da15ac1caffdded17a8bebdc7dfc9
${{ if parameters.rbe }}:
GCP_SERVICE_ACCOUNT_KEY: $(GcpServiceAccountKey)
ENVOY_RBE: "1"
BAZEL_BUILD_EXTRA_OPTIONS: "${{ parameters.bazelConfigRBE }} ${{ parameters.bazelBuildExtraOptions }}"
${{ if eq(parameters.rbe, false) }}:
BAZEL_BUILD_EXTRA_OPTIONS: "--config=ci ${{ parameters.bazelBuildExtraOptions }}"
BAZEL_REMOTE_CACHE: $(LocalBuildCache)
${{ each var in parameters.env }}:
${{ var.key }}: ${{ var.value }}
displayName: "Run CI script ${{ parameters.ciTarget }}"
Expand Down Expand Up @@ -296,6 +309,13 @@ steps:
- ${{ each pair in step }}:
${{ pair.key }}: ${{ pair.value }}

- bash: |
if [[ -n "$GCP_SERVICE_ACCOUNT_KEY_PATH" && -e "$GCP_SERVICE_ACCOUNT_KEY_PATH" ]]; then
echo "Removed key: ${GCP_SERVICE_ACCOUNT_KEY_PATH}"
rm -rf "$GCP_SERVICE_ACCOUNT_KEY_PATH"
fi
condition: not(canceled())

- script: |
set -e
sudo .azure-pipelines/docker/save_cache.sh "$(Build.StagingDirectory)" /mnt/cache/all true true
Expand Down
8 changes: 0 additions & 8 deletions .azure-pipelines/stage/checks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -101,15 +101,7 @@ jobs:
displayName: "Upload $(CI_TARGET) Report to GCS"
condition: and(not(canceled()), or(eq(variables['CI_TARGET'], 'coverage'), eq(variables['CI_TARGET'], 'fuzz_coverage')))
env:
ENVOY_DOCKER_BUILD_DIR: $(Build.StagingDirectory)
ENVOY_RBE: "1"
BAZEL_BUILD_EXTRA_OPTIONS: "--config=ci --config=rbe-google --jobs=$(RbeJobs)"
GCP_SERVICE_ACCOUNT_KEY: ${{ parameters.authGCP }}
GCS_ARTIFACT_BUCKET: ${{ parameters.bucketGCP }}
${{ if eq(variables['Build.Reason'], 'PullRequest') }}:
BAZEL_REMOTE_INSTANCE_BRANCH: "$(System.PullRequest.TargetBranch)"
${{ if ne(variables['Build.Reason'], 'PullRequest') }}:
BAZEL_REMOTE_INSTANCE_BRANCH: "$(Build.SourceBranchName)"

- job: complete
displayName: "Checks complete"
Expand Down
8 changes: 5 additions & 3 deletions .azure-pipelines/stage/macos.yml
Original file line number Diff line number Diff line change
Expand Up @@ -27,9 +27,11 @@ jobs:
- script: ./ci/mac_ci_steps.sh
displayName: "Run Mac CI"
env:
BAZEL_BUILD_EXTRA_OPTIONS: "--remote_download_toplevel --flaky_test_attempts=2"
BAZEL_REMOTE_CACHE: grpcs://remotebuildexecution.googleapis.com
BAZEL_REMOTE_INSTANCE: projects/envoy-ci/instances/default_instance
BAZEL_BUILD_EXTRA_OPTIONS: >-
--remote_download_toplevel
--flaky_test_attempts=2
--remote_cache=grpcs://remotebuildexecution.googleapis.com
--remote_instance_name=projects/envoy-ci/instances/default_instance
GCP_SERVICE_ACCOUNT_KEY: ${{ parameters.authGCP }}
ENVOY_RBE: 1

Expand Down
15 changes: 3 additions & 12 deletions .azure-pipelines/stage/prechecks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -99,15 +99,15 @@ jobs:
authGPGKey: ${{ parameters.authGPGKey }}
# GNUPGHOME inside the container
pathGPGConfiguredHome: /build/.gnupg
pathGPGHome: /tmp/envoy-docker-build/.gnupg
pathGPGHome: $(Build.StagingDirectory)/.gnupg
- bash: |
set -e
ci/run_envoy_docker.sh "
echo AUTHORITY > /tmp/authority \
&& gpg --clearsign /tmp/authority \
&& cat /tmp/authority.asc \
&& gpg --verify /tmp/authority.asc"
rm -rf /tmp/envoy-docker-build/.gnupg
rm -rf $(Build.StagingDirectory)/.gnupg
displayName: "Ensure container CI can sign with GPG"
condition: and(not(canceled()), eq(variables['CI_TARGET'], 'docs'))
Expand All @@ -129,10 +129,6 @@ jobs:
ci/run_envoy_docker.sh 'ci/do_ci.sh dockerhub-readme'
displayName: "Dockerhub publishing test"
env:
ENVOY_DOCKER_BUILD_DIR: $(Build.StagingDirectory)
ENVOY_RBE: "1"
BAZEL_BUILD_EXTRA_OPTIONS: "--config=remote-ci --config=rbe-google --jobs=$(RbeJobs)"
GCP_SERVICE_ACCOUNT_KEY: ${{ parameters.authGCP }}
GCS_ARTIFACT_BUCKET: ${{ parameters.bucketGCP }}
condition: eq(variables['CI_TARGET'], 'docs')
Expand All @@ -155,14 +151,9 @@ jobs:
condition: and(failed(), eq(variables['CI_TARGET'], 'check_and_fix_proto_format'))

# Publish docs
- script: |
ci/run_envoy_docker.sh 'ci/do_ci.sh docs-upload'
- script: ci/run_envoy_docker.sh 'ci/do_ci.sh docs-upload'
displayName: "Upload Docs to GCS"
env:
ENVOY_DOCKER_BUILD_DIR: $(Build.StagingDirectory)
ENVOY_RBE: "1"
BAZEL_BUILD_EXTRA_OPTIONS: "--config=remote-ci --config=rbe-google --jobs=$(RbeJobs)"
GCP_SERVICE_ACCOUNT_KEY: ${{ parameters.authGCP }}
GCS_ARTIFACT_BUCKET: ${{ parameters.bucketGCP }}
condition: eq(variables['CI_TARGET'], 'docs')

Expand Down
24 changes: 10 additions & 14 deletions .azure-pipelines/stage/publish.yml
Original file line number Diff line number Diff line change
Expand Up @@ -123,10 +123,6 @@ jobs:
eq(${{ parameters.publishDockerhub }}, 'true'))
displayName: "Publish Dockerhub description and README"
env:
ENVOY_DOCKER_BUILD_DIR: $(Build.StagingDirectory)
ENVOY_RBE: "1"
BAZEL_BUILD_EXTRA_OPTIONS: "--config=remote-ci --config=rbe-google --jobs=$(RbeJobs)"
GCP_SERVICE_ACCOUNT_KEY: ${{ parameters.authGCP }}
GCS_ARTIFACT_BUCKET: ${{ parameters.bucketGCP }}
DOCKERHUB_USERNAME: ${{ parameters.authDockerUser }}
DOCKERHUB_PASSWORD: ${{ parameters.authDockerPassword }}
Expand Down Expand Up @@ -277,6 +273,16 @@ jobs:
pool:
vmImage: $(agentUbuntu)
steps:
- task: DownloadSecureFile@1
name: WorkflowTriggerKey
displayName: 'Download workflow trigger key'
inputs:
secureFile: '${{ parameters.authGithubWorkflow }}'
- bash: |
set -e
KEY="$(cat $(WorkflowTriggerKey.secureFilePath) | base64 -w0)"
echo "##vso[task.setvariable variable=value;isoutput=true]$KEY"
name: key
- template: ../ci.yml
parameters:
ciTarget: verify.trigger
Expand Down Expand Up @@ -310,13 +316,3 @@ jobs:
mkdir -p $(Build.StagingDirectory)/release.signed
mv release.signed.tar.zst $(Build.StagingDirectory)/release.signed
displayName: Fetch signed release
- task: DownloadSecureFile@1
name: WorkflowTriggerKey
displayName: 'Download workflow trigger key'
inputs:
secureFile: '${{ parameters.authGithubWorkflow }}'
- bash: |
set -e
KEY="$(cat $(WorkflowTriggerKey.secureFilePath) | base64 -w0)"
echo "##vso[task.setvariable variable=value;isoutput=true]$KEY"
name: key
2 changes: 2 additions & 0 deletions .bazelrc
Original file line number Diff line number Diff line change
Expand Up @@ -235,6 +235,8 @@ build:fuzz-coverage --config=plain-fuzzer
build:fuzz-coverage --run_under=@envoy//bazel/coverage:fuzz_coverage_wrapper.sh
build:fuzz-coverage --test_tag_filters=-nocoverage

build:cache-local --remote_cache=grpc://localhost:9092

# Remote execution: https://docs.bazel.build/versions/master/remote-execution.html
build:rbe-toolchain --action_env=BAZEL_DO_NOT_DETECT_CPP_TOOLCHAIN=1

Expand Down
8 changes: 0 additions & 8 deletions ci/build_setup.sh
Original file line number Diff line number Diff line change
Expand Up @@ -119,14 +119,6 @@ bazel () {
export _bazel
export -f bazel

if [[ -n "$BAZEL_NO_CACHE_TEST_RESULTS" ]]; then
VERSION_DEV="$(cut -d- -f2 "${ENVOY_SRCDIR}/VERSION.txt")"
# Use uncached test results for non-release commits to a branch.
if [[ $VERSION_DEV == "dev" ]]; then
BAZEL_EXTRA_TEST_OPTIONS+=("--nocache_test_results")
fi
fi

# Use https://docs.bazel.build/versions/master/command-line-reference.html#flag--experimental_repository_cache_hardlinks
# to save disk space.
BAZEL_GLOBAL_OPTIONS=(
Expand Down
8 changes: 3 additions & 5 deletions ci/run_envoy_docker.sh
Original file line number Diff line number Diff line change
Expand Up @@ -95,13 +95,13 @@ VOLUMES=(
-v "${ENVOY_DOCKER_BUILD_DIR}":"${BUILD_DIR_MOUNT_DEST}"
-v "${SOURCE_DIR}":"${SOURCE_DIR_MOUNT_DEST}")

if ! is_windows && [[ -n "$ENVOY_DOCKER_IN_DOCKER" ]]; then
if ! is_windows && [[ -n "$ENVOY_DOCKER_IN_DOCKER" || -n "$ENVOY_SHARED_TMP_DIR" ]]; then
# Create a "shared" directory that has the same path in/outside the container
# This allows the host docker engine to see artefacts using a temporary path created inside the container,
# at the same path.
# For example, a directory created with `mktemp -d --tmpdir /tmp/bazel-shared` can be mounted as a volume
# from within the build container.
SHARED_TMP_DIR=/tmp/bazel-shared
SHARED_TMP_DIR="${ENVOY_SHARED_TMP_DIR:-/tmp/bazel-shared}"
mkdir -p "${SHARED_TMP_DIR}"
chmod +rwx "${SHARED_TMP_DIR}"
VOLUMES+=(-v "${SHARED_TMP_DIR}":"${SHARED_TMP_DIR}")
Expand All @@ -111,7 +111,6 @@ if [[ -n "${ENVOY_DOCKER_PULL}" ]]; then
time docker pull "${ENVOY_BUILD_IMAGE}"
fi


# Since we specify an explicit hash, docker-run will pull from the remote repo if missing.
docker run --rm \
"${ENVOY_DOCKER_OPTIONS[@]}" \
Expand All @@ -133,10 +132,9 @@ docker run --rm \
-e DOCKERHUB_PASSWORD \
-e ENVOY_STDLIB \
-e BUILD_REASON \
-e BAZEL_NO_CACHE_TEST_RESULTS \
-e BAZEL_REMOTE_INSTANCE \
-e GOOGLE_BES_PROJECT_ID \
-e GCP_SERVICE_ACCOUNT_KEY \
-e GCP_SERVICE_ACCOUNT_KEY_PATH \
-e NUM_CPUS \
-e ENVOY_BRANCH \
-e ENVOY_RBE \
Expand Down
33 changes: 9 additions & 24 deletions ci/setup_cache.sh
Original file line number Diff line number Diff line change
Expand Up @@ -14,37 +14,22 @@ if [[ -n "${GCP_SERVICE_ACCOUNT_KEY:0:1}" ]]; then

trap gcp_service_account_cleanup EXIT

echo "Setting GCP_SERVICE_ACCOUNT_KEY is deprecated, please place your decoded GCP key in " \
"an exported/shared tmp directory and add it to BAZEL_BUILD_EXTRA_OPTIONS, eg: " >&2
# shellcheck disable=SC2086
echo "$ export ENVOY_SHARED_TMP_DIR=/tmp/envoy-shared" \
"$ ENVOY_RBE_KEY_PATH=$(mktemp -p \"${ENVOY_SHARED_TMP_DIR}\" -t gcp_service_account.XXXXXX.json)" \
"$ bash -c 'echo \"$(GcpServiceAccountKey)\"' | base64 --decode > \"${ENVOY_RBE_KEY_PATH}\"" \
"$ export BAZEL_BUILD_EXTRA_OPTIONS+=\" --google_credentials=${ENVOY_RBE_KEY_PATH}\"" >&2
bash -c 'echo "${GCP_SERVICE_ACCOUNT_KEY}"' | base64 --decode > "${GCP_SERVICE_ACCOUNT_KEY_FILE}"

export BAZEL_BUILD_EXTRA_OPTIONS+=" --google_credentials=${GCP_SERVICE_ACCOUNT_KEY_FILE}"

if [[ -n "${GOOGLE_BES_PROJECT_ID}" ]]; then
export BAZEL_BUILD_EXTRA_OPTIONS+=" --config=rbe-google-bes --bes_instance_name=${GOOGLE_BES_PROJECT_ID}"
fi

fi

if [[ -n "${BAZEL_REMOTE_CACHE}" ]]; then
echo "Setting BAZEL_REMOTE_CACHE is deprecated, please use BAZEL_BUILD_EXTRA_OPTIONS " \
"or use a user.bazelrc config " >&2
export BAZEL_BUILD_EXTRA_OPTIONS+=" --remote_cache=${BAZEL_REMOTE_CACHE}"
echo "Set up bazel remote read/write cache at ${BAZEL_REMOTE_CACHE}."

if [[ -z "${ENVOY_RBE}" ]]; then
export BAZEL_BUILD_EXTRA_OPTIONS+=" --remote_timeout=600"
echo "using local build cache."
# Normalize branches - `release/vX.xx`, `vX.xx`, `vX.xx.x` -> `vX.xx`
TARGET_BRANCH="${CI_TARGET_BRANCH}"
if [[ "$TARGET_BRANCH" =~ ^origin/ ]]; then
TARGET_BRANCH=$(echo "$TARGET_BRANCH" | cut -d/ -f2-)
fi
BRANCH_NAME="$(echo "${TARGET_BRANCH}" | cut -d/ -f2 | cut -d. -f-2)"
if [[ "$BRANCH_NAME" == "merge" ]]; then
# Manually run PR commit - there is no easy way of telling which branch
# it is, so just set it to `main` - otherwise it tries to cache as `branch/merge`
BRANCH_NAME=main
fi
BAZEL_REMOTE_INSTANCE="branch/${BRANCH_NAME}"
fi

if [[ -n "${BAZEL_REMOTE_INSTANCE}" ]]; then
export BAZEL_BUILD_EXTRA_OPTIONS+=" --remote_instance_name=${BAZEL_REMOTE_INSTANCE}"
echo "instance_name: ${BAZEL_REMOTE_INSTANCE}."
Expand Down
20 changes: 5 additions & 15 deletions ci/upload_gcs_artifact.sh
Original file line number Diff line number Diff line change
Expand Up @@ -7,27 +7,17 @@ if [[ -z "${GCS_ARTIFACT_BUCKET}" ]]; then
exit 1
fi

if [[ -z "${GCP_SERVICE_ACCOUNT_KEY}" ]]; then
echo "GCP key is not set, not uploading artifacts."
exit 1
fi

read -ra BAZEL_STARTUP_OPTIONS <<< "${BAZEL_STARTUP_OPTION_LIST:-}"
read -ra BAZEL_BUILD_OPTIONS <<< "${BAZEL_BUILD_OPTION_LIST:-}"

remove_key () {
rm -rf "$KEYFILE"
}

trap remove_key EXIT

# Fail when service account key is not specified
KEYFILE="$(mktemp)"
bash -c 'echo ${GCP_SERVICE_ACCOUNT_KEY}' | base64 --decode > "$KEYFILE"
if [[ ! -s "${GCP_SERVICE_ACCOUNT_KEY_PATH}" ]]; then
echo "GCP key is not set, not uploading artifacts."
exit 1
fi

cat <<EOF > ~/.boto
[Credentials]
gs_service_key_file=${KEYFILE}
gs_service_key_file=${GCP_SERVICE_ACCOUNT_KEY_PATH}
EOF

SOURCE_DIRECTORY="$1"
Expand Down

0 comments on commit 18b4f99

Please sign in to comment.