manual_trigger_build #58
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: manual_trigger_build | ||
on: # yamllint disable-line rule:truthy | ||
workflow_dispatch: | ||
inputs: | ||
build_type: | ||
description: 'Build type' | ||
required: false | ||
default: 'release' | ||
type: choice | ||
options: | ||
- release | ||
- address | ||
- thread | ||
- memory | ||
- undefined | ||
arch: | ||
description: 'Hardware architecture' | ||
required: true | ||
default: x64 | ||
type: choice | ||
options: | ||
- x64 | ||
- arm | ||
tag: | ||
description: 'Docker image tag' | ||
required: true | ||
type: string | ||
default: 'manual' | ||
build_for_macOS: | ||
description: 'build macos binary, upload to s3 bucket' | ||
required: false | ||
default: 'false' | ||
type: choice | ||
options: | ||
- true | ||
- false | ||
jobs: | ||
prepare_sanitizer: | ||
runs-on: ubuntu-latest | ||
if: github.event.inputs.build_for_macOS == 'false' && github.event.inputs.build_type != 'release' | ||
outputs: | ||
command: | | ||
cd $GITHUB_WORKSPACE | ||
# git config | ||
git config user.name "proton-robot" | ||
git config user.email "proton_robot@timeplus.io" | ||
# prepare build cache | ||
aws s3 cp --no-progress s3://tp-internal/proton/ci_cache/cache${SANITIZER:+_}$SANITIZER${ARCH:+_}$ARCH.tar.gz . | ||
mkdir $GITHUB_WORKSPACE/ccache | ||
tar -zxf ./cache${SANITIZER:+_}$SANITIZER${ARCH:+_}$ARCH.tar.gz -C $GITHUB_WORKSPACE/ccache | ||
rm cache${SANITIZER:+_}$SANITIZER${ARCH:+_}$ARCH.tar.gz | ||
# compiling | ||
./docker/packager/packager --package-type binary --docker-image-version clang-19 --sanitizer $SANITIZER --proton-build --enable-proton-local --cache ccache --ccache_dir $GITHUB_WORKSPACE/ccache --output-dir $GITHUB_WORKSPACE/output | ||
# clear compiling footprint | ||
rm -rf $GITHUB_WORKSPACE/ccache | ||
rm -rf build_docker | ||
# build unit test docker image | ||
cp $GITHUB_WORKSPACE/output/unit_tests_dbms $GITHUB_WORKSPACE/docker/test/proton_unit_test/ | ||
cd $GITHUB_WORKSPACE/docker/test/proton_unit_test/ | ||
docker build . -t ghcr.io/timeplus-io/proton-unit-test:$TAG | ||
docker push ghcr.io/timeplus-io/proton-unit-test:$TAG | ||
docker run --name static-server -p 8080:80 -v $GITHUB_WORKSPACE/output:/usr/share/nginx/html:ro -d nginx | ||
cd $GITHUB_WORKSPACE/docker/server | ||
# prepare files to be copied to the image | ||
mkdir -p resources/protos/google/protobuf | ||
cp -r $GITHUB_WORKSPACE/contrib/google-protobuf/src/google/protobuf/*.proto ./resources/protos/google/protobuf/ | ||
rm -rf resources/protos/google/protobuf/unittest_* | ||
# build docker image | ||
docker build . --network host --build-arg single_binary_location_url=http://localhost:8080/proton -t ghcr.io/timeplus-io/proton:$TAG | ||
# push docker image | ||
docker push ghcr.io/timeplus-io/proton:$TAG | ||
echo "docker push ghcr.io/timeplus-io/proton:$TAG" | ||
steps: | ||
- name: display command | ||
run: | | ||
echo 'command: ${{ steps.set_command.outputs.command }}' | ||
prepare_release: | ||
runs-on: ubuntu-latest | ||
if: github.event.inputs.build_for_macOS == 'false' && github.event.inputs.build_type == 'release' | ||
outputs: | ||
command: | | ||
cd $GITHUB_WORKSPACE | ||
# git config | ||
git config user.name "proton-robot" | ||
git config user.email "proton_robot@timeplus.io" | ||
# prepare build cache | ||
aws s3 cp --no-progress s3://tp-internal/proton/ci_cache/cache${ARCH:+_}$ARCH.tar.gz . | ||
mkdir $GITHUB_WORKSPACE/ccache | ||
tar -zxf ./cache${ARCH:+_}$ARCH.tar.gz -C $GITHUB_WORKSPACE/ccache | ||
rm cache${ARCH:+_}$ARCH.tar.gz | ||
# compiling | ||
./docker/packager/packager --package-type binary --docker-image-version clang-19 --build-type release --proton-build --enable-proton-local --cache ccache --ccache_dir $GITHUB_WORKSPACE/ccache --output-dir $GITHUB_WORKSPACE/output | ||
# release doesn't build unit_tests_dbms | ||
# clear compiling footprint | ||
rm -rf $GITHUB_WORKSPACE/ccache | ||
rm -rf build_docker | ||
docker run --name static-server -p 8080:80 -v $GITHUB_WORKSPACE/output:/usr/share/nginx/html:ro -d nginx | ||
cd $GITHUB_WORKSPACE/docker/server | ||
# prepare files to be copied to the image | ||
mkdir -p resources/protos/google/protobuf | ||
cp -r $GITHUB_WORKSPACE/contrib/google-protobuf/src/google/protobuf/*.proto ./resources/protos/google/protobuf/ | ||
rm -rf resources/protos/google/protobuf/unittest_* | ||
# build docker image | ||
docker build . --network host --build-arg single_binary_location_url=http://localhost:8080/proton -t ghcr.io/timeplus-io/proton:$TAG | ||
# push docker image | ||
docker push ghcr.io/timeplus-io/proton:$TAG | ||
echo "docker pull ghcr.io/timeplus-io/proton:$TAG" | ||
steps: | ||
- name: display command | ||
run: | | ||
echo 'command: ${{ steps.set_command.outputs.command }}' | ||
build_x64: | ||
needs: prepare_sanitizer | ||
uses: timeplus-io/proton/.github/workflows/run_command.yml@develop | ||
if: github.event.inputs.arch == 'x64' && github.event.inputs.build_type != 'release' | ||
with: | ||
ec2-instance-type: ${{ vars.X64_INSTANCE_TYPE }} | ||
ec2-image-id: ${{ vars.X64_AMI }} | ||
ec2-volume-size: ${{ vars.VOLUME_SIZE }} | ||
submodules: 'recursive' | ||
sanitizer: ${{ github.event.inputs.build_type }} | ||
arch: ${{ vars.X64_ARCH }} | ||
tag: ${{ github.event.inputs.tag }} | ||
command: | | ||
${{ needs.prepare_sanitizer.outputs.command }} | ||
secrets: | ||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} | ||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | ||
AWS_REGION: ${{ secrets.AWS_REGION }} | ||
AWS_S3_BUCKET: ${{ secrets.AWS_S3_BUCKET }} | ||
GH_PERSONAL_ACCESS_TOKEN: ${{ secrets.GH_PERSONAL_ACCESS_TOKEN }} | ||
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} | ||
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} | ||
build_x64_release: | ||
needs: prepare_release | ||
uses: timeplus-io/proton/.github/workflows/run_command.yml@develop | ||
if: github.event.inputs.arch == 'x64' && github.event.inputs.build_type == 'release' | ||
with: | ||
ec2-instance-type: ${{ vars.X64_INSTANCE_TYPE }} | ||
ec2-image-id: ${{ vars.X64_AMI }} | ||
ec2-volume-size: ${{ vars.VOLUME_SIZE }} | ||
submodules: 'recursive' | ||
sanitizer: "" | ||
arch: ${{ vars.X64_ARCH }} | ||
tag: ${{ github.event.inputs.tag }} | ||
command: | | ||
${{ needs.prepare_release.outputs.command }} | ||
secrets: | ||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} | ||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | ||
AWS_REGION: ${{ secrets.AWS_REGION }} | ||
AWS_S3_BUCKET: ${{ secrets.AWS_S3_BUCKET }} | ||
GH_PERSONAL_ACCESS_TOKEN: ${{ secrets.GH_PERSONAL_ACCESS_TOKEN }} | ||
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} | ||
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} | ||
build_arm: | ||
needs: prepare_sanitizer | ||
uses: timeplus-io/proton/.github/workflows/run_command.yml@develop | ||
if: github.event.inputs.arch == 'arm' && github.event.inputs.build_type != 'release' | ||
with: | ||
ec2-instance-type: ${{ vars.ARM_INSTANCE_TYPE }} | ||
ec2-image-id: ${{ vars.ARM_AMI }} | ||
ec2-volume-size: ${{ vars.VOLUME_SIZE }} | ||
submodules: 'recursive' | ||
sanitizer: ${{ github.event.inputs.build_type }} | ||
arch: ${{ vars.ARM_ARCH }} | ||
tag: ${{ github.event.inputs.tag }} | ||
command: | | ||
${{ needs.prepare_sanitizer.outputs.command }} | ||
secrets: | ||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} | ||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | ||
AWS_REGION: ${{ secrets.AWS_REGION }} | ||
AWS_S3_BUCKET: ${{ secrets.AWS_S3_BUCKET }} | ||
GH_PERSONAL_ACCESS_TOKEN: ${{ secrets.GH_PERSONAL_ACCESS_TOKEN }} | ||
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} | ||
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} | ||
build_arm_release: | ||
needs: prepare_release | ||
uses: timeplus-io/proton/.github/workflows/run_command.yml@develop | ||
if: github.event.inputs.arch == 'arm' && github.event.inputs.build_type == 'release' | ||
with: | ||
ec2-instance-type: ${{ vars.ARM_INSTANCE_TYPE }} | ||
ec2-image-id: ${{ vars.ARM_AMI }} | ||
ec2-volume-size: ${{ vars.VOLUME_SIZE }} | ||
submodules: 'recursive' | ||
sanitizer: "" | ||
arch: ${{ vars.ARM_ARCH }} | ||
tag: ${{ github.event.inputs.tag }} | ||
command: | | ||
${{ needs.prepare_release.outputs.command }} | ||
secrets: | ||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} | ||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | ||
AWS_REGION: ${{ secrets.AWS_REGION }} | ||
AWS_S3_BUCKET: ${{ secrets.AWS_S3_BUCKET }} | ||
GH_PERSONAL_ACCESS_TOKEN: ${{ secrets.GH_PERSONAL_ACCESS_TOKEN }} | ||
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} | ||
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} | ||
build_native_macOS_arm: | ||
runs-on: [self-hosted, macOS, ARM64] | ||
env: | ||
build_directory: ${{ github.workspace }}/build | ||
build_type: RelWithDebInfo | ||
if: github.event.inputs.build_for_macOS == 'true' && github.event.inputs.arch == 'arm' | ||
steps: | ||
- name: Checkout | ||
uses: actions/checkout@v4.1.7 | ||
with: | ||
token: ${{ secrets.GH_PERSONAL_ACCESS_TOKEN }} | ||
fetch-depth: 1 | ||
submodules: true | ||
- name: Create and Configure Build | ||
run: | | ||
mkdir -p ${{ env.build_directory }} | ||
export CC=$(brew --prefix llvm@19)/bin/clang | ||
export CXX=$(brew --prefix llvm@19)/bin/clang++ | ||
export PATH=$(brew --prefix llvm@19)/bin:$PATH | ||
cmake -B ${{ env.build_directory }} -G "Ninja" -DCMAKE_BUILD_TYPE=${{ env.build_type }} -DENABLE_TESTS=OFF -DENABLE_UTILS=OFF -DENABLE_EXAMPLES=OFF -DENABLE_PULSAR=OFF | ||
- name: Build with Ninja | ||
run: cmake --build ${{ env.build_directory }} | ||
- name: Strip and Rename binary | ||
run: | | ||
COMMIT_HASH=$(git rev-parse --short HEAD) | ||
ARCH=$(uname -m) | ||
ORIGINAL_BINARY=${{ env.build_directory }}/programs/proton | ||
STRIPPED_BINARY=${{ env.build_directory }}/programs/proton_${COMMIT_HASH}_${ARCH} | ||
ls -lh $ORIGINAL_BINARY | ||
/opt/homebrew/opt/llvm@19/bin/llvm-strip $ORIGINAL_BINARY | ||
mv $ORIGINAL_BINARY $STRIPPED_BINARY | ||
ls -lh $STRIPPED_BINARY | ||
- name: Configure AWS credentials | ||
uses: aws-actions/configure-aws-credentials@v4.0.2 | ||
with: | ||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} | ||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | ||
aws-region: ${{ secrets.AWS_REGION }} | ||
- name: Upload Artifact To S3 | ||
run: | | ||
COMMIT_HASH=$(git rev-parse --short HEAD) | ||
ARCH=$(uname -m) | ||
STRIPPED_BINARY=${{ env.build_directory }}/programs/proton_${COMMIT_HASH}_${ARCH} | ||
aws s3 cp --no-progress $STRIPPED_BINARY s3://tp-internal/proton/native_build_macOS/ | ||
Build_macOS_X86_64: | ||
if: github.event.inputs.build_for_macOS == 'true' && github.event.inputs.arch == 'x64' | ||
uses: timeplus-io/proton-enterprise/.github/workflows/run_command.yml@develop | ||
Check failure on line 269 in .github/workflows/manual_trigger_build.yml
|
||
with: | ||
ec2-instance-type: c5.4xlarge | ||
ec2-image-id: ami-042a37e33a285c22b | ||
submodules: 'recursive' | ||
run_mode: 'start' | ||
command: | | ||
cd $GITHUB_WORKSPACE | ||
# git config | ||
git config user.name "proton-robot" | ||
git config user.email "proton_robot@timeplus.io" | ||
mkdir -p $GITHUB_WORKSPACE/ccache | ||
# Download pre-built v8 binary for cross-compilation | ||
aws s3 cp --no-progress s3://tp-internal/proton/cross-compile-prebuilt-binary/v8-cmake-x64.tar.gz $GITHUB_WORKSPACE/contrib/v8-cmake/ | ||
tar -zxf $GITHUB_WORKSPACE/contrib/v8-cmake/v8-cmake-x64.tar.gz -C $GITHUB_WORKSPACE/contrib/v8-cmake/ | ||
rm $GITHUB_WORKSPACE/contrib/v8-cmake/v8-cmake-x64.tar.gz | ||
chmod a+x $GITHUB_WORKSPACE/contrib/v8-cmake/bytecode_builtins_list_generator | ||
chmod a+x $GITHUB_WORKSPACE/contrib/v8-cmake/mksnapshot | ||
chmod a+x $GITHUB_WORKSPACE/contrib/v8-cmake/torque | ||
# compiling | ||
./docker/packager/packager --package-type binary --docker-image-version clang-19 --build-type release --proton-build --disable-python-udf --cache ccache --ccache_dir $GITHUB_WORKSPACE/ccache --output-dir $GITHUB_WORKSPACE/output --compiler clang-19-darwin | ||
if [ ! -f "$GITHUB_WORKSPACE/output/proton" ]; then | ||
echo "Compiling proton Failed" | ||
exit 127 | ||
fi | ||
# get proton tag and rename binary | ||
COMMIT_HASH=$(git rev-parse --short HEAD) | ||
PROTON_BINARY=proton-${COMMIT_HASH}-Darwin-x86_64 | ||
echo "PROTON_BINARY=${PROTON_BINARY}" >> $GITHUB_ENV | ||
# Only strip if strip_binary is true | ||
echo "Stripping the binary..." | ||
ls -lh $GITHUB_WORKSPACE/output/proton | ||
docker run --rm -v $GITHUB_WORKSPACE/output:/work -w /work timeplus/proton-binary-builder:clang-19 /usr/bin/llvm-strip-19 proton | ||
ls -lh $GITHUB_WORKSPACE/output/proton | ||
# Calculate checksum | ||
sha256sum $GITHUB_WORKSPACE/output/proton > $GITHUB_WORKSPACE/output/proton.sha256 | ||
cat $GITHUB_WORKSPACE/output/proton.sha256 | ||
# Rename binary | ||
mv $GITHUB_WORKSPACE/output/proton $GITHUB_WORKSPACE/output/${PROTON_BINARY} | ||
# S3 upload with advanced retry mechanism | ||
max_attempts=5 | ||
attempt=1 | ||
wait_time=15 | ||
while [ $attempt -le $max_attempts ]; do | ||
echo "S3 upload attempt $attempt of $max_attempts" | ||
if aws s3 cp \ | ||
--no-progress \ | ||
--cli-connect-timeout 30 \ | ||
--cli-read-timeout 600 \ | ||
"$GITHUB_WORKSPACE/output/${PROTON_BINARY}" \ | ||
"s3://tp-internal/proton/cross-compile/macOS-x86/"; then | ||
echo "Binary upload successful" | ||
break | ||
else | ||
upload_status=$? | ||
echo "Binary upload failed on attempt $attempt with exit code $upload_status" | ||
if [ $attempt -eq $max_attempts ]; then | ||
echo "All binary upload attempts failed" | ||
exit 1 | ||
fi | ||
echo "Retrying in $wait_time seconds..." | ||
sleep $wait_time | ||
wait_time=$((wait_time * 2)) # Exponential backoff | ||
attempt=$((attempt + 1)) | ||
fi | ||
done | ||
echo "Build and upload complete for ${PROTON_BINARY}" | ||
echo "S3 location: s3://tp-internal/proton/cross-compile/macOS-x86/${PROTON_BINARY}" | ||
secrets: | ||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} | ||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | ||
AWS_REGION: ${{ secrets.AWS_REGION }} | ||
AWS_S3_BUCKET: ${{ secrets.AWS_S3_BUCKET }} | ||
GH_PERSONAL_ACCESS_TOKEN: ${{ secrets.GH_PERSONAL_ACCESS_TOKEN }} | ||
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} | ||
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} |