Skip to content

Commit f7f608e

Browse files
authored
feat(thirdparty): bump Hadoop to 3.3.6 (#2037)
1 parent 297d605 commit f7f608e

File tree

17 files changed

+173
-86
lines changed

17 files changed

+173
-86
lines changed

.github/actions/rebuild_thirdparty_if_needed/action.yaml

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -42,8 +42,10 @@ runs:
4242
cmake --build build/ -j $(nproc)
4343
rm -rf build/Build build/Download/[a-y]* build/Source/[a-g]* build/Source/[i-q]* build/Source/[s-z]*
4444
find ./ -name '*CMakeFiles*' -type d -exec rm -rf "{}" +
45-
../build_tools/download_hadoop.sh hadoop-bin
46-
../build_tools/download_zk.sh zookeeper-bin
45+
../admin_tools/download_hadoop.sh hadoop-bin
46+
../admin_tools/download_zk.sh zookeeper-bin
4747
rm -rf hadoop-bin/share/doc
4848
rm -rf zookeeper-bin/docs
49+
mv hadoop-bin ..
50+
mv zookeeper-bin ..
4951
shell: bash

.github/actions/upload_artifact/action.yaml

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,8 +21,6 @@ runs:
2121
steps:
2222
- name: Tar files
2323
run: |
24-
mv thirdparty/hadoop-bin ./
25-
mv thirdparty/zookeeper-bin ./
2624
rm -rf thirdparty
2725
# The following operations are tricky, these directories and files don't exist if not build with '--test'.
2826
# When build binaries for client tests, it's not needed to add '--test'.

.licenserc.yaml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -68,6 +68,7 @@ header:
6868
- 'src/replica/duplication/test/log.1.0.handle_real_private_log2'
6969
- 'src/replica/duplication/test/log.1.0.all_loaded_are_write_empties'
7070
# Used for patches for thirdparties.
71+
- 'thirdparty/fix_hdfs_native_client.patch'
7172
- 'thirdparty/fix_jemalloc_for_m1_on_macos.patch'
7273
- 'thirdparty/fix_libevent_for_macos.patch'
7374
- 'thirdparty/fix_rocksdb-cmake-PORTABLE-option.patch'
File renamed without changes.

build_tools/download_hadoop.sh renamed to admin_tools/download_hadoop.sh

Lines changed: 16 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -19,13 +19,24 @@
1919

2020
set -e
2121

22-
CWD=$(cd $(dirname $0) && pwd)
22+
CWD=$(cd "$(dirname "$0")" && pwd)
2323

2424
if [ $# -ge 1 ]; then
2525
HADOOP_BIN_PATH=$1
2626
fi
2727

28-
HADOOP_VERSION=2.8.4
29-
HADOOP_DIR_NAME=hadoop-${HADOOP_VERSION}
30-
HADOOP_PACKAGE_MD5="b30b409bb69185003b3babd1504ba224"
31-
${CWD}/download_package.sh ${HADOOP_DIR_NAME} ${HADOOP_PACKAGE_MD5} ${HADOOP_BIN_PATH}
28+
HADOOP_VERSION="hadoop-3.3.6"
29+
arch_output=$(arch)
30+
if [ "$arch_output"x == "aarch64"x ]; then
31+
HADOOP_PACKAGE_MD5="369f899194a920e0d1c3c3bc1718b3b5"
32+
HADOOP_BASE_NAME=${HADOOP_VERSION}-"$(arch)"
33+
else
34+
if [ "$arch_output"x != "x86_64"x ]; then
35+
echo "WARNING: unrecognized CPU architecture '$arch_output', use 'x86_64' as default"
36+
fi
37+
HADOOP_PACKAGE_MD5="1cbe1214299cd3bd282d33d3934b5cbd"
38+
HADOOP_BASE_NAME=${HADOOP_VERSION}
39+
fi
40+
41+
DOWNLOAD_BASE_URL="https://mirrors.aliyun.com/apache/hadoop/common/${HADOOP_VERSION}/"
42+
"${CWD}"/download_package.sh "${HADOOP_BASE_NAME}" ${HADOOP_PACKAGE_MD5} "${HADOOP_BIN_PATH}" ${DOWNLOAD_BASE_URL} "${HADOOP_VERSION}"

build_tools/download_package.sh renamed to admin_tools/download_package.sh

Lines changed: 31 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -21,59 +21,69 @@ set -e
2121

2222
if [ $# -lt 2 ]; then
2323
echo "Invalid arguments !"
24-
echo "USAGE: $0 <DIR_NAME> <PACKAGE_MD5> [TARGET_PATH]"
24+
echo "USAGE: $0 <PACKAGE_BASE_NAME> <PACKAGE_MD5> [TARGET_PATH]"
2525
exit 1
2626
fi
2727

28-
DIR_NAME=$1
28+
PACKAGE_BASE_NAME=$1
2929
PACKAGE_MD5=$2
3030

3131
if [ $# -lt 3 ]; then
32-
echo "TARGET_PATH is not provided, thus do not try to download ${DIR_NAME}"
32+
echo "TARGET_PATH is not provided, thus do not try to download ${PACKAGE_BASE_NAME}"
3333
exit 0
3434
fi
3535

3636
TARGET_PATH=$3
37-
if [ -d ${TARGET_PATH} ]; then
38-
echo "TARGET_PATH ${TARGET_PATH} has existed, thus do not try to download ${DIR_NAME}"
37+
if [ -d "${TARGET_PATH}" ]; then
38+
echo "TARGET_PATH ${TARGET_PATH} has existed, thus do not try to download ${PACKAGE_BASE_NAME}"
3939
exit 0
4040
fi
4141

42-
PACKAGE_NAME=${DIR_NAME}.tar.gz
43-
if [ ! -f ${PACKAGE_NAME} ]; then
44-
echo "Downloading ${DIR_NAME}..."
42+
DEFAULT_DOWNLOAD_BASE_URL="https://pegasus-thirdparty-package.oss-cn-beijing.aliyuncs.com/"
43+
if [ $# -ge 4 ]; then
44+
DEFAULT_DOWNLOAD_BASE_URL=$4
45+
fi
46+
47+
DIR_NAME=${PACKAGE_BASE_NAME}
48+
if [ $# -ge 5 ]; then
49+
DIR_NAME=$5
50+
fi
51+
52+
PACKAGE_NAME=${PACKAGE_BASE_NAME}.tar.gz
53+
if [ ! -f "${PACKAGE_NAME}" ]; then
54+
echo "Downloading ${PACKAGE_NAME} ..."
4555

46-
DOWNLOAD_URL="https://pegasus-thirdparty-package.oss-cn-beijing.aliyuncs.com/${PACKAGE_NAME}"
47-
if ! wget -T 10 -t 5 ${DOWNLOAD_URL}; then
48-
echo "ERROR: download ${DIR_NAME} failed"
56+
DOWNLOAD_URL=${DEFAULT_DOWNLOAD_BASE_URL}${PACKAGE_NAME}
57+
if ! wget -q -T 10 -t 5 "${DOWNLOAD_URL}"; then
58+
echo "ERROR: download ${PACKAGE_NAME} failed"
4959
exit 1
5060
fi
5161

52-
if [ `md5sum ${PACKAGE_NAME} | awk '{print$1}'` != ${PACKAGE_MD5} ]; then
62+
if [ "$(md5sum "${PACKAGE_NAME}" | awk '{print$1}')" != "${PACKAGE_MD5}" ]; then
5363
echo "Check file ${PACKAGE_NAME} md5sum failed!"
5464
exit 1
5565
fi
5666
fi
5767

58-
rm -rf ${DIR_NAME}
68+
rm -rf "${DIR_NAME}"
5969

60-
echo "Decompressing ${DIR_NAME}..."
61-
if ! tar xf ${PACKAGE_NAME}; then
62-
echo "ERROR: decompress ${DIR_NAME} failed"
63-
rm -f ${PACKAGE_NAME}
70+
echo "Decompressing ${PACKAGE_NAME} ..."
71+
if ! tar xf "${PACKAGE_NAME}"; then
72+
echo "ERROR: decompress ${PACKAGE_NAME} failed"
73+
rm -f "${PACKAGE_NAME}"
6474
exit 1
6575
fi
6676

67-
rm -f ${PACKAGE_NAME}
77+
rm -f "${PACKAGE_NAME}"
6878

69-
if [ ! -d ${DIR_NAME} ]; then
79+
if [ ! -d "${DIR_NAME}" ]; then
7080
echo "ERROR: ${DIR_NAME} does not exist"
7181
exit 1
7282
fi
7383

74-
if [ -d ${TARGET_PATH} ]; then
84+
if [ -d "${TARGET_PATH}" ]; then
7585
echo "TARGET_PATH ${TARGET_PATH} has been generated, which means it and ${DIR_NAME} are the same dir thus do not do mv any more"
7686
exit 0
7787
fi
7888

79-
mv ${DIR_NAME} ${TARGET_PATH}
89+
mv "${DIR_NAME}" "${TARGET_PATH}"
File renamed without changes.
File renamed without changes.
File renamed without changes.

build_tools/pack_server.sh

Lines changed: 32 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -148,30 +148,40 @@ pack_server_lib crypto $separate_servers
148148
pack_server_lib ssl $separate_servers
149149

150150
# Pack hadoop-related files.
151-
# If you want to use hdfs service to backup/restore/bulkload pegasus tables,
152-
# you need to set env ${HADOOP_HOME}, edit ${HADOOP_HOME}/etc/hadoop/core-site.xml,
153-
# and specify the keytab file.
154-
if [ -n "$HADOOP_HOME" ] && [ -n "$keytab_file" ]; then
155-
mkdir -p ${pack}/hadoop
156-
copy_file $keytab_file ${pack}/hadoop
157-
copy_file ${HADOOP_HOME}/etc/hadoop/core-site.xml ${pack}/hadoop
158-
if [ -d $HADOOP_HOME/share/hadoop ]; then
159-
for f in ${HADOOP_HOME}/share/hadoop/common/lib/*.jar; do
160-
copy_file $f ${pack}/hadoop
161-
done
162-
for f in ${HADOOP_HOME}/share/hadoop/common/*.jar; do
163-
copy_file $f ${pack}/hadoop
164-
done
165-
for f in ${HADOOP_HOME}/share/hadoop/hdfs/lib/*.jar; do
166-
copy_file $f ${pack}/hadoop
167-
done
168-
for f in ${HADOOP_HOME}/share/hadoop/hdfs/*.jar; do
169-
copy_file $f ${pack}/hadoop
170-
done
151+
# If you want to use hdfs service to backup/restore/bulkload pegasus tables, you need to
152+
# set env ${HADOOP_HOME} to the proper directory where contains Hadoop *.jar files.
153+
if [ -n "$HADOOP_HOME" ]; then
154+
# Verify one of the jars.
155+
arch_output=$(arch)
156+
if [ "$arch_output"x == "aarch64"x ]; then
157+
HDFS_JAR_MD5="fcc09dbed936cd8673918774cc3ead6b"
158+
else
159+
if [ "$arch_output"x != "x86_64"x ]; then
160+
echo "WARNING: unrecognized CPU architecture '$arch_output', use 'x86_64' as default"
161+
fi
162+
HDFS_JAR_MD5="f67f3a5613c885e1622b1056fd94262b"
171163
fi
164+
HDFS_JAR=${HADOOP_HOME}/share/hadoop/hdfs/hadoop-hdfs-3.3.6.jar
165+
if [ "$(md5sum "${HDFS_JAR}" | awk '{print$1}')" != "${HDFS_JAR_MD5}" ]; then
166+
echo "check file ${HDFS_JAR} md5sum failed!"
167+
exit 1
168+
fi
169+
# Pack the jars.
170+
mkdir -p ${pack}/hadoop
171+
for f in ${HADOOP_HOME}/share/hadoop/common/lib/*.jar; do
172+
copy_file $f ${pack}/hadoop
173+
done
174+
for f in ${HADOOP_HOME}/share/hadoop/common/*.jar; do
175+
copy_file $f ${pack}/hadoop
176+
done
177+
for f in ${HADOOP_HOME}/share/hadoop/hdfs/lib/*.jar; do
178+
copy_file $f ${pack}/hadoop
179+
done
180+
for f in ${HADOOP_HOME}/share/hadoop/hdfs/*.jar; do
181+
copy_file $f ${pack}/hadoop
182+
done
172183
else
173-
echo "Couldn't find env ${HADOOP_HOME} or no valid keytab file was specified,
174-
hadoop-related files were not packed."
184+
echo "Couldn't find env HADOOP_HOME, hadoop-related files were not packed."
175185
fi
176186

177187
DISTRIB_ID=$(cat /etc/*-release | grep DISTRIB_ID | awk -F'=' '{print $2}')

build_tools/pack_tools.sh

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -156,13 +156,10 @@ chmod -x ${pack}/lib/*
156156

157157
mkdir -p ${pack}/admin_tools
158158
copy_file ./admin_tools/* ${pack}/admin_tools/
159+
copy_file ./admin_tools/download_*.sh ${pack}/admin_tools/
160+
copy_file ./admin_tools/*_zk.sh ${pack}/admin_tools/
159161
chmod +x ${pack}/admin_tools/*.sh
160162

161-
mkdir -p ${pack}/build_tools
162-
copy_file ./build_tools/download_*.sh ${pack}/build_tools/
163-
copy_file ./build_tools/*_zk.sh ${pack}/build_tools/
164-
chmod +x ${pack}/build_tools/*.sh
165-
166163
mkdir -p ${pack}/src/server
167164
copy_file ./src/server/*.ini ${pack}/src/server/
168165

docker/thirdparties-bin/Dockerfile

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -35,8 +35,8 @@ RUN git clone --depth=1 --branch=${GITHUB_BRANCH} ${GITHUB_REPOSITORY_URL} \
3535
&& unzip /root/thirdparties-src.zip -d . \
3636
&& cmake -DCMAKE_BUILD_TYPE=Release -DROCKSDB_PORTABLE=${ROCKSDB_PORTABLE} -DUSE_JEMALLOC=${USE_JEMALLOC} -B build/ . \
3737
&& cmake --build build/ -j $(($(nproc)/2+1)) \
38-
&& ../build_tools/download_hadoop.sh ${HADOOP_BIN_PATH} \
39-
&& ../build_tools/download_zk.sh ${ZOOKEEPER_BIN_PATH} \
38+
&& ../admin_tools/download_hadoop.sh ${HADOOP_BIN_PATH} \
39+
&& ../admin_tools/download_zk.sh ${ZOOKEEPER_BIN_PATH} \
4040
&& zip -r ~/thirdparties-bin.zip output/ build/Source/rocksdb/cmake build/Source/http-parser build/Source/hadoop build/Download/zookeeper ${HADOOP_BIN_PATH} ${ZOOKEEPER_BIN_PATH} \
4141
&& cd ~ \
4242
&& rm -rf incubator-pegasus;

run.sh

Lines changed: 10 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -28,12 +28,13 @@ export REPORT_DIR="$ROOT/test_report"
2828
export THIRDPARTY_ROOT=${PEGASUS_THIRDPARTY_ROOT:-"$ROOT/thirdparty"}
2929
ARCH_TYPE=''
3030
arch_output=$(arch)
31-
if [ "$arch_output"x == "x86_64"x ]; then
32-
ARCH_TYPE="amd64"
33-
elif [ "$arch_output"x == "aarch64"x ]; then
31+
if [ "$arch_output"x == "aarch64"x ]; then
3432
ARCH_TYPE="aarch64"
3533
else
36-
echo "WARNING: unsupported CPU architecture '$arch_output', use 'x86_64' as default"
34+
if [ "$arch_output"x != "x86_64"x ]; then
35+
echo "WARNING: unrecognized CPU architecture '$arch_output', use 'x86_64' as default"
36+
fi
37+
ARCH_TYPE="amd64"
3738
fi
3839
export LD_LIBRARY_PATH=${JAVA_HOME}/jre/lib/${ARCH_TYPE}:${JAVA_HOME}/jre/lib/${ARCH_TYPE}/server:${BUILD_LATEST_DIR}/output/lib:${THIRDPARTY_ROOT}/output/lib:${LD_LIBRARY_PATH}
3940
# Disable AddressSanitizerOneDefinitionRuleViolation, see https://github.com/google/sanitizers/issues/1017 for details.
@@ -656,7 +657,7 @@ function run_start_zk()
656657
fi
657658
fi
658659

659-
INSTALL_DIR="$INSTALL_DIR" PORT="$PORT" $ROOT/build_tools/start_zk.sh
660+
INSTALL_DIR="$INSTALL_DIR" PORT="$PORT" $ROOT/admin_tools/start_zk.sh
660661
}
661662

662663
#####################
@@ -693,7 +694,7 @@ function run_stop_zk()
693694
esac
694695
shift
695696
done
696-
INSTALL_DIR="$INSTALL_DIR" $ROOT/build_tools/stop_zk.sh
697+
INSTALL_DIR="$INSTALL_DIR" $ROOT/admin_tools/stop_zk.sh
697698
}
698699

699700
#####################
@@ -730,7 +731,7 @@ function run_clear_zk()
730731
esac
731732
shift
732733
done
733-
INSTALL_DIR="$INSTALL_DIR" $ROOT/build_tools/clear_zk.sh
734+
INSTALL_DIR="$INSTALL_DIR" $ROOT/admin_tools/clear_zk.sh
734735
}
735736

736737
#####################
@@ -2105,6 +2106,8 @@ case $cmd in
21052106
;;
21062107
pack_server)
21072108
shift
2109+
# source the config_hdfs.sh to get the HADOOP_HOME.
2110+
source "${ROOT}"/admin_tools/config_hdfs.sh
21082111
PEGASUS_ROOT=$ROOT ./build_tools/pack_server.sh $*
21092112
;;
21102113
pack_client)

src/sample/run.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ if [ "$arch_output"x == "x86_64"x ]; then
3333
elif [ "$arch_output"x == "aarch64"x ]; then
3434
ARCH_TYPE="aarch64"
3535
else
36-
echo "WARNING: unsupported CPU architecture '$arch_output', use 'x86_64' as default"
36+
echo "WARNING: unrecognized CPU architecture '$arch_output', use 'x86_64' as default"
3737
fi
3838
export LD_LIBRARY_PATH=${JAVA_HOME}/jre/lib/${ARCH_TYPE}:${JAVA_HOME}/jre/lib/${ARCH_TYPE}/server:${PEGASUS_THIRDPARTY_ROOT}/output/lib:$(pwd)/../../lib:${LD_LIBRARY_PATH}
3939

src/test/function_test/recovery/test_recovery.cpp

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -252,6 +252,8 @@ TEST_F(recovery_test, recovery)
252252
auto nodes = get_rpc_host_port_list({34801, 34802, 34803});
253253
ASSERT_EQ(dsn::ERR_OK, ddl_client_->do_recovery(nodes, 30, false, false, std::string()));
254254

255+
ASSERT_NO_FATAL_FAILURE(wait_table_healthy(table_name_));
256+
255257
// then wait the apps to ready
256258
ASSERT_EQ(dsn::ERR_OK,
257259
ddl_client_->create_app(table_name_, "pegasus", partition_count_, 3, {}, false));

thirdparty/CMakeLists.txt

Lines changed: 40 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -119,17 +119,51 @@ ExternalProject_Add(gperftools
119119
DOWNLOAD_NO_PROGRESS true
120120
)
121121

122+
ExternalProject_Add(abseil
123+
URL ${OSS_URL_PREFIX}/abseil-20230802.1.zip
124+
https://github.com/abseil/abseil-cpp/archive/refs/tags/20230802.1.zip
125+
URL_MD5 5c6193dbc82834f8e762c6a28c9cc615
126+
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${TP_OUTPUT}
127+
-DCMAKE_POSITION_INDEPENDENT_CODE=ON
128+
-DABSL_FIND_GOOGLETEST=OFF
129+
-DCMAKE_CXX_STANDARD=17
130+
DOWNLOAD_EXTRACT_TIMESTAMP true
131+
DOWNLOAD_NO_PROGRESS true
132+
)
133+
134+
ExternalProject_Add(protobuf
135+
URL https://github.com/protocolbuffers/protobuf/archive/refs/tags/v27.0.tar.gz
136+
URL_MD5 c96aaf02c8acea549d65bb7b2d549bf6
137+
CMAKE_ARGS -DCMAKE_BUILD_TYPE=release
138+
-Dprotobuf_BUILD_TESTS=OFF
139+
-Dprotobuf_BUILD_PROTOC_BINARIES=ON
140+
-Dprotobuf_BUILD_LIBUPB=ON
141+
-Dprotobuf_ABSL_PROVIDER=package
142+
-DBUILD_SHARED_LIBS=ON
143+
-DBUILD_SHARED_HDFSPP=ON
144+
-DHDFSPP_LIBRARY_ONLY=ON
145+
-DCMAKE_POSITION_INDEPENDENT_CODE=ON
146+
-DCMAKE_CXX_STANDARD=17
147+
-DABSL_ROOT_DIR=${TP_OUTPUT}
148+
-DCMAKE_INSTALL_PREFIX=${TP_OUTPUT}
149+
-DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}
150+
-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}
151+
DEPENDS abseil
152+
DOWNLOAD_EXTRACT_TIMESTAMP true
153+
DOWNLOAD_NO_PROGRESS true
154+
)
155+
122156
set(HDFS_CLIENT_DIR "hadoop-hdfs-project/hadoop-hdfs-native-client")
123157
ExternalProject_Add(hadoop
124-
URL ${OSS_URL_PREFIX}/hadoop-release-2.8.4.tar.gz
125-
https://github.com/apache/hadoop/archive/refs/tags/rel/release-2.8.4.tar.gz
126-
URL_MD5 a1be737d4bff14923689619ab6545a96
127-
PATCH_COMMAND ""
158+
URL https://mirrors.aliyun.com/apache/hadoop/common/hadoop-3.3.6/hadoop-3.3.6-src.tar.gz
159+
URL_MD5 285c07d8ad2c837c8ee04a4fa49c73cd
160+
PATCH_COMMAND patch -p1 < ${TP_DIR}/fix_hdfs_native_client.patch
128161
COMMAND cd ${HDFS_CLIENT_DIR} && mvn package -Pdist,native -DskipTests -Dmaven.javadoc.skip=true -Dtar
129-
COMMAND cd ${HDFS_CLIENT_DIR} && cp -R target/hadoop-hdfs-native-client-2.8.4/include/. ${TP_OUTPUT}/include/hdfs && cp -R target/hadoop-hdfs-native-client-2.8.4/lib/native/. ${TP_OUTPUT}/lib
162+
COMMAND cd ${HDFS_CLIENT_DIR} && cp -R target/hadoop-hdfs-native-client-3.3.6/include/. ${TP_OUTPUT}/include/hdfs && cp -R target/hadoop-hdfs-native-client-3.3.6/lib/native/. ${TP_OUTPUT}/lib
130163
CONFIGURE_COMMAND ""
131164
BUILD_COMMAND ""
132165
INSTALL_COMMAND ""
166+
DEPENDS protobuf
133167
DOWNLOAD_EXTRACT_TIMESTAMP true
134168
DOWNLOAD_NO_PROGRESS true
135169
)
@@ -305,18 +339,6 @@ ExternalProject_Add(nlohmann_json
305339
DOWNLOAD_NO_PROGRESS true
306340
)
307341

308-
ExternalProject_Add(abseil
309-
URL ${OSS_URL_PREFIX}/abseil-20230802.1.zip
310-
https://github.com/abseil/abseil-cpp/archive/refs/tags/20230802.1.zip
311-
URL_MD5 5c6193dbc82834f8e762c6a28c9cc615
312-
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${TP_OUTPUT}
313-
-DCMAKE_POSITION_INDEPENDENT_CODE=ON
314-
-DABSL_FIND_GOOGLETEST=OFF
315-
-DCMAKE_CXX_STANDARD=17
316-
DOWNLOAD_EXTRACT_TIMESTAMP true
317-
DOWNLOAD_NO_PROGRESS true
318-
)
319-
320342
ExternalProject_Add(s2geometry
321343
URL ${OSS_URL_PREFIX}/s2geometry-0.10.0.tar.gz
322344
https://github.com/google/s2geometry/archive/refs/tags/v0.10.0.tar.gz
@@ -359,8 +381,7 @@ set(SNAPPY_OPTIONS
359381
-DSNAPPY_FUZZING_BUILD=OFF
360382
-DSNAPPY_INSTALL=ON)
361383
execute_process(COMMAND arch OUTPUT_VARIABLE ARCH_NAME OUTPUT_STRIP_TRAILING_WHITESPACE)
362-
message(STATUS "ARCH_NAME = ${ARCH_NAME}")
363-
if (ARCH_NAME EQUAL "x86_64")
384+
if (ARCH_NAME STREQUAL "x86_64")
364385
set(SNAPPY_OPTIONS
365386
${SNAPPY_OPTIONS}
366387
-DSNAPPY_REQUIRE_AVX=ON

0 commit comments

Comments
 (0)