From 0a668aadbb65a9a17c94b524febfbc0ec2da9f4f Mon Sep 17 00:00:00 2001 From: Yancheng Zheng <103552181+anakinxc@users.noreply.github.com> Date: Wed, 31 Jul 2024 18:07:33 +0800 Subject: [PATCH] Fix build issues (#161) * Fix macOS build * cleanup * fix * fix * Fix apsi arm build * Update yacl --- bazel/flatbuffers.BUILD | 2 ++ bazel/jsoncpp.BUILD | 1 + bazel/libzmq.BUILD | 1 + bazel/log4cplus.BUILD | 2 ++ bazel/patches/apsi.patch | 13 +++++++++++++ bazel/repositories.bzl | 12 ++++++++---- psi/apsi_wrapper/api/sender.cc | 2 +- psi/apsi_wrapper/receiver.cc | 28 ++++++++++++++-------------- psi/apsi_wrapper/sender.cc | 6 +++--- psi/apsi_wrapper/utils/bucket.cc | 1 + psi/apsi_wrapper/utils/csv_reader.cc | 4 +++- psi/apsi_wrapper/utils/sender_db.cc | 12 ++++++------ 12 files changed, 55 insertions(+), 29 deletions(-) diff --git a/bazel/flatbuffers.BUILD b/bazel/flatbuffers.BUILD index c2be94c..a20438e 100644 --- a/bazel/flatbuffers.BUILD +++ b/bazel/flatbuffers.BUILD @@ -25,6 +25,8 @@ psi_cmake_external( name = "FlatBuffers", cache_entries = { "FLATBUFFERS_BUILD_TESTS": "OFF", + "CMAKE_EXE_LINKER_FLAGS": "-lm", + "CMAKE_INSTALL_LIBDIR": "lib", }, lib_source = ":all_srcs", out_binaries = ["flatc"], diff --git a/bazel/jsoncpp.BUILD b/bazel/jsoncpp.BUILD index 9d63b55..87d99c7 100644 --- a/bazel/jsoncpp.BUILD +++ b/bazel/jsoncpp.BUILD @@ -28,6 +28,7 @@ psi_cmake_external( "JSONCPP_WITH_POST_BUILD_UNITTEST": "OFF", "BUILD_SHARED_LIBS": "OFF", "BUILD_OBJECT_LIBS": "OFF", + "CMAKE_INSTALL_LIBDIR": "lib", }, lib_source = "@com_github_open_source_parsers_jsoncpp//:all", out_static_libs = ["libjsoncpp.a"], diff --git a/bazel/libzmq.BUILD b/bazel/libzmq.BUILD index 5cd9b15..d4dafc5 100644 --- a/bazel/libzmq.BUILD +++ b/bazel/libzmq.BUILD @@ -25,6 +25,7 @@ psi_cmake_external( name = "ZeroMQ", cache_entries = { "ZMQ_BUILD_TESTS": "OFF", + "CMAKE_INSTALL_LIBDIR": "lib", }, lib_source = "@com_github_zeromq_libzmq//:all", out_static_libs = ["libzmq.a"], diff --git a/bazel/log4cplus.BUILD b/bazel/log4cplus.BUILD index a5047c9..613b9b5 100644 --- a/bazel/log4cplus.BUILD +++ b/bazel/log4cplus.BUILD @@ -25,9 +25,11 @@ psi_cmake_external( name = "log4cplus", cache_entries = { "LOG4CPLUS_BUILD_TESTING": "OFF", + "LOG4CPLUS_BUILD_LOGGINGSERVER": "OFF", "WITH_UNIT_TESTS": "OFF", "BUILD_SHARED_LIBS": "False", "LOG4CPLUS_ENABLE_DECORATED_LIBRARY_NAME": "OFF", + "CMAKE_INSTALL_LIBDIR": "lib", }, lib_source = "@com_github_log4cplus_log4cplus//:all", out_static_libs = ["liblog4cplus.a"], diff --git a/bazel/patches/apsi.patch b/bazel/patches/apsi.patch index 07faf96..abd35ac 100644 --- a/bazel/patches/apsi.patch +++ b/bazel/patches/apsi.patch @@ -398,3 +398,16 @@ index 4c4e116..8eb34fc 100644 } union Request { ParmsRequest, OPRFRequest, QueryRequest } +diff --git a/CMakeLists.txt b/CMakeLists.txt +index 78d54a6..46a452e 100644 +--- a/CMakeLists.txt ++++ b/CMakeLists.txt +@@ -326,7 +326,7 @@ else() + endif() + + # Use optimized assembly on UNIX +-if(APSI_USE_ASM AND UNIX AND NOT APPLE AND NOT CYGWIN AND NOT MINGW) ++if(APSI_USE_ASM AND UNIX AND APSI_FOURQ_AMD64 AND NOT APPLE AND NOT CYGWIN AND NOT MINGW) + check_language(ASM) + if(CMAKE_ASM_COMPILER) + enable_language(ASM) diff --git a/bazel/repositories.bzl b/bazel/repositories.bzl index 281c493..d706168 100644 --- a/bazel/repositories.bzl +++ b/bazel/repositories.bzl @@ -54,10 +54,10 @@ def _yacl(): http_archive, name = "yacl", urls = [ - "https://github.com/secretflow/yacl/archive/refs/tags/0.4.5b3_nightly_20240722.tar.gz", + "https://github.com/secretflow/yacl/archive/refs/tags/0.4.5b4_nightly_20240731.tar.gz", ], - strip_prefix = "yacl-0.4.5b3_nightly_20240722", - sha256 = "ccca599e6ded6089c5afbb87c8f5e09383195af256caacd50089f0c7443e8604", + strip_prefix = "yacl-0.4.5b4_nightly_20240731", + sha256 = "e92484a9a60aaf86130157d9568b2bf7812bac4096cb108d565538268d74ea7e", ) def _bazel_platform(): @@ -227,7 +227,11 @@ def _com_google_flatbuffers(): patches = [ "@psi//bazel:patches/flatbuffers.patch", ], - patch_args = ["-p1", "-l"], + patch_args = ["-p1"], + patch_cmds = [ + # hack to make sure this file is removed + "rm grpc/BUILD.bazel", + ], build_file = "@psi//bazel:flatbuffers.BUILD", ) diff --git a/psi/apsi_wrapper/api/sender.cc b/psi/apsi_wrapper/api/sender.cc index 095f689..7ef7dba 100644 --- a/psi/apsi_wrapper/api/sender.cc +++ b/psi/apsi_wrapper/api/sender.cc @@ -180,7 +180,7 @@ std::string Sender::RunQuery(const std::string &query_str) { for (size_t bundle_idx = 0; bundle_idx < all_powers.size(); bundle_idx++) { // Load input^power to all_powers[bundle_idx][exponent] - all_powers[bundle_idx][exponent] = move(q.second[bundle_idx]); + all_powers[bundle_idx][exponent] = std::move(q.second[bundle_idx]); } } diff --git a/psi/apsi_wrapper/receiver.cc b/psi/apsi_wrapper/receiver.cc index ea6e455..8e303cc 100644 --- a/psi/apsi_wrapper/receiver.cc +++ b/psi/apsi_wrapper/receiver.cc @@ -54,7 +54,7 @@ bool has_n_zeros(T *ptr, size_t count) { } } // namespace -Receiver::Receiver(::apsi::PSIParams params) : params_(move(params)) { +Receiver::Receiver(::apsi::PSIParams params) : params_(std::move(params)) { initialize(); } @@ -70,7 +70,7 @@ void Receiver::reset_keys() { if (get_seal_context()->using_keyswitching()) { ::seal::Serializable<::seal::RelinKeys> relin_keys( generator.create_relin_keys()); - relin_keys_.set(move(relin_keys)); + relin_keys_.set(std::move(relin_keys)); } } @@ -186,7 +186,7 @@ Receiver::ExtractHashes(const ::apsi::OPRFResponse &oprf_response, APSI_LOG_INFO("Extracted OPRF hashes for " << oprf_response_item_count << " items"); - return make_pair(move(items), move(label_keys)); + return make_pair(std::move(items), std::move(label_keys)); } unique_ptr<::apsi::network::SenderOperation> Receiver::CreateOPRFRequest( @@ -327,7 +327,7 @@ Receiver::create_query(const vector<::apsi::HashedItem> &items, // Now that we have the algebraized items for this bundle index, we create // a PlaintextPowers object that computes all necessary powers of the // algebraized items. - plain_powers.emplace_back(move(alg_items), params_, pd_); + plain_powers.emplace_back(std::move(alg_items), params_, pd_); } } @@ -350,7 +350,7 @@ Receiver::create_query(const vector<::apsi::HashedItem> &items, // Move the encrypted data to encrypted_powers for (auto &e : encrypted_power) { - encrypted_powers[e.first].emplace_back(move(e.second)); + encrypted_powers[e.first].emplace_back(std::move(e.second)); } } } @@ -359,13 +359,13 @@ Receiver::create_query(const vector<::apsi::HashedItem> &items, auto sop_query = make_unique<::apsi::network::SenderOperationQuery>(); sop_query->compr_mode = ::seal::Serialization::compr_mode_default; sop_query->relin_keys = relin_keys_; - sop_query->data = move(encrypted_powers); + sop_query->data = std::move(encrypted_powers); sop_query->bucket_idx = bucket_idx; - auto sop = ::apsi::to_request(move(sop_query)); + auto sop = ::apsi::to_request(std::move(sop_query)); APSI_LOG_INFO("Finished creating encrypted query"); - return {move(sop), itt}; + return {std::move(sop), itt}; } vector<::apsi::receiver::MatchRecord> Receiver::request_query( @@ -377,8 +377,8 @@ vector<::apsi::receiver::MatchRecord> Receiver::request_query( // Create query and send to Sender auto query = create_query(items, bucket_idx); - chl.send(move(query.first)); - auto itt = move(query.second); + chl.send(std::move(query.first)); + auto itt = std::move(query.second); // Wait for query response ::apsi::QueryResponse response; @@ -608,11 +608,11 @@ vector<::apsi::receiver::MatchRecord> Receiver::process_result_part( encrypted_label, label_keys[item_idx], nonce_byte_count); // Set the label - mr.label.set(move(label)); + mr.label.set(std::move(label)); } // We are done with the MatchRecord, so add it to the mrs vector - mrs[item_idx] = move(mr); + mrs[item_idx] = std::move(mr); }); return mrs; @@ -639,7 +639,7 @@ vector<::apsi::receiver::MatchRecord> Receiver::process_result( ::seal::util::iter(mrs, this_mrs, size_t(0)), mrs.size(), [](auto &&I) { if (get<1>(I) && !get<0>(I)) { // This match needs to be merged into mrs - get<0>(I) = move(get<1>(I)); + get<0>(I) = std::move(get<1>(I)); } else if (get<1>(I) && get<0>(I)) { // If a positive MatchRecord is already present, then something is // seriously wrong @@ -705,7 +705,7 @@ void Receiver::process_result_worker( ::seal::util::iter(mrs, this_mrs, size_t(0)), mrs.size(), [](auto &&I) { if (get<1>(I) && !get<0>(I)) { // This match needs to be merged into mrs - get<0>(I) = move(get<1>(I)); + get<0>(I) = std::move(get<1>(I)); } else if (get<1>(I) && get<0>(I)) { // If a positive MatchRecord is already present, then something is // seriously wrong diff --git a/psi/apsi_wrapper/sender.cc b/psi/apsi_wrapper/sender.cc index db0d342..66369c9 100644 --- a/psi/apsi_wrapper/sender.cc +++ b/psi/apsi_wrapper/sender.cc @@ -65,7 +65,7 @@ void Sender::RunParams( make_unique<::apsi::PSIParams>(sender_db->get_params()); try { - send_fun(chl, move(response_params)); + send_fun(chl, std::move(response_params)); } catch (const exception &ex) { APSI_LOG_ERROR( "Failed to send response to parameter request; function threw an " @@ -223,7 +223,7 @@ void Sender::RunQuery( // Load input^power to all_powers[bundle_idx][exponent] APSI_LOG_DEBUG("Extracting query ciphertext power " << exponent << " for bundle index " << bundle_idx); - all_powers[bundle_idx][exponent] = move(q.second[bundle_idx]); + all_powers[bundle_idx][exponent] = std::move(q.second[bundle_idx]); } } @@ -319,7 +319,7 @@ void Sender::ComputePowers( if (relinearize) { evaluator->relinearize_inplace(prod, *relin_keys, pool); } - powers_at_this_bundle_idx[node.power] = move(prod); + powers_at_this_bundle_idx[node.power] = std::move(prod); } }); diff --git a/psi/apsi_wrapper/utils/bucket.cc b/psi/apsi_wrapper/utils/bucket.cc index fbfb9c6..af0d9db 100644 --- a/psi/apsi_wrapper/utils/bucket.cc +++ b/psi/apsi_wrapper/utils/bucket.cc @@ -37,6 +37,7 @@ BucketSenderDbSwitcher::BucketSenderDbSwitcher(const std::string& parent_folder, size_t init_idx) : parent_folder_(parent_folder), bucket_cnt_(bucket_cnt) { SetBucketIdx(init_idx, true); + (void)bucket_cnt_; } void BucketSenderDbSwitcher::SetBucketIdx(size_t idx, bool forced_to_reload) { diff --git a/psi/apsi_wrapper/utils/csv_reader.cc b/psi/apsi_wrapper/utils/csv_reader.cc index 3a297e8..5bc6685 100644 --- a/psi/apsi_wrapper/utils/csv_reader.cc +++ b/psi/apsi_wrapper/utils/csv_reader.cc @@ -146,7 +146,7 @@ auto CSVReader::read() -> pair> { return {UnlabeledData{}, {}}; } else { SPDLOG_INFO("Read csv file {}, row cnt is {}", file_name_, row_cnt); - return {move(result), move(orig_items)}; + return {std::move(result), std::move(orig_items)}; } } @@ -211,6 +211,8 @@ void CSVReader::bucketize(size_t bucket_cnt, const std::string& bucket_folder) { } } + (void)row_cnt; + for (const auto& out : bucket_os_vec) { out->Flush(); } diff --git a/psi/apsi_wrapper/utils/sender_db.cc b/psi/apsi_wrapper/utils/sender_db.cc index a833dda..42bfd7e 100644 --- a/psi/apsi_wrapper/utils/sender_db.cc +++ b/psi/apsi_wrapper/utils/sender_db.cc @@ -53,7 +53,7 @@ shared_ptr<::apsi::sender::SenderDB> try_load_sender_db( "PSI parameters were loaded with the SenderDB; " "ignoring given PSI parameters"); } - result = make_shared<::apsi::sender::SenderDB>(move(data)); + result = make_shared<::apsi::sender::SenderDB>(std::move(data)); // Load also the OPRF key oprf_key.load(fs); @@ -86,8 +86,8 @@ shared_ptr<::apsi::sender::SenderDB> try_load_csv_db( return nullptr; } - return create_sender_db(*db_data, move(params), oprf_key, nonce_byte_count, - compress); + return create_sender_db(*db_data, std::move(params), oprf_key, + nonce_byte_count, compress); } bool try_save_sender_db(const std::string &sdb_out_file, @@ -131,7 +131,7 @@ unique_ptr load_db( return nullptr; } - return make_unique(move(db_data)); + return make_unique(std::move(db_data)); } pair, vector> @@ -147,8 +147,8 @@ load_db_with_orig_items(const std::string &db_file) { return {nullptr, orig_items}; } - return {make_unique(move(db_data)), - move(orig_items)}; + return {make_unique(std::move(db_data)), + std::move(orig_items)}; } shared_ptr<::apsi::sender::SenderDB> create_sender_db(