From 0facbb7c1aea0831a5b0ce159844bdf674f10852 Mon Sep 17 00:00:00 2001 From: vakh Date: Wed, 10 Aug 2016 19:15:12 -0700 Subject: [PATCH] Verify checksum for downloaded updates and discard the update if the checks fails to match. BUG=543161 Review-Url: https://codereview.chromium.org/2206733002 Cr-Commit-Position: refs/heads/master@{#411225} --- components/safe_browsing_db/BUILD.gn | 2 + components/safe_browsing_db/v4_rice.cc | 12 +- components/safe_browsing_db/v4_store.cc | 133 +++++++++++---- components/safe_browsing_db/v4_store.h | 50 ++++-- .../safe_browsing_db/v4_store_unittest.cc | 156 ++++++++++++++---- tools/metrics/histograms/histograms.xml | 1 + 6 files changed, 274 insertions(+), 80 deletions(-) diff --git a/components/safe_browsing_db/BUILD.gn b/components/safe_browsing_db/BUILD.gn index 1e11626951d083..50e9688431d8bc 100644 --- a/components/safe_browsing_db/BUILD.gn +++ b/components/safe_browsing_db/BUILD.gn @@ -257,6 +257,7 @@ source_set("v4_store") { ":v4_protocol_manager_util", ":v4_rice", "//base", + "//crypto", ] } @@ -304,6 +305,7 @@ source_set("unit_tests") { ":v4_update_protocol_manager", "//base", "//content/test:test_support", + "//crypto", "//net", "//net:test_support", "//testing/gtest", diff --git a/components/safe_browsing_db/v4_rice.cc b/components/safe_browsing_db/v4_rice.cc index 3016fac87941e3..0e7148048fc379 100644 --- a/components/safe_browsing_db/v4_rice.cc +++ b/components/safe_browsing_db/v4_rice.cc @@ -14,7 +14,8 @@ namespace safe_browsing { namespace { -const unsigned int kMaxBitIndex = 8 * sizeof(uint32_t); +const int kBitsPerByte = 8; +const unsigned int kMaxBitIndex = kBitsPerByte * sizeof(uint32_t); void GetBytesFromUInt32(uint32_t word, char* bytes) { const size_t mask = 0xFF; @@ -264,10 +265,15 @@ void V4RiceDecoder::GetBitsFromCurrentWord(unsigned int num_requested_bits, }; std::string V4RiceDecoder::DebugString() const { + // Calculates the total number of bits that we have read from the buffer, + // excluding those that have been read into current_word_ but not yet + // consumed byt GetNextBits(). + unsigned bits_read = (data_byte_index_ - sizeof(uint32_t)) * kBitsPerByte + + current_word_bit_index_; return base::StringPrintf( - "current_word_: %x; data_byte_index_; %x, " + "bits_read: %x; current_word_: %x; data_byte_index_; %x, " "current_word_bit_index_: %x; rice_parameter_: %x", - current_word_, data_byte_index_, current_word_bit_index_, + bits_read, current_word_, data_byte_index_, current_word_bit_index_, rice_parameter_); } diff --git a/components/safe_browsing_db/v4_store.cc b/components/safe_browsing_db/v4_store.cc index 69d49a258452f5..45c003f28a2af7 100644 --- a/components/safe_browsing_db/v4_store.cc +++ b/components/safe_browsing_db/v4_store.cc @@ -8,10 +8,13 @@ #include "base/memory/ptr_util.h" #include "base/metrics/histogram_macros.h" #include "base/metrics/sparse_histogram.h" +#include "base/stl_util.h" #include "base/strings/stringprintf.h" #include "components/safe_browsing_db/v4_rice.h" #include "components/safe_browsing_db/v4_store.h" #include "components/safe_browsing_db/v4_store.pb.h" +#include "crypto/secure_hash.h" +#include "crypto/sha2.h" namespace safe_browsing { @@ -113,28 +116,44 @@ bool V4Store::Reset() { return true; } -ApplyUpdateResult V4Store::ProcessFullUpdate( - std::unique_ptr response, - const std::unique_ptr& new_store) { - HashPrefixMap hash_prefix_map; - ApplyUpdateResult apply_update_result = - UpdateHashPrefixMapFromAdditions(response->additions(), &hash_prefix_map); - if (apply_update_result == APPLY_UPDATE_SUCCESS) { - new_store->hash_prefix_map_ = hash_prefix_map; - RecordStoreWriteResult(new_store->WriteToDisk(std::move(response))); +ApplyUpdateResult V4Store::ProcessPartialUpdateAndWriteToDisk( + const HashPrefixMap& hash_prefix_map_old, + std::unique_ptr response) { + DCHECK(response->has_response_type()); + DCHECK_EQ(ListUpdateResponse::PARTIAL_UPDATE, response->response_type()); + + ApplyUpdateResult result = ProcessUpdate(hash_prefix_map_old, response); + if (result == APPLY_UPDATE_SUCCESS) { + // TODO(vakh): Create a ListUpdateResponse containing RICE encoded + // hash prefixes and response_type as FULL_UPDATE, and write that to disk. } - return apply_update_result; + return result; } -ApplyUpdateResult V4Store::ProcessPartialUpdate( - std::unique_ptr response, - const std::unique_ptr& new_store) { - // TODO(vakh): - // 1. Done: Merge the old store and the new update in new_store. - // 2. Create a ListUpdateResponse containing RICE encoded hash-prefixes and - // response_type as FULL_UPDATE, and write that to disk. - // 3. Remove this if condition after completing 1. and 2. +ApplyUpdateResult V4Store::ProcessFullUpdateAndWriteToDisk( + std::unique_ptr response) { + ApplyUpdateResult result = ProcessFullUpdate(response); + if (result == APPLY_UPDATE_SUCCESS) { + RecordStoreWriteResult(WriteToDisk(std::move(response))); + } + return result; +} + +ApplyUpdateResult V4Store::ProcessFullUpdate( + const std::unique_ptr& response) { + DCHECK(response->has_response_type()); + DCHECK_EQ(ListUpdateResponse::FULL_UPDATE, response->response_type()); + // TODO(vakh): For a full update, we don't need to process the update in + // lexographical order to store it, but we do need to do that for calculating + // checksum. It might save some CPU cycles to store the full update as-is and + // walk the list of hash prefixes in lexographical order only for checksum + // calculation. + return ProcessUpdate(HashPrefixMap(), response); +} +ApplyUpdateResult V4Store::ProcessUpdate( + const HashPrefixMap& hash_prefix_map_old, + const std::unique_ptr& response) { const RepeatedField* raw_removals = nullptr; RepeatedField rice_removals; size_t removals_size = response->removals_size(); @@ -167,12 +186,23 @@ ApplyUpdateResult V4Store::ProcessPartialUpdate( HashPrefixMap hash_prefix_map; ApplyUpdateResult apply_update_result = UpdateHashPrefixMapFromAdditions(response->additions(), &hash_prefix_map); + if (apply_update_result != APPLY_UPDATE_SUCCESS) { + return apply_update_result; + } - if (apply_update_result == APPLY_UPDATE_SUCCESS) { - apply_update_result = - new_store->MergeUpdate(hash_prefix_map_, hash_prefix_map, raw_removals); + std::string expected_checksum; + if (response->has_checksum() && response->checksum().has_sha256()) { + expected_checksum = response->checksum().sha256(); } - return apply_update_result; + + apply_update_result = MergeUpdate(hash_prefix_map_old, hash_prefix_map, + raw_removals, expected_checksum); + if (apply_update_result != APPLY_UPDATE_SUCCESS) { + return apply_update_result; + } + + state_ = response->new_client_state(); + return APPLY_UPDATE_SUCCESS; } void V4Store::ApplyUpdate( @@ -181,13 +211,14 @@ void V4Store::ApplyUpdate( UpdatedStoreReadyCallback callback) { std::unique_ptr new_store( new V4Store(this->task_runner_, this->store_path_)); - new_store->state_ = response->new_client_state(); ApplyUpdateResult apply_update_result; if (response->response_type() == ListUpdateResponse::PARTIAL_UPDATE) { - apply_update_result = ProcessPartialUpdate(std::move(response), new_store); + apply_update_result = new_store->ProcessPartialUpdateAndWriteToDisk( + hash_prefix_map_, std::move(response)); } else if (response->response_type() == ListUpdateResponse::FULL_UPDATE) { - apply_update_result = ProcessFullUpdate(std::move(response), new_store); + apply_update_result = + new_store->ProcessFullUpdateAndWriteToDisk(std::move(response)); } else { apply_update_result = UNEXPECTED_RESPONSE_TYPE_FAILURE; NOTREACHED() << "Unexpected response type: " << response->response_type(); @@ -198,6 +229,8 @@ void V4Store::ApplyUpdate( callback_task_runner->PostTask( FROM_HERE, base::Bind(callback, base::Passed(&new_store))); } else { + DVLOG(1) << "ApplyUpdate failed: reason: " << apply_update_result + << "; store: " << *this; // new_store failed updating. Pass a nullptr to the callback. callback_task_runner->PostTask(FROM_HERE, base::Bind(callback, nullptr)); } @@ -321,10 +354,10 @@ void V4Store::ReserveSpaceInPrefixMap(const HashPrefixMap& other_prefixes_map, } } -ApplyUpdateResult V4Store::MergeUpdate( - const HashPrefixMap& old_prefixes_map, - const HashPrefixMap& additions_map, - const RepeatedField* raw_removals) { +ApplyUpdateResult V4Store::MergeUpdate(const HashPrefixMap& old_prefixes_map, + const HashPrefixMap& additions_map, + const RepeatedField* raw_removals, + const std::string& expected_checksum) { DCHECK(hash_prefix_map_.empty()); hash_prefix_map_.clear(); ReserveSpaceInPrefixMap(old_prefixes_map, &hash_prefix_map_); @@ -347,6 +380,10 @@ ApplyUpdateResult V4Store::MergeUpdate( // At least one of the maps still has elements that need to be merged into the // new store. + bool calculate_checksum = !expected_checksum.empty(); + std::unique_ptr checksum_ctx( + crypto::SecureHash::Create(crypto::SecureHash::SHA256)); + // Keep track of the number of elements picked from the old map. This is used // to determine which elements to drop based on the raw_removals. Note that // picked is not the same as merged. A picked element isn't merged if its @@ -380,6 +417,11 @@ ApplyUpdateResult V4Store::MergeUpdate( *removals_iter != total_picked_from_old) { // Append the smallest hash to the appropriate list. hash_prefix_map_[next_smallest_prefix_size] += next_smallest_prefix_old; + + if (calculate_checksum) { + checksum_ctx->Update(string_as_array(&next_smallest_prefix_old), + next_smallest_prefix_size); + } } else { // Element not added to new map. Move the removals iterator forward. removals_iter++; @@ -397,6 +439,11 @@ ApplyUpdateResult V4Store::MergeUpdate( hash_prefix_map_[next_smallest_prefix_size] += next_smallest_prefix_additions; + if (calculate_checksum) { + checksum_ctx->Update(string_as_array(&next_smallest_prefix_additions), + next_smallest_prefix_size); + } + // Update the iterator map, which means that we have merged one hash // prefix of size |next_smallest_prefix_size| from the update. additions_iterator_map[next_smallest_prefix_size] += @@ -409,9 +456,24 @@ ApplyUpdateResult V4Store::MergeUpdate( } } - return (!raw_removals || removals_iter == raw_removals->end()) - ? APPLY_UPDATE_SUCCESS - : REMOVALS_INDEX_TOO_LARGE_FAILURE; + if (raw_removals && removals_iter != raw_removals->end()) { + return REMOVALS_INDEX_TOO_LARGE_FAILURE; + } + + if (calculate_checksum) { + std::string checksum(crypto::kSHA256Length, 0); + checksum_ctx->Finish(string_as_array(&checksum), checksum.size()); + if (checksum != expected_checksum) { + std::string checksum_base64, expected_checksum_base64; + base::Base64Encode(checksum, &checksum_base64); + base::Base64Encode(expected_checksum, &expected_checksum_base64); + DVLOG(1) << "Checksum failed: calculated: " << checksum_base64 + << "expected: " << expected_checksum_base64; + return CHECKSUM_MISMATCH_FAILURE; + } + } + + return APPLY_UPDATE_SUCCESS; } StoreReadResult V4Store::ReadFromDisk() { @@ -450,16 +512,15 @@ StoreReadResult V4Store::ReadFromDisk() { return HASH_PREFIX_INFO_MISSING_FAILURE; } - const ListUpdateResponse& response = file_format.list_update_response(); - ApplyUpdateResult apply_update_result = UpdateHashPrefixMapFromAdditions( - response.additions(), &hash_prefix_map_); + std::unique_ptr response(new ListUpdateResponse); + response->Swap(file_format.mutable_list_update_response()); + ApplyUpdateResult apply_update_result = ProcessFullUpdate(response); RecordApplyUpdateResultWhenReadingFromDisk(apply_update_result); if (apply_update_result != APPLY_UPDATE_SUCCESS) { hash_prefix_map_.clear(); return HASH_PREFIX_MAP_GENERATION_FAILURE; } - state_ = response.new_client_state(); return READ_SUCCESS; } diff --git a/components/safe_browsing_db/v4_store.h b/components/safe_browsing_db/v4_store.h index 770088b5938a11..48fb3e090e711d 100644 --- a/components/safe_browsing_db/v4_store.h +++ b/components/safe_browsing_db/v4_store.h @@ -138,6 +138,10 @@ enum ApplyUpdateResult { // Compression type other than RAW and RICE for removals. UNEXPECTED_COMPRESSION_TYPE_REMOVALS_FAILURE = 10, + // The state of the store did not match the expected checksum sent by the + // server. + CHECKSUM_MISMATCH_FAILURE = 11, + // Memory space for histograms is determined by the max. ALWAYS // ADD NEW VALUES BEFORE THIS ONE. APPLY_UPDATE_RESULT_MAX @@ -245,6 +249,7 @@ class V4Store { TestAdditionsWithRiceEncodingFailsWithInvalidInput); FRIEND_TEST_ALL_PREFIXES(V4StoreTest, TestAdditionsWithRiceEncodingSucceeds); FRIEND_TEST_ALL_PREFIXES(V4StoreTest, TestRemovalsWithRiceEncodingSucceeds); + FRIEND_TEST_ALL_PREFIXES(V4StoreTest, TestMergeUpdatesFailsChecksum); friend class V4StoreTest; // If |prefix_size| is within expected range, and |raw_hashes| is not invalid, @@ -290,24 +295,43 @@ class V4Store { // Merges the prefix map from the old store (|old_hash_prefix_map|) and the // update (additions_map) to populate the prefix map for the current store. // The indices in the |raw_removals| list, which may be NULL, are not merged. + // The SHA256 checksum of the final list of hash prefixes, in lexographically + // sorted order, must match |expected_checksum| (if it's not empty). ApplyUpdateResult MergeUpdate(const HashPrefixMap& old_hash_prefix_map, const HashPrefixMap& additions_map, const ::google::protobuf::RepeatedField< - ::google::protobuf::int32>* raw_removals); - - // Processes the FULL_UPDATE |response| from the server and updates the - // V4Store in |new_store| and writes it to disk. If processing the |response| - // succeeds, it returns APPLY_UPDATE_SUCCESS. - ApplyUpdateResult ProcessFullUpdate( - std::unique_ptr response, - const std::unique_ptr& new_store); + ::google::protobuf::int32>* raw_removals, + const std::string& expected_checksum); - // Processes the PARTIAL_UPDATE |response| from the server and updates the - // V4Store in |new_store|. If processing the |response| succeeds, it returns + // Processes the FULL_UPDATE |response| from the server, and writes the + // merged V4Store to disk. If processing the |response| succeeds, it returns // APPLY_UPDATE_SUCCESS. - ApplyUpdateResult ProcessPartialUpdate( - std::unique_ptr response, - const std::unique_ptr& new_store); + // This method is only called when we receive a FULL_UPDATE from the server. + ApplyUpdateResult ProcessFullUpdateAndWriteToDisk( + std::unique_ptr response); + + // Processes a FULL_UPDATE |response| and updates the V4Store. If processing + // the |response| succeeds, it returns APPLY_UPDATE_SUCCESS. + // This method is called when we receive a FULL_UPDATE from the server, and + // when we read a store file from disk on startup. + ApplyUpdateResult ProcessFullUpdate( + const std::unique_ptr& response); + + // Merges the hash prefixes in |hash_prefix_map_old| and |response|, updates + // the |hash_prefix_map_| and |state_| in the V4Store, and writes the merged + // store to disk. If processing succeeds, it returns APPLY_UPDATE_SUCCESS. + // This method is only called when we receive a PARTIAL_UPDATE from the + // server. + ApplyUpdateResult ProcessPartialUpdateAndWriteToDisk( + const HashPrefixMap& hash_prefix_map_old, + std::unique_ptr response); + + // Merges the hash prefixes in |hash_prefix_map_old| and |response|, and + // updates the |hash_prefix_map_| and |state_| in the V4Store. If processing + // succeeds, it returns APPLY_UPDATE_SUCCESS. + ApplyUpdateResult ProcessUpdate( + const HashPrefixMap& hash_prefix_map_old, + const std::unique_ptr& response); // Reads the state of the store from the file on disk and returns the reason // for the failure or reports success. diff --git a/components/safe_browsing_db/v4_store_unittest.cc b/components/safe_browsing_db/v4_store_unittest.cc index f9bd1ba3d0bd8d..2bd661e77adda7 100644 --- a/components/safe_browsing_db/v4_store_unittest.cc +++ b/components/safe_browsing_db/v4_store_unittest.cc @@ -12,6 +12,7 @@ #include "components/safe_browsing_db/v4_store.h" #include "components/safe_browsing_db/v4_store.pb.h" #include "content/public/test/test_browser_thread_bundle.h" +#include "crypto/sha2.h" #include "testing/platform_test.h" namespace safe_browsing { @@ -122,6 +123,7 @@ TEST_F(V4StoreTest, TestReadFromNoHashPrefixInfoFile) { TEST_F(V4StoreTest, TestReadFromNoHashPrefixesFile) { ListUpdateResponse list_update_response; list_update_response.set_platform_type(LINUX_PLATFORM); + list_update_response.set_response_type(ListUpdateResponse::FULL_UPDATE); WriteFileFormatProtoToFile(0x600D71FE, 9, &list_update_response); EXPECT_EQ(READ_SUCCESS, V4Store(task_runner_, store_path_).ReadFromDisk()); } @@ -229,8 +231,21 @@ TEST_F(V4StoreTest, TestMergeUpdatesWithSameSizesInEachMap) { V4Store::AddUnlumpedHashes(5, "22222bcdef", &prefix_map_additions)); V4Store store(task_runner_, store_path_); - EXPECT_EQ(APPLY_UPDATE_SUCCESS, - store.MergeUpdate(prefix_map_old, prefix_map_additions, nullptr)); + // Proof of checksum validity using python: + // >>> import hashlib + // >>> m = hashlib.sha256() + // >>> m.update("----11112222254321abcdabcdebbbbbcdefefgh") + // >>> m.digest() + // "\xbc\xb3\xedk\xe3x\xd1(\xa9\xedz7]" + // "x\x18\xbdn]\xa5\xa8R\xf7\xab\xcf\xc1\xa3\xa3\xc5Z,\xa6o" + std::string expected_checksum = std::string( + "\xBC\xB3\xEDk\xE3x\xD1(\xA9\xEDz7]x\x18\xBDn]" + "\xA5\xA8R\xF7\xAB\xCF\xC1\xA3\xA3\xC5Z,\xA6o", + crypto::kSHA256Length); + EXPECT_EQ(APPLY_UPDATE_SUCCESS, + store.MergeUpdate(prefix_map_old, prefix_map_additions, nullptr, + expected_checksum)); + const HashPrefixMap& prefix_map = store.hash_prefix_map_; EXPECT_EQ(2u, prefix_map.size()); @@ -261,8 +276,15 @@ TEST_F(V4StoreTest, TestMergeUpdatesWithDifferentSizesInEachMap) { V4Store::AddUnlumpedHashes(5, "22222bcdef", &prefix_map_additions)); V4Store store(task_runner_, store_path_); + std::string expected_checksum = std::string( + "\xA5\x8B\xCAsD\xC7\xF9\xCE\xD2\xF4\x4=" + "\xB2\"\x82\x1A\xC1\xB8\x1F\x10\r\v\x9A\x93\xFD\xE1\xB8" + "B\x1Eh\xF7\xB4", + crypto::kSHA256Length); EXPECT_EQ(APPLY_UPDATE_SUCCESS, - store.MergeUpdate(prefix_map_old, prefix_map_additions, nullptr)); + store.MergeUpdate(prefix_map_old, prefix_map_additions, nullptr, + expected_checksum)); + const HashPrefixMap& prefix_map = store.hash_prefix_map_; EXPECT_EQ(2u, prefix_map.size()); @@ -286,8 +308,15 @@ TEST_F(V4StoreTest, TestMergeUpdatesOldMapRunsOutFirst) { V4Store::AddUnlumpedHashes(4, "2222", &prefix_map_additions)); V4Store store(task_runner_, store_path_); + std::string expected_checksum = std::string( + "\x84\x92\xET\xED\xF7\x97" + "C\xCE}\xFF" + "E\x1\xAB-\b>\xDB\x95\b\xD8H\xD5\x1D\xF9]8x\xA4\xD4\xC2\xFA", + crypto::kSHA256Length); EXPECT_EQ(APPLY_UPDATE_SUCCESS, - store.MergeUpdate(prefix_map_old, prefix_map_additions, nullptr)); + store.MergeUpdate(prefix_map_old, prefix_map_additions, nullptr, + expected_checksum)); + const HashPrefixMap& prefix_map = store.hash_prefix_map_; EXPECT_EQ(1u, prefix_map.size()); @@ -308,8 +337,15 @@ TEST_F(V4StoreTest, TestMergeUpdatesAdditionsMapRunsOutFirst) { V4Store::AddUnlumpedHashes(4, "00001111", &prefix_map_additions)); V4Store store(task_runner_, store_path_); + std::string expected_checksum = std::string( + "\x84\x92\xET\xED\xF7\x97" + "C\xCE}\xFF" + "E\x1\xAB-\b>\xDB\x95\b\xD8H\xD5\x1D\xF9]8x\xA4\xD4\xC2\xFA", + crypto::kSHA256Length); EXPECT_EQ(APPLY_UPDATE_SUCCESS, - store.MergeUpdate(prefix_map_old, prefix_map_additions, nullptr)); + store.MergeUpdate(prefix_map_old, prefix_map_additions, nullptr, + expected_checksum)); + const HashPrefixMap& prefix_map = store.hash_prefix_map_; EXPECT_EQ(1u, prefix_map.size()); @@ -330,8 +366,10 @@ TEST_F(V4StoreTest, TestMergeUpdatesFailsForRepeatedHashPrefix) { V4Store::AddUnlumpedHashes(4, "2222", &prefix_map_additions)); V4Store store(task_runner_, store_path_); + std::string expected_checksum; EXPECT_EQ(ADDITIONS_HAS_EXISTING_PREFIX_FAILURE, - store.MergeUpdate(prefix_map_old, prefix_map_additions, nullptr)); + store.MergeUpdate(prefix_map_old, prefix_map_additions, nullptr, + expected_checksum)); } TEST_F(V4StoreTest, TestMergeUpdatesFailsWhenRemovalsIndexTooLarge) { @@ -348,9 +386,10 @@ TEST_F(V4StoreTest, TestMergeUpdatesFailsWhenRemovalsIndexTooLarge) { RepeatedField raw_removals; // old_store: ["2222"] raw_removals.Add(1); - EXPECT_EQ( - REMOVALS_INDEX_TOO_LARGE_FAILURE, - store.MergeUpdate(prefix_map_old, prefix_map_additions, &raw_removals)); + std::string expected_checksum; + EXPECT_EQ(REMOVALS_INDEX_TOO_LARGE_FAILURE, + store.MergeUpdate(prefix_map_old, prefix_map_additions, + &raw_removals, expected_checksum)); } TEST_F(V4StoreTest, TestMergeUpdatesRemovesOnlyElement) { @@ -365,9 +404,16 @@ TEST_F(V4StoreTest, TestMergeUpdatesRemovesOnlyElement) { RepeatedField raw_removals; // old_store: ["2222"] raw_removals.Add(0); // Removes "2222" - EXPECT_EQ( - APPLY_UPDATE_SUCCESS, - store.MergeUpdate(prefix_map_old, prefix_map_additions, &raw_removals)); + std::string expected_checksum = std::string( + "\xE6\xB0\x1\x12\x89\x83\xF0/" + "\xE7\xD2\xE6\xDC\x16\xB9\x8C+\xA2\xB3\x9E\x89<,\x88" + "B3\xA5\xB1" + "D\x9E\x9E'\x14", + crypto::kSHA256Length); + EXPECT_EQ(APPLY_UPDATE_SUCCESS, + store.MergeUpdate(prefix_map_old, prefix_map_additions, + &raw_removals, expected_checksum)); + const HashPrefixMap& prefix_map = store.hash_prefix_map_; // The size is 2 since we reserve space anyway. EXPECT_EQ(2u, prefix_map.size()); @@ -387,9 +433,16 @@ TEST_F(V4StoreTest, TestMergeUpdatesRemovesFirstElement) { RepeatedField raw_removals; // old_store: ["2222", "4444"] raw_removals.Add(0); // Removes "2222" - EXPECT_EQ( - APPLY_UPDATE_SUCCESS, - store.MergeUpdate(prefix_map_old, prefix_map_additions, &raw_removals)); + std::string expected_checksum = std::string( + "\x9D\xF3\xF2\x82\0\x1E{\xDF\xCD\xC0V\xBE\xD6<\x85" + "D7=\xB5v\xAD\b1\xC9\xB3" + "A\xAC" + "b\xF1lf\xA4", + crypto::kSHA256Length); + EXPECT_EQ(APPLY_UPDATE_SUCCESS, + store.MergeUpdate(prefix_map_old, prefix_map_additions, + &raw_removals, expected_checksum)); + const HashPrefixMap& prefix_map = store.hash_prefix_map_; // The size is 2 since we reserve space anyway. EXPECT_EQ(2u, prefix_map.size()); @@ -409,9 +462,15 @@ TEST_F(V4StoreTest, TestMergeUpdatesRemovesMiddleElement) { RepeatedField raw_removals; // old_store: ["2222", "3333", 4444"] raw_removals.Add(1); // Removes "3333" - EXPECT_EQ( - APPLY_UPDATE_SUCCESS, - store.MergeUpdate(prefix_map_old, prefix_map_additions, &raw_removals)); + std::string expected_checksum = std::string( + "\xFA-A\x15{\x17\0>\xAE" + "8\xACigR\xD1\x93<\xB2\xC9\xB5\x81\xC0\xFB\xBB\x2\f\xAFpN\xEA" + "44", + crypto::kSHA256Length); + EXPECT_EQ(APPLY_UPDATE_SUCCESS, + store.MergeUpdate(prefix_map_old, prefix_map_additions, + &raw_removals, expected_checksum)); + const HashPrefixMap& prefix_map = store.hash_prefix_map_; // The size is 2 since we reserve space anyway. EXPECT_EQ(2u, prefix_map.size()); @@ -431,9 +490,14 @@ TEST_F(V4StoreTest, TestMergeUpdatesRemovesLastElement) { RepeatedField raw_removals; // old_store: ["2222", "3333", 4444"] raw_removals.Add(2); // Removes "4444" - EXPECT_EQ( - APPLY_UPDATE_SUCCESS, - store.MergeUpdate(prefix_map_old, prefix_map_additions, &raw_removals)); + std::string expected_checksum = std::string( + "a\xE1\xAD\x96\xFE\xA6" + "A\xCA~7W\xF6z\xD8\n\xCA?\x96\x8A\x17U\x5\v\r\x88]\n\xB2JX\xC4S", + crypto::kSHA256Length); + EXPECT_EQ(APPLY_UPDATE_SUCCESS, + store.MergeUpdate(prefix_map_old, prefix_map_additions, + &raw_removals, expected_checksum)); + const HashPrefixMap& prefix_map = store.hash_prefix_map_; // The size is 2 since we reserve space anyway. EXPECT_EQ(2u, prefix_map.size()); @@ -455,9 +519,15 @@ TEST_F(V4StoreTest, TestMergeUpdatesRemovesWhenOldHasDifferentSizes) { RepeatedField raw_removals; // old_store: ["2222", "3333", 4444", "aaaaa", "bbbbb"] raw_removals.Add(3); // Removes "aaaaa" - EXPECT_EQ( - APPLY_UPDATE_SUCCESS, - store.MergeUpdate(prefix_map_old, prefix_map_additions, &raw_removals)); + std::string expected_checksum = std::string( + "\xA7OG\x9D\x83.\x9D-f\x8A\xE\x8B\r&\x19" + "6\xE3\xF0\xEFTi\xA7\x5\xEA\xF7" + "ej,\xA8\x9D\xAD\x91", + crypto::kSHA256Length); + EXPECT_EQ(APPLY_UPDATE_SUCCESS, + store.MergeUpdate(prefix_map_old, prefix_map_additions, + &raw_removals, expected_checksum)); + const HashPrefixMap& prefix_map = store.hash_prefix_map_; // The size is 2 since we reserve space anyway. EXPECT_EQ(2u, prefix_map.size()); @@ -480,9 +550,16 @@ TEST_F(V4StoreTest, TestMergeUpdatesRemovesMultipleAcrossDifferentSizes) { // old_store: ["2222", "3333", "33333", "44444", "aaaa", "bbbbb"] raw_removals.Add(1); // Removes "3333" raw_removals.Add(3); // Removes "44444" - EXPECT_EQ( - APPLY_UPDATE_SUCCESS, - store.MergeUpdate(prefix_map_old, prefix_map_additions, &raw_removals)); + std::string expected_checksum = std::string( + "!D\xB7&L\xA7&G0\x85\xB4" + "E\xDD\x10\"\x9A\xCA\xF1" + "3^\x83w\xBBL\x19n\xAD\xBDM\x9D" + "b\x9F", + crypto::kSHA256Length); + EXPECT_EQ(APPLY_UPDATE_SUCCESS, + store.MergeUpdate(prefix_map_old, prefix_map_additions, + &raw_removals, expected_checksum)); + const HashPrefixMap& prefix_map = store.hash_prefix_map_; // The size is 2 since we reserve space anyway. EXPECT_EQ(2u, prefix_map.size()); @@ -667,8 +744,14 @@ TEST_F(V4StoreTest, TestRemovalsWithRiceEncodingSucceeds) { V4Store::AddUnlumpedHashes(5, "22222bcdef", &prefix_map_additions)); V4Store store(task_runner_, store_path_); + std::string expected_checksum = std::string( + "\xA5\x8B\xCAsD\xC7\xF9\xCE\xD2\xF4\x4=" + "\xB2\"\x82\x1A\xC1\xB8\x1F\x10\r\v\x9A\x93\xFD\xE1\xB8" + "B\x1Eh\xF7\xB4", + crypto::kSHA256Length); EXPECT_EQ(APPLY_UPDATE_SUCCESS, - store.MergeUpdate(prefix_map_old, prefix_map_additions, nullptr)); + store.MergeUpdate(prefix_map_old, prefix_map_additions, nullptr, + expected_checksum)); // At this point, the store map looks like this: // 4: 1111abcdefgh @@ -698,4 +781,21 @@ TEST_F(V4StoreTest, TestRemovalsWithRiceEncodingSucceeds) { EXPECT_TRUE(called_back); } +TEST_F(V4StoreTest, TestMergeUpdatesFailsChecksum) { + // Proof of checksum mismatch using python: + // >>> import hashlib + // >>> m = hashlib.sha256() + // >>> m.update("2222") + // >>> m.digest() + // "\xed\xee)\xf8\x82T;\x95f + // \xb2m\x0e\xe0\xe7\xe9P9\x9b\x1cB"\xf5\xde\x05\xe0d%\xb4\xc9\x95\xe9" + + HashPrefixMap prefix_map_old; + EXPECT_EQ(APPLY_UPDATE_SUCCESS, + V4Store::AddUnlumpedHashes(4, "2222", &prefix_map_old)); + EXPECT_EQ(CHECKSUM_MISMATCH_FAILURE, + V4Store(task_runner_, store_path_) + .MergeUpdate(prefix_map_old, HashPrefixMap(), nullptr, "aawc")); +} + } // namespace safe_browsing diff --git a/tools/metrics/histograms/histograms.xml b/tools/metrics/histograms/histograms.xml index 4334e5024c216f..9b18c366d45fd4 100644 --- a/tools/metrics/histograms/histograms.xml +++ b/tools/metrics/histograms/histograms.xml @@ -90354,6 +90354,7 @@ To add a new entry, add it with any value and run test to compute valid value. +