Skip to content

Commit

Permalink
curvefs/metaserver: fixed the length of the saved string exceeds the …
Browse files Browse the repository at this point in the history
…limit size

it cause save snapshot failed, now we removed the length limit of saved string
for these strings already exist in memory, and we still retain the length limit of
loaded string for prevent reading the wrong length. (opencurve#768)
  • Loading branch information
Wine93 committed Dec 6, 2021
1 parent a31fcd2 commit a1a3a5b
Show file tree
Hide file tree
Showing 2 changed files with 32 additions and 5 deletions.
8 changes: 3 additions & 5 deletions curvefs/src/metaserver/dumpfile.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ using ::curve::common::CRC32;

const std::string DumpFile::kCurvefs_ = "CURVEFS"; // NOLINT
const uint8_t DumpFile::kVersion_ = 1;
const uint32_t DumpFile::kMaxStringLength_ = 1024 * 1024; // 1MB
const uint32_t DumpFile::kMaxStringLength_ = 1024 * 1024 * 1024; // 1GB

std::ostream& operator<<(std::ostream& os, DUMPFILE_ERROR code) {
static auto code2str = std::map<DUMPFILE_ERROR, std::string> {
Expand Down Expand Up @@ -203,10 +203,6 @@ DUMPFILE_ERROR DumpFile::SaveString(const std::string& str,
off_t* offset,
uint32_t* checkSum) {
size_t length = str.size();
if (length > kMaxStringLength_) {
return DUMPFILE_ERROR::EXCEED_MAX_STRING_LENGTH;
}

auto retCode = Write(str.c_str(), *offset, length);
if (retCode == DUMPFILE_ERROR::OK) {
*offset = (*offset) + length;
Expand Down Expand Up @@ -247,6 +243,8 @@ DUMPFILE_ERROR DumpFile::LoadString(std::string* str,
size_t length,
uint32_t* checkSum) {
if (length > kMaxStringLength_) {
LOG(ERROR) << "The loaded string is too large, size("
<< length << ") > limit(" << kMaxStringLength_ << ")";
return DUMPFILE_ERROR::EXCEED_MAX_STRING_LENGTH;
}

Expand Down
29 changes: 29 additions & 0 deletions curvefs/test/metaserver/dumpfile_test.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -223,6 +223,35 @@ TEST_F(DumpFileTest, TestSaveBigData) {
ASSERT_EQ(dumpfile_->GetLoadStatus(), DUMPFILE_LOAD_STATUS::COMPLETE);
}

TEST_F(DumpFileTest, TestLoadLargeValue) {
Hash hash;
const int maxValueLength = 1024 * 1024 * 1024;
auto hashIterator = std::make_shared<HashIterator>(&hash);
auto checkLoading = [](std::shared_ptr<Iterator> iter,
const int length,
const int npairs) {
int count = 0;
for (iter->SeekToFirst(); iter->Valid(); iter->Next()) {
count++;
ASSERT_EQ(iter->Key(), "key");
ASSERT_EQ(iter->Value().size(), length);
}
ASSERT_EQ(count, npairs);
};

// CASE 1: load success
hash["key"] = std::string(maxValueLength, '.');
ASSERT_EQ(dumpfile_->SaveBackground(hashIterator), DUMPFILE_ERROR::OK);
checkLoading(dumpfile_->Load(), maxValueLength, 1);
ASSERT_EQ(dumpfile_->GetLoadStatus(), DUMPFILE_LOAD_STATUS::COMPLETE);

// CASE 2: load failed
hash["key"] = std::string(maxValueLength + 1, '.');
ASSERT_EQ(dumpfile_->SaveBackground(hashIterator), DUMPFILE_ERROR::OK);
checkLoading(dumpfile_->Load(), maxValueLength + 1, 0);
ASSERT_EQ(dumpfile_->GetLoadStatus(), DUMPFILE_LOAD_STATUS::INVALID_PAIRS);
}

TEST_F(DumpFileTest, TestFileNotOpen) {
Hash hash;
auto hashIterator = std::make_shared<HashIterator>(&hash);
Expand Down

0 comments on commit a1a3a5b

Please sign in to comment.