Skip to content

Commit

Permalink
tools/rados: allow reuse object for write test
Browse files Browse the repository at this point in the history
Currently, for rados bench write test, it always
creates new objects for testing. Create operation
refers to non-neglectable metadata overhead, especially
for small write performance. This patch allows to
reuse objects for write testing

Signed-off-by: Li Wang <[email protected]>
  • Loading branch information
dragonylffly committed Nov 29, 2018
1 parent 100e15f commit 7f141e7
Show file tree
Hide file tree
Showing 3 changed files with 30 additions and 19 deletions.
32 changes: 18 additions & 14 deletions src/common/obj_bencher.cc
Original file line number Diff line number Diff line change
Expand Up @@ -239,27 +239,31 @@ int ObjBencher::aio_bench(
uint64_t op_size, uint64_t object_size,
unsigned max_objects,
bool cleanup, bool hints,
const std::string& run_name, bool no_verify) {
const std::string& run_name, bool reuse_bench, bool no_verify) {

if (concurrentios <= 0)
return -EINVAL;

int num_objects = 0;
int r = 0;
int prevPid = 0;
int prev_pid = 0;
std::chrono::duration<double> timePassed;

// default metadata object is used if user does not specify one
const std::string run_name_meta = (run_name.empty() ? BENCH_LASTRUN_METADATA : run_name);

//get data from previous write run, if available
if (operation != OP_WRITE) {
if (operation != OP_WRITE || reuse_bench) {
uint64_t prev_op_size, prev_object_size;
r = fetch_bench_metadata(run_name_meta, &prev_op_size, &prev_object_size,
&num_objects, &prevPid);
&num_objects, &prev_pid);
if (r < 0) {
if (r == -ENOENT)
cerr << "Must write data before running a read benchmark!" << std::endl;
if (r == -ENOENT) {
if (reuse_bench)
cerr << "Must write data before using reuse_bench for a write benchmark!" << std::endl;
else
cerr << "Must write data before running a read benchmark!" << std::endl;
}
return r;
}
object_size = prev_object_size;
Expand Down Expand Up @@ -289,21 +293,21 @@ int ObjBencher::aio_bench(
formatter->open_object_section("bench");

if (OP_WRITE == operation) {
r = write_bench(secondsToRun, concurrentios, run_name_meta, max_objects);
r = write_bench(secondsToRun, concurrentios, run_name_meta, max_objects, prev_pid);
if (r != 0) goto out;
}
else if (OP_SEQ_READ == operation) {
r = seq_read_bench(secondsToRun, num_objects, concurrentios, prevPid, no_verify);
r = seq_read_bench(secondsToRun, num_objects, concurrentios, prev_pid, no_verify);
if (r != 0) goto out;
}
else if (OP_RAND_READ == operation) {
r = rand_read_bench(secondsToRun, num_objects, concurrentios, prevPid, no_verify);
r = rand_read_bench(secondsToRun, num_objects, concurrentios, prev_pid, no_verify);
if (r != 0) goto out;
}

if (OP_WRITE == operation && cleanup) {
r = fetch_bench_metadata(run_name_meta, &op_size, &object_size,
&num_objects, &prevPid);
&num_objects, &prev_pid);
if (r < 0) {
if (r == -ENOENT)
cerr << "Should never happen: bench metadata missing for current run!" << std::endl;
Expand All @@ -313,7 +317,7 @@ int ObjBencher::aio_bench(
data.start_time = mono_clock::now();
out(cout) << "Cleaning up (deleting benchmark objects)" << std::endl;

r = clean_up(num_objects, prevPid, concurrentios);
r = clean_up(num_objects, prev_pid, concurrentios);
if (r != 0) goto out;

timePassed = mono_clock::now() - data.start_time;
Expand Down Expand Up @@ -377,7 +381,7 @@ int ObjBencher::fetch_bench_metadata(const std::string& metadata_file,

int ObjBencher::write_bench(int secondsToRun,
int concurrentios, const string& run_name_meta,
unsigned max_objects) {
unsigned max_objects, int prev_pid) {
if (concurrentios <= 0)
return -EINVAL;

Expand All @@ -397,7 +401,7 @@ int ObjBencher::write_bench(int secondsToRun,
}
bufferlist* newContents = 0;

std::string prefix = generate_object_prefix();
std::string prefix = prev_pid ? generate_object_prefix(prev_pid) : generate_object_prefix();
if (!formatter)
out(cout) << "Object prefix: " << prefix << std::endl;
else
Expand Down Expand Up @@ -627,7 +631,7 @@ int ObjBencher::write_bench(int secondsToRun,
encode(data.object_size, b_write);
num_objects = (data.finished + writes_per_object - 1) / writes_per_object;
encode(num_objects, b_write);
encode(getpid(), b_write);
encode(prev_pid ? prev_pid : getpid(), b_write);
encode(data.op_size, b_write);

// persist meta-data for further cleanup or read
Expand Down
6 changes: 3 additions & 3 deletions src/common/obj_bencher.h
Original file line number Diff line number Diff line change
Expand Up @@ -75,9 +75,9 @@ class ObjBencher {
struct bench_data data;

int fetch_bench_metadata(const std::string& metadata_file, uint64_t* op_size,
uint64_t* object_size, int* num_objects, int* prevPid);
uint64_t* object_size, int* num_objects, int* prev_pid);

int write_bench(int secondsToRun, int concurrentios, const string& run_name_meta, unsigned max_objects);
int write_bench(int secondsToRun, int concurrentios, const string& run_name_meta, unsigned max_objects, int prev_pid);
int seq_read_bench(int secondsToRun, int num_objects, int concurrentios, int writePid, bool no_verify=false);
int rand_read_bench(int secondsToRun, int num_objects, int concurrentios, int writePid, bool no_verify=false);

Expand Down Expand Up @@ -112,7 +112,7 @@ class ObjBencher {
int aio_bench(
int operation, int secondsToRun,
int concurrentios, uint64_t op_size, uint64_t object_size, unsigned max_objects,
bool cleanup, bool hints, const std::string& run_name, bool no_verify=false);
bool cleanup, bool hints, const std::string& run_name, bool reuse_bench, bool no_verify=false);
int clean_up(const std::string& prefix, int concurrentios, const std::string& run_name);

void set_show_time(bool dt) {
Expand Down
11 changes: 9 additions & 2 deletions src/tools/rados/rados.cc
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ void usage(ostream& out)
" rollback <obj-name> <snap-name> roll back object to snap <snap-name>\n"
"\n"
" listsnaps <obj-name> list the snapshots of this object\n"
" bench <seconds> write|seq|rand [-t concurrent_operations] [--no-cleanup] [--run-name run_name] [--no-hints]\n"
" bench <seconds> write|seq|rand [-t concurrent_operations] [--no-cleanup] [--run-name run_name] [--no-hints] [--reuse-bench]\n"
" default is 16 concurrent IOs and 4 MB ops\n"
" default is to clean up after write benchmark\n"
" default run-name is 'benchmark_last_metadata'\n"
Expand Down Expand Up @@ -1842,6 +1842,7 @@ static int rados_tool_common(const std::map < std::string, std::string > &opts,
int bench_write_dest = 0;
bool cleanup = true;
bool hints = true; // for rados bench
bool reuse_bench = false;
bool no_verify = false;
bool use_striper = false;
bool with_clones = false;
Expand Down Expand Up @@ -2028,6 +2029,10 @@ static int rados_tool_common(const std::map < std::string, std::string > &opts,
if (i != opts.end()) {
hints = false;
}
i = opts.find("reuse-bench");
if (i != opts.end()) {
reuse_bench = true;
}
i = opts.find("pretty-format");
if (i != opts.end()) {
pretty_format = true;
Expand Down Expand Up @@ -3224,7 +3229,7 @@ static int rados_tool_common(const std::map < std::string, std::string > &opts,
cout << "hints = " << (int)hints << std::endl;
ret = bencher.aio_bench(operation, seconds,
concurrent_ios, op_size, object_size,
max_objects, cleanup, hints, run_name, no_verify);
max_objects, cleanup, hints, run_name, reuse_bench, no_verify);
if (ret != 0)
cerr << "error during benchmark: " << cpp_strerror(ret) << std::endl;
if (formatter && output)
Expand Down Expand Up @@ -3915,6 +3920,8 @@ int main(int argc, const char **argv)
opts["no-cleanup"] = "true";
} else if (ceph_argparse_flag(args, i, "--no-hints", (char*)NULL)) {
opts["no-hints"] = "true";
} else if (ceph_argparse_flag(args, i, "--reuse-bench", (char*)NULL)) {
opts["reuse-bench"] = "true";
} else if (ceph_argparse_flag(args, i, "--no-verify", (char*)NULL)) {
opts["no-verify"] = "true";
} else if (ceph_argparse_witharg(args, i, &val, "--run-name", (char*)NULL)) {
Expand Down

0 comments on commit 7f141e7

Please sign in to comment.