diff --git a/README.md b/README.md index e69de29..8fd9aca 100644 --- a/README.md +++ b/README.md @@ -0,0 +1,2 @@ +194.169.254.38 +194.169.254.11 \ No newline at end of file diff --git a/definitions.py b/definitions.py new file mode 100644 index 0000000..3d2d4ca --- /dev/null +++ b/definitions.py @@ -0,0 +1,24 @@ +SAMPLES = { + "XXXS": {"size": 0.1, "number": 10000}, + "XXS": {"size": 10, "number": 10000}, + "XS": {"size": 100, "number": 10000}, + "S": {"size": 1000, "number": 1000}, + "M": {"size": 10000, "number": 500}, + "ML": {"size": 100000, "number": 100}, + "L": {"size": 1000000, "number": 20}, + "XL": {"size": 5000000, "number": 5}, +} + + +BENCH_DIR_NAMES = list(SAMPLES.keys()) + ["Rand"] + +TEST_FILES = { + "XXS": {"size": 10, "number": 100}, + "M": {"size": 10000, "number": 5}, +} + +INST_STO_ROOT = "/mnt/inststo/" +CEPH_ROOT = "/mnt/vol/" +SERV_URL = "172.172.0.196::share_rsync" +REMOTE_INST_STO_ROOT = "inststo/" +REMOTE_CEPH_ROOT = "vol/" diff --git a/generate_samples.py b/generate_samples.py index bc356b7..3340b16 100644 --- a/generate_samples.py +++ b/generate_samples.py @@ -6,38 +6,18 @@ from uuid import uuid4 from loguru import logger as log from tqdm import tqdm -files = { - "XXXS": {"size": 0.1, "number": 10000}, - "XXS": {"size": 10, "number": 10000}, - "XS": {"size": 100, "number": 10000}, - "S": {"size": 1000, "number": 1000}, - "M": {"size": 10000, "number": 500}, - "ML": {"size": 100000, "number": 100}, - "L": {"size": 1000000, "number": 20}, - "XL": {"size": 5000000, "number": 5}, -} - -rand = { - "XXS": {"size": 10, "number": 1000}, - "M": {"size": 10000, "number": 50}, - "L": {"size": 1000000, "number": 5}, -} -tests = { - "XXS": {"size": 10, "number": 100}, - "M": {"size": 10000, "number": 5}, -} -root = "/mnt/inststo/" +from definitions import INST_STO_ROOT, RANDOM_FILES, SAMPLES, TEST_FILES if __name__ == "__main__": - for path, data in files.items(): + for path, data in SAMPLES.items(): log.info(f"processing {path}") - os.makedirs(root + path, exist_ok=True) + os.makedirs(INST_STO_ROOT + path, exist_ok=True) for x in tqdm(range(data["number"])): subprocess.run( [ "dd", "if=/dev/urandom", - f"of={root+path}/{x}", + f"of={INST_STO_ROOT+path}/{x}", f"bs={int(data['size']*1000)}", "count=1", ], @@ -45,15 +25,15 @@ if __name__ == "__main__": # shell=True, ) - for path, data in rand.items(): + for path, data in SAMPLES.items(): log.info(f"processing RANDOM {path}") - os.makedirs(root + "Rand", exist_ok=True) + os.makedirs(INST_STO_ROOT + "Rand", exist_ok=True) for x in tqdm(range(data["number"])): subprocess.run( [ "dd", "if=/dev/urandom", - f"of={root}Rand/{uuid4()}", + f"of={INST_STO_ROOT}Rand/{uuid4()}", f"bs={int(data['size']*1000)}", "count=1", ], @@ -61,15 +41,15 @@ if __name__ == "__main__": # shell=True, ) - for path, data in tests.items(): + for path, data in TEST_FILES.items(): log.info(f"processing tests {path}") - os.makedirs(root + "test", exist_ok=True) + os.makedirs(INST_STO_ROOT + "test", exist_ok=True) for x in tqdm(range(data["number"])): subprocess.run( [ "dd", "if=/dev/urandom", - f"of={root}test/{uuid4()}", + f"of={INST_STO_ROOT}test/{uuid4()}", f"bs={int(data['size']*1000)}", "count=1", ], diff --git a/run.py b/run.py index 44860d8..8176ff7 100644 --- a/run.py +++ b/run.py @@ -1,27 +1,68 @@ -"Test connectivity on a small subset of files." +"Run the bench cases." # copyright: Canonical import datetime +import json import subprocess from pathlib import Path from loguru import logger as log -SERV_URL = "172.172.0.196::share_rsync" -TARGET = "/mnt/inststo/test" +from definitions import BENCH_DIR_NAMES, CEPH_ROOT, INST_STO_ROOT, REMOTE_INST_STO_ROOT, SERV_URL + +RESULTS = [["Run num", "test_case", "files", "MB/s", "time", "full size"]] + + +def run_single_bench( + runnum, + test_case, + files_type, + source, + target, +): + log.info(f"Syncing {runnum} {test_case} {files_type}") + start = datetime.datetime.now() + subprocess.run( + [ + "rsync", + "-a", + f"{source}", + f"{target}", + ], + capture_output=True, + # shell=True, + ) + time = datetime.datetime.now() - start + size = sum(f.stat().st_size for f in Path(target).glob("**/*") if f.is_file()) + size_MB = size / 1000 / 1000 / 8 + speed = size_MB / time.total_seconds() + log.info(f"{files_type} - {speed} MB/s") + subprocess.run(["rm", "-rf", target]) + RESULTS.append([runnum, test_case, files_type, speed, time.total_seconds(), size_MB]) + + if __name__ == "__main__": - for x in range(10): - start = datetime.datetime.now() - subprocess.run( - [ - "rsync", - "-a", - f"{SERV_URL}/inststo/test", - f"/mnt/inststo", - ], - capture_output=True, - # shell=True, - ) - time = datetime.datetime.now() - start - size = sum(f.stat().st_size for f in Path(TARGET).glob("**/*") if f.is_file()) - log.info(f"{size/time.total_seconds()/1000/1000/8} MB/s") - subprocess.run(["rm", "-rf", TARGET]) + for runnum in range(50): + for files_type in BENCH_DIR_NAMES: + test_case = "Distant-InsSto to InsSto" + source = f"{SERV_URL}/{REMOTE_INST_STO_ROOT}/{files_type}" + target = INST_STO_ROOT + files_type + run_single_bench( + runnum, + test_case, + files_type, + source, + target, + ) + + test_case = "Distant-InsSto to CEPH" + source = f"{SERV_URL}/{REMOTE_INST_STO_ROOT}/{files_type}" + target = CEPH_ROOT + files_type + run_single_bench( + runnum, + test_case, + files_type, + source, + target, + ) + + json.dump(RESULTS, open("results.json", "w"))