This commit is contained in:
alex
2023-09-06 12:23:40 +02:00
parent 8c87f4049a
commit 6dadb2da03
4 changed files with 96 additions and 49 deletions

View File

@@ -0,0 +1,2 @@
194.169.254.38
194.169.254.11

24
definitions.py Normal file
View File

@@ -0,0 +1,24 @@
SAMPLES = {
"XXXS": {"size": 0.1, "number": 10000},
"XXS": {"size": 10, "number": 10000},
"XS": {"size": 100, "number": 10000},
"S": {"size": 1000, "number": 1000},
"M": {"size": 10000, "number": 500},
"ML": {"size": 100000, "number": 100},
"L": {"size": 1000000, "number": 20},
"XL": {"size": 5000000, "number": 5},
}
BENCH_DIR_NAMES = list(SAMPLES.keys()) + ["Rand"]
TEST_FILES = {
"XXS": {"size": 10, "number": 100},
"M": {"size": 10000, "number": 5},
}
INST_STO_ROOT = "/mnt/inststo/"
CEPH_ROOT = "/mnt/vol/"
SERV_URL = "172.172.0.196::share_rsync"
REMOTE_INST_STO_ROOT = "inststo/"
REMOTE_CEPH_ROOT = "vol/"

View File

@@ -6,38 +6,18 @@ from uuid import uuid4
from loguru import logger as log from loguru import logger as log
from tqdm import tqdm from tqdm import tqdm
files = { from definitions import INST_STO_ROOT, RANDOM_FILES, SAMPLES, TEST_FILES
"XXXS": {"size": 0.1, "number": 10000},
"XXS": {"size": 10, "number": 10000},
"XS": {"size": 100, "number": 10000},
"S": {"size": 1000, "number": 1000},
"M": {"size": 10000, "number": 500},
"ML": {"size": 100000, "number": 100},
"L": {"size": 1000000, "number": 20},
"XL": {"size": 5000000, "number": 5},
}
rand = {
"XXS": {"size": 10, "number": 1000},
"M": {"size": 10000, "number": 50},
"L": {"size": 1000000, "number": 5},
}
tests = {
"XXS": {"size": 10, "number": 100},
"M": {"size": 10000, "number": 5},
}
root = "/mnt/inststo/"
if __name__ == "__main__": if __name__ == "__main__":
for path, data in files.items(): for path, data in SAMPLES.items():
log.info(f"processing {path}") log.info(f"processing {path}")
os.makedirs(root + path, exist_ok=True) os.makedirs(INST_STO_ROOT + path, exist_ok=True)
for x in tqdm(range(data["number"])): for x in tqdm(range(data["number"])):
subprocess.run( subprocess.run(
[ [
"dd", "dd",
"if=/dev/urandom", "if=/dev/urandom",
f"of={root+path}/{x}", f"of={INST_STO_ROOT+path}/{x}",
f"bs={int(data['size']*1000)}", f"bs={int(data['size']*1000)}",
"count=1", "count=1",
], ],
@@ -45,15 +25,15 @@ if __name__ == "__main__":
# shell=True, # shell=True,
) )
for path, data in rand.items(): for path, data in SAMPLES.items():
log.info(f"processing RANDOM {path}") log.info(f"processing RANDOM {path}")
os.makedirs(root + "Rand", exist_ok=True) os.makedirs(INST_STO_ROOT + "Rand", exist_ok=True)
for x in tqdm(range(data["number"])): for x in tqdm(range(data["number"])):
subprocess.run( subprocess.run(
[ [
"dd", "dd",
"if=/dev/urandom", "if=/dev/urandom",
f"of={root}Rand/{uuid4()}", f"of={INST_STO_ROOT}Rand/{uuid4()}",
f"bs={int(data['size']*1000)}", f"bs={int(data['size']*1000)}",
"count=1", "count=1",
], ],
@@ -61,15 +41,15 @@ if __name__ == "__main__":
# shell=True, # shell=True,
) )
for path, data in tests.items(): for path, data in TEST_FILES.items():
log.info(f"processing tests {path}") log.info(f"processing tests {path}")
os.makedirs(root + "test", exist_ok=True) os.makedirs(INST_STO_ROOT + "test", exist_ok=True)
for x in tqdm(range(data["number"])): for x in tqdm(range(data["number"])):
subprocess.run( subprocess.run(
[ [
"dd", "dd",
"if=/dev/urandom", "if=/dev/urandom",
f"of={root}test/{uuid4()}", f"of={INST_STO_ROOT}test/{uuid4()}",
f"bs={int(data['size']*1000)}", f"bs={int(data['size']*1000)}",
"count=1", "count=1",
], ],

79
run.py
View File

@@ -1,27 +1,68 @@
"Test connectivity on a small subset of files." "Run the bench cases."
# copyright: Canonical # copyright: Canonical
import datetime import datetime
import json
import subprocess import subprocess
from pathlib import Path from pathlib import Path
from loguru import logger as log from loguru import logger as log
SERV_URL = "172.172.0.196::share_rsync" from definitions import BENCH_DIR_NAMES, CEPH_ROOT, INST_STO_ROOT, REMOTE_INST_STO_ROOT, SERV_URL
TARGET = "/mnt/inststo/test"
RESULTS = [["Run num", "test_case", "files", "MB/s", "time", "full size"]]
def run_single_bench(
runnum,
test_case,
files_type,
source,
target,
):
log.info(f"Syncing {runnum} {test_case} {files_type}")
start = datetime.datetime.now()
subprocess.run(
[
"rsync",
"-a",
f"{source}",
f"{target}",
],
capture_output=True,
# shell=True,
)
time = datetime.datetime.now() - start
size = sum(f.stat().st_size for f in Path(target).glob("**/*") if f.is_file())
size_MB = size / 1000 / 1000 / 8
speed = size_MB / time.total_seconds()
log.info(f"{files_type} - {speed} MB/s")
subprocess.run(["rm", "-rf", target])
RESULTS.append([runnum, test_case, files_type, speed, time.total_seconds(), size_MB])
if __name__ == "__main__": if __name__ == "__main__":
for x in range(10): for runnum in range(50):
start = datetime.datetime.now() for files_type in BENCH_DIR_NAMES:
subprocess.run( test_case = "Distant-InsSto to InsSto"
[ source = f"{SERV_URL}/{REMOTE_INST_STO_ROOT}/{files_type}"
"rsync", target = INST_STO_ROOT + files_type
"-a", run_single_bench(
f"{SERV_URL}/inststo/test", runnum,
f"/mnt/inststo", test_case,
], files_type,
capture_output=True, source,
# shell=True, target,
) )
time = datetime.datetime.now() - start
size = sum(f.stat().st_size for f in Path(TARGET).glob("**/*") if f.is_file()) test_case = "Distant-InsSto to CEPH"
log.info(f"{size/time.total_seconds()/1000/1000/8} MB/s") source = f"{SERV_URL}/{REMOTE_INST_STO_ROOT}/{files_type}"
subprocess.run(["rm", "-rf", TARGET]) target = CEPH_ROOT + files_type
run_single_bench(
runnum,
test_case,
files_type,
source,
target,
)
json.dump(RESULTS, open("results.json", "w"))