updates
This commit is contained in:
24
definitions.py
Normal file
24
definitions.py
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
SAMPLES = {
|
||||||
|
"XXXS": {"size": 0.1, "number": 10000},
|
||||||
|
"XXS": {"size": 10, "number": 10000},
|
||||||
|
"XS": {"size": 100, "number": 10000},
|
||||||
|
"S": {"size": 1000, "number": 1000},
|
||||||
|
"M": {"size": 10000, "number": 500},
|
||||||
|
"ML": {"size": 100000, "number": 100},
|
||||||
|
"L": {"size": 1000000, "number": 20},
|
||||||
|
"XL": {"size": 5000000, "number": 5},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
BENCH_DIR_NAMES = list(SAMPLES.keys()) + ["Rand"]
|
||||||
|
|
||||||
|
TEST_FILES = {
|
||||||
|
"XXS": {"size": 10, "number": 100},
|
||||||
|
"M": {"size": 10000, "number": 5},
|
||||||
|
}
|
||||||
|
|
||||||
|
INST_STO_ROOT = "/mnt/inststo/"
|
||||||
|
CEPH_ROOT = "/mnt/vol/"
|
||||||
|
SERV_URL = "172.172.0.196::share_rsync"
|
||||||
|
REMOTE_INST_STO_ROOT = "inststo/"
|
||||||
|
REMOTE_CEPH_ROOT = "vol/"
|
||||||
@@ -6,38 +6,18 @@ from uuid import uuid4
|
|||||||
from loguru import logger as log
|
from loguru import logger as log
|
||||||
from tqdm import tqdm
|
from tqdm import tqdm
|
||||||
|
|
||||||
files = {
|
from definitions import INST_STO_ROOT, RANDOM_FILES, SAMPLES, TEST_FILES
|
||||||
"XXXS": {"size": 0.1, "number": 10000},
|
|
||||||
"XXS": {"size": 10, "number": 10000},
|
|
||||||
"XS": {"size": 100, "number": 10000},
|
|
||||||
"S": {"size": 1000, "number": 1000},
|
|
||||||
"M": {"size": 10000, "number": 500},
|
|
||||||
"ML": {"size": 100000, "number": 100},
|
|
||||||
"L": {"size": 1000000, "number": 20},
|
|
||||||
"XL": {"size": 5000000, "number": 5},
|
|
||||||
}
|
|
||||||
|
|
||||||
rand = {
|
|
||||||
"XXS": {"size": 10, "number": 1000},
|
|
||||||
"M": {"size": 10000, "number": 50},
|
|
||||||
"L": {"size": 1000000, "number": 5},
|
|
||||||
}
|
|
||||||
tests = {
|
|
||||||
"XXS": {"size": 10, "number": 100},
|
|
||||||
"M": {"size": 10000, "number": 5},
|
|
||||||
}
|
|
||||||
root = "/mnt/inststo/"
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
for path, data in files.items():
|
for path, data in SAMPLES.items():
|
||||||
log.info(f"processing {path}")
|
log.info(f"processing {path}")
|
||||||
os.makedirs(root + path, exist_ok=True)
|
os.makedirs(INST_STO_ROOT + path, exist_ok=True)
|
||||||
for x in tqdm(range(data["number"])):
|
for x in tqdm(range(data["number"])):
|
||||||
subprocess.run(
|
subprocess.run(
|
||||||
[
|
[
|
||||||
"dd",
|
"dd",
|
||||||
"if=/dev/urandom",
|
"if=/dev/urandom",
|
||||||
f"of={root+path}/{x}",
|
f"of={INST_STO_ROOT+path}/{x}",
|
||||||
f"bs={int(data['size']*1000)}",
|
f"bs={int(data['size']*1000)}",
|
||||||
"count=1",
|
"count=1",
|
||||||
],
|
],
|
||||||
@@ -45,15 +25,15 @@ if __name__ == "__main__":
|
|||||||
# shell=True,
|
# shell=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
for path, data in rand.items():
|
for path, data in SAMPLES.items():
|
||||||
log.info(f"processing RANDOM {path}")
|
log.info(f"processing RANDOM {path}")
|
||||||
os.makedirs(root + "Rand", exist_ok=True)
|
os.makedirs(INST_STO_ROOT + "Rand", exist_ok=True)
|
||||||
for x in tqdm(range(data["number"])):
|
for x in tqdm(range(data["number"])):
|
||||||
subprocess.run(
|
subprocess.run(
|
||||||
[
|
[
|
||||||
"dd",
|
"dd",
|
||||||
"if=/dev/urandom",
|
"if=/dev/urandom",
|
||||||
f"of={root}Rand/{uuid4()}",
|
f"of={INST_STO_ROOT}Rand/{uuid4()}",
|
||||||
f"bs={int(data['size']*1000)}",
|
f"bs={int(data['size']*1000)}",
|
||||||
"count=1",
|
"count=1",
|
||||||
],
|
],
|
||||||
@@ -61,15 +41,15 @@ if __name__ == "__main__":
|
|||||||
# shell=True,
|
# shell=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
for path, data in tests.items():
|
for path, data in TEST_FILES.items():
|
||||||
log.info(f"processing tests {path}")
|
log.info(f"processing tests {path}")
|
||||||
os.makedirs(root + "test", exist_ok=True)
|
os.makedirs(INST_STO_ROOT + "test", exist_ok=True)
|
||||||
for x in tqdm(range(data["number"])):
|
for x in tqdm(range(data["number"])):
|
||||||
subprocess.run(
|
subprocess.run(
|
||||||
[
|
[
|
||||||
"dd",
|
"dd",
|
||||||
"if=/dev/urandom",
|
"if=/dev/urandom",
|
||||||
f"of={root}test/{uuid4()}",
|
f"of={INST_STO_ROOT}test/{uuid4()}",
|
||||||
f"bs={int(data['size']*1000)}",
|
f"bs={int(data['size']*1000)}",
|
||||||
"count=1",
|
"count=1",
|
||||||
],
|
],
|
||||||
|
|||||||
61
run.py
61
run.py
@@ -1,27 +1,68 @@
|
|||||||
"Test connectivity on a small subset of files."
|
"Run the bench cases."
|
||||||
# copyright: Canonical
|
# copyright: Canonical
|
||||||
import datetime
|
import datetime
|
||||||
|
import json
|
||||||
import subprocess
|
import subprocess
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from loguru import logger as log
|
from loguru import logger as log
|
||||||
|
|
||||||
SERV_URL = "172.172.0.196::share_rsync"
|
from definitions import BENCH_DIR_NAMES, CEPH_ROOT, INST_STO_ROOT, REMOTE_INST_STO_ROOT, SERV_URL
|
||||||
TARGET = "/mnt/inststo/test"
|
|
||||||
if __name__ == "__main__":
|
RESULTS = [["Run num", "test_case", "files", "MB/s", "time", "full size"]]
|
||||||
for x in range(10):
|
|
||||||
|
|
||||||
|
def run_single_bench(
|
||||||
|
runnum,
|
||||||
|
test_case,
|
||||||
|
files_type,
|
||||||
|
source,
|
||||||
|
target,
|
||||||
|
):
|
||||||
|
log.info(f"Syncing {runnum} {test_case} {files_type}")
|
||||||
start = datetime.datetime.now()
|
start = datetime.datetime.now()
|
||||||
subprocess.run(
|
subprocess.run(
|
||||||
[
|
[
|
||||||
"rsync",
|
"rsync",
|
||||||
"-a",
|
"-a",
|
||||||
f"{SERV_URL}/inststo/test",
|
f"{source}",
|
||||||
f"/mnt/inststo",
|
f"{target}",
|
||||||
],
|
],
|
||||||
capture_output=True,
|
capture_output=True,
|
||||||
# shell=True,
|
# shell=True,
|
||||||
)
|
)
|
||||||
time = datetime.datetime.now() - start
|
time = datetime.datetime.now() - start
|
||||||
size = sum(f.stat().st_size for f in Path(TARGET).glob("**/*") if f.is_file())
|
size = sum(f.stat().st_size for f in Path(target).glob("**/*") if f.is_file())
|
||||||
log.info(f"{size/time.total_seconds()/1000/1000/8} MB/s")
|
size_MB = size / 1000 / 1000 / 8
|
||||||
subprocess.run(["rm", "-rf", TARGET])
|
speed = size_MB / time.total_seconds()
|
||||||
|
log.info(f"{files_type} - {speed} MB/s")
|
||||||
|
subprocess.run(["rm", "-rf", target])
|
||||||
|
RESULTS.append([runnum, test_case, files_type, speed, time.total_seconds(), size_MB])
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
for runnum in range(50):
|
||||||
|
for files_type in BENCH_DIR_NAMES:
|
||||||
|
test_case = "Distant-InsSto to InsSto"
|
||||||
|
source = f"{SERV_URL}/{REMOTE_INST_STO_ROOT}/{files_type}"
|
||||||
|
target = INST_STO_ROOT + files_type
|
||||||
|
run_single_bench(
|
||||||
|
runnum,
|
||||||
|
test_case,
|
||||||
|
files_type,
|
||||||
|
source,
|
||||||
|
target,
|
||||||
|
)
|
||||||
|
|
||||||
|
test_case = "Distant-InsSto to CEPH"
|
||||||
|
source = f"{SERV_URL}/{REMOTE_INST_STO_ROOT}/{files_type}"
|
||||||
|
target = CEPH_ROOT + files_type
|
||||||
|
run_single_bench(
|
||||||
|
runnum,
|
||||||
|
test_case,
|
||||||
|
files_type,
|
||||||
|
source,
|
||||||
|
target,
|
||||||
|
)
|
||||||
|
|
||||||
|
json.dump(RESULTS, open("results.json", "w"))
|
||||||
|
|||||||
Reference in New Issue
Block a user