This commit is contained in:
alex
2023-09-05 16:47:21 +02:00
parent 10f9209c51
commit 8c87f4049a
5 changed files with 220 additions and 0 deletions

2
.gitignore vendored Normal file
View File

@@ -0,0 +1,2 @@
venv
*.pyc

92
create_instances.py Normal file
View File

@@ -0,0 +1,92 @@
# copyright: Canonical
"""Tools for setting up a rsync bench environment."""
import json
import subprocess
def run_osc(cmdlist: list[str]):
"""Launch a nicely formatted command on openstack and retrieve result in json.
Args:
cmdlist (list[str]): Cmd line args
Returns:
_type_: _description_
"""
cmdlist.extend(["--format", "json"])
res = subprocess.run(cmdlist, capture_output=True, text=True)
return json.loads(res.stdout or "[]")
def provision():
"""Provision servers on openstack."""
run_osc(
[
"openstack",
"flavor",
"create",
"--public",
"8c-64r-150s",
"--ram",
"65536",
"--disk",
"150",
"--vcpus 8",
]
)
for x in range(2):
serv_resp = run_osc(
[
"openstack",
"server",
"create",
"--image",
"ubuntu2204",
"--key-name",
"alexmicouleau",
"--network",
"bench-net",
"--flavor",
"8c-64r-150s",
f"alex_rsync_{x}",
]
)
serv_id = serv_resp["id"]
fip_res = run_osc(
[
"openstack",
"floating",
"ip",
"create",
"--description",
f"alex_rsync_{x}",
"ext-net",
]
)
fip_ad = fip_res["floating_ip_address"]
run_osc(
[
"openstack",
"server",
"add",
"floating",
"ip",
f"{serv_id}",
f"{fip_ad}",
]
)
vol_res = run_osc(
[
"openstack",
"volume",
"create",
"--size",
"150",
f"alex_rsync{x}",
]
)
vol_id = vol_res["id"]
add_vol_res = run_osc(["openstack", "server", "add", "volume", serv_id, vol_id])

78
generate_samples.py Normal file
View File

@@ -0,0 +1,78 @@
# copyright: Canonical
import os
import subprocess
from uuid import uuid4
from loguru import logger as log
from tqdm import tqdm
files = {
"XXXS": {"size": 0.1, "number": 10000},
"XXS": {"size": 10, "number": 10000},
"XS": {"size": 100, "number": 10000},
"S": {"size": 1000, "number": 1000},
"M": {"size": 10000, "number": 500},
"ML": {"size": 100000, "number": 100},
"L": {"size": 1000000, "number": 20},
"XL": {"size": 5000000, "number": 5},
}
rand = {
"XXS": {"size": 10, "number": 1000},
"M": {"size": 10000, "number": 50},
"L": {"size": 1000000, "number": 5},
}
tests = {
"XXS": {"size": 10, "number": 100},
"M": {"size": 10000, "number": 5},
}
root = "/mnt/inststo/"
if __name__ == "__main__":
for path, data in files.items():
log.info(f"processing {path}")
os.makedirs(root + path, exist_ok=True)
for x in tqdm(range(data["number"])):
subprocess.run(
[
"dd",
"if=/dev/urandom",
f"of={root+path}/{x}",
f"bs={int(data['size']*1000)}",
"count=1",
],
capture_output=True,
# shell=True,
)
for path, data in rand.items():
log.info(f"processing RANDOM {path}")
os.makedirs(root + "Rand", exist_ok=True)
for x in tqdm(range(data["number"])):
subprocess.run(
[
"dd",
"if=/dev/urandom",
f"of={root}Rand/{uuid4()}",
f"bs={int(data['size']*1000)}",
"count=1",
],
capture_output=True,
# shell=True,
)
for path, data in tests.items():
log.info(f"processing tests {path}")
os.makedirs(root + "test", exist_ok=True)
for x in tqdm(range(data["number"])):
subprocess.run(
[
"dd",
"if=/dev/urandom",
f"of={root}test/{uuid4()}",
f"bs={int(data['size']*1000)}",
"count=1",
],
capture_output=True,
# shell=True,
)

21
requirements.txt Normal file
View File

@@ -0,0 +1,21 @@
asttokens==2.4.0
backcall==0.2.0
decorator==5.1.1
exceptiongroup==1.1.3
executing==1.2.0
ipython==8.15.0
jedi==0.19.0
loguru==0.7.1
matplotlib-inline==0.1.6
parso==0.8.3
pexpect==4.8.0
pickleshare==0.7.5
prompt-toolkit==3.0.39
ptyprocess==0.7.0
pure-eval==0.2.2
Pygments==2.16.1
six==1.16.0
stack-data==0.6.2
tqdm==4.66.1
traitlets==5.9.0
wcwidth==0.2.6

27
run.py Normal file
View File

@@ -0,0 +1,27 @@
"Test connectivity on a small subset of files."
# copyright: Canonical
import datetime
import subprocess
from pathlib import Path
from loguru import logger as log
SERV_URL = "172.172.0.196::share_rsync"
TARGET = "/mnt/inststo/test"
if __name__ == "__main__":
for x in range(10):
start = datetime.datetime.now()
subprocess.run(
[
"rsync",
"-a",
f"{SERV_URL}/inststo/test",
f"/mnt/inststo",
],
capture_output=True,
# shell=True,
)
time = datetime.datetime.now() - start
size = sum(f.stat().st_size for f in Path(TARGET).glob("**/*") if f.is_file())
log.info(f"{size/time.total_seconds()/1000/1000/8} MB/s")
subprocess.run(["rm", "-rf", TARGET])