mirror of
https://github.com/apple/foundationdb.git
synced 2025-05-14 09:58:50 +08:00
Adding blob granule client test target and local cluster setup
This commit is contained in:
parent
5df3bac110
commit
8fa25aa013
@ -253,6 +253,21 @@ endif()
|
||||
${CMAKE_SOURCE_DIR}/bindings/c/test/apitester/tests
|
||||
)
|
||||
|
||||
add_fdbclient_test(
|
||||
NAME fdb_c_api_tests_bg
|
||||
DISABLE_LOG_DUMP
|
||||
API_TEST_BG_ENABLED
|
||||
COMMAND ${CMAKE_SOURCE_DIR}/bindings/c/test/apitester/run_c_api_tests.py
|
||||
--cluster-file
|
||||
@CLUSTER_FILE@
|
||||
--tester-binary
|
||||
$<TARGET_FILE:fdb_c_api_tester>
|
||||
--external-client-library
|
||||
${CMAKE_CURRENT_BINARY_DIR}/libfdb_c_external.so
|
||||
--test-dir
|
||||
${CMAKE_SOURCE_DIR}/bindings/c/test/apitester/tests
|
||||
)
|
||||
|
||||
if(CMAKE_SYSTEM_PROCESSOR STREQUAL "x86_64" AND NOT USE_SANITIZER)
|
||||
add_test(NAME fdb_c_upgrade_single_threaded_630api
|
||||
COMMAND ${CMAKE_SOURCE_DIR}/tests/TestRunner/upgrade_test.py
|
||||
|
@ -405,6 +405,7 @@ endfunction()
|
||||
# Creates a single cluster before running the specified command (usually a ctest test)
|
||||
function(add_fdbclient_test)
|
||||
set(options DISABLED ENABLED DISABLE_LOG_DUMP)
|
||||
set(options DISABLED ENABLED API_TEST_BG_ENABLED)
|
||||
set(oneValueArgs NAME PROCESS_NUMBER TEST_TIMEOUT WORKING_DIRECTORY)
|
||||
set(multiValueArgs COMMAND)
|
||||
cmake_parse_arguments(T "${options}" "${oneValueArgs}" "${multiValueArgs}" "${ARGN}")
|
||||
@ -431,6 +432,9 @@ function(add_fdbclient_test)
|
||||
if(T_DISABLE_LOG_DUMP)
|
||||
list(APPEND TMP_CLUSTER_CMD --disable-log-dump)
|
||||
endif()
|
||||
if(T_API_TEST_BG_ENABLED)
|
||||
list(APPEND TMP_CLUSTER_CMD --bg-enabled)
|
||||
endif()
|
||||
message(STATUS "Adding Client test ${T_NAME}")
|
||||
add_test(NAME "${T_NAME}"
|
||||
WORKING_DIRECTORY ${T_WORKING_DIRECTORY}
|
||||
|
@ -66,6 +66,7 @@ public-address = {ip_address}:$ID
|
||||
listen-address = public
|
||||
datadir = {datadir}/$ID
|
||||
logdir = {logdir}
|
||||
knob_bg_url=file://{datadir}/fdbblob/
|
||||
# logsize = 10MiB
|
||||
# maxlogssize = 100MiB
|
||||
# machine-id =
|
||||
@ -82,7 +83,7 @@ logdir = {logdir}
|
||||
"""
|
||||
|
||||
def __init__(self, basedir: str, fdbserver_binary: str, fdbmonitor_binary: str, fdbcli_binary: str,
|
||||
process_number: int, create_config=True, port=None, ip_address=None):
|
||||
process_number: int, create_config=True, port=None, ip_address=None, bg_enabled: bool=False):
|
||||
self.basedir = Path(basedir)
|
||||
self.etc = self.basedir.joinpath('etc')
|
||||
self.log = self.basedir.joinpath('log')
|
||||
@ -100,6 +101,11 @@ logdir = {logdir}
|
||||
self.process_number = process_number
|
||||
self.ip_address = '127.0.0.1' if ip_address is None else ip_address
|
||||
self.first_port = port
|
||||
self.bg_enabled = bg_enabled
|
||||
if (bg_enabled):
|
||||
# add extra process for blob_worker
|
||||
self.process_number += 1
|
||||
|
||||
if (self.first_port is not None):
|
||||
self.last_used_port = int(self.first_port)-1
|
||||
self.server_ports = [self.__next_port()
|
||||
@ -111,6 +117,7 @@ logdir = {logdir}
|
||||
self.process = None
|
||||
self.fdbmonitor_logfile = None
|
||||
self.use_legacy_conf_syntax = False
|
||||
|
||||
if create_config:
|
||||
self.create_cluster_file()
|
||||
self.save_config()
|
||||
@ -143,6 +150,9 @@ logdir = {logdir}
|
||||
for port in self.server_ports:
|
||||
f.write('[fdbserver.{server_port}]\n'.format(
|
||||
server_port=port))
|
||||
if (self.bg_enabled):
|
||||
# make last process a blob_worker class
|
||||
f.write('class = blob_worker')
|
||||
f.flush()
|
||||
os.fsync(f.fileno())
|
||||
|
||||
@ -202,12 +212,21 @@ logdir = {logdir}
|
||||
db_config = 'configure new single {}'.format(storage)
|
||||
if (enable_tenants):
|
||||
db_config += " tenant_mode=optional_experimental"
|
||||
if (self.bg_enabled):
|
||||
db_config += " blob_granules_enabled:=1"
|
||||
args = [self.fdbcli_binary, '-C',
|
||||
self.cluster_file, '--exec', db_config]
|
||||
|
||||
res = subprocess.run(args, env=self.process_env())
|
||||
assert res.returncode == 0, "Create database failed with {}".format(
|
||||
res.returncode)
|
||||
|
||||
if (self.bg_enabled):
|
||||
bg_args = [self.fdbcli_binary, '-C',
|
||||
self.cluster_file, '--exec', 'blobrange start \\x00 \\xff']
|
||||
bg_res = subprocess.run(bg_args, env=self.process_env())
|
||||
assert bg_res.returncode == 0, "Start blob granules failed with {}".format(bg_res.returncode)
|
||||
|
||||
def get_status(self):
|
||||
args = [self.fdbcli_binary, '-C', self.cluster_file, '--exec',
|
||||
'status json']
|
||||
|
@ -11,7 +11,7 @@ from pathlib import Path
|
||||
|
||||
|
||||
class TempCluster:
|
||||
def __init__(self, build_dir: str, process_number: int = 1, port: str = None):
|
||||
def __init__(self, build_dir: str, process_number: int = 1, port: str = None, bg_enabled: bool = False):
|
||||
self.build_dir = Path(build_dir).resolve()
|
||||
assert self.build_dir.exists(), "{} does not exist".format(build_dir)
|
||||
assert self.build_dir.is_dir(), "{} is not a directory".format(build_dir)
|
||||
@ -27,6 +27,7 @@ class TempCluster:
|
||||
self.build_dir.joinpath("bin", "fdbcli"),
|
||||
process_number,
|
||||
port=port,
|
||||
bg_enabled=bg_enabled
|
||||
)
|
||||
self.log = self.cluster.log
|
||||
self.etc = self.cluster.etc
|
||||
@ -88,9 +89,14 @@ if __name__ == "__main__":
|
||||
help='Do not dump cluster log on error',
|
||||
action="store_true"
|
||||
)
|
||||
parser.add_argument(
|
||||
'--bg-enabled',
|
||||
help='Enable blob granules',
|
||||
action="store_true"
|
||||
)
|
||||
args = parser.parse_args()
|
||||
errcode = 1
|
||||
with TempCluster(args.build_dir, args.process_number) as cluster:
|
||||
with TempCluster(args.build_dir, args.process_number, bg_enabled=args.bg_enabled) as cluster:
|
||||
print("log-dir: {}".format(cluster.log))
|
||||
print("etc-dir: {}".format(cluster.etc))
|
||||
print("data-dir: {}".format(cluster.data))
|
||||
|
Loading…
x
Reference in New Issue
Block a user