@@ -226,6 +226,132 @@ def run_coordinator_tests_dockerhub():
226226 return run_coordinator
227227
228228
229+ def test_self_contained_coordinator_dockerhub_preload ():
230+ try :
231+ if run_coordinator_tests_dockerhub ():
232+ db_port = int (os .getenv ("DATASINK_PORT" , "6379" ))
233+ conn = redis .StrictRedis (port = db_port )
234+ conn .ping ()
235+ conn .flushall ()
236+
237+ id = "dockerhub"
238+ redis_version = "7.4.0"
239+ run_image = f"redis:{ redis_version } "
240+ build_arch = "amd64"
241+ testDetails = {}
242+ build_os = "test_build_os"
243+ build_stream_fields , result = generate_benchmark_stream_request (
244+ id ,
245+ conn ,
246+ run_image ,
247+ build_arch ,
248+ testDetails ,
249+ build_os ,
250+ )
251+ build_stream_fields ["mnt_point" ] = ""
252+ if result is True :
253+ benchmark_stream_id = conn .xadd (
254+ STREAM_KEYNAME_NEW_BUILD_EVENTS , build_stream_fields
255+ )
256+ logging .info (
257+ "sucessfully requested a new run {}. Stream id: {}" .format (
258+ build_stream_fields , benchmark_stream_id
259+ )
260+ )
261+
262+ build_variant_name = "gcc:8.5.0-amd64-debian-buster-default"
263+ expected_datapoint_ts = None
264+
265+ assert conn .exists (STREAM_KEYNAME_NEW_BUILD_EVENTS )
266+ assert conn .xlen (STREAM_KEYNAME_NEW_BUILD_EVENTS ) > 0
267+ running_platform = "fco-ThinkPad-T490"
268+
269+ build_runners_consumer_group_create (conn , running_platform , "0" )
270+ datasink_conn = redis .StrictRedis (port = db_port )
271+ docker_client = docker .from_env ()
272+ home = str (Path .home ())
273+ stream_id = ">"
274+ topologies_map = get_topologies (
275+ "./redis_benchmarks_specification/setups/topologies/topologies.yml"
276+ )
277+ # we use a benchmark spec with smaller CPU limit for client given github machines only contain 2 cores
278+ # and we need 1 core for DB and another for CLIENT
279+ testsuite_spec_files = [
280+ "./utils/tests/test_data/test-suites/generic-touch.yml"
281+ ]
282+ defaults_filename = "./utils/tests/test_data/test-suites/defaults.yml"
283+ (
284+ _ ,
285+ _ ,
286+ default_metrics ,
287+ _ ,
288+ _ ,
289+ _ ,
290+ ) = get_defaults (defaults_filename )
291+
292+ (
293+ result ,
294+ stream_id ,
295+ number_processed_streams ,
296+ num_process_test_suites ,
297+ ) = self_contained_coordinator_blocking_read (
298+ conn ,
299+ True ,
300+ docker_client ,
301+ home ,
302+ stream_id ,
303+ datasink_conn ,
304+ testsuite_spec_files ,
305+ topologies_map ,
306+ running_platform ,
307+ False ,
308+ [],
309+ "" ,
310+ 0 ,
311+ 6399 ,
312+ 1 ,
313+ False ,
314+ 5 ,
315+ default_metrics ,
316+ "amd64" ,
317+ None ,
318+ 0 ,
319+ 10000 ,
320+ "unstable" ,
321+ "" ,
322+ True ,
323+ False ,
324+ )
325+
326+ assert result == True
327+ assert number_processed_streams == 1
328+ assert num_process_test_suites == 1
329+ by_version_key = f"ci.benchmarks.redislabs/ci/redis/redis/memtier_benchmark-1Mkeys-generic-touch-pipeline-10/by.version/{ redis_version } /benchmark_end/oss-standalone/memory_maxmemory"
330+ assert datasink_conn .exists (by_version_key )
331+ rts = datasink_conn .ts ()
332+ # check we have by version metrics
333+ assert "version" in rts .info (by_version_key ).labels
334+ assert redis_version == rts .info (by_version_key ).labels ["version" ]
335+
336+ # get all keys
337+ all_keys = datasink_conn .keys ("*" )
338+ by_hash_keys = []
339+ for key in all_keys :
340+ if "/by.hash/" in key .decode ():
341+ by_hash_keys .append (key )
342+
343+ # ensure we have by hash keys
344+ assert len (by_hash_keys ) > 0
345+ for hash_key in by_hash_keys :
346+ # ensure we have both version and hash info on the key
347+ assert "version" in rts .info (hash_key ).labels
348+ assert "hash" in rts .info (hash_key ).labels
349+ assert redis_version == rts .info (hash_key ).labels ["version" ]
350+
351+ except redis .exceptions .ConnectionError :
352+ pass
353+
354+
229355def test_self_contained_coordinator_dockerhub ():
230356 try :
231357 if run_coordinator_tests_dockerhub ():
0 commit comments