13
13
import random
14
14
import time
15
15
import urllib .parse
16
+ from contextlib import contextmanager
16
17
from hashlib import sha256
17
18
from pathlib import Path
18
19
from typing import TYPE_CHECKING
27
28
from conda_libmamba_solver import shards , shards_cache
28
29
from conda_libmamba_solver .index import LibMambaIndexHelper
29
30
from conda_libmamba_solver .shards import (
30
- RepodataDict ,
31
31
ShardLike ,
32
- Shards ,
32
+ ShardsIndex ,
33
33
fetch_shards ,
34
34
shard_mentioned_packages ,
35
35
)
36
- from conda_libmamba_solver .shards_subset import build_repodata_subset
36
+ from conda_libmamba_solver .shards_subset import Node , build_repodata_subset , fetch_channels
37
37
from tests .channel_testing .helpers import _dummy_http_server
38
38
39
39
if TYPE_CHECKING :
@@ -157,19 +157,10 @@ def test_fetch_shards(conda_no_token: None):
157
157
158
158
channels .append (Channel ("conda-forge-sharded" ))
159
159
160
- channel_data : dict [str , ShardLike ] = {}
161
- for channel in channels :
162
- for channel_url in Channel (channel ).urls (True , context .subdirs ):
163
- subdir_data = SubdirData (Channel (channel_url ))
164
- found = fetch_shards (subdir_data )
165
- if not found :
166
- repodata_json , _ = subdir_data .repo_fetch .fetch_latest_parsed ()
167
- repodata_json = RepodataDict (repodata_json ) # type: ignore
168
- found = ShardLike (repodata_json , channel_url )
169
- channel_data [channel_url ] = found
160
+ channel_data = fetch_channels (channels )
170
161
171
162
# at least one should be real shards, not repodata.json presented as shards.
172
- assert any (isinstance (channel , Shards ) for channel in channel_data .values ())
163
+ assert any (isinstance (channel , ShardsIndex ) for channel in channel_data .values ())
173
164
174
165
175
166
def test_shard_cache (tmp_path : Path ):
@@ -317,6 +308,9 @@ def test_shardlike():
317
308
318
309
319
310
def test_shardlike_repr ():
311
+ """
312
+ Code coverage for ShardLike.__repr__()
313
+ """
320
314
shardlike = ShardLike (
321
315
{
322
316
"packages" : {},
@@ -325,7 +319,7 @@ def test_shardlike_repr():
325
319
},
326
320
"https://conda.anaconda.org/" ,
327
321
)
328
- cls , url , * rest = repr (shardlike ).split ()
322
+ cls , url , * _ = repr (shardlike ).split ()
329
323
assert "ShardLike" in cls
330
324
assert shardlike .url == url
331
325
@@ -361,7 +355,8 @@ def test_shardlike_repr():
361
355
362
356
def test_traverse_shards_3 (conda_no_token : None , tmp_path ):
363
357
"""
364
- Another go at the dependency traversal algorithm.
358
+ Build repodata subset using the third attempt at a dependency traversal
359
+ algorithm.
365
360
"""
366
361
367
362
logging .basicConfig (level = logging .INFO )
@@ -390,6 +385,9 @@ def test_traverse_shards_3(conda_no_token: None, tmp_path):
390
385
391
386
392
387
def test_shards_indexhelper (conda_no_token ):
388
+ """
389
+ Load LibMambaIndexHelper with parameters that will enable sharded repodata.
390
+ """
393
391
channels = [* context .default_channels , Channel ("conda-forge-sharded" )]
394
392
395
393
class fake_in_state :
@@ -407,3 +405,68 @@ class fake_in_state:
407
405
)
408
406
409
407
print (helper .repos )
408
+
409
+
410
+ @contextmanager
411
+ def _timer (name : str ):
412
+ begin = time .monotonic_ns ()
413
+ yield
414
+ end = time .monotonic_ns ()
415
+ print (f"{ name } took { (end - begin ) / 1e9 :0.6f} s" )
416
+
417
+
418
+ def test_parallel_fetcherator (conda_no_token : None ):
419
+ channels = [* context .default_channels , Channel ("conda-forge-sharded" )]
420
+ roots = [
421
+ Node (distance = 0 , package = "ca-certificates" , visited = False ),
422
+ Node (distance = 0 , package = "icu" , visited = False ),
423
+ Node (distance = 0 , package = "expat" , visited = False ),
424
+ Node (distance = 0 , package = "libexpat" , visited = False ),
425
+ Node (distance = 0 , package = "libffi" , visited = False ),
426
+ Node (distance = 0 , package = "libmpdec" , visited = False ),
427
+ Node (distance = 0 , package = "libzlib" , visited = False ),
428
+ Node (distance = 0 , package = "openssl" , visited = False ),
429
+ Node (distance = 0 , package = "python" , visited = False ),
430
+ Node (distance = 0 , package = "readline" , visited = False ),
431
+ Node (distance = 0 , package = "liblzma" , visited = False ),
432
+ Node (distance = 0 , package = "xz" , visited = False ),
433
+ Node (distance = 0 , package = "libsqlite" , visited = False ),
434
+ Node (distance = 0 , package = "tk" , visited = False ),
435
+ Node (distance = 0 , package = "ncurses" , visited = False ),
436
+ Node (distance = 0 , package = "zlib" , visited = False ),
437
+ Node (distance = 0 , package = "pip" , visited = False ),
438
+ Node (distance = 0 , package = "twine" , visited = False ),
439
+ Node (distance = 0 , package = "python_abi" , visited = False ),
440
+ Node (distance = 0 , package = "tzdata" , visited = False ),
441
+ ]
442
+
443
+ with _timer ("repodata.json/shards index fetch" ):
444
+ channel_data = fetch_channels (channels )
445
+
446
+ with _timer ("Shard fetch" ):
447
+ sharded = [
448
+ channel for channel in channel_data .values () if isinstance (channel , ShardsIndex )
449
+ ]
450
+ assert sharded , "No sharded repodata found"
451
+
452
+ wanted = []
453
+ for shard in sharded :
454
+ for root in roots :
455
+ if root .package in shard :
456
+ wanted .append ((shard , root .package , shard .shard_url (root .package )))
457
+
458
+ print (len (wanted ), "shards to fetch" )
459
+
460
+ shared_shard_cache = sharded [0 ].shards_cache
461
+ from_cache = shared_shard_cache .retrieve_multiple ([shard_url for * _ , shard_url in wanted ])
462
+
463
+ for url , shard_or_none in from_cache .items ():
464
+ if shard_or_none is not None :
465
+ print (f"Cache hit for { url } " )
466
+
467
+ # add fetched Shard objects to Shards objects visited dict
468
+ for shard , package , shard_url in wanted :
469
+ if from_cache_shard := from_cache .get (shard_url ):
470
+ shard .visited [package ] = from_cache_shard
471
+
472
+ # XXX don't call everything Shard/Shards
0 commit comments