Skip to content
Merged
Show file tree
Hide file tree
Changes from 12 commits
Commits
Show all changes
31 commits
Select commit Hold shift + click to select a range
7e399fd
Document important PackagesList methods
AbrilRBS Aug 26, 2025
7ad1c0a
Merge branch 'develop2' into ar/packagelists-prefs
memsharded Aug 27, 2025
23538dc
wip
memsharded Aug 28, 2025
cc59fb5
Merge branch 'develop2' into ar/packagelists-prefs
memsharded Sep 2, 2025
211f7e7
wip
memsharded Sep 2, 2025
90e5588
proposal draft
memsharded Sep 2, 2025
2ddef51
Merge branch 'develop2' into ar/packagelists-prefs
memsharded Sep 4, 2025
eb30098
wip
memsharded Sep 4, 2025
2c423f8
merged develop2
memsharded Sep 5, 2025
38af848
dirty, but to see if tests pass
memsharded Sep 5, 2025
75e5601
fix test
memsharded Sep 5, 2025
02508f8
remove print
memsharded Sep 5, 2025
fe8b6f5
Merge branch 'develop2' into ar/packagelists-prefs
memsharded Sep 9, 2025
300f442
review
memsharded Sep 9, 2025
2430ad8
Merge branch 'develop2' into ar/packagelists-prefs
memsharded Sep 10, 2025
fbf24ec
Merge branch 'develop2' into ar/packagelists-prefs
memsharded Sep 11, 2025
1f777b7
walk()->items() + accessor
memsharded Sep 11, 2025
6526420
remove private ._data access
memsharded Sep 11, 2025
4f524ae
wip
memsharded Sep 14, 2025
de79873
wip
memsharded Sep 14, 2025
3540999
wip
memsharded Sep 14, 2025
b2cfd62
wip
memsharded Sep 14, 2025
71e45b9
Some last changes to the decoumentation
AbrilRBS Sep 17, 2025
3ff9ff8
Merge branch 'develop2' into ar/packagelists-prefs
AbrilRBS Sep 17, 2025
9726b40
Update conan/api/model/list.py
memsharded Sep 17, 2025
e8c6222
Update conan/api/model/list.py
memsharded Sep 17, 2025
4704ec7
Update test/integration/command/upload/test_upload_bundle.py
memsharded Sep 17, 2025
62e6553
Merge branch 'develop2' into ar/packagelists-prefs
memsharded Sep 22, 2025
8011560
review, normalized package list name
memsharded Sep 22, 2025
843ef22
Remove last pkglist in help strings
AbrilRBS Sep 22, 2025
75c083c
Update conan/cli/commands/audit.py
memsharded Sep 22, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
63 changes: 46 additions & 17 deletions conan/api/model/list.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
import copy
import fnmatch
import json
import os
from json import JSONDecodeError

from conan.api.model import RecipeReference, PkgReference
from conan.api.output import ConanOutput
from conan.errors import ConanException
from conan.internal.errors import NotFoundException
from conan.internal.model.version_range import VersionRange
Expand Down Expand Up @@ -104,7 +106,7 @@ def load_graph(graphfile, graph_recipes=None, graph_binaries=None, context=None)
)

mpkglist = MultiPackagesList._define_graph(graph, graph_recipes, graph_binaries,
context=base_context)
context=base_context)
if context == "build-only":
host = MultiPackagesList._define_graph(graph, graph_recipes, graph_binaries,
context="host")
Expand Down Expand Up @@ -193,7 +195,10 @@ def _define_graph(graph, graph_recipes=None, graph_binaries=None, context=None):
class PackagesList:
""" A collection of recipes, revisions and packages."""
def __init__(self):
self.recipes = {}
self._data = {}

def __bool__(self):
return bool(self._data)

def merge(self, other):
def recursive_dict_update(d, u): # TODO: repeated from conandata.py
Expand All @@ -203,40 +208,40 @@ def recursive_dict_update(d, u): # TODO: repeated from conandata.py
else:
d[k] = v
return d
recursive_dict_update(self.recipes, other.recipes)
recursive_dict_update(self._data, other._data)

def keep_outer(self, other):
if not self.recipes:
if not self._data:
return

for ref, info in other.recipes.items():
if self.recipes.get(ref, {}) == info:
self.recipes.pop(ref)
for ref, info in other._data.items():
if self._data.get(ref, {}) == info:
self._data.pop(ref)

def split(self):
"""
Returns a list of PackageList, splitted one per reference.
This can be useful to parallelize things like upload, parallelizing per-reference
"""
result = []
for r, content in self.recipes.items():
for r, content in self._data.items():
subpkglist = PackagesList()
subpkglist.recipes[r] = content
subpkglist._data[r] = content
result.append(subpkglist)
return result

def only_recipes(self) -> None:
""" Filter out all the packages and package revisions, keep only the recipes and
recipe revisions in self.recipes.
recipe revisions in self._data.
"""
for ref, ref_dict in self.recipes.items():
for ref, ref_dict in self._data.items():
for rrev_dict in ref_dict.get("revisions", {}).values():
rrev_dict.pop("packages", None)

def add_refs(self, refs):
# RREVS alreday come in ASCENDING order, so upload does older revisions first
for ref in refs:
ref_dict = self.recipes.setdefault(str(ref), {})
ref_dict = self._data.setdefault(str(ref), {})
if ref.revision:
revs_dict = ref_dict.setdefault("revisions", {})
rev_dict = revs_dict.setdefault(ref.revision, {})
Expand All @@ -245,7 +250,7 @@ def add_refs(self, refs):

def add_prefs(self, rrev, prefs):
# Prevs already come in ASCENDING order, so upload does older revisions first
revs_dict = self.recipes[str(rrev)]["revisions"]
revs_dict = self._data[str(rrev)]["revisions"]
rev_dict = revs_dict[rrev.revision]
packages_dict = rev_dict.setdefault("packages", {})

Expand All @@ -259,15 +264,18 @@ def add_prefs(self, rrev, prefs):

def add_configurations(self, confs):
for pref, conf in confs.items():
rev_dict = self.recipes[str(pref.ref)]["revisions"][pref.ref.revision]
rev_dict = self._data[str(pref.ref)]["revisions"][pref.ref.revision]
try:
rev_dict["packages"][pref.package_id]["info"] = conf
except KeyError: # If package_id does not exist, do nothing, only add to existing prefs
pass

def refs(self):
kk
ConanOutput().warning("PackageLists.refs() non-public, non-documented method will be "
"removed, use .items() instead", warn_tag="deprecated")
result = {}
for ref, ref_dict in self.recipes.items():
for ref, ref_dict in self._data.items():
for rrev, rrev_dict in ref_dict.get("revisions", {}).items():
t = rrev_dict.get("timestamp")
recipe = RecipeReference.loads(f"{ref}#{rrev}") # TODO: optimize this
Expand All @@ -276,8 +284,29 @@ def refs(self):
result[recipe] = rrev_dict
return result

def walk(self):
""" Get all the recipe references in the package list."""
for ref, ref_dict in self._data.items():
for rrev, rrev_dict in ref_dict.get("revisions", {}).items():
recipe = RecipeReference.loads(f"{ref}#{rrev}") # TODO: optimize this
t = rrev_dict.get("timestamp")
if t is not None:
recipe.timestamp = t
packages = {}
for package_id, pkg_bundle in rrev_dict.get("packages", {}).items():
prevs = pkg_bundle.get("revisions", {})
for prev, prev_bundle in prevs.items():
t = prev_bundle.pop("timestamp", None)
pref = PkgReference(recipe, package_id, prev, t)
packages[pref] = prev_bundle
yield recipe, rrev_dict, packages

@staticmethod
def prefs(ref, recipe_bundle):
""" Get all the package references for a given recipe reference given a bundle."""
kk
ConanOutput().warning("PackageLists.prefs() non-public, non-documented method will be "
"removed, use .items() instead", warn_tag="deprecated")
result = {}
for package_id, pkg_bundle in recipe_bundle.get("packages", {}).items():
prevs = pkg_bundle.get("revisions", {})
Expand All @@ -289,13 +318,13 @@ def prefs(ref, recipe_bundle):

def serialize(self):
""" Serialize the instance to a dictionary."""
return self.recipes.copy()
return copy.deepcopy(self._data)

@staticmethod
def deserialize(data):
""" Loads the data from a serialized dictionary."""
result = PackagesList()
result.recipes = data
result._data = copy.deepcopy(data)
return result


Expand Down
12 changes: 6 additions & 6 deletions conan/api/subapi/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,14 +109,14 @@ def clean(self, package_list, source=True, build=True, download=True, temp=True,
for f in backup_files:
remove(f)

for ref, ref_bundle in package_list.refs().items():
for ref, _, packages in package_list.walk():
ConanOutput(ref.repr_notime()).verbose("Cleaning recipe cache contents")
ref_layout = cache.recipe_layout(ref)
if source:
rmdir(ref_layout.source())
if download:
rmdir(ref_layout.download_export())
for pref, _ in package_list.prefs(ref, ref_bundle).items():
for pref, _ in packages.items():
ConanOutput(pref).verbose("Cleaning package cache contents")
pref_layout = cache.pkg_layout(pref)
if build:
Expand All @@ -135,7 +135,7 @@ def save(self, package_list, tgz_path, no_source=False):
compresslevel = global_conf.get("core.gzip:compresslevel", check_type=int)
tar_files: dict[str, str] = {} # {path_in_tar: abs_path}

for ref, ref_bundle in package_list.refs().items():
for ref, ref_bundle, packages in package_list.walk():
ref_layout = cache.recipe_layout(ref)
recipe_folder = os.path.relpath(ref_layout.base_folder, cache_folder)
recipe_folder = recipe_folder.replace("\\", "/") # make win paths portable
Expand All @@ -152,7 +152,7 @@ def save(self, package_list, tgz_path, no_source=False):
if os.path.exists(path):
tar_files[f"{recipe_folder}/{DOWNLOAD_EXPORT_FOLDER}/{METADATA}"] = path

for pref, pref_bundle in package_list.prefs(ref, ref_bundle).items():
for pref, pref_bundle in packages.items():
pref_layout = cache.pkg_layout(pref)
pkg_folder = pref_layout.package()
folder = os.path.relpath(pkg_folder, cache_folder)
Expand Down Expand Up @@ -194,7 +194,7 @@ def restore(self, path):
# After unzipping the files, we need to update the DB that references these files
out = ConanOutput()
package_list = PackagesList.deserialize(json.loads(pkglist))
for ref, ref_bundle in package_list.refs().items():
for ref, ref_bundle, packages in package_list.walk():
ref.timestamp = revision_timestamp_now()
ref_bundle["timestamp"] = ref.timestamp
try:
Expand All @@ -207,7 +207,7 @@ def restore(self, path):
# In the case of recipes, they are always "in place", so just checking it
assert rel_path == recipe_folder, f"{rel_path}!={recipe_folder}"
out.info(f"Restore: {ref} in {recipe_folder}")
for pref, pref_bundle in package_list.prefs(ref, ref_bundle).items():
for pref, pref_bundle in packages.items():
pref.timestamp = revision_timestamp_now()
pref_bundle["timestamp"] = pref.timestamp
try:
Expand Down
14 changes: 7 additions & 7 deletions conan/api/subapi/download.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,19 +83,19 @@ def download_full(self, package_list: PackagesList, remote: Remote,
"""Download the recipes and packages specified in the ``package_list`` from the remote,
parallelized based on ``core.download:parallel``"""
def _download_pkglist(pkglist):
for ref, recipe_bundle in pkglist.refs().items():
for ref, ref_dict, packages in pkglist.walk():
self.recipe(ref, remote, metadata)
recipe_bundle.pop("files", None)
recipe_bundle.pop("upload-urls", None)
for pref, pref_bundle in pkglist.prefs(ref, recipe_bundle).items():
ref_dict.pop("files", None)
ref_dict.pop("upload-urls", None)
for pref, pkg_dict in packages.items():
self.package(pref, remote, metadata)
pref_bundle.pop("files", None)
pref_bundle.pop("upload-urls", None)
pkg_dict.pop("files", None)
pkg_dict.pop("upload-urls", None)

t = time.time()
parallel = self._conan_api.config.get("core.download:parallel", default=1, check_type=int)
thread_pool = ThreadPool(parallel) if parallel > 1 else None
if not thread_pool or len(package_list.refs()) <= 1:
if not thread_pool or len(package_list._data) <= 1:
_download_pkglist(package_list)
else:
ConanOutput().subtitle(f"Downloading with {parallel} parallel threads")
Expand Down
12 changes: 6 additions & 6 deletions conan/api/subapi/list.py
Original file line number Diff line number Diff line change
Expand Up @@ -306,7 +306,7 @@ def explain_missing_binaries(self, ref, conaninfo, remotes):
pkglist.add_prefs(ref, [pref])
pkglist.add_configurations({pref: candidate.binary_config})
# Add the diff data
rev_dict = pkglist.recipes[str(pref.ref)]["revisions"][pref.ref.revision]
rev_dict = pkglist._data[str(pref.ref)]["revisions"][pref.ref.revision]
rev_dict["packages"][pref.package_id]["diff"] = candidate.serialize()
remote = candidate.remote.name if candidate.remote else "Local Cache"
rev_dict["packages"][pref.package_id]["remote"] = remote
Expand All @@ -319,7 +319,7 @@ def find_remotes(self, package_list, remotes):
result = MultiPackagesList()
for r in remotes:
result_pkg_list = PackagesList()
for ref, recipe_bundle in package_list.refs().items():
for ref, recipe_bundle, packages in package_list.walk():
ref_no_rev = copy.copy(ref) # TODO: Improve ugly API
ref_no_rev.revision = None
try:
Expand All @@ -329,7 +329,7 @@ def find_remotes(self, package_list, remotes):
if ref not in revs: # not found
continue
result_pkg_list.add_refs([ref])
for pref, pref_bundle in package_list.prefs(ref, recipe_bundle).items():
for pref, pref_bundle in packages.items():
pref_no_rev = copy.copy(pref) # TODO: Improve ugly API
pref_no_rev.revision = None
try:
Expand All @@ -340,7 +340,7 @@ def find_remotes(self, package_list, remotes):
result_pkg_list.add_prefs(ref, [pref])
info = recipe_bundle["packages"][pref.package_id]["info"]
result_pkg_list.add_configurations({pref: info})
if result_pkg_list.recipes:
if result_pkg_list:
result.add(r.name, result_pkg_list)
return result

Expand Down Expand Up @@ -371,9 +371,9 @@ def outdated(self, deps_graph, remotes):
remote_ref_list = self.select(ref_pattern, package_query=None, remote=remote)
except NotFoundException:
continue
if not remote_ref_list.recipes:
if not remote_ref_list:
continue
str_latest_ref = list(remote_ref_list.recipes.keys())[-1]
str_latest_ref = list(remote_ref_list._data.keys())[-1]
recipe_ref = RecipeReference.loads(str_latest_ref)
if (node_info["latest_remote"] is None
or node_info["latest_remote"]["ref"] < recipe_ref):
Expand Down
6 changes: 3 additions & 3 deletions conan/api/subapi/upload.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,14 +38,14 @@ def check_upstream(self, package_list: PackagesList, remote: Remote, enabled_rem
A ``force_upload`` key will be added to the entries that will be uploaded.
"""
app = ConanApp(self._conan_api)
for ref, bundle in package_list.refs().items():
for ref, ref_info, _ in package_list.walk():
layout = app.cache.recipe_layout(ref)
conanfile_path = layout.conanfile()
conanfile = app.loader.load_basic(conanfile_path, remotes=enabled_remotes)
if conanfile.upload_policy == "skip":
ConanOutput().info(f"{ref}: Skipping upload of binaries, "
"because upload_policy='skip'")
bundle["packages"] = {}
ref_info["packages"] = {}

UploadUpstreamChecker(app).check(package_list, remote, force)

Expand Down Expand Up @@ -130,7 +130,7 @@ def _upload_pkglist(pkglist, subtitle=lambda _: None):
ConanOutput().title(f"Uploading to remote {remote.name}")
parallel = self._conan_api.config.get("core.upload:parallel", default=1, check_type=int)
thread_pool = ThreadPool(parallel) if parallel > 1 else None
if not thread_pool or len(package_list.recipes) <= 1:
if not thread_pool or len(package_list._data) <= 1:
_upload_pkglist(package_list, subtitle=ConanOutput().subtitle)
else:
ConanOutput().subtitle(f"Uploading with {parallel} parallel threads")
Expand Down
2 changes: 1 addition & 1 deletion conan/cli/commands/download.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ def download(conan_api: ConanAPI, parser, *args):
ref_pattern = ListPattern(args.pattern, package_id="*", only_recipe=args.only_recipe)
package_list = conan_api.list.select(ref_pattern, args.package_query, remote)

if package_list.recipes:
if package_list:
conan_api.download.download_full(package_list, remote, args.metadata)
else:
ConanOutput().warning(f"No packages were downloaded because the package list is empty.")
Expand Down
Loading
Loading