Thanks for this, Dave. My comments are inline below. NB I have just done a code review, and haven't actually run the code.
On 11 March 2016 at 16:29, Dave Lerner <[email protected]> wrote: > For customised image package removal change behavior. > From: > Only display the immediate dependents of the requested package > to remove, not the full dependent list, that is dependents of > dependents ... > Do not remove the displayed dependents, just notify the user > of the list. > To: > Display the complete dependent tree, traversing all reverse > dependencies starting from the package to be removed and then it's > dependents. > Change the modal dialog to note that all of these dependents will > be removed automatically. > > [YOCTO #9121] > > Signed-off-by: Dave Lerner <[email protected]> > --- > bitbake/lib/toaster/toastergui/views.py | 98 > +++++++++++++++++++++++++++++---- > 1 file changed, 86 insertions(+), 12 deletions(-) > > diff --git a/bitbake/lib/toaster/toastergui/views.py > b/bitbake/lib/toaster/toastergui/views.py > index 85ca9be..4442d33 100755 > --- a/bitbake/lib/toaster/toastergui/views.py > +++ b/bitbake/lib/toaster/toastergui/views.py > @@ -2538,6 +2538,67 @@ if True: > > return response > > + def _traverse_dependents(next_package_id, list, all_current_packages, > tree_level): > + """ > + Recurse through dependents (reverse dependency) tree up to a > + depth of MAX_DEPENDENCY_TREE_DEPTH just in case their are circular > + dependencies. A large system has 500 packages, exceeding a > + dependency chain that long is unlikely. > + Don't scan for dependencies for a package already in list. > + Return the updated list with new dependencies. > + """ > + MAX_DEPENDENCY_TREE_DEPTH = 500 > + if tree_level >= MAX_DEPENDENCY_TREE_DEPTH: > + logger.warning( > + "The number of dependents in the dependency tree" > + "for this package exceeds " + > MAX_DEPENDENDENCY_TREE_DEPTH + > + " and the remaining dependents will not be removed") > + return list, True > This makes me a bit nervous, but I'm not sure if there's a better way to deal with it. Could we reduce the maximum depth of the tree, e.g. set it to the smaller of 500 or len(all_current_packages)? (I'm not sure what all_current_packages has in it, but if it means all packages in the image, then the max depth of a genuine dependency chain would be equal to the number of packages - 1.) > + > + package = CustomImagePackage.objects.get(id=next_package_id) > + dependents = \ > + package.package_dependencies_target.annotate( > + name=F('package__name'), > + pk=F('package__pk'), > + size=F('package__size'), > + ).values("name", "pk", "size").exclude( > + ~Q(pk__in=all_current_packages) > + ) > + > + truncated = False > + for pkg in dependents: > + if pkg in list: > + # already seen, skip dependent search > + continue > + > + list.append(pkg) > + extension, truncated = _traverse_dependents( > I was slightly confused by the use of extension, then realised that it's not actually used for anything. Maybe a comment there to that effect? I'm also not 100% sure about the structure of this, as the method is being used in two modes, each of which only cares about one of the return values; and the value we're actually interested in is an argument which is being updated in place. Though I can't think of a better alternative right now. > + pkg["pk"], list, all_current_packages, tree_level+1) > + if truncated: > + break > + > + return list, truncated + > + def _get_all_dependents(package_id, all_current_packages, flat): > + """ > + Returns the full list of dependents, also known as reverse > + dependencies, for package_id, recursing through dependency > + relationships. > + Returns either list of dictionary items or a list of > CustomImagePackage > + model objects depending on arg flat=False > + """ > + import itertools > My preference would be for all imports to be at the top of the file. > + list = [] > + list, truncated = _traverse_dependents( > + package_id, list, all_current_packages, 0) > Rather than pass 0 and [] here, I would change the signature of _traverse_dependents to: def _traverse_dependents(next_package_id, all_current_packages, list=[], tree_level=0): Then call it here with: list, truncated = _traverse_dependents( package_id, all_current_packages) + > + # sort and remove duplicates > + sortedlist = sorted(list, key=lambda x: x["name"]) > + list = [x[0] for x in itertools.groupby(sortedlist)] > (This is the line you marked as unnecessary, as the list already contains unique elements.) > + > + if flat: > + list = [CustomImagePackage.objects.get(id=entry["pk"]) for > entry in list] > Could this be done with one query, e.g. if flat: ids = [entry['pk'] for entry in list] list = list(CustomImagePackage.objects.filter(id__in=ids)) ? Otherwise we run a query for each entry in the list. By the way, is the cast to list necessary, as the queryset is iterable and countable? I suggest changing the variable name "list" to something else, too. > + return list > > @xhr_response > def xhr_customrecipe_packages(request, recipe_id, package_id): > @@ -2606,15 +2667,10 @@ if True: > ) > > # Reverse dependencies which are needed by packages that > are > - # in the image > - reverse_deps = > package.package_dependencies_target.annotate( > - name=F('package__name'), > - pk=F('package__pk'), > - size=F('package__size'), > - ).values("name", "pk", "size").exclude( > - ~Q(pk__in=all_current_packages) > - ) > - > + # in the image. Recursive search providing all dependents, > + # not just immediate dependents. > + reverse_deps = _get_all_dependents( > + package_id, all_current_packages, flat=False) > total_size_deps = 0 > total_size_reverse_deps = 0 > > @@ -2658,6 +2714,11 @@ if True: > > else: > recipe.appends_set.add(package) > + # Make sure that package is not in the excludes set > + try: > + recipe.excludes_set.remove(package) > + except: > + pass > # Add the dependencies we think will be added to the > recipe > # as a result of appending this package. > # TODO this should recurse down the entire deps tree > @@ -2668,11 +2729,12 @@ if True: > > recipe.includes_set.add(cust_package) > try: > - # when adding the pre-requisite package make > sure it's not in the > - # excluded list from a prior removal. > + # When adding the pre-requisite package, make > + # sure it's not in the excluded list from a > + # prior removal. > recipe.excludes_set.remove(cust_package) > except Package.DoesNotExist: > - # Don't care if the package had never been > excluded > + # Don't care if the package had never been > excluded > pass > except: > logger.warning("Could not add package's suggested" > @@ -2688,6 +2750,18 @@ if True: > recipe.excludes_set.add(package) > else: > recipe.appends_set.remove(package) > + all_current_packages = recipe.get_all_packages() > + reverse_deps = _get_all_dependents( > + package.pk, all_current_packages, flat=True) > + for r in reverse_deps: > + try: > + if r.id in included_packages: > + recipe.excludes_set.add(r) > + else: > + recipe.appends_set.remove(r) > + except: > + pass > + > return {"error": "ok"} > except CustomImageRecipe.DoesNotExist: > return {"error": "Tried to remove package that wasn't > present"} > -- > 1.9.1 > > -- > _______________________________________________ > toaster mailing list > [email protected] > https://lists.yoctoproject.org/listinfo/toaster > -- Elliot Smith Software Engineer Intel Open Source Technology Centre
-- _______________________________________________ toaster mailing list [email protected] https://lists.yoctoproject.org/listinfo/toaster
