diff --git a/tests/unit/admin/views/test_users.py b/tests/unit/admin/views/test_users.py index d19499edf1e7..1ba37b2aa872 100644 --- a/tests/unit/admin/views/test_users.py +++ b/tests/unit/admin/views/test_users.py @@ -18,6 +18,7 @@ from webob.multidict import MultiDict from warehouse.admin.views import users as views +from warehouse.packaging.models import Project from ....common.db.accounts import User, UserFactory, EmailFactory from ....common.db.packaging import ( @@ -178,6 +179,7 @@ class TestUserDelete: def test_deletes_user(self, db_request, monkeypatch): user = UserFactory.create() project = ProjectFactory.create() + another_project = ProjectFactory.create() journal = JournalEntryFactory(submitted_by=user) RoleFactory(project=project, user=user, role_name='Owner') deleted_user = UserFactory.create(username="deleted-user") @@ -188,17 +190,12 @@ def test_deletes_user(self, db_request, monkeypatch): db_request.user = UserFactory.create() db_request.remote_addr = '10.10.10.10' - remove_project = pretend.call_recorder(lambda *a, **kw: None) - monkeypatch.setattr(views, 'remove_project', remove_project) - result = views.user_delete(db_request) db_request.db.flush() assert not db_request.db.query(User).get(user.id) - assert remove_project.calls == [ - pretend.call(project, db_request, flash=False), - ] + assert db_request.db.query(Project).all() == [another_project] assert db_request.route_path.calls == [pretend.call('admin.user.list')] assert result.status_code == 303 assert result.location == '/foobar' @@ -213,15 +210,12 @@ def test_deletes_user_bad_confirm(self, db_request, monkeypatch): db_request.params = {'username': 'wrong'} db_request.route_path = pretend.call_recorder(lambda a, **k: '/foobar') - remove_project = pretend.call_recorder(lambda *a, **kw: None) - monkeypatch.setattr(views, 'remove_project', remove_project) - result = views.user_delete(db_request) db_request.db.flush() assert db_request.db.query(User).get(user.id) - assert remove_project.calls == [] + assert db_request.db.query(Project).all() == [project] assert db_request.route_path.calls == [ pretend.call('admin.user.detail', user_id=user.id), ] diff --git a/tests/unit/cache/origin/test_init.py b/tests/unit/cache/origin/test_init.py index 97c306612ab3..bf4b68171631 100644 --- a/tests/unit/cache/origin/test_init.py +++ b/tests/unit/cache/origin/test_init.py @@ -204,32 +204,55 @@ class TestKeyMaker: def test_both_cache_and_purge(self): key_maker = origin.key_maker_factory( cache_keys=["foo", "foo/{obj.attr}"], - purge_keys=["bar", "bar/{obj.attr}"], - ) - assert key_maker(pretend.stub(attr="bar")) == origin.CacheKeys( - cache=["foo", "foo/bar"], - purge=["bar", "bar/bar"], + purge_keys=[ + origin.key_factory("bar"), + origin.key_factory("bar/{obj.attr}"), + ], ) + cache_keys = key_maker(pretend.stub(attr="bar")) + + assert isinstance(cache_keys, origin.CacheKeys) + assert cache_keys.cache == ["foo", "foo/bar"] + assert list(cache_keys.purge) == ["bar", "bar/bar"] def test_only_cache(self): key_maker = origin.key_maker_factory( cache_keys=["foo", "foo/{obj.attr}"], purge_keys=None, ) - assert key_maker(pretend.stub(attr="bar")) == origin.CacheKeys( - cache=["foo", "foo/bar"], - purge=[], - ) + cache_keys = key_maker(pretend.stub(attr="bar")) + + assert isinstance(cache_keys, origin.CacheKeys) + assert cache_keys.cache == ["foo", "foo/bar"] + assert list(cache_keys.purge) == [] def test_only_purge(self): key_maker = origin.key_maker_factory( cache_keys=None, - purge_keys=["bar", "bar/{obj.attr}"], + purge_keys=[ + origin.key_factory("bar"), + origin.key_factory("bar/{obj.attr}"), + ], ) - assert key_maker(pretend.stub(attr="bar")) == origin.CacheKeys( - cache=[], - purge=["bar", "bar/bar"], + cache_keys = key_maker(pretend.stub(attr="bar")) + + assert isinstance(cache_keys, origin.CacheKeys) + assert cache_keys.cache == [] + assert list(cache_keys.purge) == ["bar", "bar/bar"] + + def test_iterate_on(self): + key_maker = origin.key_maker_factory( + cache_keys=['foo'], # Intentionally does not support `iterate_on` + purge_keys=[ + origin.key_factory('bar'), + origin.key_factory('bar/{itr}', iterate_on='iterate_me'), + ], ) + cache_keys = key_maker(pretend.stub(iterate_me=['biz', 'baz'])) + + assert isinstance(cache_keys, origin.CacheKeys) + assert cache_keys.cache == ['foo'] + assert list(cache_keys.purge) == ['bar', 'bar/biz', 'bar/baz'] def test_register_origin_keys(monkeypatch): diff --git a/tests/unit/packaging/test_init.py b/tests/unit/packaging/test_init.py index dd7eca2c5e6f..9a897c014a90 100644 --- a/tests/unit/packaging/test_init.py +++ b/tests/unit/packaging/test_init.py @@ -17,7 +17,7 @@ from warehouse import packaging from warehouse.packaging.interfaces import IDownloadStatService, IFileStorage -from warehouse.packaging.models import Project, Release +from warehouse.packaging.models import Project, Release, User from warehouse.packaging.tasks import compute_trending @@ -33,6 +33,11 @@ def test_includme(monkeypatch, with_trending): packaging, "RedisDownloadStatService", download_stat_service_cls, ) + def key_factory(keystring, iterate_on=None): + return pretend.call(keystring, iterate_on=iterate_on) + + monkeypatch.setattr(packaging, 'key_factory', key_factory) + config = pretend.stub( maybe_dotted=lambda dotted: storage_class, register_service=pretend.call_recorder( @@ -72,14 +77,30 @@ def test_includme(monkeypatch, with_trending): pretend.call( Project, cache_keys=["project/{obj.normalized_name}"], - purge_keys=["project/{obj.normalized_name}", "all-projects"], + purge_keys=[ + key_factory("project/{obj.normalized_name}"), + key_factory("user/{itr.username}", iterate_on='users'), + key_factory("all-projects"), + ], ), pretend.call( Release, cache_keys=["project/{obj.project.normalized_name}"], purge_keys=[ - "project/{obj.project.normalized_name}", - "all-projects", + key_factory("project/{obj.project.normalized_name}"), + key_factory("user/{itr.username}", iterate_on='project.users'), + key_factory("all-projects"), + ], + ), + pretend.call( + User, + cache_keys=["user/{obj.username}"], + purge_keys=[ + key_factory("user/{obj.username}"), + key_factory( + "project/{itr.normalized_name}", + iterate_on='projects', + ), ], ), ] diff --git a/warehouse/admin/views/users.py b/warehouse/admin/views/users.py index 7c68fed1c336..265ce1a491d5 100644 --- a/warehouse/admin/views/users.py +++ b/warehouse/admin/views/users.py @@ -23,9 +23,8 @@ from warehouse import forms from warehouse.accounts.models import User, Email -from warehouse.packaging.models import JournalEntry, Role +from warehouse.packaging.models import JournalEntry, Project, Role from warehouse.utils.paginate import paginate_url_factory -from warehouse.utils.project import remove_project @view_config( @@ -139,16 +138,24 @@ def user_delete(request): user = request.db.query(User).get(request.matchdict['user_id']) if user.username != request.params.get('username'): - print(user.username) - print(request.params.get('username')) request.session.flash(f'Wrong confirmation input.', queue='error') return HTTPSeeOther( request.route_path('admin.user.detail', user_id=user.id) ) - # Delete projects one by one so they are purged from the cache - for project in user.projects: - remove_project(project, request, flash=False) + # Delete all the user's projects + projects = ( + request.db.query(Project) + .filter( + Project.name.in_( + request.db.query(Project.name) + .join(Role.project) + .filter(Role.user == user) + .subquery() + ) + ) + ) + projects.delete(synchronize_session=False) # Update all journals to point to `deleted-user` instead deleted_user = ( diff --git a/warehouse/cache/origin/__init__.py b/warehouse/cache/origin/__init__.py index 97005c3b4e13..5a71e51f32d2 100644 --- a/warehouse/cache/origin/__init__.py +++ b/warehouse/cache/origin/__init__.py @@ -12,6 +12,8 @@ import collections import functools +import operator +from itertools import chain from warehouse import db from warehouse.cache.origin.interfaces import IOriginCache @@ -87,6 +89,18 @@ def wrapped(context, request): CacheKeys = collections.namedtuple("CacheKeys", ["cache", "purge"]) +def key_factory(keystring, iterate_on=None): + + def generate_key(obj): + if iterate_on: + for itr in operator.attrgetter(iterate_on)(obj): + yield keystring.format(itr=itr, obj=obj) + else: + yield keystring.format(obj=obj) + + return generate_key + + def key_maker_factory(cache_keys, purge_keys): if cache_keys is None: cache_keys = [] @@ -96,8 +110,14 @@ def key_maker_factory(cache_keys, purge_keys): def key_maker(obj): return CacheKeys( + # Note: this does not support setting the `cache` argument via + # multiple `key_factories` as we do with `purge` because there is + # a limit to how many surrogate keys we can attach to a single HTTP + # response, and being able to use use `iterate_on` would allow this + # size to be unbounded. + # ref: https://github.com/pypa/warehouse/pull/3189 cache=[k.format(obj=obj) for k in cache_keys], - purge=[k.format(obj=obj) for k in purge_keys], + purge=chain.from_iterable(key(obj) for key in purge_keys), ) return key_maker diff --git a/warehouse/packaging/__init__.py b/warehouse/packaging/__init__.py index ef5bb611d9a8..3db703299059 100644 --- a/warehouse/packaging/__init__.py +++ b/warehouse/packaging/__init__.py @@ -12,6 +12,8 @@ from celery.schedules import crontab +from warehouse.accounts.models import User +from warehouse.cache.origin import key_factory from warehouse.packaging.interfaces import IDownloadStatService, IFileStorage from warehouse.packaging.services import RedisDownloadStatService from warehouse.packaging.models import Project, Release @@ -39,12 +41,28 @@ def includeme(config): config.register_origin_cache_keys( Project, cache_keys=["project/{obj.normalized_name}"], - purge_keys=["project/{obj.normalized_name}", "all-projects"], + purge_keys=[ + key_factory("project/{obj.normalized_name}"), + key_factory("user/{itr.username}", iterate_on='users'), + key_factory("all-projects"), + ], ) config.register_origin_cache_keys( Release, cache_keys=["project/{obj.project.normalized_name}"], - purge_keys=["project/{obj.project.normalized_name}", "all-projects"], + purge_keys=[ + key_factory("project/{obj.project.normalized_name}"), + key_factory("user/{itr.username}", iterate_on='project.users'), + key_factory("all-projects"), + ], + ) + config.register_origin_cache_keys( + User, + cache_keys=["user/{obj.username}"], + purge_keys=[ + key_factory("user/{obj.username}"), + key_factory("project/{itr.normalized_name}", iterate_on='projects') + ], ) # Add a periodic task to compute trending once a day, assuming we have