diff --git a/admin/nodes/views.py b/admin/nodes/views.py
index af2e4726b43..f5b3f1d6df0 100644
--- a/admin/nodes/views.py
+++ b/admin/nodes/views.py
@@ -53,6 +53,7 @@
from scripts.approve_registrations import approve_past_pendings
from website import settings, search
+from website.archiver.tasks import force_archive
class NodeMixin(PermissionRequiredMixin):
@@ -705,6 +706,12 @@ class NodeReindexShare(NodeMixin, View):
def post(self, request, *args, **kwargs):
node = self.get_object()
update_share(node)
+ messages.success(
+ request,
+ 'Reindex request has been sent to SHARE. '
+ 'Changes typically appear in OSF Search within about 5 minutes, '
+ 'subject to background queue load and SHARE availability.'
+ )
update_admin_log(
user_id=self.request.user.id,
object_id=node._id,
@@ -830,7 +837,7 @@ class CheckArchiveStatusRegistrationsView(NodeMixin, View):
def get(self, request, *args, **kwargs):
# Prevents circular imports that cause admin app to hang at startup
- from osf.management.commands.force_archive import check
+ from osf.management.commands.force_archive import check, DEFAULT_PERMISSIBLE_ADDONS
registration = self.get_object()
@@ -838,8 +845,11 @@ def get(self, request, *args, **kwargs):
messages.success(request, f"Registration {registration._id} is archived.")
return redirect(self.get_success_url())
+ addons = set(registration.registered_from.get_addon_names())
+ addons.update(DEFAULT_PERMISSIBLE_ADDONS)
+
try:
- archive_status = check(registration)
+ archive_status = check(registration, permissible_addons=addons)
messages.success(request, archive_status)
except RegistrationStuckError as exc:
messages.error(request, str(exc))
@@ -860,7 +870,7 @@ class ForceArchiveRegistrationsView(NodeMixin, View):
def post(self, request, *args, **kwargs):
# Prevents circular imports that cause admin app to hang at startup
- from osf.management.commands.force_archive import verify, archive, DEFAULT_PERMISSIBLE_ADDONS
+ from osf.management.commands.force_archive import verify, DEFAULT_PERMISSIBLE_ADDONS
registration = self.get_object()
force_archive_params = request.POST
@@ -885,18 +895,14 @@ def post(self, request, *args, **kwargs):
if dry_mode:
messages.success(request, f"Registration {registration._id} can be archived.")
else:
- try:
- archive(
- registration,
- permissible_addons=addons,
- allow_unconfigured=allow_unconfigured,
- skip_collisions=skip_collision,
- delete_collisions=delete_collision,
- )
- messages.success(request, 'Registration archive process has finished.')
- except Exception as exc:
- messages.error(request, f'This registration cannot be archived due to {exc.__class__.__name__}: {str(exc)}. '
- f'If the problem persists get a developer to fix it.')
+ force_archive_task = force_archive.delay(
+ str(registration._id),
+ permissible_addons=list(addons),
+ allow_unconfigured=allow_unconfigured,
+ skip_collisions=skip_collision,
+ delete_collisions=delete_collision,
+ )
+ messages.success(request, f'Registration archive process has started. Task id: {force_archive_task.id}.')
return redirect(self.get_success_url())
diff --git a/admin/preprints/views.py b/admin/preprints/views.py
index cccc54fe1e9..909006ea91d 100644
--- a/admin/preprints/views.py
+++ b/admin/preprints/views.py
@@ -184,6 +184,12 @@ class PreprintReindexShare(PreprintMixin, View):
def post(self, request, *args, **kwargs):
preprint = self.get_object()
update_share(preprint)
+ messages.success(
+ request,
+ 'Reindex request has been sent to SHARE. '
+ 'Changes typically appear in OSF Search within about 5 minutes, '
+ 'subject to background queue load and SHARE availability.'
+ )
update_admin_log(
user_id=self.request.user.id,
object_id=preprint._id,
diff --git a/admin/templates/users/reindex_user_share.html b/admin/templates/users/reindex_user_share.html
new file mode 100644
index 00000000000..9a0f6c93564
--- /dev/null
+++ b/admin/templates/users/reindex_user_share.html
@@ -0,0 +1,21 @@
+SHARE Reindex User Content
+
diff --git a/admin/templates/users/user.html b/admin/templates/users/user.html
index 36997faf61d..96cda4689aa 100644
--- a/admin/templates/users/user.html
+++ b/admin/templates/users/user.html
@@ -37,6 +37,7 @@
{% include "users/disable_user.html" with user=user %}
{% include "users/mark_spam.html" with user=user %}
{% include "users/reindex_user_elastic.html" with user=user %}
+ {% include "users/reindex_user_share.html" with user=user %}
diff --git a/admin/users/urls.py b/admin/users/urls.py
index 309ba6bcd35..3c87ab1e332 100644
--- a/admin/users/urls.py
+++ b/admin/users/urls.py
@@ -26,6 +26,8 @@
re_path(r'^(?P[a-z0-9]+)/get_reset_password/$', views.GetPasswordResetLink.as_view(), name='get-reset-password'),
re_path(r'^(?P[a-z0-9]+)/reindex_elastic_user/$', views.UserReindexElastic.as_view(),
name='reindex-elastic-user'),
+ re_path(r'^(?P[a-z0-9]+)/reindex_share_user/$', views.UserShareReindex.as_view(),
+ name='reindex-share-user'),
re_path(r'^(?P[a-z0-9]+)/merge_accounts/$', views.UserMergeAccounts.as_view(), name='merge-accounts'),
re_path(r'^(?P[a-z0-9]+)/draft_registrations/$', views.UserDraftRegistrationsList.as_view(), name='draft-registrations'),
]
diff --git a/admin/users/views.py b/admin/users/views.py
index 1771c0fea0d..1584c78158e 100644
--- a/admin/users/views.py
+++ b/admin/users/views.py
@@ -38,6 +38,7 @@
CONFIRM_HAM,
UNFLAG_SPAM,
REINDEX_ELASTIC,
+ REINDEX_SHARE,
)
from admin.users.forms import (
@@ -560,6 +561,44 @@ def post(self, request, *args, **kwargs):
return redirect(self.get_success_url())
+class UserShareReindex(UserMixin, View):
+ permission_required = 'osf.change_osfuser'
+
+ def post(self, request, *args, **kwargs):
+ from api.share.utils import update_share
+ user = self.get_object()
+
+ nodes_count = user.contributed.count()
+ preprints_count = user.preprints.filter(deleted=None).count()
+
+ for node in user.contributed:
+ try:
+ update_share(node)
+ except Exception as e:
+ messages.error(request, f'Failed to SHARE reindex node {node._id}: {e}')
+
+ for preprint in user.preprints.filter(deleted=None):
+ try:
+ update_share(preprint)
+ except Exception as e:
+ messages.error(request, f'Failed to SHARE reindex preprint {preprint._id}: {e}')
+
+ messages.success(
+ request,
+ f'Triggered SHARE reindexing for {nodes_count} nodes and {preprints_count} preprints'
+ )
+
+ update_admin_log(
+ user_id=self.request.user.id,
+ object_id=user._id,
+ object_repr='User',
+ message=f'SHARE reindexed all content for user {user._id}',
+ action_flag=REINDEX_SHARE
+ )
+
+ return redirect(self.get_success_url())
+
+
class UserDraftRegistrationsList(UserMixin, ListView):
template_name = 'users/draft-registrations.html'
permission_required = 'osf.view_draftregistration'
diff --git a/admin_tests/nodes/test_views.py b/admin_tests/nodes/test_views.py
index a42f3be9ae1..7de8bdb755c 100644
--- a/admin_tests/nodes/test_views.py
+++ b/admin_tests/nodes/test_views.py
@@ -303,6 +303,7 @@ class TestNodeReindex(AdminTestCase):
def setUp(self):
super().setUp()
self.request = RequestFactory().post('/fake_path')
+ patch_messages(self.request)
self.user = AuthUserFactory()
self.node = ProjectFactory(creator=self.user)
diff --git a/admin_tests/preprints/test_views.py b/admin_tests/preprints/test_views.py
index 57b03815eb4..2cdcda136d1 100644
--- a/admin_tests/preprints/test_views.py
+++ b/admin_tests/preprints/test_views.py
@@ -364,6 +364,7 @@ def test_reindex_preprint_share(self, preprint, req, mock_update_share):
preprint.provider.save()
count = AdminLogEntry.objects.count()
+ patch_messages(req)
view = views.PreprintReindexShare()
view = setup_log_view(view, req, guid=preprint._id)
mock_update_share.reset_mock()
diff --git a/api/base/utils.py b/api/base/utils.py
index 9e5f6e1a445..31bc35fd8d4 100644
--- a/api/base/utils.py
+++ b/api/base/utils.py
@@ -148,6 +148,8 @@ def get_object_or_error(model_or_qs, query_or_pk=None, request=None, display_nam
# users who are unconfirmed or unregistered, but not users who have been
# disabled.
if model_cls is OSFUser and obj.is_disabled:
+ if getattr(obj, 'gdpr_deleted', False):
+ raise NotFound
raise UserGone(user=obj)
if check_deleted and (model_cls is not OSFUser and not getattr(obj, 'is_active', True) or getattr(obj, 'is_deleted', False) or getattr(obj, 'deleted', False)):
if display_name is None:
diff --git a/api/base/views.py b/api/base/views.py
index a2e59cf7b0e..e0a253cd65e 100644
--- a/api/base/views.py
+++ b/api/base/views.py
@@ -38,7 +38,7 @@
from api.nodes.permissions import ExcludeWithdrawals
from api.users.serializers import UserSerializer
from framework.auth.oauth_scopes import CoreScopes
-from osf.models import Contributor, MaintenanceState, BaseFileNode
+from osf.models import Contributor, MaintenanceState, BaseFileNode, AbstractNode
from osf.utils.permissions import API_CONTRIBUTOR_PERMISSIONS, READ, WRITE, ADMIN
from waffle.models import Flag, Switch, Sample
from waffle import sample_is_active
@@ -600,7 +600,7 @@ def get_queryset(self):
)
-class BaseLinkedList(JSONAPIBaseView, generics.ListAPIView):
+class BaseLinkedList(JSONAPIBaseView, generics.ListAPIView, ListFilterMixin):
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
@@ -618,11 +618,9 @@ class BaseLinkedList(JSONAPIBaseView, generics.ListAPIView):
view_name = None
ordering = ('-modified',)
+ model_class = AbstractNode
- # TODO: This class no longer exists
- # model_class = Pointer
-
- def get_queryset(self):
+ def get_default_queryset(self):
auth = get_user_auth(self.request)
from api.resources import annotations as resource_annotations
@@ -639,6 +637,9 @@ def get_queryset(self):
.order_by('-modified')
)
+ def get_queryset(self):
+ return self.get_queryset_from_request()
+
class WaterButlerMixin:
diff --git a/api/collections/views.py b/api/collections/views.py
index fb8d808f888..907a6fee571 100644
--- a/api/collections/views.py
+++ b/api/collections/views.py
@@ -569,6 +569,7 @@ class LinkedNodesList(JSONAPIBaseView, generics.ListAPIView, CollectionMixin, No
view_name = 'linked-nodes'
ordering = ('-modified',)
+ model_class = Node
def get_default_queryset(self):
auth = get_user_auth(self.request)
@@ -589,7 +590,7 @@ def get_parser_context(self, http_request):
return res
-class LinkedRegistrationsList(JSONAPIBaseView, generics.ListAPIView, CollectionMixin):
+class LinkedRegistrationsList(JSONAPIBaseView, generics.ListAPIView, CollectionMixin, ListFilterMixin):
"""List of registrations linked to this node. *Read-only*.
Linked registrations are the registration nodes pointed to by node links.
@@ -667,8 +668,9 @@ class LinkedRegistrationsList(JSONAPIBaseView, generics.ListAPIView, CollectionM
required_write_scopes = [CoreScopes.COLLECTED_META_WRITE]
ordering = ('-modified',)
+ model_class = Registration
- def get_queryset(self):
+ def get_default_queryset(self):
auth = get_user_auth(self.request)
return Registration.objects.filter(
guids__in=self.get_collection().active_guids.all(),
@@ -680,6 +682,9 @@ def get_queryset(self):
'-modified',
)
+ def get_queryset(self):
+ return self.get_queryset_from_request()
+
# overrides APIView
def get_parser_context(self, http_request):
"""
diff --git a/api/institutions/views.py b/api/institutions/views.py
index 60f8d31b88f..a3c0f93d0c8 100644
--- a/api/institutions/views.py
+++ b/api/institutions/views.py
@@ -338,7 +338,8 @@ class InstitutionNodesRelationship(JSONAPIBaseView, generics.RetrieveDestroyAPIV
}
Success: 204
- This requires write permissions in the nodes requested.
+ This requires write permissions in the nodes requested. If the user has admin permissions on the node,
+ the institution does not need to be affiliated in their account.
"""
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
@@ -367,16 +368,19 @@ def get_object(self):
def perform_destroy(self, instance):
data = self.request.data['data']
user = self.request.user
+ inst = instance['self']
ids = [datum['id'] for datum in data]
nodes = []
for id_ in ids:
node = Node.load(id_)
if not node.has_permission(user, osf_permissions.WRITE):
raise exceptions.PermissionDenied(detail=f'Write permission on node {id_} required')
+ if not user.is_affiliated_with_institution(inst) and not node.has_permission(user, osf_permissions.ADMIN):
+ raise exceptions.PermissionDenied(detail=f'User needs to be affiliated with {inst.name}')
nodes.append(node)
for node in nodes:
- node.remove_affiliated_institution(inst=instance['self'], user=user)
+ node.remove_affiliated_institution(inst=inst, user=user)
node.save()
def create(self, *args, **kwargs):
diff --git a/api/nodes/views.py b/api/nodes/views.py
index ae94133e5ca..3908d5406a3 100644
--- a/api/nodes/views.py
+++ b/api/nodes/views.py
@@ -547,13 +547,17 @@ def get_serializer_context(self):
return context
def perform_destroy(self, instance):
- node = self.get_resource()
+ node: Node = self.get_resource()
auth = get_user_auth(self.request)
if node.visible_contributors.count() == 1 and instance.visible:
raise ValidationError('Must have at least one visible contributor')
removed = node.remove_contributor(instance, auth)
if not removed:
raise ValidationError('Must have at least one registered admin contributor')
+ propagate = self.request.query_params.get('propagate_to_children') == 'true'
+ if propagate:
+ for child_node in node.get_nodes(_contributors__in=[instance.user]):
+ child_node.remove_contributor(instance, auth)
class NodeImplicitContributorsList(JSONAPIBaseView, generics.ListAPIView, ListFilterMixin, NodeMixin):
diff --git a/api/preprints/serializers.py b/api/preprints/serializers.py
index 6db54af0141..4a2490ac578 100644
--- a/api/preprints/serializers.py
+++ b/api/preprints/serializers.py
@@ -326,6 +326,7 @@ def update(self, preprint, validated_data):
updated_has_prereg_links = validated_data.get('has_prereg_links', preprint.has_prereg_links)
updated_why_no_prereg = validated_data.get('why_no_prereg', preprint.why_no_prereg)
+ prereg_links = validated_data.get('prereg_links', preprint.prereg_links)
if updated_has_coi is False and updated_conflict_statement:
raise exceptions.ValidationError(
@@ -342,7 +343,7 @@ def update(self, preprint, validated_data):
detail='Cannot provide data links when has_data_links is set to "no".',
)
- if updated_has_prereg_links != 'no' and updated_why_no_prereg:
+ if updated_has_prereg_links != 'no' and (updated_why_no_prereg and not prereg_links):
raise exceptions.ValidationError(
detail='You cannot edit this statement while your prereg links availability is set to true or is unanswered.',
)
diff --git a/api/share/utils.py b/api/share/utils.py
index 438f2c738a6..583b148cb9e 100644
--- a/api/share/utils.py
+++ b/api/share/utils.py
@@ -6,6 +6,7 @@
import logging
from django.apps import apps
+from celery.utils.time import get_exponential_backoff_interval
import requests
from framework.celery_tasks import app as celery_app
@@ -97,6 +98,20 @@ def task__update_share(self, guid: str, is_backfill=False, osfmap_partition_name
_response.raise_for_status()
except Exception as e:
log_exception(e)
+ if _response.status_code == HTTPStatus.TOO_MANY_REQUESTS:
+ retry_after = _response.headers.get('Retry-After')
+ try:
+ countdown = int(retry_after)
+ except (TypeError, ValueError):
+ retries = getattr(self.request, 'retries', 0)
+ countdown = get_exponential_backoff_interval(
+ factor=4,
+ retries=retries,
+ maximum=2 * 60,
+ full_jitter=True,
+ )
+ raise self.retry(exc=e, countdown=countdown)
+
if HTTPStatus(_response.status_code).is_server_error:
raise self.retry(exc=e)
else: # success response
diff --git a/api/users/serializers.py b/api/users/serializers.py
index 3631674bc94..743932ea440 100644
--- a/api/users/serializers.py
+++ b/api/users/serializers.py
@@ -103,6 +103,7 @@ class UserSerializer(JSONAPISerializer):
{
'html': 'absolute_url',
'profile_image': 'profile_image_url',
+ 'merged_by': 'get_merged_by_absolute_url',
},
),
)
@@ -240,6 +241,10 @@ def get_default_region_id(self, obj):
def get_accepted_terms_of_service(self, obj):
return bool(obj.accepted_terms_of_service)
+ def get_merged_by_absolute_url(self, obj):
+ if obj.merged_by:
+ return obj.merged_by.absolute_url
+
def profile_image_url(self, user):
size = self.context['request'].query_params.get('profile_image_size')
return user.profile_image_url(size=size)
diff --git a/api/users/views.py b/api/users/views.py
index 47a4e52fc17..6485fc625ad 100644
--- a/api/users/views.py
+++ b/api/users/views.py
@@ -132,6 +132,8 @@ def get_user(self, check_permissions=True):
contrib_id, contrib = list(self.request.parents[Contributor].items())[0]
user = contrib.user
if user.is_disabled:
+ if getattr(user, 'gdpr_deleted', False):
+ raise NotFound
raise UserGone(user=user)
# Make sure that the contributor ID is correct
if user._id == key:
diff --git a/api_tests/institutions/views/test_institution_relationship_nodes.py b/api_tests/institutions/views/test_institution_relationship_nodes.py
index 24a22bfa4f9..f99802dbe91 100644
--- a/api_tests/institutions/views/test_institution_relationship_nodes.py
+++ b/api_tests/institutions/views/test_institution_relationship_nodes.py
@@ -297,7 +297,7 @@ def test_delete_user_is_admin(self, app, url_institution_nodes, node_one, user,
assert res.status_code == 204
assert institution not in node_one.affiliated_institutions.all()
- def test_delete_user_is_read_write(self, app, node_private, user, url_institution_nodes, institution):
+ def test_delete_user_is_read_write_and_affiliated(self, app, node_private, user, url_institution_nodes, institution):
node_private.add_contributor(user)
node_private.save()
@@ -312,6 +312,22 @@ def test_delete_user_is_read_write(self, app, node_private, user, url_institutio
assert res.status_code == 204
assert institution not in node_private.affiliated_institutions.all()
+ def test_delete_user_is_read_write_but_not_affiliated(self, app, node_private, url_institution_nodes, institution):
+ user_not_affiliated = AuthUserFactory()
+ node_private.add_contributor(user_not_affiliated, permissions=permissions.WRITE)
+ node_private.save()
+
+ res = app.delete_json_api(
+ url_institution_nodes,
+ make_payload(node_private._id),
+ auth=user_not_affiliated.auth,
+ expect_errors=True
+ )
+ node_private.reload()
+
+ assert res.status_code == 403
+ assert institution in node_private.affiliated_institutions.all()
+
def test_delete_user_is_read_only(self, node_private, user, app, url_institution_nodes, institution):
node_private.add_contributor(user, permissions=permissions.READ)
node_private.save()
diff --git a/api_tests/users/views/test_user_detail.py b/api_tests/users/views/test_user_detail.py
index f5a4f0acf6e..188a3593ee1 100644
--- a/api_tests/users/views/test_user_detail.py
+++ b/api_tests/users/views/test_user_detail.py
@@ -1185,7 +1185,7 @@ def user_two(self):
def test_requesting_as_deactivated_user_returns_400_response(
self, app, user_one):
url = f'/{API_BASE}users/{user_one._id}/'
- res = app.get(url, auth=user_one.auth, expect_errors=True)
+ res = app.get(url, auth=user_one.auth, expect_errors=False)
assert res.status_code == 200
user_one.is_disabled = True
user_one.save()
@@ -1196,7 +1196,7 @@ def test_requesting_as_deactivated_user_returns_400_response(
def test_unconfirmed_users_return_entire_user_object(
self, app, user_one, user_two):
url = f'/{API_BASE}users/{user_one._id}/'
- res = app.get(url, auth=user_two.auth, expect_errors=True)
+ res = app.get(url, auth=user_two.auth, expect_errors=False)
assert res.status_code == 200
user_one.is_registered = False
user_one.save()
@@ -1209,7 +1209,7 @@ def test_unconfirmed_users_return_entire_user_object(
def test_requesting_deactivated_user_returns_410_response_and_meta_info(
self, app, user_one, user_two):
url = f'/{API_BASE}users/{user_one._id}/'
- res = app.get(url, auth=user_two.auth, expect_errors=True)
+ res = app.get(url, auth=user_two.auth, expect_errors=False)
assert res.status_code == 200
user_one.is_disabled = True
user_one.save()
@@ -1223,6 +1223,21 @@ def test_requesting_deactivated_user_returns_410_response_and_meta_info(
res.json['errors'][0]['meta']['profile_image']).netloc == 'secure.gravatar.com'
assert res.json['errors'][0]['detail'] == 'The requested user is no longer available.'
+ def test_gdpr_deleted_user_returns_404_and_no_meta_info(
+ self, app, user_one, user_two):
+ url = f'/{API_BASE}users/{user_one._id}/'
+ res = app.get(url, auth=user_two.auth, expect_errors=False)
+ assert res.status_code == 200
+
+ user_one.gdpr_delete()
+ user_one.save()
+
+ res = app.get(url, auth=user_two.auth, expect_errors=True)
+ assert res.status_code == 404
+ if res.json:
+ assert 'errors' in res.json
+ assert 'meta' not in res.json['errors'][0]
+
@pytest.mark.django_db
class UserProfileMixin:
diff --git a/osf/management/commands/force_archive.py b/osf/management/commands/force_archive.py
index 1f5612a2f91..8f9cf8fbc03 100644
--- a/osf/management/commands/force_archive.py
+++ b/osf/management/commands/force_archive.py
@@ -36,10 +36,13 @@
from addons.osfstorage.models import OsfStorageFile, OsfStorageFolder, OsfStorageFileNode
from framework import sentry
from framework.exceptions import HTTPError
+from osf import features
from osf.models import Node, NodeLog, Registration, BaseFileNode
from osf.models.files import TrashedFileNode
+from osf.utils.requests import get_current_request
from osf.exceptions import RegistrationStuckRecoverableException, RegistrationStuckBrokenException
from api.base.utils import waterbutler_api_url_for
+from api.waffle.utils import flag_is_active
from scripts import utils as script_utils
from website.archiver import ARCHIVER_SUCCESS
from website.settings import ARCHIVE_TIMEOUT_TIMEDELTA, ARCHIVE_PROVIDER, COOKIE_NAME
@@ -149,9 +152,11 @@ def complete_archive_target(reg, addon_short_name):
def perform_wb_copy(reg, node_settings, delete_collisions=False, skip_collisions=False):
src, dst, user = reg.archive_job.info()
- if dst.files.filter(name=node_settings.archive_folder_name.replace('/', '-')).exists():
+ dst_storage = dst.get_addon('osfstorage')
+ archive_name = node_settings.archive_folder_name.replace('/', '-')
+ if dst_storage.get_root().children.filter(name=archive_name).exists():
if not delete_collisions and not skip_collisions:
- raise Exception('Archive folder for {} already exists. Investigate manually and rerun with either --delete-collisions or --skip-collisions')
+ raise Exception(f'Archive folder for {archive_name} already exists. Investigate manually and rerun with either --delete-collisions or --skip-collisions')
if delete_collisions:
archive_folder = dst.files.exclude(type='osf.trashedfolder').get(name=node_settings.archive_folder_name.replace('/', '-'))
logger.info(f'Removing {archive_folder}')
@@ -394,12 +399,23 @@ def archive(registration, *args, permissible_addons=DEFAULT_PERMISSIBLE_ADDONS,
logger.info(f'Preparing to archive {reg._id}')
for short_name in permissible_addons:
node_settings = reg.registered_from.get_addon(short_name)
+ if not node_settings and short_name != 'osfstorage' and flag_is_active(get_current_request(), features.ENABLE_GV):
+ # get_addon() returns None for addons when archive is running inside of
+ # the celery task. In this case, try to get addon settings from the GV
+ try:
+ from website.archiver.tasks import get_addon_from_gv
+ node_settings = get_addon_from_gv(reg.registered_from, short_name, reg.registered_from.creator)
+ except Exception as e:
+ logger.warning(f'Could not load {short_name} from GV: {e}')
+
if not hasattr(node_settings, '_get_file_tree'):
# Excludes invalid or None-type
+ logger.warning(f"Skipping {short_name} for {registration._id}.")
continue
if not node_settings.configured:
if not allow_unconfigured:
raise Exception(f'{reg._id}: {short_name} on {reg.registered_from._id} is not configured. If this is permissible, re-run with `--allow-unconfigured`.')
+ logger.warning(f"{short_name} is not configured for {registration._id}.")
continue
if not reg.archive_job.get_target(short_name) or reg.archive_job.get_target(short_name).status == ARCHIVER_SUCCESS:
continue
@@ -487,7 +503,7 @@ def verify_registrations(registration_ids, permissible_addons):
else:
SKIPPED.append(reg)
-def check(reg):
+def check(reg, *args, **kwargs):
"""Check registration status. Raise exception if registration stuck."""
logger.info(f'Checking {reg._id}')
if reg.is_deleted:
@@ -504,7 +520,7 @@ def check(reg):
still_archiving = not archive_tree_finished
if still_archiving and root_job.datetime_initiated < expired_if_before:
logger.warning(f'Registration {reg._id} is stuck in archiving')
- if verify(reg):
+ if verify(reg, *args, **kwargs):
raise RegistrationStuckRecoverableException(f'Registration {reg._id} is stuck and verified recoverable')
else:
raise RegistrationStuckBrokenException(f'Registration {reg._id} is stuck and verified broken')
diff --git a/osf/models/user.py b/osf/models/user.py
index a8cbf66d5b3..aab3dccebe8 100644
--- a/osf/models/user.py
+++ b/osf/models/user.py
@@ -156,7 +156,7 @@ class OSFUser(DirtyFieldsMixin, GuidMixin, BaseModel, AbstractBaseUser, Permissi
# Overrides DirtyFieldsMixin, Foreign Keys checked by '_id' rather than typical name.
FIELDS_TO_CHECK = SEARCH_UPDATE_FIELDS.copy()
- FIELDS_TO_CHECK.update({'password', 'last_login', 'merged_by_id', 'username'})
+ FIELDS_TO_CHECK.update({'password', 'date_last_login', 'merged_by_id', 'username'})
# TODO: Add SEARCH_UPDATE_NODE_FIELDS, for fields that should trigger a
# search update for all nodes to which the user is a contributor.
@@ -857,6 +857,20 @@ def merge_user(self, user):
user.save()
signals.user_account_merged.send(user)
+ from api.share.utils import update_share
+
+ for node in user.contributed:
+ try:
+ update_share(node)
+ except Exception as e:
+ logger.exception(f'Failed to SHARE reindex node {node._id} during user merge: {e}')
+
+ for preprint in user.preprints.all():
+ try:
+ update_share(preprint)
+ except Exception as e:
+ logger.exception(f'Failed to SHARE reindex preprint {preprint._id} during user merge: {e}')
+
def _merge_users_preprints(self, user):
"""
Preprints use guardian. The PreprintContributor table stores order and bibliographic information.
@@ -949,6 +963,14 @@ def _merge_user_draft_registrations(self, user):
draft_reg.remove_permission(user, user_perms)
draft_reg.save()
+ @property
+ def gdpr_deleted(self):
+ if not self.is_disabled:
+ return False
+ if self.fullname != 'Deleted user':
+ return False
+ return not self.emails.exists()
+
def deactivate_account(self):
"""
Disables user account, making is_disabled true, while also unsubscribing user
diff --git a/poetry.lock b/poetry.lock
index dd79982f24f..83ca13f7a00 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -1,4 +1,4 @@
-# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand.
+# This file is automatically @generated by Poetry 2.2.1 and should not be changed by hand.
[[package]]
name = "amqp"
@@ -967,14 +967,14 @@ files = [
[[package]]
name = "django"
-version = "4.2.17"
+version = "4.2.26"
description = "A high-level Python web framework that encourages rapid development and clean, pragmatic design."
optional = false
python-versions = ">=3.8"
groups = ["main", "dev"]
files = [
- {file = "Django-4.2.17-py3-none-any.whl", hash = "sha256:3a93350214ba25f178d4045c0786c61573e7dbfa3c509b3551374f1e11ba8de0"},
- {file = "Django-4.2.17.tar.gz", hash = "sha256:6b56d834cc94c8b21a8f4e775064896be3b4a4ca387f2612d4406a5927cd2fdc"},
+ {file = "django-4.2.26-py3-none-any.whl", hash = "sha256:c96e64fc3c359d051a6306871bd26243db1bd02317472a62ffdbe6c3cae14280"},
+ {file = "django-4.2.26.tar.gz", hash = "sha256:9398e487bcb55e3f142cb56d19fbd9a83e15bb03a97edc31f408361ee76d9d7a"},
]
[package.dependencies]
@@ -1166,6 +1166,7 @@ groups = ["main"]
files = [
{file = "django-sendgrid-v5-1.2.3.tar.gz", hash = "sha256:3887aafbb10d5b808efc2c1031dcd96fd357d542eb5affe38fef07cc0f3cfae9"},
{file = "django_sendgrid_v5-1.2.3-py2.py3-none-any.whl", hash = "sha256:2d2fa8a085d21c95e5f97fc60b61f199ccc57a27df8da90cd3f29a5702346dc6"},
+ {file = "django_sendgrid_v5-1.2.3-py3-none-any.whl", hash = "sha256:f6a44ee37c1c3cc7d683a43c55ead530417be1849a8a41bde02b158009559d9d"},
]
[package.dependencies]
@@ -4714,4 +4715,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"]
[metadata]
lock-version = "2.1"
python-versions = "^3.12"
-content-hash = "07a5081a731599efec5bc45ca753a024e427072954be6c4029278c116f9877e5"
+content-hash = "2bc7e95f03d05e8b3335514e887b590acdab5cb2a44fc47bde870bdf8e465bf2"
diff --git a/pyproject.toml b/pyproject.toml
index 1d624f43fb9..d9973dae373 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -56,7 +56,7 @@ requests-oauthlib = "1.3.1"
sentry-sdk = {version= "2.2.0", extras = ["django", "flask", "celery"]}
django-redis = "5.4.0"
# API requirements
-Django = "4.2.17"
+Django = ">=4.2.26,<4.3"
djangorestframework = "3.15.1"
django-cors-headers = "4.3.1"
djangorestframework-bulk = "0.2.1"
diff --git a/website/archiver/tasks.py b/website/archiver/tasks.py
index a7c7d6c80ee..46dd928f9b7 100644
--- a/website/archiver/tasks.py
+++ b/website/archiver/tasks.py
@@ -35,6 +35,7 @@
from osf.models import (
ArchiveJob,
AbstractNode,
+ Registration,
DraftRegistration,
)
from osf import features
@@ -370,3 +371,29 @@ def archive_success(self, dst_pk, job_pk):
dst.sanction.ask(dst.get_active_contributors_recursive(unique_users=True))
dst.update_search()
+
+
+@celery_app.task(bind=True)
+def force_archive(self, registration_id, permissible_addons, allow_unconfigured=False, skip_collisions=False, delete_collisions=False):
+ from osf.management.commands.force_archive import archive
+
+ create_app_context()
+
+ try:
+ registration = AbstractNode.load(registration_id)
+ if not registration or not isinstance(registration, Registration):
+ return f'Registration {registration_id} not found'
+
+ archive(
+ registration,
+ permissible_addons=set(permissible_addons),
+ allow_unconfigured=allow_unconfigured,
+ skip_collisions=skip_collisions,
+ delete_collisions=delete_collisions,
+ )
+ return f'Registration {registration_id} archive completed'
+
+ except Exception as exc:
+ sentry.log_message(f'Archive task failed for {registration_id}: {exc}')
+ sentry.log_exception(exc)
+ return f'{exc.__class__.__name__}: {str(exc)}'