diff --git a/CHANGELOG b/CHANGELOG index 5fcd0e35643..9b038e58fef 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -2,6 +2,26 @@ We follow the CalVer (https://calver.org/) versioning scheme: YY.MINOR.MICRO. +25.09.0 (2025-05-14) +==================== + +- Accounts confirmed but not registered +- Action requests creation for preprints and nodes not working +- Reviews Dashboard not displaying contributors for preprint provider moderators +- Backend Support for Static Contact Indicator in Institutional Dashboard +- Add ability to remove categories from active collections +- Add scopes for applications to full_read and full_write scopes + +25.08.0 (2025-05-02) +==================== + +- Bugfix and Improvements + +25.07.0 (2025-04-21) +==================== + +- Bugfix and Improvements + 25.06.0 (2025-04-07) ==================== diff --git a/addons/base/views.py b/addons/base/views.py index 6f22c71f3e3..a6c90860b98 100644 --- a/addons/base/views.py +++ b/addons/base/views.py @@ -431,11 +431,7 @@ def _enqueue_metrics(file_version, file_node, action, auth, from_mfr=False): def _construct_payload(auth, resource, credentials, waterbutler_settings): if isinstance(resource, Registration): - callback_url = resource.api_url_for( - 'registration_callbacks', - _absolute=True, - _internal=True - ) + callback_url = resource.callbacks_url else: callback_url = resource.api_url_for( 'create_waterbutler_log', diff --git a/admin/collection_providers/forms.py b/admin/collection_providers/forms.py index feae16aa695..7d0e8dadf46 100644 --- a/admin/collection_providers/forms.py +++ b/admin/collection_providers/forms.py @@ -3,7 +3,7 @@ from django import forms from framework.utils import sanitize_html -from osf.models import CollectionProvider, CollectionSubmission +from osf.models import CollectionProvider from admin.base.utils import get_nodelicense_choices, get_defaultlicense_choices, validate_slug @@ -74,12 +74,6 @@ def clean_collected_type_choices(self): type_choices_new = {c.strip(' ') for c in json.loads(self.data.get('collected_type_choices'))} type_choices_added = type_choices_new - type_choices_old type_choices_removed = type_choices_old - type_choices_new - for item in type_choices_removed: - if CollectionSubmission.objects.filter(collection=collection_provider.primary_collection, - collected_type=item).exists(): - raise forms.ValidationError( - f'Cannot delete "{item}" because it is used as metadata on objects.' - ) else: # if this is creating a CollectionProvider type_choices_added = [] @@ -104,12 +98,6 @@ def clean_status_choices(self): status_choices_new = {c.strip(' ') for c in json.loads(self.data.get('status_choices'))} status_choices_added = status_choices_new - status_choices_old status_choices_removed = status_choices_old - status_choices_new - for item in status_choices_removed: - if CollectionSubmission.objects.filter(collection=collection_provider.primary_collection, - status=item).exists(): - raise forms.ValidationError( - f'Cannot delete "{item}" because it is used as metadata on objects.' - ) else: # if this is creating a CollectionProvider status_choices_added = [] @@ -134,12 +122,6 @@ def clean_volume_choices(self): volume_choices_new = {c.strip(' ') for c in json.loads(self.data.get('volume_choices'))} volume_choices_added = volume_choices_new - volume_choices_old volume_choices_removed = volume_choices_old - volume_choices_new - for item in volume_choices_removed: - if CollectionSubmission.objects.filter(collection=collection_provider.primary_collection, - volume=item).exists(): - raise forms.ValidationError( - f'Cannot delete "{item}" because it is used as metadata on objects.' - ) else: # if this is creating a CollectionProvider volume_choices_added = [] @@ -164,12 +146,6 @@ def clean_issue_choices(self): issue_choices_new = {c.strip(' ') for c in json.loads(self.data.get('issue_choices'))} issue_choices_added = issue_choices_new - issue_choices_old issue_choices_removed = issue_choices_old - issue_choices_new - for item in issue_choices_removed: - if CollectionSubmission.objects.filter(collection=collection_provider.primary_collection, - issue=item).exists(): - raise forms.ValidationError( - f'Cannot delete "{item}" because it is used as metadata on objects.' - ) else: # if this is creating a CollectionProvider issue_choices_added = [] @@ -194,12 +170,6 @@ def clean_program_area_choices(self): program_area_choices_new = {c.strip(' ') for c in json.loads(self.data.get('program_area_choices'))} program_area_choices_added = program_area_choices_new - program_area_choices_old program_area_choices_removed = program_area_choices_old - program_area_choices_new - for item in program_area_choices_removed: - if CollectionSubmission.objects.filter(collection=collection_provider.primary_collection, - program_area=item).exists(): - raise forms.ValidationError( - f'Cannot delete "{item}" because it is used as metadata on objects.' - ) else: # if this is creating a CollectionProvider program_area_choices_added = [] @@ -224,16 +194,6 @@ def clean_school_type_choices(self): updated_choices = {c.strip(' ') for c in json.loads(self.data.get('school_type_choices'))} added_choices = updated_choices - old_choices removed_choices = old_choices - updated_choices - active_removed_choices = set( - primary_collection.collectionsubmission_set.filter( - school_type__in=removed_choices - ).values_list('school_type', flat=True) - ) - if active_removed_choices: - raise forms.ValidationError( - 'Cannot remove the following choices for "school_type", as they are ' - f'currently in use: {active_removed_choices}' - ) else: # Creating a new CollectionProvider added_choices = set() removed_choices = set() @@ -253,17 +213,6 @@ def clean_study_design_choices(self): updated_choices = {c.strip(' ') for c in json.loads(self.data.get('study_design_choices'))} added_choices = updated_choices - old_choices removed_choices = old_choices - updated_choices - - active_removed_choices = set( - primary_collection.collectionsubmission_set.filter( - study_design__in=removed_choices - ).values_list('school_type', flat=True) - ) - if active_removed_choices: - raise forms.ValidationError( - 'Cannot remove the following choices for "study_design", as they are ' - f'currently in use: {active_removed_choices}' - ) else: # Creating a new CollectionProvider added_choices = set() removed_choices = set() @@ -283,17 +232,6 @@ def clean_disease_choices(self): updated_choices = {c.strip(' ') for c in json.loads(self.data.get('disease_choices'))} added_choices = updated_choices - old_choices removed_choices = old_choices - updated_choices - - active_removed_choices = set( - primary_collection.collectionsubmission_set.filter( - disease__in=removed_choices - ).values_list('disease', flat=True) - ) - if active_removed_choices: - raise forms.ValidationError( - 'Cannot remove the following choices for "disease", as they are ' - f'currently in use: {active_removed_choices}' - ) else: # Creating a new CollectionProvider added_choices = set() removed_choices = set() @@ -313,17 +251,6 @@ def clean_data_type_choices(self): updated_choices = {c.strip(' ') for c in json.loads(self.data.get('data_type_choices'))} added_choices = updated_choices - old_choices removed_choices = old_choices - updated_choices - - active_removed_choices = set( - primary_collection.collectionsubmission_set.filter( - data_type__in=removed_choices - ).values_list('data_type', flat=True) - ) - if active_removed_choices: - raise forms.ValidationError( - 'Cannot remove the following choices for "data_type", as they are ' - f'currently in use: {active_removed_choices}' - ) else: # Creating a new CollectionProvider added_choices = set() removed_choices = set() @@ -343,17 +270,6 @@ def clean_grade_levels_choices(self): updated_choices = {c.strip(' ') for c in json.loads(self.data.get('grade_levels_choices'))} added_choices = updated_choices - old_choices removed_choices = old_choices - updated_choices - - active_removed_choices = set( - primary_collection.collectionsubmission_set.filter( - data_type__in=removed_choices - ).values_list('grade_levels', flat=True) - ) - if active_removed_choices: - raise forms.ValidationError( - 'Cannot remove the following choices for "grade_levels", as they are ' - f'currently in use: {active_removed_choices}' - ) else: # Creating a new CollectionProvider added_choices = set() removed_choices = set() diff --git a/admin/management/views.py b/admin/management/views.py index 16057f147ca..525f0d8d64a 100644 --- a/admin/management/views.py +++ b/admin/management/views.py @@ -150,10 +150,12 @@ def post(self, request): class BulkResync(ManagementCommandPermissionView): def post(self, request): + missing_dois_only = request.POST.get('missing_preprint_dois_only', False) sync_doi_metadata.apply_async(kwargs={ 'modified_date': timezone.now(), 'batch_size': None, - 'dry_run': False + 'dry_run': False, + 'missing_preprint_dois_only': missing_dois_only }) messages.success(request, 'Resyncing with CrossRef and DataCite! It will take some time.') return redirect(reverse('management:commands')) diff --git a/admin/nodes/urls.py b/admin/nodes/urls.py index d28a73e2c51..c2704ee95b2 100644 --- a/admin/nodes/urls.py +++ b/admin/nodes/urls.py @@ -27,10 +27,12 @@ re_path(r'^(?P[a-z0-9]+)/reindex_share_node/$', views.NodeReindexShare.as_view(), name='reindex-share-node'), re_path(r'^(?P[a-z0-9]+)/reindex_elastic_node/$', views.NodeReindexElastic.as_view(), name='reindex-elastic-node'), - re_path(r'^(?P[a-z0-9]+)/restart_stuck_registrations/$', views.RestartStuckRegistrationsView.as_view(), - name='restart-stuck-registrations'), re_path(r'^(?P[a-z0-9]+)/remove_stuck_registrations/$', views.RemoveStuckRegistrationsView.as_view(), name='remove-stuck-registrations'), + re_path(r'^(?P[a-z0-9]+)/check_archive_status/$', views.CheckArchiveStatusRegistrationsView.as_view(), + name='check-archive-status'), + re_path(r'^(?P[a-z0-9]+)/force_archive_registration/$', views.ForceArchiveRegistrationsView.as_view(), + name='force-archive-registration'), re_path(r'^(?P[a-z0-9]+)/remove_user/(?P[a-z0-9]+)/$', views.NodeRemoveContributorView.as_view(), name='remove-user'), re_path(r'^(?P[a-z0-9]+)/modify_storage_usage/$', views.NodeModifyStorageUsage.as_view(), diff --git a/admin/nodes/views.py b/admin/nodes/views.py index fc16a3b0d05..2d4f0c1194f 100644 --- a/admin/nodes/views.py +++ b/admin/nodes/views.py @@ -1,4 +1,5 @@ import pytz +from enum import Enum from datetime import datetime from framework import status @@ -26,7 +27,7 @@ from api.share.utils import update_share from api.caching.tasks import update_storage_usage_cache -from osf.exceptions import NodeStateError +from osf.exceptions import NodeStateError, RegistrationStuckError from osf.models import ( OSFUser, NodeLog, @@ -672,23 +673,16 @@ def post(self, request, *args, **kwargs): return redirect(self.get_success_url()) -class RestartStuckRegistrationsView(NodeMixin, TemplateView): - """ Allows an authorized user to restart a registrations archive process. +class RemoveStuckRegistrationsView(NodeMixin, View): + """ Allows an authorized user to remove a registrations if it's stuck in the archiving process. """ - template_name = 'nodes/restart_registrations_modal.html' permission_required = ('osf.view_node', 'osf.change_node') def post(self, request, *args, **kwargs): - # Prevents circular imports that cause admin app to hang at startup - from osf.management.commands.force_archive import archive, verify stuck_reg = self.get_object() - if verify(stuck_reg): - try: - archive(stuck_reg) - messages.success(request, 'Registration archive processes has restarted') - except Exception as exc: - messages.error(request, f'This registration cannot be unstuck due to {exc.__class__.__name__} ' - f'if the problem persists get a developer to fix it.') + if Registration.find_failed_registrations().filter(id=stuck_reg.id).exists(): + stuck_reg.delete_registration_tree(save=True) + messages.success(request, 'The registration has been deleted') else: messages.error(request, 'This registration may not technically be stuck,' ' if the problem persists get a developer to fix it.') @@ -696,20 +690,80 @@ def post(self, request, *args, **kwargs): return redirect(self.get_success_url()) -class RemoveStuckRegistrationsView(NodeMixin, TemplateView): - """ Allows an authorized user to remove a registrations if it's stuck in the archiving process. +class CheckArchiveStatusRegistrationsView(NodeMixin, View): + """Allows an authorized user to check a registration archive status. + """ + permission_required = ('osf.view_node', 'osf.change_node') + + def get(self, request, *args, **kwargs): + # Prevents circular imports that cause admin app to hang at startup + from osf.management.commands.force_archive import check + + registration = self.get_object() + + if registration.archived: + messages.success(request, f"Registration {registration._id} is archived.") + return redirect(self.get_success_url()) + + try: + archive_status = check(registration) + messages.success(request, archive_status) + except RegistrationStuckError as exc: + messages.error(request, str(exc)) + + return redirect(self.get_success_url()) + + +class CollisionMode(Enum): + NONE: str = 'none' + SKIP: str = 'skip' + DELETE: str = 'delete' + + +class ForceArchiveRegistrationsView(NodeMixin, View): + """Allows an authorized user to force archive registration. """ - template_name = 'nodes/remove_registrations_modal.html' permission_required = ('osf.view_node', 'osf.change_node') def post(self, request, *args, **kwargs): - stuck_reg = self.get_object() - if Registration.find_failed_registrations().filter(id=stuck_reg.id).exists(): - stuck_reg.delete_registration_tree(save=True) - messages.success(request, 'The registration has been deleted') + # Prevents circular imports that cause admin app to hang at startup + from osf.management.commands.force_archive import verify, archive, DEFAULT_PERMISSIBLE_ADDONS + + registration = self.get_object() + force_archive_params = request.POST + + collision_mode = force_archive_params.get('collision_mode', CollisionMode.NONE.value) + delete_collision = CollisionMode.DELETE.value == collision_mode + skip_collision = CollisionMode.SKIP.value == collision_mode + + allow_unconfigured = force_archive_params.get('allow_unconfigured', False) + + addons = set(force_archive_params.getlist('addons', [])) + addons.update(DEFAULT_PERMISSIBLE_ADDONS) + + try: + verify(registration, permissible_addons=addons, raise_error=True) + except ValidationError as exc: + messages.error(request, str(exc)) + return redirect(self.get_success_url()) + + dry_mode = force_archive_params.get('dry_mode', False) + + if dry_mode: + messages.success(request, f"Registration {registration._id} can be archived.") else: - messages.error(request, 'This registration may not technically be stuck,' - ' if the problem persists get a developer to fix it.') + try: + archive( + registration, + permissible_addons=addons, + allow_unconfigured=allow_unconfigured, + skip_collision=skip_collision, + delete_collision=delete_collision, + ) + messages.success(request, 'Registration archive process has finished.') + except Exception as exc: + messages.error(request, f'This registration cannot be archived due to {exc.__class__.__name__}: {str(exc)}. ' + f'If the problem persists get a developer to fix it.') return redirect(self.get_success_url()) diff --git a/admin/preprint_providers/forms.py b/admin/preprint_providers/forms.py index 06d27052098..1393aae41ef 100644 --- a/admin/preprint_providers/forms.py +++ b/admin/preprint_providers/forms.py @@ -112,11 +112,12 @@ class PreprintProviderRegisterModeratorOrAdminForm(forms.Form): """ A form that finds an existing OSF User, and grants permissions to that user so that they can use the admin app""" - def __init__(self, *args, **kwargs): - provider_id = kwargs.pop('provider_id') + def __init__(self, *args, provider_groups=None, **kwargs): super().__init__(*args, **kwargs) + + provider_groups = provider_groups or Group.objects.none() self.fields['group_perms'] = forms.ModelMultipleChoiceField( - queryset=Group.objects.filter(name__startswith=f'reviews_preprint_{provider_id}'), + queryset=provider_groups, required=False, widget=forms.CheckboxSelectMultiple ) diff --git a/admin/preprint_providers/views.py b/admin/preprint_providers/views.py index e98aed1ecfa..4c7439f4554 100644 --- a/admin/preprint_providers/views.py +++ b/admin/preprint_providers/views.py @@ -12,6 +12,7 @@ from django.contrib.auth.mixins import PermissionRequiredMixin from django.forms.models import model_to_dict from django.shortcuts import redirect, render +from django.utils.functional import cached_property from admin.base import settings from admin.base.forms import ImportFileForm @@ -459,14 +460,18 @@ class PreprintProviderRegisterModeratorOrAdmin(PermissionRequiredMixin, FormView template_name = 'preprint_providers/register_moderator_admin.html' form_class = PreprintProviderRegisterModeratorOrAdminForm + @cached_property + def target_provider(self): + return PreprintProvider.objects.get(id=self.kwargs['preprint_provider_id']) + def get_form_kwargs(self): kwargs = super().get_form_kwargs() - kwargs['provider_id'] = self.kwargs['preprint_provider_id'] + kwargs['provider_groups'] = self.target_provider.group_objects return kwargs def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) - context['provider_name'] = PreprintProvider.objects.get(id=self.kwargs['preprint_provider_id']).name + context['provider_name'] = self.target_provider.name return context def form_valid(self, form): @@ -477,13 +482,7 @@ def form_valid(self, form): raise Http404(f'OSF user with id "{user_id}" not found. Please double check.') for group in form.cleaned_data.get('group_perms'): - osf_user.groups.add(group) - split = group.name.split('_') - group_type = split[0] - if group_type == 'reviews': - provider_id = split[2] - provider = PreprintProvider.objects.get(id=provider_id) - provider.notification_subscriptions.get(event_name='new_pending_submissions').add_user_to_subscription(osf_user, 'email_transactional') + self.target_provider.add_to_group(osf_user, group) osf_user.save() messages.success(self.request, f'Permissions update successful for OSF User {osf_user.username}!') diff --git a/admin/preprints/urls.py b/admin/preprints/urls.py index f0a439f9722..4ab9bd33939 100644 --- a/admin/preprints/urls.py +++ b/admin/preprints/urls.py @@ -13,8 +13,10 @@ re_path(r'^(?P\w+)/change_provider/$', views.PreprintProviderChangeView.as_view(), name='preprint-provider'), re_path(r'^(?P\w+)/machine_state/$', views.PreprintMachineStateView.as_view(), name='preprint-machine-state'), re_path(r'^(?P\w+)/reindex_share_preprint/$', views.PreprintReindexShare.as_view(), name='reindex-share-preprint'), + re_path(r'^(?P\w+)/reversion_preprint/$', views.PreprintReVersion.as_view(), name='re-version-preprint'), re_path(r'^(?P\w+)/remove_user/(?P[a-z0-9]+)/$', views.PreprintRemoveContributorView.as_view(), name='remove-user'), re_path(r'^(?P\w+)/make_private/$', views.PreprintMakePrivate.as_view(), name='make-private'), + re_path(r'^(?P\w+)/fix_editing/$', views.PreprintFixEditing.as_view(), name='fix-editing'), re_path(r'^(?P\w+)/make_public/$', views.PreprintMakePublic.as_view(), name='make-public'), re_path(r'^(?P\w+)/remove/$', views.PreprintDeleteView.as_view(), name='remove'), re_path(r'^(?P\w+)/restore/$', views.PreprintDeleteView.as_view(), name='restore'), diff --git a/admin/preprints/views.py b/admin/preprints/views.py index a936c27582e..ef7d1860e76 100644 --- a/admin/preprints/views.py +++ b/admin/preprints/views.py @@ -1,6 +1,7 @@ +from django.db import transaction from django.db.models import F from django.core.exceptions import PermissionDenied -from django.urls import NoReverseMatch +from django.http import HttpResponse, JsonResponse from django.contrib import messages from django.contrib.auth.mixins import PermissionRequiredMixin from django.shortcuts import redirect @@ -10,7 +11,7 @@ FormView, ) from django.utils import timezone -from django.urls import reverse_lazy +from django.urls import NoReverseMatch, reverse_lazy from admin.base.views import GuidView from admin.base.forms import GuidForm @@ -19,9 +20,13 @@ from api.share.utils import update_share from api.providers.workflows import Workflows +from api.preprints.serializers import PreprintSerializer from osf.exceptions import PreprintStateError +from rest_framework.exceptions import PermissionDenied as DrfPermissionDenied +from framework.exceptions import PermissionsError +from osf.management.commands.fix_preprints_has_data_links_and_why_no_data import process_wrong_why_not_data_preprints from osf.models import ( SpamStatus, Preprint, @@ -44,6 +49,7 @@ ) from osf.utils.workflows import DefaultStates from website import search +from website.files.utils import copy_files from website.preprints.tasks import on_preprint_updated @@ -55,8 +61,8 @@ def get_object(self): preprint.guid = preprint._id return preprint - def get_success_url(self): - return reverse_lazy('preprints:preprint', kwargs={'guid': self.kwargs['guid']}) + def get_success_url(self, guid=None): + return reverse_lazy('preprints:preprint', kwargs={'guid': guid or self.kwargs['guid']}) class PreprintView(PreprintMixin, GuidView): @@ -182,6 +188,55 @@ def post(self, request, *args, **kwargs): return redirect(self.get_success_url()) +class PreprintReVersion(PreprintMixin, View): + """Allows an authorized user to create new version 1 of a preprint based on earlier + primary file version(s). All operations are executed within an atomic transaction. + If any step fails, the entire transaction will be rolled back and no version will be changed. + """ + permission_required = 'osf.change_node' + + def post(self, request, *args, **kwargs): + preprint = self.get_object() + + file_versions = request.POST.getlist('file_versions') + if not file_versions: + return HttpResponse('At least one file version should be attached.', status=400) + + try: + with transaction.atomic(): + versions = preprint.get_preprint_versions() + for version in versions: + version.upgrade_version() + + new_preprint, data_to_update = Preprint.create_version( + create_from_guid=preprint._id, + assign_version_number=1, + auth=request, + ignore_permission=True, + ignore_existing_versions=True, + ) + data_to_update = data_to_update or dict() + + primary_file = copy_files(preprint.primary_file, target_node=new_preprint, identifier__in=file_versions) + if primary_file is None: + raise ValueError(f"Primary file {preprint.primary_file.id} doesn't have following versions: {file_versions}") # rollback changes + data_to_update['primary_file'] = primary_file + + # FIXME: currently it's not possible to ignore permission when update subjects + # via serializer, remove this logic if deprecated + subjects = data_to_update.pop('subjects', None) + if subjects: + new_preprint.set_subjects_from_relationships(subjects, auth=request, ignore_permission=True) + + PreprintSerializer(new_preprint, context={'request': request, 'ignore_permission': True}).update(new_preprint, data_to_update) + except ValueError as exc: + return HttpResponse(str(exc), status=400) + except (PermissionsError, DrfPermissionDenied) as exc: + return HttpResponse(f'Not enough permissions to perform this action : {str(exc)}', status=400) + + return JsonResponse({'redirect': self.get_success_url(new_preprint._id)}) + + class PreprintReindexElastic(PreprintMixin, View): """ Allows an authorized user to reindex a node in ElasticSearch. """ @@ -525,6 +580,21 @@ def post(self, request, *args, **kwargs): return redirect(self.get_success_url()) +class PreprintFixEditing(PreprintMixin, View): + """ Allows an authorized user to manually fix why not data field. + """ + permission_required = 'osf.change_node' + + def post(self, request, *args, **kwargs): + preprint = self.get_object() + process_wrong_why_not_data_preprints( + version_guid=preprint._id, + dry_run=False, + executing_through_command=False, + ) + + return redirect(self.get_success_url()) + class PreprintMakePublic(PreprintMixin, View): """ Allows an authorized user to manually make a private preprint public. diff --git a/admin/static/js/preprints/preprints.js b/admin/static/js/preprints/preprints.js new file mode 100644 index 00000000000..21217725ba2 --- /dev/null +++ b/admin/static/js/preprints/preprints.js @@ -0,0 +1,18 @@ +$(document).ready(function() { + + $("#confirmReversion").on("submit", function (event) { + event.preventDefault(); + + $.ajax({ + url: window.templateVars.reVersionPreprint, + type: "post", + data: $("#re-version-preprint-form").serialize(), + }).success(function (response) { + if (response.redirect) { + window.location.href = response.redirect; + } + }).fail(function (jqXHR, textStatus, error) { + $("#version-validation").text(jqXHR.responseText); + }); + }); +}); diff --git a/admin/templates/management/commands.html b/admin/templates/management/commands.html index beaaf9cfb5d..93eeaf24c18 100644 --- a/admin/templates/management/commands.html +++ b/admin/templates/management/commands.html @@ -74,7 +74,7 @@

Ban spam users by regular expression


- +
@@ -133,6 +133,7 @@

Resync with CrossRef and DataCite

{% csrf_token %} +
diff --git a/admin/templates/nodes/node.html b/admin/templates/nodes/node.html index aa705243a69..c178709534f 100644 --- a/admin/templates/nodes/node.html +++ b/admin/templates/nodes/node.html @@ -17,7 +17,7 @@ View Logs {% include "nodes/remove_node.html" with node=node %} - {% include "nodes/restart_stuck_registration.html" with node=node %} + {% include "nodes/registration_force_archive.html" with node=node %} {% include "nodes/make_private.html" with node=node %} {% include "nodes/make_public.html" with node=node %} {% include "nodes/mark_spam.html" with node=node %} diff --git a/admin/templates/nodes/registration_force_archive.html b/admin/templates/nodes/registration_force_archive.html new file mode 100644 index 00000000000..7c87f1a837d --- /dev/null +++ b/admin/templates/nodes/registration_force_archive.html @@ -0,0 +1,79 @@ +{% if node.is_registration %} + + Check archive status + +{% if not node.archived %} + {% if node.is_stuck_registration %} + + Restart Stuck Registration + + + Remove Stuck Registration + + {% else %} + + Force Archive + + {% endif %} + + + + +
+ + +{% endif %} +{% endif %} diff --git a/admin/templates/nodes/registration_force_archive_form.html b/admin/templates/nodes/registration_force_archive_form.html new file mode 100644 index 00000000000..ab52d7f7c33 --- /dev/null +++ b/admin/templates/nodes/registration_force_archive_form.html @@ -0,0 +1,39 @@ +
+ {% csrf_token %} + +
\ No newline at end of file diff --git a/admin/templates/nodes/restart_stuck_registration.html b/admin/templates/nodes/restart_stuck_registration.html deleted file mode 100644 index c81bd3fb55f..00000000000 --- a/admin/templates/nodes/restart_stuck_registration.html +++ /dev/null @@ -1,51 +0,0 @@ -{% if node.is_stuck_registration %} - - Restart Registration - - - Remove Registration - - - -{% endif %} - diff --git a/admin/templates/preprints/assign_new_version.html b/admin/templates/preprints/assign_new_version.html new file mode 100644 index 00000000000..3ee5fcce6d5 --- /dev/null +++ b/admin/templates/preprints/assign_new_version.html @@ -0,0 +1,32 @@ +{% load node_extras %} + + Create new version 1 + + + diff --git a/admin/templates/preprints/fix_editing.html b/admin/templates/preprints/fix_editing.html new file mode 100644 index 00000000000..84c6e3cdd99 --- /dev/null +++ b/admin/templates/preprints/fix_editing.html @@ -0,0 +1,21 @@ +{% if perms.osf.change_node %} + + Fix why not data + + +{% endif %} diff --git a/admin/templates/preprints/preprint.html b/admin/templates/preprints/preprint.html index 2763d3d35f1..719304d716f 100644 --- a/admin/templates/preprints/preprint.html +++ b/admin/templates/preprints/preprint.html @@ -26,6 +26,8 @@ {% include "preprints/make_private.html" with preprint=preprint %} {% include "preprints/make_public.html" with preprint=preprint %} {% include "preprints/make_published.html" with preprint=preprint %} + {% include "preprints/fix_editing.html" with preprint=preprint %} + {% include "preprints/assign_new_version.html" with preprint=preprint %} @@ -122,3 +124,11 @@

Preprint: {{ preprint.title }} {% endblock content %} +{% block bottom_js %} + + +{% endblock %} diff --git a/admin/users/forms.py b/admin/users/forms.py index 737b8c8f4a3..e64a648ff77 100644 --- a/admin/users/forms.py +++ b/admin/users/forms.py @@ -14,6 +14,7 @@ class UserSearchForm(forms.Form): guid = forms.CharField(label='guid', min_length=5, max_length=5, required=False) # TODO: Move max to 6 when needed name = forms.CharField(label='name', required=False) email = forms.EmailField(label='email', required=False) + orcid = forms.CharField(label='orcid', required=False) class MergeUserForm(forms.Form): diff --git a/admin/users/views.py b/admin/users/views.py index 85b6eac4ee6..38787a84a23 100644 --- a/admin/users/views.py +++ b/admin/users/views.py @@ -25,6 +25,7 @@ from framework.auth.core import generate_verification_key from website import search +from website.settings import EXTERNAL_IDENTITY_PROFILE from osf.models.admin_log_entry import ( update_admin_log, @@ -126,6 +127,7 @@ def form_valid(self, form): guid = form.cleaned_data['guid'] name = form.cleaned_data['name'] email = form.cleaned_data['email'] + orcid = form.cleaned_data['orcid'] if name: return redirect(reverse('users:search-list', kwargs={'name': name})) @@ -148,6 +150,18 @@ def form_valid(self, form): return redirect(reverse('users:user', kwargs={'guid': guid})) + if orcid: + external_id_provider = EXTERNAL_IDENTITY_PROFILE.get('OrcidProfile') + user = get_user(external_id_provider=external_id_provider, external_id=orcid) + + if not user: + return page_not_found( + self.request, + AttributeError(f'resource with id "{orcid}" not found.') + ) + + return redirect(reverse('users:user', kwargs={'guid': user._id})) + return super().form_valid(form) @@ -456,9 +470,6 @@ def get_context_data(self, **kwargs): class GetUserConfirmationLink(GetUserLink): def get_link(self, user): - if user.is_confirmed: - return f'User {user._id} is already confirmed' - if user.deleted or user.is_merged: return f'User {user._id} is deleted or merged' diff --git a/admin_tests/nodes/test_views.py b/admin_tests/nodes/test_views.py index c80eeb27e47..9f978e75268 100644 --- a/admin_tests/nodes/test_views.py +++ b/admin_tests/nodes/test_views.py @@ -17,8 +17,9 @@ NodeKnownHamList, NodeConfirmHamView, AdminNodeLogView, - RestartStuckRegistrationsView, RemoveStuckRegistrationsView, + CheckArchiveStatusRegistrationsView, + ForceArchiveRegistrationsView, ApprovalBacklogListView, ConfirmApproveBacklogView ) @@ -375,28 +376,50 @@ def test_get_queryset(self): assert log_entry.params['title_new'] == 'New Title' -class TestRestartStuckRegistrationsView(AdminTestCase): +class TestCheckArchiveStatusRegistrationsView(AdminTestCase): + def setUp(self): + super().setUp() + self.user = AuthUserFactory() + self.view = CheckArchiveStatusRegistrationsView + self.request = RequestFactory().post('/fake_path') + + def test_check_archive_status(self): + from django.contrib.messages.storage.fallback import FallbackStorage + + registration = RegistrationFactory(creator=self.user, archive=True) + view = setup_log_view(self.view(), self.request, guid=registration._id) + + # django.contrib.messages has a bug which effects unittests + # more info here -> https://code.djangoproject.com/ticket/17971 + setattr(self.request, 'session', 'session') + messages = FallbackStorage(self.request) + setattr(self.request, '_messages', messages) + + view.get(self.request) + + assert not registration.archived + assert f'Registration {registration._id} is not stuck in archiving' in [m.message for m in messages] + + +class TestForceArchiveRegistrationsView(AdminTestCase): def setUp(self): super().setUp() self.user = AuthUserFactory() self.registration = RegistrationFactory(creator=self.user, archive=True) self.registration.save() - self.view = RestartStuckRegistrationsView + self.view = ForceArchiveRegistrationsView self.request = RequestFactory().post('/fake_path') def test_get_object(self): - view = RestartStuckRegistrationsView() - view = setup_log_view(view, self.request, guid=self.registration._id) + view = setup_log_view(self.view(), self.request, guid=self.registration._id) assert self.registration == view.get_object() - def test_restart_stuck_registration(self): + def test_force_archive_registration(self): # Prevents circular import that prevents admin app from starting up from django.contrib.messages.storage.fallback import FallbackStorage - view = RestartStuckRegistrationsView() - view = setup_log_view(view, self.request, guid=self.registration._id) - assert self.registration.archive_job.status == 'INITIATED' + view = setup_log_view(self.view(), self.request, guid=self.registration._id) # django.contrib.messages has a bug which effects unittests # more info here -> https://code.djangoproject.com/ticket/17971 @@ -408,6 +431,24 @@ def test_restart_stuck_registration(self): assert self.registration.archive_job.status == 'SUCCESS' + def test_force_archive_registration_dry_mode(self): + # Prevents circular import that prevents admin app from starting up + from django.contrib.messages.storage.fallback import FallbackStorage + + request = RequestFactory().post('/fake_path', data={'dry_mode': 'true'}) + view = setup_log_view(self.view(), request, guid=self.registration._id) + assert self.registration.archive_job.status == 'INITIATED' + + # django.contrib.messages has a bug which effects unittests + # more info here -> https://code.djangoproject.com/ticket/17971 + setattr(request, 'session', 'session') + messages = FallbackStorage(request) + setattr(request, '_messages', messages) + + view.post(request) + + assert self.registration.archive_job.status == 'INITIATED' + class TestRemoveStuckRegistrationsView(AdminTestCase): def setUp(self): diff --git a/admin_tests/preprints/test_views.py b/admin_tests/preprints/test_views.py index 1fb9d68482d..1273a3a6363 100644 --- a/admin_tests/preprints/test_views.py +++ b/admin_tests/preprints/test_views.py @@ -536,7 +536,8 @@ def withdrawal_request(self, preprint, submitter): withdrawal_request.run_submit(submitter) return withdrawal_request - def test_can_approve_withdrawal_request(self, withdrawal_request, submitter, preprint, admin): + @mock.patch('osf.models.preprint.update_or_enqueue_on_preprint_updated') + def test_can_approve_withdrawal_request(self, mocked_function, withdrawal_request, submitter, preprint, admin): assert withdrawal_request.machine_state == DefaultStates.PENDING.value original_comment = withdrawal_request.comment @@ -552,6 +553,12 @@ def test_can_approve_withdrawal_request(self, withdrawal_request, submitter, pre assert withdrawal_request.machine_state == DefaultStates.ACCEPTED.value assert original_comment == withdrawal_request.target.withdrawal_justification + # share update is triggered when update "date_withdrawn" and "withdrawal_justification" throughout withdrawal process + updated_fields = mocked_function.call_args[1]['saved_fields'] + assert 'date_withdrawn' in updated_fields + assert 'withdrawal_justification' in updated_fields + assert preprint.SEARCH_UPDATE_FIELDS.intersection(updated_fields) + def test_can_reject_withdrawal_request(self, withdrawal_request, admin, preprint): assert withdrawal_request.machine_state == DefaultStates.PENDING.value @@ -797,3 +804,33 @@ def test_admin_user_can_publish_preprint(self, user, preprint, plain_view): preprint.reload() assert preprint.is_published + + +@pytest.mark.urls('admin.base.urls') +class TestPreprintReVersionView: + + @pytest.fixture() + def plain_view(self): + return views.PreprintReVersion + + def test_admin_user_can_add_new_version_one(self, user, preprint, plain_view): + # user isn't admin contributor in the preprint + assert preprint.contributors.filter(id=user.id).exists() is False + assert preprint.has_permission(user, ADMIN) is False + assert len(preprint.get_preprint_versions()) == 1 + + request = RequestFactory().post( + reverse('preprints:re-version-preprint', + kwargs={'guid': preprint._id}), + data={'file_versions': ['1']} + ) + request.user = user + + admin_group = Group.objects.get(name='osf_admin') + admin_group.permissions.add(Permission.objects.get(codename='change_node')) + user.groups.add(admin_group) + + plain_view.as_view()(request, guid=preprint._id) + preprint.refresh_from_db() + + assert len(preprint.get_preprint_versions()) == 2 diff --git a/admin_tests/users/test_views.py b/admin_tests/users/test_views.py index cd51459e134..d22211fe848 100644 --- a/admin_tests/users/test_views.py +++ b/admin_tests/users/test_views.py @@ -141,6 +141,16 @@ def test_correct_view_permissions(self): response = self.view.as_view()(request, guid=user._id) self.assertEqual(response.status_code, 302) + def test_user_with_deleted_node_is_deleted(self): + patch_messages(self.request) + + project = ProjectFactory(creator=self.user, is_deleted=True) + assert self.user.nodes.filter(id=project.id, is_deleted=True).count() + + self.view().post(self.request) + self.user.reload() + assert self.user.deleted + class TestDisableUser(AdminTestCase): def setUp(self): @@ -392,7 +402,14 @@ def test_correct_view_permissions(self): class TestUserSearchView(AdminTestCase): def setUp(self): - self.user_1 = AuthUserFactory(fullname='Broken Matt Hardy') + self.user_1 = AuthUserFactory( + fullname='Broken Matt Hardy', + external_identity={ + settings.EXTERNAL_IDENTITY_PROFILE.get('OrcidProfile'): { + '1234-5678': 'VERIFIED' + } + } + ) self.user_2 = AuthUserFactory(fullname='Jeff Hardy') self.user_3 = AuthUserFactory(fullname='Reby Sky') self.user_4 = AuthUserFactory(fullname='King Maxel Hardy') @@ -415,6 +432,14 @@ def test_search_user_by_guid(self): assert response.status_code == 302 assert response.headers['location'] == f'/users/{self.user_1.guids.first()._id}/' + form_data = { + 'guid': 'wrong' + } + form = UserSearchForm(data=form_data) + assert form.is_valid() + response = self.view.form_valid(form) + assert response.status_code == 404 + def test_search_user_by_name(self): form_data = { 'name': 'Hardy' @@ -445,6 +470,14 @@ def test_search_user_by_username(self): assert response.status_code == 302 assert response.headers['location'] == f'/users/{self.user_1.guids.first()._id}/' + form_data = { + 'email': 'wrong@email.com' + } + form = UserSearchForm(data=form_data) + assert form.is_valid() + response = self.view.form_valid(form) + assert response.status_code == 404 + def test_search_user_by_alternate_email(self): form_data = { 'email': self.user_2_alternate_email @@ -477,6 +510,24 @@ def test_search_user_list_case_insensitive(self): for user in results: assert 'Hardy' in user.fullname + def test_search_user_by_orcid(self): + form_data = { + 'orcid': '1234-5678' + } + form = UserSearchForm(data=form_data) + assert form.is_valid() + response = self.view.form_valid(form) + assert response.status_code == 302 + assert response.headers['location'] == f'/users/{self.user_1.guids.first()._id}/' + + form_data = { + 'orcid': '1234-5678-90' + } + form = UserSearchForm(data=form_data) + assert form.is_valid() + response = self.view.form_valid(form) + assert response.status_code == 404 + class TestGetLinkView(AdminTestCase): diff --git a/api/actions/views.py b/api/actions/views.py index 9e1d6d76ebb..5a9f8803781 100644 --- a/api/actions/views.py +++ b/api/actions/views.py @@ -84,7 +84,6 @@ def get_serializer_class(self): return ReviewActionSerializer def get_object(self): - action = None action_id = self.kwargs['action_id'] if NodeRequestAction.objects.filter(_id=action_id).exists() or PreprintRequestAction.objects.filter(_id=action_id).exists(): @@ -169,6 +168,9 @@ class ReviewActionListCreate(JSONAPIBaseView, generics.ListCreateAPIView, Target # overrides ListCreateAPIView def perform_create(self, serializer): target = serializer.validated_data['target'] + if not target: + raise NotFound(f'Unable to find specified Action {target}') + self.check_object_permissions(self.request, target) if not target.provider.is_reviewed: diff --git a/api/applications/serializers.py b/api/applications/serializers.py index 43120ead5d9..1b606497156 100644 --- a/api/applications/serializers.py +++ b/api/applications/serializers.py @@ -73,6 +73,7 @@ class ApiOAuth2ApplicationSerializer(ApiOAuthApplicationBaseSerializer): required=True, validators=[URLValidator()], label='Home URL', + max_length=200, ) callback_url = ser.CharField( @@ -80,6 +81,7 @@ class ApiOAuth2ApplicationSerializer(ApiOAuthApplicationBaseSerializer): required=True, validators=[URLValidator()], label='Callback URL', + max_length=200, ) owner = ser.CharField( diff --git a/api/base/utils.py b/api/base/utils.py index 0e3690e6dbd..c08c8c73977 100644 --- a/api/base/utils.py +++ b/api/base/utils.py @@ -69,7 +69,9 @@ def get_user_auth(request): authenticated user attached to it. """ user = request.user - private_key = request.query_params.get('view_only', None) + private_key = None + if hasattr(request, 'query_params'): # allows django WSGIRequest to be used as well + private_key = request.query_params.get('view_only', None) if user.is_anonymous: auth = Auth(None, private_key=private_key) else: diff --git a/api/institutions/serializers.py b/api/institutions/serializers.py index 0262f9dddd7..2af640f3b48 100644 --- a/api/institutions/serializers.py +++ b/api/institutions/serializers.py @@ -1,7 +1,10 @@ +from django.db.models import Count, F + from rest_framework import serializers as ser from rest_framework import exceptions -from osf.models import Node, Registration +from framework import sentry +from osf.models import Node, Registration, OSFUser from osf.utils import permissions as osf_permissions from api.base.serializers import ( @@ -355,12 +358,28 @@ class Meta: related_view='institutions:institution-detail', related_view_kwargs={'institution_id': ''}, ) + contacts = ser.SerializerMethodField() links = LinksField({}) def get_absolute_url(self): return None # there is no detail view for institution-users + def get_contacts(self, obj): + user = OSFUser.load(obj._d_['user_id']) + if not user: + sentry.log_message(f'Institutional report with id {obj.meta['id']} has missing user') + return [] + + results = user.received_user_messages.annotate( + sender_name=F('sender__fullname'), + ).values( + 'sender_name', + ).annotate( + count=Count('sender_name'), + ).order_by('sender_name') + return list(results) + class NewInstitutionSummaryMetricsSerializer(JSONAPISerializer): '''serializer for institution-summary metrics diff --git a/api/preprints/serializers.py b/api/preprints/serializers.py index 2e802a438a0..41e8fee203f 100644 --- a/api/preprints/serializers.py +++ b/api/preprints/serializers.py @@ -296,7 +296,8 @@ def update(self, preprint, validated_data): assert isinstance(preprint, Preprint), 'You must specify a valid preprint to be updated' auth = get_user_auth(self.context['request']) - if not preprint.has_permission(auth.user, osf_permissions.WRITE): + ignore_permission = self.context.get('ignore_permission', False) + if not ignore_permission and not preprint.has_permission(auth.user, osf_permissions.WRITE): raise exceptions.PermissionDenied(detail='User must have admin or write permissions to update a preprint.') for field in ['conflict_of_interest_statement', 'why_no_data', 'why_no_prereg']: @@ -344,76 +345,40 @@ def update(self, preprint, validated_data): detail='You cannot edit this field while your prereg links availability is set to false or is unanswered.', ) - def require_admin_permission(): - if not preprint.has_permission(auth.user, osf_permissions.ADMIN): - raise exceptions.PermissionDenied(detail='Must have admin permissions to update author assertion fields.') - - if 'has_coi' in validated_data: - require_admin_permission() - try: - preprint.update_has_coi(auth, validated_data['has_coi']) - except PreprintStateError as e: - raise exceptions.ValidationError(detail=str(e)) - - if 'conflict_of_interest_statement' in validated_data: - require_admin_permission() - try: - preprint.update_conflict_of_interest_statement(auth, validated_data['conflict_of_interest_statement']) - except PreprintStateError as e: - raise exceptions.ValidationError(detail=str(e)) - - if 'has_data_links' in validated_data: - require_admin_permission() - try: - preprint.update_has_data_links(auth, validated_data['has_data_links']) - except PreprintStateError as e: - raise exceptions.ValidationError(detail=str(e)) - - if 'why_no_data' in validated_data: - require_admin_permission() - try: - preprint.update_why_no_data(auth, validated_data['why_no_data']) - except PreprintStateError as e: - raise exceptions.ValidationError(detail=str(e)) - - if 'data_links' in validated_data: - require_admin_permission() - try: - preprint.update_data_links(auth, validated_data['data_links']) - except PreprintStateError as e: - raise exceptions.ValidationError(detail=str(e)) - else: - if updated_has_data_links == 'no' and preprint.data_links: - preprint.update_data_links(auth, []) - - if 'has_prereg_links' in validated_data: - require_admin_permission() - - try: - preprint.update_has_prereg_links(auth, validated_data['has_prereg_links']) - except PreprintStateError as e: - raise exceptions.ValidationError(detail=str(e)) - - if 'why_no_prereg' in validated_data: - require_admin_permission() - try: - preprint.update_why_no_prereg(auth, validated_data['why_no_prereg']) - except PreprintStateError as e: - raise exceptions.ValidationError(detail=str(e)) - - if 'prereg_links' in validated_data: - require_admin_permission() - try: - preprint.update_prereg_links(auth, validated_data['prereg_links']) - except PreprintStateError as e: - raise exceptions.ValidationError(detail=str(e)) - - if 'prereg_link_info' in validated_data: - require_admin_permission() - try: - preprint.update_prereg_link_info(auth, validated_data['prereg_link_info']) - except PreprintStateError as e: - raise exceptions.ValidationError(detail=str(e)) + try: + if 'has_coi' in validated_data: + preprint.update_has_coi(auth, validated_data['has_coi'], ignore_permission=ignore_permission) + + if 'conflict_of_interest_statement' in validated_data: + preprint.update_conflict_of_interest_statement(auth, validated_data['conflict_of_interest_statement'], ignore_permission=ignore_permission) + + if 'has_data_links' in validated_data: + preprint.update_has_data_links(auth, validated_data['has_data_links'], ignore_permission=ignore_permission) + + if 'why_no_data' in validated_data: + preprint.update_why_no_data(auth, validated_data['why_no_data'], ignore_permission=ignore_permission) + + if 'has_prereg_links' in validated_data: + preprint.update_has_prereg_links(auth, validated_data['has_prereg_links'], ignore_permission=ignore_permission) + + if 'why_no_prereg' in validated_data: + preprint.update_why_no_prereg(auth, validated_data['why_no_prereg'], ignore_permission=ignore_permission) + + if 'prereg_links' in validated_data: + preprint.update_prereg_links(auth, validated_data['prereg_links'], ignore_permission=ignore_permission) + + if 'prereg_link_info' in validated_data: + preprint.update_prereg_link_info(auth, validated_data['prereg_link_info'], ignore_permission=ignore_permission) + + if 'data_links' in validated_data: + preprint.update_data_links(auth, validated_data['data_links'], ignore_permission=ignore_permission) + else: + if updated_has_data_links == 'no' and preprint.data_links: + preprint.update_data_links(auth, [], ignore_permission=ignore_permission) + except PreprintStateError as e: + raise exceptions.ValidationError(detail=str(e)) + except PermissionsError: + raise exceptions.PermissionDenied(detail='Must have admin permissions to update author assertion fields.') published = validated_data.pop('is_published', None) if published and preprint.provider.is_reviewed: @@ -434,7 +399,7 @@ def require_admin_permission(): primary_file = validated_data.pop('primary_file', None) if primary_file: - self.set_field(preprint.set_primary_file, primary_file, auth) + self.set_field(preprint.set_primary_file, primary_file, auth, ignore_permission=ignore_permission) old_tags = set(preprint.tags.values_list('name', flat=True)) if 'tags' in validated_data: @@ -451,7 +416,7 @@ def require_admin_permission(): if 'node' in validated_data: node = validated_data.pop('node', None) - self.set_field(preprint.set_supplemental_node, node, auth) + self.set_field(preprint.set_supplemental_node, node, auth, ignore_node_permissions=ignore_permission, ignore_permission=ignore_permission) if 'subjects' in validated_data: subjects = validated_data.pop('subjects', None) @@ -459,18 +424,18 @@ def require_admin_permission(): if 'title' in validated_data: title = validated_data['title'] - self.set_field(preprint.set_title, title, auth) + self.set_field(preprint.set_title, title, auth, ignore_permission=ignore_permission) if 'description' in validated_data: description = validated_data['description'] - self.set_field(preprint.set_description, description, auth) + self.set_field(preprint.set_description, description, auth, ignore_permission=ignore_permission) if 'article_doi' in validated_data: preprint.article_doi = validated_data['article_doi'] if 'license_type' in validated_data or 'license' in validated_data: license_details = get_license_details(preprint, validated_data) - self.set_field(preprint.set_preprint_license, license_details, auth) + self.set_field(preprint.set_preprint_license, license_details, auth, ignore_permission=ignore_permission) if 'original_publication_date' in validated_data: preprint.original_publication_date = validated_data['original_publication_date'] or None @@ -483,9 +448,9 @@ def require_admin_permission(): raise exceptions.ValidationError( detail='A valid primary_file must be set before publishing a preprint.', ) - self.set_field(preprint.set_published, published, auth) + self.set_field(preprint.set_published, published, auth, ignore_permission=ignore_permission) recently_published = published - preprint.set_privacy('public', log=False, save=True) + preprint.set_privacy('public', log=False, save=True, ignore_permission=ignore_permission) try: preprint.full_clean() @@ -506,9 +471,9 @@ def require_admin_permission(): return preprint - def set_field(self, func, val, auth, save=False): + def set_field(self, func, val, auth, **kwargs): try: - func(val, auth) + func(val, auth, **kwargs) except PermissionsError as e: raise exceptions.PermissionDenied(detail=str(e)) except (ValueError, ValidationError, NodeStateError) as e: @@ -566,6 +531,13 @@ def create(self, validated_data): raise Conflict(detail='Failed to create a new preprint version due to unpublished pending version exists.') if not preprint: raise NotFound(detail='Failed to create a new preprint version due to source preprint not found.') + for contributor in preprint.contributor_set.filter(user__is_registered=False): + contributor.user.add_unclaimed_record( + claim_origin=preprint, + referrer=auth.user, + email=contributor.user.email, + given_name=contributor.user.fullname, + ) if data_to_update: return self.update(preprint, data_to_update) return preprint diff --git a/api/preprints/views.py b/api/preprints/views.py index af1cb3af974..cd5dba8ba8d 100644 --- a/api/preprints/views.py +++ b/api/preprints/views.py @@ -1,6 +1,7 @@ import re from packaging.version import Version +from django.contrib.auth.models import AnonymousUser from rest_framework import generics from rest_framework.exceptions import MethodNotAllowed, NotFound, PermissionDenied, NotAuthenticated, ValidationError from rest_framework import permissions as drf_permissions @@ -136,12 +137,17 @@ def get_preprint(self, check_object_permissions=True, ignore_404=False): if check_object_permissions: self.check_object_permissions(self.request, preprint) - user_is_moderator = preprint.provider.get_group('moderator').user_set.filter(id=self.request.user.id).exists() - user_is_contributor = preprint.contributors.filter(id=self.request.user.id).exists() + user = self.request.user + if isinstance(user, AnonymousUser): + user_is_reviewer = user_is_contributor = False + else: + user_is_reviewer = user.has_groups(preprint.provider.group_names) + user_is_contributor = preprint.is_contributor(user) + if ( preprint.machine_state == DefaultStates.INITIAL.value and not user_is_contributor and - user_is_moderator + user_is_reviewer ): raise NotFound @@ -149,7 +155,7 @@ def get_preprint(self, check_object_permissions=True, ignore_404=False): preprint.machine_state in PUBLIC_STATES[preprint.provider.reviews_workflow] or preprint.machine_state == ReviewStates.WITHDRAWN.value, ) - if not preprint_is_public and not user_is_contributor and not user_is_moderator: + if not preprint_is_public and not user_is_contributor and not user_is_reviewer: raise NotFound return preprint diff --git a/api/registrations/urls.py b/api/registrations/urls.py index 66e5175b05b..232be823bb9 100644 --- a/api/registrations/urls.py +++ b/api/registrations/urls.py @@ -13,6 +13,7 @@ re_path(r'^(?P\w+)/$', views.RegistrationDetail.as_view(), name=views.RegistrationDetail.view_name), re_path(r'^(?P\w+)/bibliographic_contributors/$', views.RegistrationBibliographicContributorsList.as_view(), name=views.RegistrationBibliographicContributorsList.view_name), re_path(r'^(?P\w+)/cedar_metadata_records/$', views.RegistrationCedarMetadataRecordsList.as_view(), name=views.RegistrationCedarMetadataRecordsList.view_name), + re_path(r'^(?P\w+)/callbacks/$', views.RegistrationCallbackView.as_view(), name=views.RegistrationCallbackView.view_name), re_path(r'^(?P\w+)/children/$', views.RegistrationChildrenList.as_view(), name=views.RegistrationChildrenList.view_name), re_path(r'^(?P\w+)/comments/$', views.RegistrationCommentsList.as_view(), name=views.RegistrationCommentsList.view_name), re_path(r'^(?P\w+)/contributors/$', views.RegistrationContributorsList.as_view(), name=views.RegistrationContributorsList.view_name), diff --git a/api/registrations/views.py b/api/registrations/views.py index 8254ea69edf..a8d10d0602b 100644 --- a/api/registrations/views.py +++ b/api/registrations/views.py @@ -1,7 +1,13 @@ -from rest_framework import generics, mixins, permissions as drf_permissions +from rest_framework import generics, mixins, permissions as drf_permissions, status from rest_framework.exceptions import ValidationError, NotFound, PermissionDenied +from rest_framework.response import Response +from framework.exceptions import HTTPError from framework.auth.oauth_scopes import CoreScopes +from addons.base.views import DOWNLOAD_ACTIONS +from website.archiver import signals, ARCHIVER_NETWORK_ERROR, ARCHIVER_SUCCESS, ARCHIVER_FAILURE +from website.project import signals as project_signals + from osf.models import Registration, OSFUser, RegistrationProvider, OutcomeArtifact, CedarMetadataRecord from osf.utils.permissions import WRITE_NODE from osf.utils.workflows import ApprovalStates @@ -28,6 +34,7 @@ JSONAPIMultipleRelationshipsParser, JSONAPIRelationshipParserForRegularJSON, JSONAPIMultipleRelationshipsParserForRegularJSON, + HMACSignedParser, ) from api.base.utils import ( get_user_auth, @@ -1040,3 +1047,47 @@ def get_default_queryset(self): def get_queryset(self): return self.get_queryset_from_request() + + +class RegistrationCallbackView(JSONAPIBaseView, generics.UpdateAPIView, RegistrationMixin): + permission_classes = [drf_permissions.AllowAny] + + view_category = 'registrations' + view_name = 'registration-callbacks' + + parser_classes = [HMACSignedParser] + + def update(self, request, *args, **kwargs): + registration = self.get_node() + + try: + payload = request.data + if payload.get('action', None) in DOWNLOAD_ACTIONS: + return Response({'status': 'success'}, status=status.HTTP_200_OK) + errors = payload.get('errors') + src_provider = payload['source']['provider'] + if errors: + registration.archive_job.update_target( + src_provider, + ARCHIVER_FAILURE, + errors=errors, + ) + else: + # Dataverse requires two seperate targets, one + # for draft files and one for published files + if src_provider == 'dataverse': + src_provider += '-' + (payload['destination']['name'].split(' ')[-1].lstrip('(').rstrip(')').strip()) + registration.archive_job.update_target( + src_provider, + ARCHIVER_SUCCESS, + ) + project_signals.archive_callback.send(registration) + return Response(status=status.HTTP_200_OK) + except HTTPError as e: + registration.archive_status = ARCHIVER_NETWORK_ERROR + registration.save() + signals.archive_fail.send( + registration, + errors=[str(e)], + ) + return Response(status=status.HTTP_200_OK) diff --git a/api/share/utils.py b/api/share/utils.py index 1178adfa85d..5083220ce7f 100644 --- a/api/share/utils.py +++ b/api/share/utils.py @@ -2,14 +2,9 @@ SHARE/Trove accepts metadata records as "indexcards" in turtle format: https://www.w3.org/TR/turtle/ """ -from functools import partial from http import HTTPStatus import logging -import random -from urllib.parse import urljoin -import uuid -from celery.exceptions import Retry from django.apps import apps import requests @@ -17,7 +12,6 @@ from framework.celery_tasks.handlers import enqueue_task from framework.encryption import ensure_bytes from framework.sentry import log_exception -from osf import models as osf_db from osf.metadata.osf_gathering import ( OsfmapPartition, pls_get_magic_metadata_basket, @@ -33,10 +27,6 @@ def shtrove_ingest_url(): return f'{settings.SHARE_URL}trove/ingest' -def sharev2_push_url(): - return f'{settings.SHARE_URL}api/v2/normalizeddata/' - - def is_qa_resource(resource): """ QA puts tags and special titles on their project to stop them from appearing in the search results. This function @@ -70,8 +60,6 @@ def _enqueue_update_share(osfresource): logger.warning(f'update_share skipping resource that has no guids: {osfresource}') return enqueue_task(task__update_share.s(_osfguid_value)) - if isinstance(osfresource, (osf_db.AbstractNode, osf_db.Preprint)): - enqueue_task(async_update_resource_share.s(_osfguid_value)) @celery_app.task( @@ -212,398 +200,3 @@ def _next_osfmap_partition(partition: OsfmapPartition) -> OsfmapPartition | None return OsfmapPartition.MONTHLY_SUPPLEMENT case _: return None - - -### -# BEGIN soon-to-be-deleted (🤞) legacy sharev2 push -# (until dust has settled on iri-centric (rdf-based) search) -"""Utilities for pushing metadata to SHARE - -SHARE uses a specific dialect of JSON-LD that could/should have been -an internal implementation detail, but for historical reasons OSF must -be aware of it in order to push metadata updates to SHARE -- hopefully, -that awareness is contained entirely within this file. - -WARNING: In this context, "graph node" does NOT have anything to do with -OSF's `Node` model, but instead refers to a "node object" within a JSON-LD -graph, as defined at https://www.w3.org/TR/json-ld11/#dfn-node-object - -Each graph node must contain '@id' and '@type', plus other key/value pairs -according to the "SHARE schema": -https://github.com/CenterForOpenScience/SHARE/blob/develop/share/schema/schema-spec.yaml - -In this case, '@id' will always be a "blank" identifier, which begins with '_:' -and is used only to define relationships between nodes in the graph -- nodes -may reference each other with @id/@type pairs -- -e.g. {'@id': '...', '@type': '...'} - -Example serialization: The following SHARE-style JSON-LD document represents a -preprint with one "creator" and one identifier -- the graph contains nodes for -the preprint, person, and identifier, plus another node representing the -"creator" relationship between the preprint and person: -``` -{ - 'central_node_id': '_:foo', - '@graph': [ - { - '@id': '_:foo', - '@type': 'preprint', - 'title': 'This is a preprint!', - }, - { - '@id': '_:bar', - '@type': 'workidentifier', - 'uri': 'https://osf.io/foobar/', - 'creative_work': {'@id': '_:foo', '@type': 'preprint'} - }, - { - '@id': '_:baz', - '@type': 'person', - 'name': 'Magpie Jones' - }, - { - '@id': '_:qux', - '@type': 'creator', - 'creative_work': {'@id': '_:foo', '@type': 'preprint'}, - 'agent': {'@id': '_:baz', '@type': 'person'} - } - ] -} -``` -""" - - -class GraphNode: - """Utility class for building a JSON-LD graph suitable for pushing to SHARE - - WARNING: In this context, "graph node" does NOT have anything to do with - OSF's `Node` model, but instead refers to a "node object" within a JSON-LD - graph, as defined at https://www.w3.org/TR/json-ld11/#dfn-node-object - """ - - @staticmethod - def serialize_graph(central_graph_node, all_graph_nodes): - """Serialize the mess of GraphNodes to a JSON-friendly dictionary - :param central_graph_node: the GraphNode for the preprint/node/registration - this graph is most "about" - :param all_graph_nodes: list of GraphNodes to include -- will also recursively - look for and include GraphNodes contained in attrs - """ - to_visit = [central_graph_node, *all_graph_nodes] # make a copy of the list - visited = set() - while to_visit: - n = to_visit.pop(0) - if n not in visited: - visited.add(n) - to_visit.extend(n.get_related()) - - return { - 'central_node_id': central_graph_node.id, - '@graph': [node.serialize() for node in visited], - } - - @property - def ref(self): - return {'@id': self.id, '@type': self.type} - - def __init__(self, type_, **attrs): - self.id = f'_:{uuid.uuid4()}' - self.type = type_.lower() - self.attrs = attrs - - def get_related(self): - for value in self.attrs.values(): - if isinstance(value, GraphNode): - yield value - elif isinstance(value, list): - yield from value - - def serialize(self): - ser = {} - for key, value in self.attrs.items(): - if isinstance(value, GraphNode): - ser[key] = value.ref - elif isinstance(value, list) or value in (None, '', {}): - continue - else: - ser[key] = value - - return dict(self.ref, **ser) - - -def format_user(user): - person = GraphNode( - 'person', **{ - 'name': user.fullname, - 'suffix': user.suffix, - 'given_name': user.given_name, - 'family_name': user.family_name, - 'additional_name': user.middle_names, - }, - ) - - person.attrs['identifiers'] = [GraphNode('agentidentifier', agent=person, uri=user.absolute_url)] - - if user.external_identity.get('ORCID') and list(user.external_identity['ORCID'].values())[0] == 'VERIFIED': - person.attrs['identifiers'].append(GraphNode('agentidentifier', agent=person, uri=list(user.external_identity['ORCID'].keys())[0])) - - person.attrs['related_agents'] = [GraphNode('isaffiliatedwith', subject=person, related=GraphNode('institution', name=institution.name)) for institution in user.get_affiliated_institutions()] - - return person - - -def format_bibliographic_contributor(work_node, user, index): - return GraphNode( - 'creator', - agent=format_user(user), - order_cited=index, - creative_work=work_node, - cited_as=user.fullname, - ) - - -def format_subject(subject, context=None): - if context is None: - context = {} - if subject is None: - return None - if subject.id in context: - return context[subject.id] - context[subject.id] = GraphNode( - 'subject', - name=subject.text, - uri=subject.absolute_api_v2_url, - ) - context[subject.id].attrs['parent'] = format_subject(subject.parent, context) - context[subject.id].attrs['central_synonym'] = format_subject(subject.bepress_subject, context) - return context[subject.id] - - -def send_share_json(resource, data): - """POST metadata to SHARE, using the provider for the given resource. - """ - if getattr(resource, 'provider') and resource.provider.access_token: - access_token = resource.provider.access_token - else: - access_token = settings.SHARE_API_TOKEN - - return requests.post( - sharev2_push_url(), - json=data, - headers={ - 'Authorization': f'Bearer {access_token}', - 'Content-Type': 'application/vnd.api+json', - }, - ) - - -def serialize_share_data(resource, old_subjects=None): - """Build a request payload to send Node/Preprint/Registration metadata to SHARE. - :param resource: either a Node, Preprint or Registration - :param old_subjects: - - :return: JSON-serializable dictionary of the resource's metadata, good for POSTing to SHARE - """ - from osf.models import ( - Node, - DraftNode, - Preprint, - Registration, - ) - suid = None - if isinstance(resource, Preprint): - # old_subjects is only used for preprints and should be removed as soon as SHARE - # is fully switched over to the non-mergy pipeline (see ENG-2098) - serializer = partial(serialize_preprint, old_subjects=old_subjects) - suid = resource.get_guid()._id - elif isinstance(resource, Node): - serializer = serialize_osf_node - elif isinstance(resource, Registration): - serializer = serialize_registration - elif isinstance(resource, DraftNode): - return {} - else: - raise NotImplementedError() - - return { - 'data': { - 'type': 'NormalizedData', - 'attributes': { - 'tasks': [], - 'raw': None, - 'suid': resource._id if not suid else suid, - 'data': serializer(resource), - }, - }, - } - - -def serialize_preprint(preprint, old_subjects=None): - if old_subjects is None: - old_subjects = [] - from osf.models import Subject - old_subjects = [Subject.objects.get(id=s) for s in old_subjects] - preprint_graph = GraphNode( - preprint.provider.share_publish_type, **{ - 'title': preprint.title, - 'description': preprint.description or '', - 'is_deleted': ( - (not preprint.verified_publishable and not preprint.is_retracted) - or preprint.is_spam - or preprint.is_deleted - or is_qa_resource(preprint) - ), - 'date_updated': preprint.modified.isoformat(), - 'date_published': preprint.date_published.isoformat() if preprint.date_published else None, - }, - ) - to_visit = [ - preprint_graph, - GraphNode('workidentifier', creative_work=preprint_graph, uri=urljoin(settings.DOMAIN, preprint._id + '/')), - ] - - doi = preprint.get_identifier_value('doi') - if doi: - to_visit.append(GraphNode('workidentifier', creative_work=preprint_graph, uri=f'{settings.DOI_URL_PREFIX}{doi}')) - - if preprint.provider.domain_redirect_enabled: - to_visit.append(GraphNode('workidentifier', creative_work=preprint_graph, uri=preprint.absolute_url)) - - if preprint.article_doi: - # Article DOI refers to a clone of this preprint on another system and therefore does not qualify as an identifier for this preprint - related_work = GraphNode('creativework') - to_visit.append(GraphNode('workrelation', subject=preprint_graph, related=related_work)) - to_visit.append(GraphNode('workidentifier', creative_work=related_work, uri=f'{settings.DOI_URL_PREFIX}{preprint.article_doi}')) - - preprint_graph.attrs['tags'] = [ - GraphNode('throughtags', creative_work=preprint_graph, tag=GraphNode('tag', name=tag)) - for tag in preprint.tags.values_list('name', flat=True) if tag - ] - - current_subjects = [ - GraphNode('throughsubjects', creative_work=preprint_graph, is_deleted=False, subject=format_subject(s)) - for s in preprint.subjects.all() - ] - deleted_subjects = [ - GraphNode('throughsubjects', creative_work=preprint_graph, is_deleted=True, subject=format_subject(s)) - for s in old_subjects if not preprint.subjects.filter(id=s.id).exists() - ] - preprint_graph.attrs['subjects'] = current_subjects + deleted_subjects - - to_visit.extend(format_bibliographic_contributor(preprint_graph, user, i) for i, user in enumerate(preprint.visible_contributors)) - - return GraphNode.serialize_graph(preprint_graph, to_visit) - -def format_node_lineage(child_osf_node, child_graph_node): - parent_osf_node = child_osf_node.parent_node - if not parent_osf_node: - return [] - parent_graph_node = GraphNode('registration', title=parent_osf_node.title) - return [ - parent_graph_node, - GraphNode('workidentifier', creative_work=parent_graph_node, uri=urljoin(settings.DOMAIN, parent_osf_node.url)), - GraphNode('ispartof', subject=child_graph_node, related=parent_graph_node), - *format_node_lineage(parent_osf_node, parent_graph_node), - ] - -def serialize_registration(registration): - return serialize_osf_node( - registration, - additional_attrs={ - 'date_published': registration.registered_date.isoformat() if registration.registered_date else None, - 'registration_type': registration.registered_schema.first().name if registration.registered_schema.exists() else None, - 'justification': registration.retraction.justification if registration.retraction else None, - 'withdrawn': registration.is_retracted, - 'extra': {'osf_related_resource_types': _get_osf_related_resource_types(registration)}, - }, - ) - -def _get_osf_related_resource_types(registration): - from osf.models import OutcomeArtifact - from osf.utils.outcomes import ArtifactTypes - artifacts = OutcomeArtifact.objects.for_registration(registration).filter(finalized=True, deleted__isnull=True) - return { - artifact_type.name.lower(): artifacts.filter(artifact_type=artifact_type).exists() - for artifact_type in ArtifactTypes.public_types() - } - -def serialize_osf_node(osf_node, additional_attrs=None): - if osf_node.provider: - share_publish_type = osf_node.provider.share_publish_type - else: - share_publish_type = 'project' - - graph_node = GraphNode( - share_publish_type, **{ - 'title': osf_node.title, - 'description': osf_node.description or '', - 'is_deleted': ( - not osf_node.is_public - or osf_node.is_deleted - or osf_node.is_spam - or is_qa_resource(osf_node) - ), - **(additional_attrs or {}), - }, - ) - - to_visit = [ - graph_node, - GraphNode('workidentifier', creative_work=graph_node, uri=urljoin(settings.DOMAIN, osf_node.url)), - ] - - doi = osf_node.get_identifier_value('doi') - if doi: - to_visit.append(GraphNode('workidentifier', creative_work=graph_node, uri=f'{settings.DOI_URL_PREFIX}{doi}')) - - graph_node.attrs['tags'] = [ - GraphNode('throughtags', creative_work=graph_node, tag=GraphNode('tag', name=tag._id)) - for tag in osf_node.tags.all() - ] - - graph_node.attrs['subjects'] = [ - GraphNode('throughsubjects', creative_work=graph_node, subject=format_subject(s)) - for s in osf_node.subjects.all() - ] - - to_visit.extend(format_bibliographic_contributor(graph_node, user, i) for i, user in enumerate(osf_node.visible_contributors)) - to_visit.extend(GraphNode('AgentWorkRelation', creative_work=graph_node, agent=GraphNode('institution', name=institution.name)) for institution in osf_node.affiliated_institutions.all()) - - to_visit.extend(format_node_lineage(osf_node, graph_node)) - - return GraphNode.serialize_graph(graph_node, to_visit) - - -@celery_app.task(bind=True, max_retries=4, acks_late=True) -def async_update_resource_share(self, guid, old_subjects=None): - """ - This function updates share takes Preprints, Projects and Registrations. - :param self: - :param guid: - :return: - """ - AbstractNode = apps.get_model('osf.AbstractNode') - resource = AbstractNode.load(guid) - if not resource: - Preprint = apps.get_model('osf.Preprint') - resource = Preprint.load(guid) - - data = serialize_share_data(resource, old_subjects) - resp = send_share_json(resource, data) - try: - resp.raise_for_status() - except Exception as e: - if self.request.retries == self.max_retries: - log_exception(e) - elif resp.status_code >= 500: - try: - self.retry( - exc=e, - countdown=(random.random() + 1) * min(60 + settings.CELERY_RETRY_BACKOFF_BASE ** self.request.retries, 60 * 10), - ) - except Retry as e: # Retry is only raise after > 5 retries - log_exception(e) - else: - log_exception(e) - - return resp diff --git a/api_tests/actions/views/test_action_list.py b/api_tests/actions/views/test_action_list.py index bda6be6890a..c33048656f1 100644 --- a/api_tests/actions/views/test_action_list.py +++ b/api_tests/actions/views/test_action_list.py @@ -57,54 +57,106 @@ def moderator(self, provider): moderator.groups.add(provider.get_group('moderator')) return moderator - def test_create_permissions( - self, app, url, preprint, node_admin, moderator - ): + def test_create_permissions_unauthorized(self, app, url, preprint, node_admin, moderator): assert preprint.machine_state == 'initial' - submit_payload = self.create_payload(preprint._id, trigger='submit') + submit_payload = self.create_payload( + preprint._id, + trigger='submit' + ) # Unauthorized user can't submit - res = app.post_json_api(url, submit_payload, expect_errors=True) + res = app.post_json_api( + url, + submit_payload, + expect_errors=True + ) assert res.status_code == 401 + def test_create_permissions_forbidden(self, app, url, preprint, node_admin, moderator): + assert preprint.machine_state == 'initial' + + submit_payload = self.create_payload( + preprint._id, + trigger='submit' + ) + # A random user can't submit some_rando = AuthUserFactory() res = app.post_json_api( - url, submit_payload, + url, + submit_payload, auth=some_rando.auth, expect_errors=True ) assert res.status_code == 403 + def test_create_permissions_success(self, app, url, preprint, node_admin, moderator): + assert preprint.machine_state == 'initial' + + submit_payload = self.create_payload( + preprint._id, + trigger='submit' + ) + # Node admin can submit - res = app.post_json_api(url, submit_payload, auth=node_admin.auth) + res = app.post_json_api( + url, + submit_payload, + auth=node_admin.auth + ) assert res.status_code == 201 preprint.refresh_from_db() assert preprint.machine_state == 'pending' assert not preprint.is_published + def test_accept_permissions_unauthorized(self, app, url, preprint, node_admin, moderator): + preprint.machine_state = 'pending' + preprint.save() + assert preprint.machine_state == 'pending' + accept_payload = self.create_payload( - preprint._id, trigger='accept', comment='This is good.' + preprint._id, + trigger='accept', + comment='This is good.' ) # Unauthorized user can't accept res = app.post_json_api(url, accept_payload, expect_errors=True) assert res.status_code == 401 + def test_accept_permissions_forbidden(self, app, url, preprint, node_admin, moderator): + preprint.machine_state = 'pending' + preprint.save() + assert preprint.machine_state == 'pending' + + accept_payload = self.create_payload( + preprint._id, + trigger='accept', + comment='This is good.' + ) + + some_rando = AuthUserFactory() + # A random user can't accept res = app.post_json_api( - url, accept_payload, + url, + accept_payload, auth=some_rando.auth, expect_errors=True ) assert res.status_code == 403 - # Moderator from another provider can't accept + def test_accept_permissions_other_mod(self, app, url, preprint, node_admin, moderator): another_moderator = AuthUserFactory() another_moderator.groups.add( PreprintProviderFactory().get_group('moderator') ) + accept_payload = self.create_payload( + preprint._id, + trigger='accept', + comment='This is good.' + ) res = app.post_json_api( url, accept_payload, auth=another_moderator.auth, @@ -120,6 +172,15 @@ def test_create_permissions( ) assert res.status_code == 403 + def test_accept_permissions_accept(self, app, url, preprint, node_admin, moderator): + preprint.machine_state = 'pending' + preprint.save() + accept_payload = self.create_payload( + preprint._id, + trigger='accept', + comment='This is good.' + ) + # Still unchanged after all those tries preprint.refresh_from_db() assert preprint.machine_state == 'pending' @@ -275,3 +336,33 @@ def test_valid_transitions( assert preprint.date_last_transitioned is None else: assert preprint.date_last_transitioned == action.created + + def test_invalid_target_id(self, app, moderator): + """ + This test simulates the issue reported where using either an invalid + action ID or an unrelated node ID as the target ID results in a 502 error. + + It attempts to create a review action with a bad `target.id`. + """ + res = app.post_json_api( + f'/{API_BASE}actions/reviews/', + { + 'data': { + 'type': 'actions', + 'attributes': { + 'trigger': 'submit' + }, + 'relationships': { + 'target': { + 'data': { + 'type': 'preprints', + 'id': 'deadbeef1234' + } + } + } + } + }, + auth=moderator.auth, + expect_errors=True + ) + assert res.status_code == 404 diff --git a/api_tests/applications/views/test_application_detail.py b/api_tests/applications/views/test_application_detail.py index ae1458e2d5d..f1ca4fd2f38 100644 --- a/api_tests/applications/views/test_application_detail.py +++ b/api_tests/applications/views/test_application_detail.py @@ -6,49 +6,41 @@ from osf_tests.factories import ApiOAuth2ApplicationFactory, AuthUserFactory -def _get_application_detail_route(app): - path = f'applications/{app.client_id}/' - return api_v2_url(path, base_route='/') - - -def _get_application_list_url(): - path = 'applications/' - return api_v2_url(path, base_route='/') - - -@pytest.fixture() -def user(): - return AuthUserFactory() - - @pytest.mark.django_db class TestApplicationDetail: + @pytest.fixture() + def user(self): + return AuthUserFactory() + @pytest.fixture() def user_app(self, user): return ApiOAuth2ApplicationFactory(owner=user) @pytest.fixture() def user_app_url(self, user_app): - return _get_application_detail_route(user_app) + return self._get_application_detail_route(user_app) - @pytest.fixture() - def make_payload(self, user_app): + def _get_application_detail_route(self, user_app): + path = f'applications/{user_app.client_id}/' + return api_v2_url(path, base_route='/') - def payload(type='applications', id=user_app.client_id): - return { - 'data': { - 'id': id, - 'type': type, - 'attributes': { - 'name': 'A shiny new application', - 'home_url': 'http://osf.io', - 'callback_url': 'https://cos.io' - } + def _get_application_list_url(self): + path = 'applications/' + return api_v2_url(path, base_route='/') + + def make_payload(self, user_app, id=None, type='applications'): + return { + 'data': { + 'id': id or user_app.client_id, + 'type': type, + 'attributes': { + 'name': 'A shiny new application', + 'home_url': 'http://osf.io', + 'callback_url': 'https://cos.io' } } - - return payload + } def test_can_view(self, app, user, user_app, user_app_url): # owner can view @@ -78,16 +70,17 @@ def test_owner_can_delete(self, mock_method, app, user, user_app_url): def test_non_owner_cant_delete(self, app, user_app_url): non_owner = AuthUserFactory() res = app.delete( - user_app_url, auth=non_owner.auth, + user_app_url, + auth=non_owner.auth, expect_errors=True ) assert res.status_code == 403 @mock.patch('framework.auth.cas.CasClient.revoke_application_tokens') - def test_deleting_application_makes_api_view_inaccessible( - self, mock_method, app, user, user_app_url): + def test_deleting_application_makes_api_view_inaccessible(self, mock_method, app, user, user_app_url): mock_method.return_value(True) - res = app.delete(user_app_url, auth=user.auth) + app.delete(user_app_url, auth=user.auth) + res = app.get(user_app_url, auth=user.auth, expect_errors=True) assert res.status_code == 404 @@ -135,58 +128,49 @@ def test_updating_an_instance_does_not_change_the_number_of_instances( ) assert res.status_code == 200 - list_url = _get_application_list_url() + list_url = self._get_application_list_url() res = app.get(list_url, auth=user.auth) assert res.status_code == 200 assert len(res.json['data']) == 1 @mock.patch('framework.auth.cas.CasClient.revoke_application_tokens') - def test_deleting_application_flags_instance_inactive( - self, mock_method, app, user, user_app, user_app_url): + def test_deleting_application_flags_instance_inactive(self, mock_method, app, user, user_app, user_app_url): mock_method.return_value(True) app.delete(user_app_url, auth=user.auth) user_app.reload() assert not user_app.is_active - @pytest.mark.enable_implicit_clean - def test_update_application( - self, app, user, user_app, user_app_url, make_payload): - - valid_payload = make_payload() - incorrect_type_payload = make_payload(type='incorrect') - incorrect_id_payload = make_payload(id='12345') - missing_type_payload = make_payload() - del missing_type_payload['data']['type'] - missing_id_payload = make_payload() - del missing_id_payload['data']['id'] - + def test_update_application(self, app, user, user_app, user_app_url): res = app.put_json_api( user_app_url, - valid_payload, + self.make_payload(user_app), auth=user.auth, expect_errors=True ) assert res.status_code == 200 - # test_update_application_incorrect_type_payload + def test_update_application_incorrect_type_payload(self, app, user, user_app, user_app_url): res = app.put_json_api( user_app_url, - incorrect_type_payload, + self.make_payload(user_app, type='incorrect'), auth=user.auth, expect_errors=True ) assert res.status_code == 409 - # test_update_application_incorrect_id_payload + def test_update_application_incorrect_id_payload(self, app, user, user_app, user_app_url): res = app.put_json_api( user_app_url, - incorrect_id_payload, + self.make_payload(user_app, id='12345'), auth=user.auth, expect_errors=True ) assert res.status_code == 409 - # test_update_application_no_type + def test_update_application_no_type(self, app, user, user_app, user_app_url): + missing_type_payload = self.make_payload(user_app) + del missing_type_payload['data']['type'] + res = app.put_json_api( user_app_url, missing_type_payload, @@ -195,7 +179,10 @@ def test_update_application( ) assert res.status_code == 400 - # test_update_application_no_id + def test_update_application_no_id(self, app, user, user_app, user_app_url): + missing_id_payload = self.make_payload(user_app) + del missing_id_payload['data']['id'] + res = app.put_json_api( user_app_url, missing_id_payload, @@ -204,39 +191,41 @@ def test_update_application( ) assert res.status_code == 400 - # test_update_application_no_attributes - payload = { - 'id': user_app.client_id, - 'type': 'applications', - 'name': 'The instance formerly known as Prince' - } + def test_update_application_no_attributes(self, app, user, user_app, user_app_url,): res = app.put_json_api( user_app_url, - payload, + { + 'id': user_app.client_id, + 'type': 'applications', + 'name': 'The instance formerly known as Prince' + }, auth=user.auth, expect_errors=True ) assert res.status_code == 400 - # test_partial_update_application_incorrect_type_payload + def test_partial_update_application_incorrect_type_payload(self, app, user, user_app, user_app_url): res = app.patch_json_api( user_app_url, - incorrect_type_payload, + self.make_payload(user_app, type='incorrect'), auth=user.auth, expect_errors=True ) assert res.status_code == 409 - # test_partial_update_application_incorrect_id_payload + def test_partial_update_application_incorrect_id_payload(self, app, user, user_app, user_app_url): res = app.patch_json_api( user_app_url, - incorrect_id_payload, + self.make_payload(user_app, id='12345'), auth=user.auth, expect_errors=True ) assert res.status_code == 409 - # test_partial_update_application_no_type + def test_partial_update_application_no_type(self, app, user, user_app, user_app_url): + missing_type_payload = self.make_payload(user_app) + del missing_type_payload['data']['type'] + res = app.patch_json_api( user_app_url, missing_type_payload, @@ -245,7 +234,10 @@ def test_update_application( ) assert res.status_code == 400 - # test_partial_update_application_no_id + def test_partial_update_application_no_id(self, app, user, user_app, user_app_url): + missing_id_payload = self.make_payload(user_app) + del missing_id_payload['data']['id'] + res = app.patch_json_api( user_app_url, missing_id_payload, @@ -254,24 +246,23 @@ def test_update_application( ) assert res.status_code == 400 - # test_partial_update_application_no_attributes - payload = { - 'data': { - 'id': user_app.client_id, - 'type': 'applications', - 'name': 'The instance formerly known as Prince' - } - } + def test_partial_update_application_no_attributes(self, app, user, user_app, user_app_url): res = app.patch_json_api( user_app_url, - payload, + { + 'data': { + 'id': user_app.client_id, + 'type': 'applications', + 'name': 'The instance formerly known as Prince' + } + }, auth=user.auth, expect_errors=True ) assert res.status_code == 200 - # test home url is too long - payload = make_payload() + def test_partial_update_url_too_long(self, app, user, user_app, user_app_url): + payload = self.make_payload(user_app) payload['data']['attributes']['home_url'] = 'http://osf.io/' + 'A' * 200 res = app.patch_json_api( user_app_url, diff --git a/api_tests/applications/views/test_application_detail_scopes.py b/api_tests/applications/views/test_application_detail_scopes.py new file mode 100644 index 00000000000..2abdedf616e --- /dev/null +++ b/api_tests/applications/views/test_application_detail_scopes.py @@ -0,0 +1,77 @@ +from unittest import mock + +import pytest + +from framework.auth.cas import CasResponse +from website.util import api_v2_url +from osf_tests.factories import ( + ApiOAuth2ApplicationFactory, + ApiOAuth2PersonalTokenFactory, + ApiOAuth2ScopeFactory, + AuthUserFactory, +) + +@pytest.mark.django_db +class TestApplicationScopes: + + @pytest.fixture + def user(self): + return AuthUserFactory() + + @pytest.fixture() + def write_scope(self): + return ApiOAuth2ScopeFactory(name='osf.full_write') + + @pytest.fixture() + def token_full_write(self, user, write_scope): + token = ApiOAuth2PersonalTokenFactory( + owner=user, + ) + token.scopes.add(write_scope) + assert token.owner == user + return token + + def _get_application_list_url(self): + return api_v2_url('applications/', base_route='/') + + def make_payload(self, user_app): + return { + 'data': { + 'id': user_app.client_id, + 'type': 'applications', + 'attributes': { + 'name': 'Updated application', + 'home_url': 'http://osf.io', + 'callback_url': 'http://osf.io', + } + } + } + + @pytest.fixture + def user_app(self, user): + oauth_app = ApiOAuth2ApplicationFactory(owner=user) + assert user == oauth_app.owner + return oauth_app + + @pytest.fixture + def user_app_url(self, user_app): + return api_v2_url(f'applications/{user_app.client_id}/', base_route='/') + + @pytest.fixture + def user_app_list_url(self): + return self._get_application_list_url() + + def test_can_update_with_full_write_scope(self, app, user, user_app, token_full_write, user_app_url): + with mock.patch('framework.auth.cas.CasClient.profile') as mock_cas: + mock_cas.return_value = CasResponse( + authenticated=True, + user=user._id, + attributes={'accessTokenScope': ['osf.full_write',]} + ) + res = app.put_json_api( + user_app_url, + self.make_payload(user_app), + headers={'Authorization': f'Bearer {token_full_write.token_id}'}, + expect_errors=True + ) + assert res.status_code == 200 diff --git a/api_tests/base/test_utils.py b/api_tests/base/test_utils.py index 51ff6611da0..32429fdf0f7 100644 --- a/api_tests/base/test_utils.py +++ b/api_tests/base/test_utils.py @@ -6,8 +6,11 @@ from rest_framework import fields from rest_framework.exceptions import ValidationError -from api.base import utils as api_utils +from api.base import utils as api_utils +from osf.models.base import coerce_guid, Guid, GuidMixin, OptionalGuidMixin, VersionedGuidMixin, InvalidGuid +from osf_tests.factories import ProjectFactory, PreprintFactory +from tests.test_websitefiles import TestFile from framework.status import push_status_message SessionStore = import_module(django_conf_settings.SESSION_ENGINE).SessionStore @@ -100,3 +103,67 @@ def test_push_status_message_unexpected_error(self, mock_get_session): 'Unexpected Exception from push_status_message when called ' 'from the v2 API with type "error"' ) + + +@pytest.mark.django_db +class TestCoerceGuid: + + def test_guid_instance(self): + project = ProjectFactory() + assert isinstance(project.guids.first(), Guid) + assert coerce_guid(project.guids.first()) == project.guids.first() + + def test_versioned_guid_instance(self): + preprint = PreprintFactory() + assert isinstance(preprint, VersionedGuidMixin) + assert coerce_guid(preprint) == preprint.versioned_guids.first().guid + + def test_guid_mixin_instance(self): + project = ProjectFactory() + assert isinstance(project, GuidMixin) + assert coerce_guid(project._id) == project.guids.first() + + def test_str_guid_instance(self): + project = ProjectFactory() + str_guid = str(project._id) + guid = coerce_guid(str_guid) + assert isinstance(guid, Guid) + assert guid == project.guids.first() + + def test_incorrect_str_guid_instance(self): + incorrect_guid = '12345' + with pytest.raises(InvalidGuid, match='guid does not exist'): + assert coerce_guid(incorrect_guid) + + def test_optional_guid_instance(self): + node = ProjectFactory() + test_file = TestFile( + _path='anid', + name='name', + target=node, + provider='test', + materialized_path='/long/path/to/name', + ) + test_file.save() + test_file.get_guid(create=True) + assert isinstance(test_file, OptionalGuidMixin) + assert coerce_guid(test_file) == test_file.guids.first() + + def test_incorrect_optional_guid_instance(self): + node = ProjectFactory() + test_file = TestFile( + _path='anid', + name='name', + target=node, + provider='test', + materialized_path='/long/path/to/name', + ) + test_file.save() + assert isinstance(test_file, OptionalGuidMixin) + with pytest.raises(InvalidGuid, match='guid does not exist'): + assert coerce_guid(test_file) + + def test_invalid_guid(self): + incorrect_guid = 12345 + with pytest.raises(InvalidGuid, match='cannot coerce'): + assert coerce_guid(incorrect_guid) diff --git a/api_tests/base/test_views.py b/api_tests/base/test_views.py index 2db3a4b65b2..b09df8d753c 100644 --- a/api_tests/base/test_views.py +++ b/api_tests/base/test_views.py @@ -18,6 +18,7 @@ MetricsOpenapiView, ) from api.users.views import ClaimUser, ResetPassword, ExternalLoginConfirmEmailView, ExternalLogin +from api.registrations.views import RegistrationCallbackView from api.wb.views import MoveFileMetadataView, CopyFileMetadataView from rest_framework.permissions import IsAuthenticatedOrReadOnly, IsAuthenticated from api.base.permissions import TokenHasScope @@ -63,6 +64,7 @@ def setUp(self): ResetPassword, ExternalLoginConfirmEmailView, ExternalLogin, + RegistrationCallbackView, ] def test_root_returns_200(self): diff --git a/api_tests/institutions/views/test_institution_user_metric_list.py b/api_tests/institutions/views/test_institution_user_metric_list.py index b1bf3490788..3c0e1bf055f 100644 --- a/api_tests/institutions/views/test_institution_user_metric_list.py +++ b/api_tests/institutions/views/test_institution_user_metric_list.py @@ -17,6 +17,8 @@ from osf.metrics import UserInstitutionProjectCounts from osf.metrics.reports import InstitutionalUserReport +from osf.models import UserMessage + @pytest.mark.es_metrics @pytest.mark.django_db @@ -350,6 +352,10 @@ def test_get_reports(self, app, url, institutional_admin, institution, reports, _expected_user_ids = {_report.user_id for _report in reports} assert set(_user_ids(_resp)) == _expected_user_ids + # # users haven't any received messages from institutional admins + for response_object in _resp.json['data']: + assert len(response_object['attributes']['contacts']) == 0 + def test_filter_reports(self, app, url, institutional_admin, institution, reports, unshown_reports): for _query, _expected_user_ids in ( ({'filter[department]': 'nunavum'}, set()), @@ -447,6 +453,7 @@ def test_get_report_formats_csv_tsv(self, app, url, institutional_admin, institu [ 'report_yearmonth', 'account_creation_date', + 'contacts', 'department', 'embargoed_registration_count', 'month_last_active', @@ -463,6 +470,7 @@ def test_get_report_formats_csv_tsv(self, app, url, institutional_admin, institu [ '2024-08', '2018-02', + '[]', 'Center, \t Greatest Ever', '1', '2018-02', @@ -516,6 +524,7 @@ def test_csv_tsv_ignores_pagination(self, app, url, institutional_admin, institu expected_data.append([ '2024-08', '2018-02', + '[]', 'QBatman', '1', '2018-02', @@ -557,6 +566,7 @@ def test_csv_tsv_ignores_pagination(self, app, url, institutional_admin, institu expected_header = [ 'report_yearmonth', 'account_creation_date', + 'contacts', 'department', 'embargoed_registration_count', 'month_last_active', @@ -612,6 +622,7 @@ def test_get_report_format_table_json(self, app, url, institutional_admin, insti { 'report_yearmonth': '2024-08', 'account_creation_date': '2018-02', + 'contacts': [], 'department': 'Safety "The Wolverine" Weapon X', 'embargoed_registration_count': 1, 'month_last_active': '2018-02', @@ -628,6 +639,88 @@ def test_get_report_format_table_json(self, app, url, institutional_admin, insti ] assert response_data == expected_data + def test_correct_number_of_contact_messages(self, app, url, institutional_admin, institution): + user1 = AuthUserFactory() + user2 = AuthUserFactory() + user3 = AuthUserFactory() + user4 = AuthUserFactory() + _report_factory( + '2024-08', institution, + user_id=user1._id, + storage_byte_count=53, + ), + _report_factory( + '2024-08', institution, + user_id=user2._id, + orcid_id='5555-4444-3333-2222', + storage_byte_count=8277, + ), + _report_factory( + '2024-08', institution, + user_id=user3._id, + department_name='blargl', + storage_byte_count=34834834, + ), + _report_factory( + '2024-08', institution, + user_id=user4._id, + orcid_id='4444-3333-2222-1111', + department_name='a department, or so, that happens, incidentally, to have commas', + storage_byte_count=736662999298, + ) + + receiver = user1 + UserMessage.objects.create( + sender=institutional_admin, + recipient=receiver, + message_text='message1', + message_type='institutional_request', + institution=institution + ) + UserMessage.objects.create( + sender=institutional_admin, + recipient=receiver, + message_text='message2', + message_type='institutional_request', + institution=institution + ) + UserMessage.objects.create( + sender=institutional_admin, + recipient=receiver, + message_text='message3', + message_type='institutional_request', + institution=institution + ) + + new_admin = AuthUserFactory() + institution.get_group('institutional_admins').user_set.add(new_admin) + + # messages from another admin + UserMessage.objects.create( + sender=new_admin, + recipient=receiver, + message_text='message4', + message_type='institutional_request', + institution=institution + ) + UserMessage.objects.create( + sender=new_admin, + recipient=receiver, + message_text='message5', + message_type='institutional_request', + institution=institution + ) + + res = app.get(f'/{API_BASE}institutions/{institution._id}/metrics/users/', auth=institutional_admin.auth) + contact_object = list(filter(lambda contact: receiver._id in contact['relationships']['user']['links']['related']['href'], res.json['data']))[0] + contacts = contact_object['attributes']['contacts'] + + first_admin_contact = list(filter(lambda x: x['sender_name'] == institutional_admin.fullname, contacts))[0] + assert first_admin_contact['count'] == 3 + + another_admin_contact = list(filter(lambda x: x['sender_name'] == new_admin.fullname, contacts))[0] + assert another_admin_contact['count'] == 2 + def _user_ids(api_response): for _datum in api_response.json['data']: diff --git a/api_tests/preprints/views/test_preprint_versions.py b/api_tests/preprints/views/test_preprint_versions.py index 090f180721c..efb8a092327 100644 --- a/api_tests/preprints/views/test_preprint_versions.py +++ b/api_tests/preprints/views/test_preprint_versions.py @@ -20,6 +20,7 @@ def setUp(self): self.user = AuthUserFactory() self.project = ProjectFactory(creator=self.user) self.moderator = AuthUserFactory() + self.admin = AuthUserFactory() self.post_mod_preprint = PreprintFactory( reviews_workflow='post-moderation', is_published=True, @@ -30,8 +31,10 @@ def setUp(self): is_published=False, creator=self.user ) - self.post_mod_preprint.provider.get_group('moderator').user_set.add(self.moderator) - self.pre_mod_preprint.provider.get_group('moderator').user_set.add(self.moderator) + self.post_mod_preprint.provider.add_to_group(self.moderator, 'moderator') + self.pre_mod_preprint.provider.add_to_group(self.moderator, 'moderator') + self.post_mod_preprint.provider.add_to_group(self.admin, 'admin') + self.pre_mod_preprint.provider.add_to_group(self.admin, 'admin') self.post_mod_version_create_url = f'/{API_BASE}preprints/{self.post_mod_preprint._id}/versions/?version=2.20' self.pre_mode_version_create_url = f'/{API_BASE}preprints/{self.pre_mod_preprint._id}/versions/?version=2.20' self.post_mod_preprint_update_url = f'/{API_BASE}preprints/{self.post_mod_preprint._id}/?version=2.20' @@ -513,7 +516,7 @@ def test_accepted_preprint_in_pre_moderation_and_withdrawn_is_shown_for_everyone assert res.status_code == 200 assert res.json['data']['attributes']['reviews_state'] == 'withdrawn' - def test_moderator_sees_pending_preprint(self): + def test_reviewer_sees_pending_preprint(self): preprint_id = self.pre_mod_preprint._id.split('_')[0] contributor = AuthUserFactory() @@ -526,17 +529,20 @@ def test_moderator_sees_pending_preprint(self): ) pre_mod_preprint_v2.run_submit(self.user) - # preprint - res = self.app.get(f'/{API_BASE}preprints/{preprint_id}/', auth=self.moderator.auth) - assert res.status_code == 200 - assert res.json['data']['attributes']['reviews_state'] == 'pending' + for reviewer in [self.admin, self.moderator]: + assert not pre_mod_preprint_v2.is_contributor(reviewer) - # preprint version - res = self.app.get(f'/{API_BASE}preprints/{pre_mod_preprint_v2._id}/', auth=self.moderator.auth) - assert res.status_code == 200 - assert res.json['data']['attributes']['reviews_state'] == 'pending' + # preprint + res = self.app.get(f'/{API_BASE}preprints/{preprint_id}/', auth=reviewer.auth) + assert res.status_code == 200 + assert res.json['data']['attributes']['reviews_state'] == 'pending' - def test_moderator_sees_pending_withdrawn_preprint(self): + # preprint version + res = self.app.get(f'/{API_BASE}preprints/{pre_mod_preprint_v2._id}/', auth=reviewer.auth) + assert res.status_code == 200 + assert res.json['data']['attributes']['reviews_state'] == 'pending' + + def test_reviewer_sees_pending_withdrawn_preprint(self): preprint_id = self.pre_mod_preprint._id.split('_')[0] contributor = AuthUserFactory() @@ -558,17 +564,20 @@ def test_moderator_sees_pending_withdrawn_preprint(self): withdrawal_request.run_submit(self.user) withdrawal_request.run_accept(self.moderator, withdrawal_request.comment) - # preprint - res = self.app.get(f'/{API_BASE}preprints/{preprint_id}/', auth=self.moderator.auth) - assert res.status_code == 200 - assert res.json['data']['attributes']['reviews_state'] == 'withdrawn' + for reviewer in [self.admin, self.moderator]: + assert not pre_mod_preprint_v2.is_contributor(reviewer) - # preprint version - res = self.app.get(f'/{API_BASE}preprints/{pre_mod_preprint_v2._id}/', auth=self.moderator.auth) - assert res.status_code == 200 - assert res.json['data']['attributes']['reviews_state'] == 'withdrawn' + # preprint + res = self.app.get(f'/{API_BASE}preprints/{preprint_id}/', auth=reviewer.auth) + assert res.status_code == 200 + assert res.json['data']['attributes']['reviews_state'] == 'withdrawn' + + # preprint version + res = self.app.get(f'/{API_BASE}preprints/{pre_mod_preprint_v2._id}/', auth=reviewer.auth) + assert res.status_code == 200 + assert res.json['data']['attributes']['reviews_state'] == 'withdrawn' - def test_moderator_sees_accepted_preprint(self): + def test_reviewer_sees_accepted_preprint(self): preprint_id = self.pre_mod_preprint._id.split('_')[0] contributor = AuthUserFactory() @@ -582,17 +591,20 @@ def test_moderator_sees_accepted_preprint(self): pre_mod_preprint_v2.run_submit(self.user) pre_mod_preprint_v2.run_accept(self.moderator, 'comment') - # preprint - res = self.app.get(f'/{API_BASE}preprints/{preprint_id}/', auth=self.moderator.auth) - assert res.status_code == 200 - assert res.json['data']['attributes']['reviews_state'] == 'accepted' + for reviewer in [self.admin, self.moderator]: + assert not pre_mod_preprint_v2.is_contributor(reviewer) - # preprint version - res = self.app.get(f'/{API_BASE}preprints/{pre_mod_preprint_v2._id}/', auth=self.moderator.auth) - assert res.status_code == 200 - assert res.json['data']['attributes']['reviews_state'] == 'accepted' + # preprint + res = self.app.get(f'/{API_BASE}preprints/{preprint_id}/', auth=reviewer.auth) + assert res.status_code == 200 + assert res.json['data']['attributes']['reviews_state'] == 'accepted' + + # preprint version + res = self.app.get(f'/{API_BASE}preprints/{pre_mod_preprint_v2._id}/', auth=reviewer.auth) + assert res.status_code == 200 + assert res.json['data']['attributes']['reviews_state'] == 'accepted' - def test_moderator_sees_withdrawn_preprint(self): + def test_reviewer_sees_withdrawn_preprint(self): preprint_id = self.pre_mod_preprint._id.split('_')[0] contributor = AuthUserFactory() @@ -614,17 +626,20 @@ def test_moderator_sees_withdrawn_preprint(self): withdrawal_request.run_submit(self.user) withdrawal_request.run_accept(self.moderator, withdrawal_request.comment) - # preprint - res = self.app.get(f'/{API_BASE}preprints/{preprint_id}/', auth=self.moderator.auth) - assert res.status_code == 200 - assert res.json['data']['attributes']['reviews_state'] == 'withdrawn' + for reviewer in [self.admin, self.moderator]: + assert not pre_mod_preprint_v2.is_contributor(reviewer) - # preprint version - res = self.app.get(f'/{API_BASE}preprints/{pre_mod_preprint_v2._id}/', auth=self.moderator.auth) - assert res.status_code == 200 - assert res.json['data']['attributes']['reviews_state'] == 'withdrawn' + # preprint + res = self.app.get(f'/{API_BASE}preprints/{preprint_id}/', auth=reviewer.auth) + assert res.status_code == 200 + assert res.json['data']['attributes']['reviews_state'] == 'withdrawn' + + # preprint version + res = self.app.get(f'/{API_BASE}preprints/{pre_mod_preprint_v2._id}/', auth=reviewer.auth) + assert res.status_code == 200 + assert res.json['data']['attributes']['reviews_state'] == 'withdrawn' - def test_moderator_does_not_see_initial_preprint(self): + def test_reviewer_does_not_see_initial_preprint(self): preprint_id = self.pre_mod_preprint._id.split('_')[0] contributor = AuthUserFactory() @@ -636,30 +651,36 @@ def test_moderator_does_not_see_initial_preprint(self): set_doi=False ) - # preprint - res = self.app.get(f'/{API_BASE}preprints/{preprint_id}/', auth=self.moderator.auth, expect_errors=True) - assert res.status_code == 404 + for reviewer in [self.admin, self.moderator]: + assert not pre_mod_preprint_v2.is_contributor(reviewer) - # preprint version - res = self.app.get(f'/{API_BASE}preprints/{pre_mod_preprint_v2._id}/', auth=self.moderator.auth, expect_errors=True) - assert res.status_code == 404 + # preprint + res = self.app.get(f'/{API_BASE}preprints/{preprint_id}/', auth=reviewer.auth, expect_errors=True) + assert res.status_code == 404 + + # preprint version + res = self.app.get(f'/{API_BASE}preprints/{pre_mod_preprint_v2._id}/', auth=reviewer.auth, expect_errors=True) + assert res.status_code == 404 - def test_moderator_can_contribute(self): + def test_reviewer_can_contribute(self): pre_mod_preprint_v2 = PreprintFactory.create_version( create_from=self.pre_mod_preprint, final_machine_state='initial', creator=self.user, set_doi=False ) - pre_mod_preprint_v2.add_contributor(self.moderator, permissions.READ) - # preprint - res = self.app.get(f'/{API_BASE}preprints/{self.pre_mod_preprint._id.split('_')[0]}/', auth=self.moderator.auth) - assert res.status_code == 200 + for reviewer in [self.admin, self.moderator]: + pre_mod_preprint_v2.add_contributor(reviewer, permissions.READ) + assert pre_mod_preprint_v2.is_contributor(reviewer) - # preprint version - res = self.app.get(f'/{API_BASE}preprints/{pre_mod_preprint_v2._id}/', auth=self.moderator.auth) - assert res.status_code == 200 + # preprint + res = self.app.get(f'/{API_BASE}preprints/{self.pre_mod_preprint._id.split('_')[0]}/', auth=reviewer.auth) + assert res.status_code == 200 + + # preprint version + res = self.app.get(f'/{API_BASE}preprints/{pre_mod_preprint_v2._id}/', auth=reviewer.auth) + assert res.status_code == 200 class TestPreprintVersionsListRetrieve(ApiTestCase): diff --git a/api_tests/registrations/views/test_regisatration_callbacks.py b/api_tests/registrations/views/test_regisatration_callbacks.py new file mode 100644 index 00000000000..35d65d013b6 --- /dev/null +++ b/api_tests/registrations/views/test_regisatration_callbacks.py @@ -0,0 +1,82 @@ +import copy +import time +import pytest + +from api.base.settings.defaults import API_BASE +from osf_tests.factories import RegistrationFactory +from framework.auth import signing + + +@pytest.mark.django_db +class TestRegistrationCallbacks: + + @pytest.fixture() + def registration(self): + registration = RegistrationFactory() + return registration + + @pytest.fixture() + def url(self, registration): + return f'/{API_BASE}registrations/{registration._id}/callbacks/' + + @pytest.fixture() + def payload(self): + return { + 'action': 'copy', + 'destination': { + 'name': 'Archive of OSF Storage', + }, + 'errors': None, + 'source': { + 'provider': 'osfstorage', + }, + 'time': time.time() + 1000 + } + + def sign_payload(self, payload): + message, signature = signing.default_signer.sign_payload(payload) + return { + 'payload': message, + 'signature': signature, + } + + def test_registration_callback(self, app, payload, url): + data = self.sign_payload(payload) + res = app.put_json(url, data) + assert res.status_code == 200 + + def test_signature_expired(self, app, payload, url): + payload['time'] = time.time() - 100 + data = self.sign_payload(payload) + res = app.put_json(url, data, expect_errors=True) + assert res.status_code == 400 + assert res.json['errors'][0]['detail'] == 'Signature has expired' + + def test_bad_signature(self, app, payload, url): + data = self.sign_payload(payload) + data['signature'] = '1234' + res = app.put_json(url, data, expect_errors=True) + assert res.status_code == 401 + assert res.json['errors'][0]['detail'] == 'Authentication credentials were not provided.' + + def test_invalid_payload(self, app, payload, url): + payload1 = copy.deepcopy(payload) + del payload1['time'] + data = self.sign_payload(payload1) + res = app.put_json(url, data, expect_errors=True) + assert res.status_code == 400 + assert res.json['errors'][0]['detail'] == 'Invalid Payload' + + payload2 = copy.deepcopy(payload) + data = self.sign_payload(payload2) + del data['signature'] + res = app.put_json(url, data, expect_errors=True) + assert res.status_code == 400 + assert res.json['errors'][0]['detail'] == 'Invalid Payload' + + payload3 = copy.deepcopy(payload) + data = self.sign_payload(payload3) + del data['payload'] + res = app.put_json(url, data, expect_errors=True) + assert res.status_code == 400 + assert res.json['errors'][0]['detail'] == 'Invalid Payload' diff --git a/api_tests/share/_utils.py b/api_tests/share/_utils.py index 2d974b75ccf..4fde322fccc 100644 --- a/api_tests/share/_utils.py +++ b/api_tests/share/_utils.py @@ -11,7 +11,7 @@ postcommit_queue, ) from website import settings as website_settings -from api.share.utils import shtrove_ingest_url, sharev2_push_url +from api.share.utils import shtrove_ingest_url from osf.metadata.osf_gathering import OsfmapPartition @@ -28,8 +28,6 @@ def mock_share_responses(): _ingest_url = shtrove_ingest_url() _rsps.add(responses.POST, _ingest_url, status=200) _rsps.add(responses.DELETE, _ingest_url, status=200) - # for legacy sharev2 support: - _rsps.add(responses.POST, sharev2_push_url(), status=200) yield _rsps @@ -44,7 +42,6 @@ def mock_update_share(): def expect_ingest_request(mock_share_responses, osfguid, *, token=None, delete=False, count=1, error_response=False): mock_share_responses._calls.reset() yield - _legacy_count_per_item = 1 _trove_main_count_per_item = 1 _trove_supplementary_count_per_item = ( 0 @@ -52,8 +49,7 @@ def expect_ingest_request(mock_share_responses, osfguid, *, token=None, delete=F else (len(OsfmapPartition) - 1) ) _total_count = count * ( - _legacy_count_per_item - + _trove_main_count_per_item + _trove_main_count_per_item + _trove_supplementary_count_per_item ) assert len(mock_share_responses.calls) == _total_count, ( @@ -61,24 +57,18 @@ def expect_ingest_request(mock_share_responses, osfguid, *, token=None, delete=F ) _trove_ingest_calls = [] _trove_supp_ingest_calls = [] - _legacy_push_calls = [] for _call in mock_share_responses.calls: if _call.request.url.startswith(shtrove_ingest_url()): if 'is_supplementary' in _call.request.url: _trove_supp_ingest_calls.append(_call) else: _trove_ingest_calls.append(_call) - else: - _legacy_push_calls.append(_call) assert len(_trove_ingest_calls) == count assert len(_trove_supp_ingest_calls) == count * _trove_supplementary_count_per_item - assert len(_legacy_push_calls) == count for _call in _trove_ingest_calls: assert_ingest_request(_call.request, osfguid, token=token, delete=delete) for _call in _trove_supp_ingest_calls: assert_ingest_request(_call.request, osfguid, token=token, delete=delete, supp=True) - for _call in _legacy_push_calls: - assert _call.request.url.startswith(sharev2_push_url()) def assert_ingest_request(request, expected_osfguid, *, token=None, delete=False, supp=False): diff --git a/api_tests/share/test_share_node.py b/api_tests/share/test_share_node.py index 089611f2512..791e7d0099a 100644 --- a/api_tests/share/test_share_node.py +++ b/api_tests/share/test_share_node.py @@ -20,7 +20,7 @@ from website.project.tasks import on_node_updated from framework.auth.core import Auth -from api.share.utils import shtrove_ingest_url, sharev2_push_url +from api.share.utils import shtrove_ingest_url from ._utils import expect_ingest_request @@ -189,8 +189,6 @@ def test_call_async_update_on_500_retry(self, mock_share_responses, node, user): """This is meant to simulate a temporary outage, so the retry mechanism should kick in and complete it.""" mock_share_responses.replace(responses.POST, shtrove_ingest_url(), status=500) mock_share_responses.add(responses.POST, shtrove_ingest_url(), status=200) - mock_share_responses.replace(responses.POST, sharev2_push_url(), status=500) - mock_share_responses.add(responses.POST, sharev2_push_url(), status=200) with expect_ingest_request(mock_share_responses, node._id, count=2): on_node_updated(node._id, user._id, False, {'is_public'}) @@ -198,13 +196,11 @@ def test_call_async_update_on_500_retry(self, mock_share_responses, node, user): def test_call_async_update_on_500_failure(self, mock_share_responses, node, user): """This is meant to simulate a total outage, so the retry mechanism should try X number of times and quit.""" mock_share_responses.replace(responses.POST, shtrove_ingest_url(), status=500) - mock_share_responses.replace(responses.POST, sharev2_push_url(), status=500) with expect_ingest_request(mock_share_responses, node._id, count=5): # tries five times on_node_updated(node._id, user._id, False, {'is_public'}) @mark.skip('Synchronous retries not supported if celery >=5.0') def test_no_call_async_update_on_400_failure(self, mock_share_responses, node, user): mock_share_responses.replace(responses.POST, shtrove_ingest_url(), status=400) - mock_share_responses.replace(responses.POST, sharev2_push_url(), status=400) with expect_ingest_request(mock_share_responses, node._id): on_node_updated(node._id, user._id, False, {'is_public'}) diff --git a/api_tests/share/test_share_preprint.py b/api_tests/share/test_share_preprint.py index 4ab47963bc8..cf0c8a3d92d 100644 --- a/api_tests/share/test_share_preprint.py +++ b/api_tests/share/test_share_preprint.py @@ -4,7 +4,7 @@ import pytest import responses -from api.share.utils import shtrove_ingest_url, sharev2_push_url +from api.share.utils import shtrove_ingest_url from framework.auth.core import Auth from osf.models.spam import SpamStatus from osf.utils.permissions import READ, WRITE, ADMIN @@ -124,14 +124,12 @@ def test_preprint_contributor_changes_updates_preprints_share(self, mock_share_r @pytest.mark.skip('Synchronous retries not supported if celery >=5.0') def test_call_async_update_on_500_failure(self, mock_share_responses, preprint, auth): mock_share_responses.replace(responses.POST, shtrove_ingest_url(), status=500) - mock_share_responses.replace(responses.POST, sharev2_push_url(), status=500) preprint.set_published(True, auth=auth, save=True) with expect_preprint_ingest_request(mock_share_responses, preprint, count=5): preprint.update_search() def test_no_call_async_update_on_400_failure(self, mock_share_responses, preprint, auth): mock_share_responses.replace(responses.POST, shtrove_ingest_url(), status=400) - mock_share_responses.replace(responses.POST, sharev2_push_url(), status=400) preprint.set_published(True, auth=auth, save=True) with expect_preprint_ingest_request(mock_share_responses, preprint, count=1, error_response=True): preprint.update_search() diff --git a/framework/auth/oauth_scopes.py b/framework/auth/oauth_scopes.py index 796023228df..9cdad6d728a 100644 --- a/framework/auth/oauth_scopes.py +++ b/framework/auth/oauth_scopes.py @@ -343,6 +343,7 @@ class ComposedScopes: + PREPRINT_ALL_WRITE\ + GROUP_WRITE\ + TOKENS_WRITE\ + + APPLICATIONS_WRITE\ + ( CoreScopes.CEDAR_METADATA_RECORD_WRITE, CoreScopes.WRITE_COLLECTION_SUBMISSION_ACTION, @@ -351,7 +352,7 @@ class ComposedScopes: ) # Admin permissions- includes functionality not intended for third-party use - ADMIN_LEVEL = FULL_WRITE + APPLICATIONS_WRITE + TOKENS_WRITE + COMMENT_REPORTS_WRITE + USERS_CREATE + REVIEWS_WRITE +\ + ADMIN_LEVEL = FULL_WRITE + TOKENS_WRITE + COMMENT_REPORTS_WRITE + USERS_CREATE + REVIEWS_WRITE +\ (CoreScopes.USER_EMAIL_READ, CoreScopes.USER_ADDON_READ, CoreScopes.WAFFLE_READ, ) # List of all publicly documented scopes, mapped to composed scopes defined above. diff --git a/framework/auth/views.py b/framework/auth/views.py index e83f47b5db2..26aa494ddd4 100644 --- a/framework/auth/views.py +++ b/framework/auth/views.py @@ -970,39 +970,41 @@ def resend_confirmation_post(auth): View for user to submit resend confirmation form. HTTP Method: POST """ + try: + # If user is already logged in, log user out + if auth.logged_in: + return auth_logout(redirect_url=request.url) - # If user is already logged in, log user out - if auth.logged_in: - return auth_logout(redirect_url=request.url) - - form = ResendConfirmationForm(request.form) + form = ResendConfirmationForm(request.form) - if form.validate(): - clean_email = form.email.data - user = get_user(email=clean_email) - status_message = ( - f'If there is an OSF account associated with this unconfirmed email address {clean_email}, ' - 'a confirmation email has been resent to it. If you do not receive an email and believe ' - 'you should have, please contact OSF Support.' - ) - kind = 'success' - if user: - if throttle_period_expired(user.email_last_sent, settings.SEND_EMAIL_THROTTLE): - try: - send_confirm_email(user, clean_email, renew=True) - except KeyError: - # already confirmed, redirect to dashboard - status_message = f'This email {clean_email} has already been confirmed.' - kind = 'warning' - user.email_last_sent = timezone.now() - user.save() - else: - status_message = ('You have recently requested to resend your confirmation email. ' - 'Please wait a few minutes before trying again.') - kind = 'error' - status.push_status_message(status_message, kind=kind, trust=False) - else: - forms.push_errors_to_status(form.errors) + if form.validate(): + clean_email = form.email.data + user = get_user(email=clean_email) + status_message = ( + f'If there is an OSF account associated with this unconfirmed email address {clean_email}, ' + 'a confirmation email has been resent to it. If you do not receive an email and believe ' + 'you should have, please contact OSF Support.' + ) + kind = 'success' + if user: + if throttle_period_expired(user.email_last_sent, settings.SEND_EMAIL_THROTTLE): + try: + send_confirm_email(user, clean_email, renew=True) + except KeyError: + # already confirmed, redirect to dashboard + status_message = f'This email {clean_email} has already been confirmed.' + kind = 'warning' + user.email_last_sent = timezone.now() + user.save() + else: + status_message = ('You have recently requested to resend your confirmation email. ' + 'Please wait a few minutes before trying again.') + kind = 'error' + status.push_status_message(status_message, kind=kind, trust=False) + else: + forms.push_errors_to_status(form.errors) + except Exception as err: + sentry.log_exception(f'Async email confirmation failed because of the error: {err}') # Don't go anywhere return {'form': form} diff --git a/framework/forms/utils.py b/framework/forms/utils.py index 420d70bcaf0..973ed310481 100644 --- a/framework/forms/utils.py +++ b/framework/forms/utils.py @@ -9,34 +9,6 @@ def sanitize(s, **kwargs): return sanitize_html(s, **kwargs) -def process_data(data, func): - if isinstance(data, dict): - return { - key: process_data(value, func) - for key, value in data.items() - } - elif isinstance(data, list): - return [ - process_data(item, func) - for item in data - ] - return func(data) - - -def process_payload(data): - return process_data( - data, - lambda value: quote(value.encode('utf-8') if value else '', safe=' ') - ) - - -def unprocess_payload(data): - return process_data( - data, - lambda value: unquote(value.encode('utf-8') if value else '') - ) - - def jsonify(form): """Cast WTForm to JSON object. diff --git a/osf/exceptions.py b/osf/exceptions.py index 82e8ab5f505..30130a587d1 100644 --- a/osf/exceptions.py +++ b/osf/exceptions.py @@ -292,3 +292,18 @@ class MetadataSerializationError(OSFError): class InvalidCookieOrSessionError(OSFError): """Raised when cookie is invalid or session key is not found.""" pass + + +class RegistrationStuckError(OSFError): + """Raised if Registration stuck during archive.""" + pass + + +class RegistrationStuckRecoverableException(RegistrationStuckError): + """Raised if registration stuck but recoverable.""" + pass + + +class RegistrationStuckBrokenException(RegistrationStuckError): + """Raised if registration stuck and not recoverable.""" + pass diff --git a/osf/management/commands/deactivate_requested_accounts.py b/osf/management/commands/deactivate_requested_accounts.py index 9a3ddcf5356..512fb34eeef 100644 --- a/osf/management/commands/deactivate_requested_accounts.py +++ b/osf/management/commands/deactivate_requested_accounts.py @@ -31,7 +31,6 @@ def deactivate_requested_accounts(dry_run=True): logger.info(f'Disabling user {user._id}.') if not dry_run: user.deactivate_account() - user.is_registered = False mails.send_mail( to_addr=user.username, mail=mails.REQUEST_DEACTIVATION_COMPLETE, diff --git a/osf/management/commands/fix_preprints_has_data_links_and_why_no_data.py b/osf/management/commands/fix_preprints_has_data_links_and_why_no_data.py new file mode 100644 index 00000000000..84997c1fbe9 --- /dev/null +++ b/osf/management/commands/fix_preprints_has_data_links_and_why_no_data.py @@ -0,0 +1,109 @@ +from django.core.management.base import BaseCommand +from django.db.models import Q +from osf.models import Preprint, Guid +import logging + +logger = logging.getLogger(__name__) + + +def process_wrong_why_not_data_preprints( + version_guid: str | None, + dry_run: bool, + executing_through_command: bool = True, + command_obj: BaseCommand = None +): + through_command_constrain = executing_through_command and command_obj + why_no_data_filters = Q(why_no_data__isnull=False) & ~Q(why_no_data='') + + if version_guid: + base_guid_str, version = Guid.split_guid(version_guid) + preprints = Preprint.objects.filter( + versioned_guids__guid___id=base_guid_str, + versioned_guids__version=version + ) + if not preprints: + no_preprint_message = f'No preprint found with version_guid: {version_guid}' + logger.error(no_preprint_message) + if through_command_constrain: + command_obj.stdout.write(command_obj.style.ERROR(no_preprint_message)) + return + if preprints[0].has_data_links != 'no' and not preprints[0].why_no_data: + correct_behavior_message = f'Correct behavior for {preprints[0]._id} has_data_links={preprints[0].has_data_links} why_no_data={preprints[0].why_no_data}' + if through_command_constrain: + command_obj.stdout.write(correct_behavior_message) + return + + else: + preprints = Preprint.objects.filter( + ~Q(has_data_links='no') & why_no_data_filters + ) + + total = preprints.count() + logger.info(f'Found {total} preprints to process') + if through_command_constrain: + command_obj.stdout.write(f'Found {total} preprints to process') + + processed = 0 + errors = 0 + + for preprint in preprints: + try: + logger.info(f'Processing preprint {preprint._id}') + if through_command_constrain: + command_obj.stdout.write(f'Processing preprint {preprint._id} ({processed + 1}/{total})') + + if not dry_run: + preprint.why_no_data = '' + preprint.save() + logger.info(f'Updated preprint {preprint._id}') + else: + logger.info( + f'Would update preprint {preprint._id} (dry run), {preprint.has_data_links=}, {preprint.why_no_data=}' + ) + + processed += 1 + except Exception as e: + errors += 1 + logger.error(f'Error processing preprint {preprint._id}: {str(e)}') + if through_command_constrain: + command_obj.stdout.write(command_obj.style.ERROR(f'Error processing preprint {preprint._id}: {str(e)}')) + continue + + logger.info(f'Completed processing {processed} preprints with {errors} errors') + if through_command_constrain: + command_obj.stdout.write( + command_obj.style.SUCCESS( + f'Completed processing {processed} preprints with {errors} errors' + ) + ) + + +class Command(BaseCommand): + help = 'Fix preprints has_data_links and why_no_data' + + def add_arguments(self, parser): + parser.add_argument( + '--dry-run', + action='store_true', + help='Run without making changes', + ) + parser.add_argument( + '--guid', + type=str, + help='Version GUID to process (e.g. awgxb_v1, kupen_v4)', + ) + + def handle(self, *args, **options): + dry_run = options.get('dry_run', False) + version_guid = options.get('guid') + + if dry_run: + logger.info('Running in dry-run mode - no changes will be made') + self.stdout.write('Running in dry-run mode - no changes will be made') + + process_wrong_why_not_data_preprints( + version_guid=version_guid, + dry_run=dry_run, + executing_through_command=True, + command_obj=self + ) diff --git a/osf/management/commands/fix_unclaimed_records_for_preprint_versions.py b/osf/management/commands/fix_unclaimed_records_for_preprint_versions.py new file mode 100644 index 00000000000..17fca6a54df --- /dev/null +++ b/osf/management/commands/fix_unclaimed_records_for_preprint_versions.py @@ -0,0 +1,163 @@ +import logging + +from django.core.management.base import BaseCommand +from django.apps import apps +from django.db.models import Q + +logger = logging.getLogger(__name__) + + +class Command(BaseCommand): + help = 'Update unclaimed records for preprint versions' + + def add_arguments(self, parser): + parser.add_argument( + '--dry-run', + action='store_true', + dest='dry_run', + help='Run the command without saving changes', + ) + + def handle(self, *args, **options): + dry_run = options.get('dry_run', False) + update_unclaimed_records_for_preprint_versions(dry_run=dry_run) + +def safe_sort_key(x, delimiter): + parts = x.split(delimiter) + if len(parts) > 1: + try: + return int(parts[1]) + except (ValueError, TypeError): + return 0 + return 0 + + +def update_unclaimed_records_for_preprint_versions(dry_run=False): + Preprint = apps.get_model('osf.Preprint') + Guid = apps.get_model('osf.Guid') + OSFUser = apps.get_model('osf.OSFUser') + GuidVersionsThrough = apps.get_model('osf.GuidVersionsThrough') + + preprint_filters = ( + Q(preprintcontributor__user__is_registered=False) | + Q(preprintcontributor__user__date_disabled__isnull=False) + ) + + mode = 'DRY RUN' if dry_run else 'UPDATING' + logger.info(f'Starting {mode} for unclaimed records for preprint versions') + + preprints_count = Preprint.objects.filter( + preprint_filters + ).distinct('versioned_guids__guid').count() + + logger.info(f'Found {preprints_count} preprints with unregistered contributors') + + processed_count = 0 + skipped_count = 0 + updated_count = 0 + + logger.info('-' * 50) + logger.info(f'{mode} MODE') + logger.info('-' * 50) + + for preprint in Preprint.objects.filter( + preprint_filters + ).prefetch_related('_contributors').distinct( + 'versioned_guids__guid' + ): + processed_count += 1 + try: + guid, version = Guid.split_guid(preprint._id) + logger.info(f'[{processed_count}/{preprints_count}] Processing preprint {preprint._id}') + + latest_version_through = GuidVersionsThrough.objects.filter(guid___id=guid).last() + if not latest_version_through: + logger.error(f'No version found for guid {guid}, skipping') + skipped_count += 1 + continue + + latest_version_number = latest_version_through.version + unregistered_contributors = preprint.contributor_set.filter(user__is_registered=False) + logger.info(f'Found {unregistered_contributors.count()} unregistered contributors for preprint {preprint._id}') + delimiter = Preprint.GUID_VERSION_DELIMITER + for contributor in unregistered_contributors: + try: + records_key_for_current_guid = [ + key for key in contributor.user.unclaimed_records.keys() if guid in key and delimiter in key + ] + if records_key_for_current_guid: + records_key_for_current_guid.sort( + key=lambda x: safe_sort_key(x, delimiter), + ) + record_info = contributor.user.unclaimed_records[records_key_for_current_guid[0]] + for current_version in range(1, int(latest_version_number) + 1): + preprint_id = f'{guid}{Preprint.GUID_VERSION_DELIMITER}{current_version}' + if preprint_id not in contributor.user.unclaimed_records.keys(): + if not dry_run: + try: + preprint_obj = Preprint.load(preprint_id) + referrer = OSFUser.load(record_info['referrer_id']) + + if not preprint_obj: + logger.error(f'Could not load preprint {preprint_id}, skipping') + continue + + if not referrer: + logger.error(f'Could not load referrer {record_info["referrer_id"]}, skipping') + continue + + logger.info(f'Adding unclaimed record for {preprint_id} for user {contributor.user._id}') + contributor.user.unclaimed_records[preprint_id] = contributor.user.add_unclaimed_record( + claim_origin=preprint_obj, + referrer=referrer, + given_name=record_info.get('name', None), + email=record_info.get('email', None), + skip_referrer_permissions=True + ) + contributor.user.save() + updated_count += 1 + logger.info(f'Successfully saved unclaimed record for {preprint_id}') + except Exception as e: + logger.error(f'Error adding unclaimed record for {preprint_id}: {str(e)}') + else: + logger.info(f'[DRY RUN] Would add unclaimed record for {preprint_id} for user {contributor.user._id}') + updated_count += 1 + else: + try: + all_versions = [guid.referent for guid in GuidVersionsThrough.objects.filter(guid___id=guid)] + logger.info(f'Found {len(all_versions)} versions for preprint with guid {guid}') + + for current_preprint in all_versions: + preprint_id = current_preprint._id + if preprint_id not in contributor.user.unclaimed_records.keys(): + if not dry_run: + try: + logger.info(f'Adding unclaimed record for {preprint_id} for user {contributor.user._id}') + contributor.user.unclaimed_records[preprint_id] = contributor.user.add_unclaimed_record( + claim_origin=current_preprint, + referrer=current_preprint.creator, + given_name=contributor.user.fullname, + email=contributor.user.username, + skip_referrer_permissions=True + ) + contributor.user.save() + updated_count += 1 + logger.info(f'Successfully saved unclaimed record for {preprint_id}') + except Exception as e: + logger.error(f'Error adding unclaimed record for {preprint_id}: {str(e)}') + else: + logger.info(f'[DRY RUN] Would add unclaimed record for {preprint_id} for user {contributor.user._id}') + updated_count += 1 + except Exception as e: + logger.error(f'Error processing versions for guid {guid}: {str(e)}') + except Exception as e: + logger.error(f'Error processing contributor {contributor.id}: {str(e)}') + + except Exception as e: + logger.error(f'Unexpected error processing preprint {preprint.id}: {str(e)}') + skipped_count += 1 + + if dry_run: + logger.info(f'Processed: {processed_count}, Would update: {updated_count}, Skipped: {skipped_count}') + else: + logger.info(f'Processed: {processed_count}, Updated: {updated_count}, Skipped: {skipped_count}') diff --git a/osf/management/commands/force_archive.py b/osf/management/commands/force_archive.py index d58b3641deb..0d12d12bfdf 100644 --- a/osf/management/commands/force_archive.py +++ b/osf/management/commands/force_archive.py @@ -22,10 +22,12 @@ import json import logging import requests +import contextlib import django django.setup() from django.contrib.contenttypes.models import ContentType +from django.core.exceptions import ValidationError from django.core.management.base import BaseCommand from django.db.models import Q from django.db.utils import IntegrityError @@ -35,6 +37,7 @@ from framework import sentry from framework.exceptions import HTTPError from osf.models import Node, NodeLog, Registration, BaseFileNode +from osf.exceptions import RegistrationStuckRecoverableException, RegistrationStuckBrokenException from api.base.utils import waterbutler_api_url_for from scripts import utils as script_utils from website.archiver import ARCHIVER_SUCCESS @@ -43,11 +46,6 @@ logger = logging.getLogger(__name__) -# Control globals -DELETE_COLLISIONS = False -SKIP_COLLISIONS = False -ALLOW_UNCONFIGURED = False - # Logging globals CHECKED_OKAY = [] CHECKED_STUCK_RECOVERABLE = [] @@ -57,7 +55,7 @@ SKIPPED = [] # Ignorable NodeLogs -LOG_WHITELIST = { +LOG_WHITELIST = ( 'affiliated_institution_added', 'category_updated', 'comment_added', @@ -109,35 +107,34 @@ 'node_access_requests_disabled', 'view_only_link_added', 'view_only_link_removed', -} +) # Require action, but recoverable from -LOG_GREYLIST = { +LOG_GREYLIST = ( 'addon_file_moved', 'addon_file_renamed', 'osf_storage_file_added', 'osf_storage_file_removed', 'osf_storage_file_updated', 'osf_storage_folder_created' -} -VERIFY_PROVIDER = { +) +VERIFY_PROVIDER = ( 'addon_file_moved', 'addon_file_renamed' -} +) # Permissible in certain circumstances after communication with user -PERMISSIBLE_BLACKLIST = { +PERMISSIBLE_BLACKLIST = ( 'dropbox_folder_selected', 'dropbox_node_authorized', 'dropbox_node_deauthorized', 'addon_removed', 'addon_added' -} +) -# Extendable with command line input -PERMISSIBLE_ADDONS = { - 'osfstorage' -} +DEFAULT_PERMISSIBLE_ADDONS = ( + 'osfstorage', +) def complete_archive_target(reg, addon_short_name): # Cache registration files count @@ -149,16 +146,16 @@ def complete_archive_target(reg, addon_short_name): target.save() archive_job._post_update_target() -def perform_wb_copy(reg, node_settings): +def perform_wb_copy(reg, node_settings, delete_collisions=False, skip_collisions=False): src, dst, user = reg.archive_job.info() if dst.files.filter(name=node_settings.archive_folder_name.replace('/', '-')).exists(): - if not DELETE_COLLISIONS and not SKIP_COLLISIONS: + if not delete_collisions and not skip_collisions: raise Exception('Archive folder for {} already exists. Investigate manually and rerun with either --delete-collisions or --skip-collisions') - if DELETE_COLLISIONS: + if delete_collisions: archive_folder = dst.files.exclude(type='osf.trashedfolder').get(name=node_settings.archive_folder_name.replace('/', '-')) logger.info(f'Removing {archive_folder}') archive_folder.delete() - if SKIP_COLLISIONS: + if skip_collisions: complete_archive_target(reg, node_settings.short_name) return cookie = user.get_or_create_cookie().decode() @@ -283,9 +280,9 @@ def get_logs_to_revert(reg): Q(node=reg.registered_from) | (Q(params__source__nid=reg.registered_from._id) | Q(params__destination__nid=reg.registered_from._id))).order_by('-date') -def revert_log_actions(file_tree, reg, obj_cache): +def revert_log_actions(file_tree, reg, obj_cache, permissible_addons): logs_to_revert = get_logs_to_revert(reg) - if len(PERMISSIBLE_ADDONS) > 1: + if len(permissible_addons) > 1: logs_to_revert = logs_to_revert.exclude(action__in=PERMISSIBLE_BLACKLIST) for log in list(logs_to_revert): try: @@ -327,7 +324,7 @@ def revert_log_actions(file_tree, reg, obj_cache): obj_cache.add(file_obj._id) return file_tree -def build_file_tree(reg, node_settings): +def build_file_tree(reg, node_settings, *args, **kwargs): n = reg.registered_from obj_cache = set(n.files.values_list('_id', flat=True)) @@ -344,45 +341,48 @@ def _recurse(file_obj, node): return serialized current_tree = _recurse(node_settings.get_root(), n) - return revert_log_actions(current_tree, reg, obj_cache) + return revert_log_actions(current_tree, reg, obj_cache, *args, **kwargs) -def archive(registration): +def archive(registration, *args, permissible_addons=DEFAULT_PERMISSIBLE_ADDONS, allow_unconfigured=False, **kwargs): for reg in registration.node_and_primary_descendants(): reg.registered_from.creator.get_or_create_cookie() # Allow WB requests if reg.archive_job.status == ARCHIVER_SUCCESS: continue logs_to_revert = reg.registered_from.logs.filter(date__gt=reg.registered_date).exclude(action__in=LOG_WHITELIST) - if len(PERMISSIBLE_ADDONS) == 1: + if len(permissible_addons) == 1: assert not logs_to_revert.exclude(action__in=LOG_GREYLIST).exists(), f'{registration._id}: {reg.registered_from._id} had unexpected unacceptable logs' else: assert not logs_to_revert.exclude(action__in=LOG_GREYLIST).exclude(action__in=PERMISSIBLE_BLACKLIST).exists(), f'{registration._id}: {reg.registered_from._id} had unexpected unacceptable logs' logger.info(f'Preparing to archive {reg._id}') - for short_name in PERMISSIBLE_ADDONS: + for short_name in permissible_addons: node_settings = reg.registered_from.get_addon(short_name) if not hasattr(node_settings, '_get_file_tree'): # Excludes invalid or None-type continue if not node_settings.configured: - if not ALLOW_UNCONFIGURED: + if not allow_unconfigured: raise Exception(f'{reg._id}: {short_name} on {reg.registered_from._id} is not configured. If this is permissible, re-run with `--allow-unconfigured`.') continue if not reg.archive_job.get_target(short_name) or reg.archive_job.get_target(short_name).status == ARCHIVER_SUCCESS: continue if short_name == 'osfstorage': - file_tree = build_file_tree(reg, node_settings) + file_tree = build_file_tree(reg, node_settings, permissible_addons=permissible_addons) manually_archive(file_tree, reg, node_settings) complete_archive_target(reg, short_name) else: assert reg.archiving, f'{reg._id}: Must be `archiving` for WB to copy' - perform_wb_copy(reg, node_settings) + perform_wb_copy(reg, node_settings, *args, **kwargs) -def archive_registrations(): +def archive_registrations(*args, **kwargs): for reg in deepcopy(VERIFIED): - archive(reg) + archive(reg, *args, *kwargs) ARCHIVED.append(reg) VERIFIED.remove(reg) -def verify(registration): +def verify(registration, permissible_addons=DEFAULT_PERMISSIBLE_ADDONS, raise_error=False): + permissible_addons = set(permissible_addons) + maybe_suppress_error = contextlib.suppress(ValidationError) if not raise_error else contextlib.nullcontext(enter_result=False) + for reg in registration.node_and_primary_descendants(): logger.info(f'Verifying {reg._id}') if reg.archive_job.status == ARCHIVER_SUCCESS: @@ -390,26 +390,41 @@ def verify(registration): nonignorable_logs = get_logs_to_revert(reg) unacceptable_logs = nonignorable_logs.exclude(action__in=LOG_GREYLIST) if unacceptable_logs.exists(): - if len(PERMISSIBLE_ADDONS) == 1 or unacceptable_logs.exclude(action__in=PERMISSIBLE_BLACKLIST): - logger.error('{}: Original node {} has unacceptable logs: {}'.format( + if len(permissible_addons) == 1 or unacceptable_logs.exclude(action__in=PERMISSIBLE_BLACKLIST): + message = '{}: Original node {} has unacceptable logs: {}'.format( registration._id, reg.registered_from._id, list(unacceptable_logs.values_list('action', flat=True)) - )) + ) + logger.error(message) + + with maybe_suppress_error: + raise ValidationError(message) + return False if nonignorable_logs.filter(action__in=VERIFY_PROVIDER).exists(): for log in nonignorable_logs.filter(action__in=VERIFY_PROVIDER): for key in ['source', 'destination']: if key in log.params: if log.params[key]['provider'] != 'osfstorage': - logger.error('{}: {} Only OSFS moves and renames are permissible'.format( + message = '{}: {} Only OSFS moves and renames are permissible'.format( registration._id, log._id - )) + ) + logger.error(message) + + with maybe_suppress_error: + raise ValidationError(message) + return False addons = reg.registered_from.get_addon_names() - if set(addons) - set(PERMISSIBLE_ADDONS | {'wiki'}) != set(): - logger.error(f'{registration._id}: Original node {reg.registered_from._id} has addons: {addons}') + if set(addons) - set(permissible_addons | {'wiki'}) != set(): + message = f'{registration._id}: Original node {reg.registered_from._id} has addons: {addons}' + logger.error(message) + + with maybe_suppress_error: + raise ValidationError(message) + return False if nonignorable_logs.exists(): logger.info('{}: Original node {} has had revertable file operations'.format( @@ -423,23 +438,23 @@ def verify(registration): )) return True -def verify_registrations(registration_ids): +def verify_registrations(registration_ids, permissible_addons): for r_id in registration_ids: reg = Registration.load(r_id) if not reg: logger.warning(f'Registration {r_id} not found') else: - if verify(reg): + if verify(reg, permissible_addons=permissible_addons): VERIFIED.append(reg) else: SKIPPED.append(reg) def check(reg): + """Check registration status. Raise exception if registration stuck.""" logger.info(f'Checking {reg._id}') if reg.is_deleted: - logger.info(f'Registration {reg._id} is deleted.') - CHECKED_OKAY.append(reg) - return + return f'Registration {reg._id} is deleted.' + expired_if_before = timezone.now() - ARCHIVE_TIMEOUT_TIMEDELTA archive_job = reg.archive_job root_job = reg.root.archive_job @@ -452,14 +467,11 @@ def check(reg): if still_archiving and root_job.datetime_initiated < expired_if_before: logger.warning(f'Registration {reg._id} is stuck in archiving') if verify(reg): - logger.info(f'Registration {reg._id} verified recoverable') - CHECKED_STUCK_RECOVERABLE.append(reg) + raise RegistrationStuckRecoverableException(f'Registration {reg._id} is stuck and verified recoverable') else: - logger.info(f'Registration {reg._id} verified broken') - CHECKED_STUCK_BROKEN.append(reg) - else: - logger.info(f'Registration {reg._id} is not stuck in archiving') - CHECKED_OKAY.append(reg) + raise RegistrationStuckBrokenException(f'Registration {reg._id} is stuck and verified broken') + + return f'Registration {reg._id} is not stuck in archiving' def check_registrations(registration_ids): for r_id in registration_ids: @@ -467,7 +479,16 @@ def check_registrations(registration_ids): if not reg: logger.warning(f'Registration {r_id} not found') else: - check(reg) + try: + status = check(reg) + logger.info(status) + CHECKED_OKAY.append(reg) + except RegistrationStuckRecoverableException as exc: + logger.info(str(exc)) + CHECKED_STUCK_RECOVERABLE.append(reg) + except RegistrationStuckBrokenException as exc: + logger.info(str(exc)) + CHECKED_STUCK_BROKEN.append(reg) def log_results(dry_run): if CHECKED_OKAY: @@ -527,29 +548,31 @@ def add_arguments(self, parser): parser.add_argument('--guids', type=str, nargs='+', help='GUIDs of registrations to archive') def handle(self, *args, **options): - global DELETE_COLLISIONS - global SKIP_COLLISIONS - global ALLOW_UNCONFIGURED - DELETE_COLLISIONS = options.get('delete_collisions') - SKIP_COLLISIONS = options.get('skip_collisions') - ALLOW_UNCONFIGURED = options.get('allow_unconfigured') - if DELETE_COLLISIONS and SKIP_COLLISIONS: + delete_collisions = options.get('delete_collisions') + skip_collisions = options.get('skip_collisions') + allow_unconfigured = options.get('allow_unconfigured') + if delete_collisions and skip_collisions: raise Exception('Cannot specify both delete_collisions and skip_collisions') dry_run = options.get('dry_run') if not dry_run: script_utils.add_file_logger(logger, __file__) - addons = options.get('addons', []) - if addons: - PERMISSIBLE_ADDONS.update(set(addons)) + addons = options.get('addons') or set() + addons.update(DEFAULT_PERMISSIBLE_ADDONS) + registration_ids = options.get('guids', []) if options.get('check', False): check_registrations(registration_ids) else: - verify_registrations(registration_ids) + verify_registrations(registration_ids, permissible_addons=addons) if not dry_run: - archive_registrations() + archive_registrations( + permissible_addons=addons, + delete_collisions=delete_collisions, + skip_collisions=skip_collisions, + allow_unconfigured=allow_unconfigured, + ) log_results(dry_run) diff --git a/osf/management/commands/sync_doi_metadata.py b/osf/management/commands/sync_doi_metadata.py index 8002feeb961..e6b079100f3 100644 --- a/osf/management/commands/sync_doi_metadata.py +++ b/osf/management/commands/sync_doi_metadata.py @@ -5,7 +5,7 @@ from django.contrib.contenttypes.models import ContentType from django.core.management.base import BaseCommand -from osf.models import GuidMetadataRecord, Identifier, Registration +from osf.models import GuidMetadataRecord, Identifier, Registration, Preprint from framework.celery_tasks import app logger = logging.getLogger(__name__) @@ -14,8 +14,8 @@ RATE_LIMIT_RETRY_DELAY = 60 * 5 -@app.task(name='osf.management.commands.sync_doi_metadata', max_retries=5, default_retry_delay=RATE_LIMIT_RETRY_DELAY) -def sync_identifier_doi(identifier_id): +@app.task(name='osf.management.commands.sync_doi_metadata', bind=True, acks_late=True, max_retries=5, default_retry_delay=RATE_LIMIT_RETRY_DELAY) +def sync_identifier_doi(self, identifier_id): try: identifier = Identifier.objects.get(id=identifier_id) identifier.referent.request_identifier_update('doi') @@ -23,17 +23,21 @@ def sync_identifier_doi(identifier_id): logger.info(f'Doi update for {identifier.value} complete') except Exception as err: logger.warning(f'[{err.__class__.__name__}] Doi update for {identifier.value} failed because of error: {err}') - sync_identifier_doi.retry(exc=err, countdown=RATE_LIMIT_RETRY_DELAY) + self.retry() @app.task(name='osf.management.commands.sync_doi_metadata_command', max_retries=5, default_retry_delay=RATE_LIMIT_RETRY_DELAY) -def sync_doi_metadata(modified_date, batch_size=100, dry_run=True, sync_private=False, rate_limit=100): +def sync_doi_metadata(modified_date, batch_size=100, dry_run=True, sync_private=False, rate_limit=100, missing_preprint_dois_only=False): identifiers = Identifier.objects.filter( category='doi', deleted__isnull=True, modified__lte=modified_date, object_id__isnull=False, ) + if missing_preprint_dois_only: + sync_preprint_missing_dois.apply_async(kwargs={'rate_limit': rate_limit}) + identifiers = identifiers.exclude(content_type=ContentType.objects.get_for_model(Preprint)) + if batch_size: identifiers = identifiers[:batch_size] rate_limit = batch_size if batch_size > rate_limit else rate_limit @@ -55,6 +59,27 @@ def sync_doi_metadata(modified_date, batch_size=100, dry_run=True, sync_private= sync_identifier_doi.apply_async(kwargs={'identifier_id': identifier.id}) +@app.task(name='osf.management.commands.sync_preprint_missing_dois', max_retries=5, default_retry_delay=RATE_LIMIT_RETRY_DELAY) +def sync_preprint_missing_dois(rate_limit): + preprints = Preprint.objects.filter(preprint_doi_created=None) + for record_number, preprint in enumerate(preprints, 1): + # in order to not reach rate limit that CrossRef has, we make delay + if not record_number % rate_limit: + time.sleep(RATE_LIMIT_RETRY_DELAY) + + async_request_identifier_update.apply_async(kwargs={'preprint_id': preprint._id}) + + +@app.task(name='osf.management.commands.async_request_identifier_update', bind=True, acks_late=True, max_retries=5, default_retry_delay=RATE_LIMIT_RETRY_DELAY) +def async_request_identifier_update(self, preprint_id): + preprint = Preprint.load(preprint_id) + try: + preprint.request_identifier_update('doi', create=True) + except Exception as err: + logger.warning(f'[{err.__class__.__name__}] Doi creation failed for the preprint with id {preprint._id} because of error: {err}') + self.retry() + + @app.task(name='osf.management.commands.sync_doi_empty_metadata_dataarchive_registrations_command', max_retries=5, default_retry_delay=RATE_LIMIT_RETRY_DELAY) def sync_doi_empty_metadata_dataarchive_registrations(modified_date, batch_size=100, dry_run=True, sync_private=False, rate_limit=100): registrations_ids = list( diff --git a/osf/models/base.py b/osf/models/base.py index 4b51544dd15..d2c07a86d9e 100644 --- a/osf/models/base.py +++ b/osf/models/base.py @@ -49,6 +49,8 @@ def generate_object_id(): def coerce_guid(maybe_guid, create_if_needed=False): if isinstance(maybe_guid, Guid): return maybe_guid + if isinstance(maybe_guid, VersionedGuidMixin): + return maybe_guid.versioned_guids.first().guid if isinstance(maybe_guid, GuidMixin): return maybe_guid.guids.first() if isinstance(maybe_guid, OptionalGuidMixin): diff --git a/osf/models/mixins.py b/osf/models/mixins.py index 9027a284f7c..b7fe97b7ece 100644 --- a/osf/models/mixins.py +++ b/osf/models/mixins.py @@ -1061,12 +1061,17 @@ def get_request_state_counts(self): return counts def add_to_group(self, user, group): + if isinstance(group, Group): + group.user_set.add(user) + elif isinstance(group, str): + self.get_group(group).user_set.add(user) + else: + raise TypeError(f"Unsupported group type: {type(group)}") + # Add default notification subscription for subscription in self.DEFAULT_SUBSCRIPTIONS: self.add_user_to_subscription(user, f'{self._id}_{subscription}') - return self.get_group(group).user_set.add(user) - def remove_from_group(self, user, group, unsubscribe=True): _group = self.get_group(group) if group == ADMIN: @@ -1116,25 +1121,26 @@ def subjects_relationship_url(self): def subjects_url(self): return self.absolute_api_v2_url + 'subjects/' - def check_subject_perms(self, auth): + def check_subject_perms(self, auth, ignore_permission=False): AbstractNode = apps.get_model('osf.AbstractNode') Preprint = apps.get_model('osf.Preprint') CollectionSubmission = apps.get_model('osf.CollectionSubmission') DraftRegistration = apps.get_model('osf.DraftRegistration') - if isinstance(self, AbstractNode): - if not self.has_permission(auth.user, ADMIN): - raise PermissionsError('Only admins can change subjects.') - elif isinstance(self, Preprint): - if not self.has_permission(auth.user, WRITE): - raise PermissionsError('Must have admin or write permissions to change a preprint\'s subjects.') - elif isinstance(self, DraftRegistration): - if not self.has_permission(auth.user, WRITE): - raise PermissionsError('Must have write permissions to change a draft registration\'s subjects.') - elif isinstance(self, CollectionSubmission): - if not self.guid.referent.has_permission(auth.user, ADMIN) and not auth.user.has_perms( - self.collection.groups[ADMIN], self.collection): - raise PermissionsError('Only admins can change subjects.') + if not ignore_permission: + if isinstance(self, AbstractNode): + if not self.has_permission(auth.user, ADMIN): + raise PermissionsError('Only admins can change subjects.') + elif isinstance(self, Preprint): + if not self.has_permission(auth.user, WRITE): + raise PermissionsError('Must have admin or write permissions to change a preprint\'s subjects.') + elif isinstance(self, DraftRegistration): + if not self.has_permission(auth.user, WRITE): + raise PermissionsError('Must have write permissions to change a draft registration\'s subjects.') + elif isinstance(self, CollectionSubmission): + if not self.guid.referent.has_permission(auth.user, ADMIN) and not auth.user.has_perms( + self.collection.groups[ADMIN], self.collection): + raise PermissionsError('Only admins can change subjects.') return def add_subjects_log(self, old_subjects, auth): @@ -1157,7 +1163,7 @@ def assert_subject_format(self, subj_list, expect_list, error_msg): if (expect_list and not is_list) or (not expect_list and is_list): raise ValidationValueError(f'Subjects are improperly formatted. {error_msg}') - def set_subjects(self, new_subjects, auth, add_log=True): + def set_subjects(self, new_subjects, auth, add_log=True, **kwargs): """ Helper for setting M2M subjects field from list of hierarchies received from UI. Only authorized admins may set subjects. @@ -1168,7 +1174,7 @@ def set_subjects(self, new_subjects, auth, add_log=True): :return: None """ if auth: - self.check_subject_perms(auth) + self.check_subject_perms(auth, **kwargs) self.assert_subject_format(new_subjects, expect_list=True, error_msg='Expecting list of lists.') old_subjects = list(self.subjects.values_list('id', flat=True)) @@ -1190,7 +1196,7 @@ def set_subjects(self, new_subjects, auth, add_log=True): if hasattr(self, 'update_search'): self.update_search() - def set_subjects_from_relationships(self, subjects_list, auth, add_log=True): + def set_subjects_from_relationships(self, subjects_list, auth, add_log=True, **kwargs): """ Helper for setting M2M subjects field from list of flattened subjects received from UI. Only authorized admins may set subjects. @@ -1200,7 +1206,7 @@ def set_subjects_from_relationships(self, subjects_list, auth, add_log=True): :return: None """ - self.check_subject_perms(auth) + self.check_subject_perms(auth, **kwargs) self.assert_subject_format(subjects_list, expect_list=True, error_msg='Expecting a list of subjects.') if subjects_list: self.assert_subject_format(subjects_list[0], expect_list=False, error_msg='Expecting a list of subjects.') @@ -2221,7 +2227,6 @@ def suspend_spam_user(self, user): user.flag_spam() if not user.is_disabled: user.deactivate_account() - user.is_registered = False mails.send_mail( to_addr=user.username, mail=mails.SPAM_USER_BANNED, diff --git a/osf/models/node.py b/osf/models/node.py index 187f5f3123f..83d646cc717 100644 --- a/osf/models/node.py +++ b/osf/models/node.py @@ -612,6 +612,10 @@ def institutions_url(self): def institutions_relationship_url(self): return self.absolute_api_v2_url + 'relationships/institutions/' + @property + def callbacks_url(self): + return self.absolute_api_v2_url + 'callbacks/' + # For Comment API compatibility @property def target_type(self): @@ -661,6 +665,9 @@ def web_url_for(self, view_name, _absolute=False, _guid=False, *args, **kwargs): def api_url_for(self, view_name, _absolute=False, *args, **kwargs): return api_url_for(view_name, pid=self._primary_key, _absolute=_absolute, *args, **kwargs) + def api_v2_url_for(self, path_str, params=None, **kwargs): + return api_url_for(path_str, params=params, **kwargs) + @property def project_or_component(self): # The distinction is drawn based on whether something has a parent node, rather than by category diff --git a/osf/models/preprint.py b/osf/models/preprint.py index 162ab8b00a8..ed8aa0af380 100644 --- a/osf/models/preprint.py +++ b/osf/models/preprint.py @@ -1,11 +1,12 @@ import functools +import inspect from urllib.parse import urljoin import logging import re from dirtyfields import DirtyFieldsMixin from django.db import models, IntegrityError -from django.db.models import Q +from django.db.models import Q, Max from django.utils import timezone from django.contrib.contenttypes.fields import GenericRelation from django.core.exceptions import ValidationError @@ -146,6 +147,35 @@ class EverPublishedPreprintManager(PreprintManager): def get_queryset(self): return super().get_queryset().filter(date_published__isnull=False) + +def require_permission(permissions: list): + """ + Preprint-specific decorator for permission checks. + + This decorator adds an implicit `ignore_permission` argument to the decorated function, + allowing you to bypass the permission check when set to `True`. + + Usage example: + preprint.some_method(..., ignore_permission=True) # Skips permission check + """ + def decorator(func): + @functools.wraps(func) + def wrapper(self, *args, ignore_permission=False, **kwargs): + sig = inspect.signature(func) + bound_args = sig.bind_partial(self, *args, **kwargs) + bound_args.apply_defaults() + + auth = bound_args.arguments.get('auth', None) + + if not ignore_permission and auth is not None: + for permission in permissions: + if not self.has_permission(auth.user, permission): + raise PermissionsError(f'Must have following permissions to change a preprint: {permissions}') + return func(self, *args, ignore_permission=ignore_permission, **kwargs) + return wrapper + return decorator + + class Preprint(DirtyFieldsMixin, VersionedGuidMixin, IdentifierMixin, ReviewableMixin, BaseModel, TitleMixin, DescriptionMixin, Loggable, Taggable, ContributorMixin, GuardianMixin, SpamOverrideMixin, TaxonomizableMixin, AffiliatedInstitutionMixin): @@ -171,6 +201,8 @@ class Preprint(DirtyFieldsMixin, VersionedGuidMixin, IdentifierMixin, Reviewable 'primary_file', 'contributors', 'tags', + 'date_withdrawn', + 'withdrawal_justification' } PREREG_LINK_INFO_CHOICES = [('prereg_designs', 'Pre-registration of study designs'), @@ -374,12 +406,14 @@ def check_unfinished_or_unpublished_version(self): return None, None @classmethod - def create_version(cls, create_from_guid, auth): + def create_version(cls, create_from_guid, auth, assign_version_number=None, ignore_permission=False, ignore_existing_versions=False): """Create a new version for a given preprint. `create_from_guid` can be any existing versions of the preprint but `create_version` always finds the latest version and creates a new version from it. In addition, this creates an "incomplete" new preprint version object using the model class and returns both the new object and the data to be updated. The API, more specifically `PreprintCreateVersionSerializer` must call `.update()` to "completely finish" the new preprint version object creation. + Optionally, you can assign a custom version number, as long as it doesn't conflict with existing versions. + The version must be an integer greater than 0. """ # Use `Guid.load()` instead of `VersionedGuid.load()` to retrieve the base guid obj, which always points to the @@ -389,26 +423,25 @@ def create_version(cls, create_from_guid, auth): if not latest_version: sentry.log_message(f'Preprint not found: [guid={guid_obj._id}, create_from_guid={create_from_guid}]') return None, None - if not latest_version.has_permission(auth.user, ADMIN): + if not ignore_permission and not latest_version.has_permission(auth.user, ADMIN): sentry.log_message(f'ADMIN permission for the latest version is required to create a new version: ' f'[user={auth.user._id}, guid={guid_obj._id}, latest_version={latest_version._id}]') raise PermissionsError - unfinished_version, unpublished_version = latest_version.check_unfinished_or_unpublished_version() - if unpublished_version: - logger.error('Failed to create a new version due to unpublished pending version already exists: ' - f'[version={unpublished_version.version}, ' - f'_id={unpublished_version._id}, ' - f'state={unpublished_version.machine_state}].') - raise UnpublishedPendingPreprintVersionExists - if unfinished_version: - logger.warning(f'Use existing initiated but unfinished version instead of creating a new one: ' - f'[version={unfinished_version.version}, ' - f'_id={unfinished_version._id}, ' - f'state={unfinished_version.machine_state}].') - return unfinished_version, None - - # Note: version number bumps from the last version number instead of the latest version number - last_version_number = guid_obj.versions.order_by('-version').first().version + if not ignore_existing_versions: + unfinished_version, unpublished_version = latest_version.check_unfinished_or_unpublished_version() + if unpublished_version: + message = ('Failed to create a new version due to unpublished pending version already exists: ' + f'[version={unpublished_version.version}, ' + f'_id={unpublished_version._id}, ' + f'state={unpublished_version.machine_state}].') + logger.error(message) + raise UnpublishedPendingPreprintVersionExists(message) + if unfinished_version: + logger.warning(f'Use existing initiated but unfinished version instead of creating a new one: ' + f'[version={unfinished_version.version}, ' + f'_id={unfinished_version._id}, ' + f'state={unfinished_version.machine_state}].') + return unfinished_version, None # Prepare the data to clone/update data_to_update = { @@ -438,13 +471,30 @@ def create_version(cls, create_from_guid, auth): description=latest_version.description, ) preprint.save(guid_ready=False) + + # Note: version number bumps from the last version number instead of the latest version number + # if assign_version_number is not specified + if assign_version_number: + if not isinstance(assign_version_number, int) or assign_version_number <= 0: + raise ValueError( + f"Unable to assign: {assign_version_number}. " + 'Version must be integer greater than 0.' + ) + if GuidVersionsThrough.objects.filter(guid=guid_obj, version=assign_version_number).first(): + raise ValueError(f"Version {assign_version_number} for preprint {guid_obj} already exists.") + + version_number = assign_version_number + else: + last_version_number = guid_obj.versions.order_by('-version').first().version + version_number = last_version_number + 1 + # Create a new entry in the `GuidVersionsThrough` table to store version information, which must happen right # after the first `.save()` of the new preprint version object, which enables `preprint._id` to be computed. guid_version = GuidVersionsThrough( referent=preprint, object_id=guid_obj.object_id, content_type=guid_obj.content_type, - version=last_version_number + 1, + version=version_number, guid=guid_obj ) guid_version.save() @@ -463,22 +513,51 @@ def create_version(cls, create_from_guid, auth): sentry.log_exception(e) sentry.log_message(f'Contributor was not added to new preprint version due to error: ' f'[preprint={preprint._id}, user={contributor.user._id}]') + + # Add new version record for unregistered contributors + for contributor in preprint.contributor_set.filter(Q(user__is_registered=False) | Q(user__date_disabled__isnull=False)): + try: + contributor.user.add_unclaimed_record( + claim_origin=preprint, + referrer=auth.user, + email=contributor.user.email, + given_name=contributor.user.fullname, + ) + except ValidationError as e: + sentry.log_exception(e) + sentry.log_message(f'Unregistered contributor was not added to new preprint version due to error: ' + f'[preprint={preprint._id}, user={contributor.user._id}]') + # Add affiliated institutions for institution in latest_version.affiliated_institutions.all(): preprint.add_affiliated_institution(institution, auth.user, ignore_user_affiliation=True) - # Update Guid obj to point to the new version if there is no moderation - if not preprint.provider.reviews_workflow: + # Update Guid obj to point to the new version if there is no moderation and new version is bigger + if not preprint.provider.reviews_workflow and version_number > guid_obj.referent.version: guid_obj.referent = preprint guid_obj.object_id = preprint.pk guid_obj.content_type = ContentType.objects.get_for_model(preprint) guid_obj.save() if latest_version.node: - preprint.set_supplemental_node(latest_version.node, auth, save=False, ignore_node_permissions=True) + preprint.set_supplemental_node( + latest_version.node, + auth, + save=False, + ignore_node_permissions=True, + ignore_permission=ignore_permission + ) return preprint, data_to_update + def upgrade_version(self): + """Increase preprint version by one.""" + guid_version = GuidVersionsThrough.objects.get(object_id=self.id) + guid_version.version += 1 + guid_version.save() + + return self + @property def is_deleted(self): return bool(self.deleted) @@ -692,9 +771,14 @@ def is_latest_version(self): def get_preprint_versions(self, include_rejected=True): guids = self.versioned_guids.first().guid.versions.all() - preprint_versions = Preprint.objects.filter(id__in=[vg.object_id for vg in guids]).order_by('-id') + preprint_versions = ( + Preprint.objects + .filter(id__in=[vg.object_id for vg in guids]) + .annotate(latest_version=Max('versioned_guids__version')) + .order_by('-latest_version') + ) if include_rejected is False: - preprint_versions = [p for p in preprint_versions if p.machine_state != 'rejected'] + preprint_versions = preprint_versions.exclude(machine_state=DefaultStates.REJECTED.value) return preprint_versions def web_url_for(self, view_name, _absolute=False, _guid=False, *args, **kwargs): @@ -753,13 +837,11 @@ def add_subjects_log(self, old_subjects, auth): ) return - def set_primary_file(self, preprint_file, auth, save=False): + @require_permission([WRITE]) + def set_primary_file(self, preprint_file, auth, save=False, **kwargs): if not self.root_folder: raise PreprintStateError('Preprint needs a root folder.') - if not self.has_permission(auth.user, WRITE): - raise PermissionsError('Must have admin or write permissions to change a preprint\'s primary file.') - if preprint_file.target != self or preprint_file.provider != 'osfstorage': raise ValueError('This file is not a valid primary file for this preprint.') @@ -785,10 +867,8 @@ def set_primary_file(self, preprint_file, auth, save=False): self.save() update_or_enqueue_on_preprint_updated(preprint_id=self._id, saved_fields=['primary_file']) - def set_published(self, published, auth, save=False, ignore_permission=False): - if not ignore_permission and not self.has_permission(auth.user, ADMIN): - raise PermissionsError('Only admins can publish a preprint.') - + @require_permission([ADMIN]) + def set_published(self, published, auth, save=False, **kwargs): if self.is_published and not published: raise ValueError('Cannot unpublish preprint.') @@ -805,7 +885,7 @@ def set_published(self, published, auth, save=False, ignore_permission=False): raise ValueError('Preprint must have at least one subject to be published.') self.date_published = timezone.now() # For legacy preprints, not logging - self.set_privacy('public', log=False, save=False) + self.set_privacy('public', log=False, save=False, **kwargs) # In case this provider is ever set up to use a reviews workflow, put this preprint in a sensible state self.machine_state = ReviewStates.ACCEPTED.value @@ -827,8 +907,9 @@ def set_published(self, published, auth, save=False, ignore_permission=False): if save: self.save() - def set_preprint_license(self, license_detail, auth, save=False): - license_record, license_changed = set_license(self, license_detail, auth, node_type='preprint') + @require_permission([WRITE]) + def set_preprint_license(self, license_detail, auth, save=False, **kwargs): + license_record, license_changed = set_license(self, license_detail, auth, node_type='preprint', **kwargs) if license_changed: self.add_log( @@ -1027,10 +1108,8 @@ def remove_tag(self, tag, auth, save=True): update_or_enqueue_on_preprint_updated(preprint_id=self._id, saved_fields=['tags']) return True - def set_supplemental_node(self, node, auth, save=False, ignore_node_permissions=False): - if not self.has_permission(auth.user, WRITE): - raise PermissionsError('You must have write permissions to set a supplemental node.') - + @require_permission([WRITE]) + def set_supplemental_node(self, node, auth, save=False, ignore_node_permissions=False, **kwargs): if not node.has_permission(auth.user, WRITE) and not ignore_node_permissions: raise PermissionsError('You must have write permissions on the supplemental node to attach.') @@ -1052,10 +1131,8 @@ def set_supplemental_node(self, node, auth, save=False, ignore_node_permissions= if save: self.save() - def unset_supplemental_node(self, auth, save=False): - if not self.has_permission(auth.user, WRITE): - raise PermissionsError('You must have write permissions to set a supplemental node.') - + @require_permission([WRITE]) + def unset_supplemental_node(self, auth, save=False, **kwargs): current_node_id = self.node._id if self.node else None self.node = None @@ -1072,27 +1149,23 @@ def unset_supplemental_node(self, auth, save=False): if save: self.save() - def set_title(self, title, auth, save=False): + @require_permission([WRITE]) + def set_title(self, title, auth, save=False, **kwargs): """Set the title of this Preprint and log it. :param str title: The new title. :param auth: All the auth information including user, API key. """ - if not self.has_permission(auth.user, WRITE): - raise PermissionsError('Must have admin or write permissions to edit a preprint\'s title.') - return super().set_title(title, auth, save) - def set_description(self, description, auth, save=False): + @require_permission([WRITE]) + def set_description(self, description, auth, save=False, **kwargs): """Set the description and log the event. :param str description: The new description :param auth: All the auth informtion including user, API key. :param bool save: Save self after updating. """ - if not self.has_permission(auth.user, WRITE): - raise PermissionsError('Must have admin or write permissions to edit a preprint\'s title.') - return super().set_description(description, auth, save) def get_spam_fields(self, saved_fields=None): @@ -1100,7 +1173,8 @@ def get_spam_fields(self, saved_fields=None): return self.SPAM_CHECK_FIELDS return self.SPAM_CHECK_FIELDS.intersection(saved_fields) - def set_privacy(self, permissions, auth=None, log=True, save=True, check_addons=False, force=False, should_hide=False): + @require_permission([WRITE]) + def set_privacy(self, permissions, auth=None, log=True, save=True, check_addons=False, force=False, should_hide=False, **kwargs): """Set the permissions for this preprint - mainly for spam purposes. :param permissions: A string, either 'public' or 'private' @@ -1109,8 +1183,6 @@ def set_privacy(self, permissions, auth=None, log=True, save=True, check_addons= :param bool meeting_creation: Whether this was created due to a meetings email. :param bool check_addons: Check and collect messages for addons? """ - if auth and not self.has_permission(auth.user, WRITE): - raise PermissionsError('Must have admin or write permissions to change privacy settings.') if permissions == 'public' and not self.is_public: if (self.is_spam or (settings.SPAM_FLAGGED_MAKE_NODE_PRIVATE and self.is_spammy)) and not force: raise PreprintStateError( @@ -1176,7 +1248,8 @@ def set_contributor_order(self, contributor_ids): @classmethod def bulk_update_search(cls, preprints, index=None): for _preprint in preprints: - update_share(_preprint) + if _preprint.is_latest_version: + update_share(_preprint) from website import search try: serialize = functools.partial(search.search.update_preprint, index=index, bulk=True, async_update=False) @@ -1254,7 +1327,8 @@ def _add_related_source_tags(self, contributor): system_tag_to_add, created = Tag.all_tags.get_or_create(name=provider_source_tag(self.provider._id, 'preprint'), system=True) contributor.add_system_tag(system_tag_to_add) - def update_has_coi(self, auth: Auth, has_coi: bool, log: bool = True, save: bool = True): + @require_permission([ADMIN]) + def update_has_coi(self, auth: Auth, has_coi: bool, log: bool = True, save: bool = True, **kwargs): """ This method sets the field `has_coi` to indicate if there's a conflict interest statement for this preprint and logs that change. @@ -1286,7 +1360,8 @@ def update_has_coi(self, auth: Auth, has_coi: bool, log: bool = True, save: bool if save: self.save() - def update_conflict_of_interest_statement(self, auth: Auth, coi_statement: str, log: bool = True, save: bool = True): + @require_permission([ADMIN]) + def update_conflict_of_interest_statement(self, auth: Auth, coi_statement: str, log: bool = True, save: bool = True, **kwargs): """ This method sets the `conflict_of_interest_statement` field for this preprint and logs that change. @@ -1315,7 +1390,8 @@ def update_conflict_of_interest_statement(self, auth: Auth, coi_statement: str, if save: self.save() - def update_has_data_links(self, auth: Auth, has_data_links: bool, log: bool = True, save: bool = True): + @require_permission([ADMIN]) + def update_has_data_links(self, auth: Auth, has_data_links: bool, log: bool = True, save: bool = True, **kwargs): """ This method sets the `has_data_links` field that respresent the availability of links to supplementary data for this preprint and logs that change. @@ -1346,11 +1422,12 @@ def update_has_data_links(self, auth: Auth, has_data_links: bool, log: bool = Tr auth=auth ) if not has_data_links: - self.update_data_links(auth, data_links=[], log=False) + self.update_data_links(auth, data_links=[], log=False, **kwargs) if save: self.save() - def update_data_links(self, auth: Auth, data_links: list, log: bool = True, save: bool = True): + @require_permission([ADMIN]) + def update_data_links(self, auth: Auth, data_links: list, log: bool = True, save: bool = True, **kwargs): """ This method sets the field `data_links` which is a validated list of links to supplementary data for a preprint and logs that change. @@ -1382,7 +1459,8 @@ def update_data_links(self, auth: Auth, data_links: list, log: bool = True, save if save: self.save() - def update_why_no_data(self, auth: Auth, why_no_data: str, log: bool = True, save: bool = True): + @require_permission([ADMIN]) + def update_why_no_data(self, auth: Auth, why_no_data: str, log: bool = True, save: bool = True, **kwargs): """ This method sets the field `why_no_data` a string that represents a user provided explanation for the unavailability of supplementary data for their preprint. @@ -1414,7 +1492,8 @@ def update_why_no_data(self, auth: Auth, why_no_data: str, log: bool = True, sav if save: self.save() - def update_has_prereg_links(self, auth: Auth, has_prereg_links: bool, log: bool = True, save: bool = True): + @require_permission([ADMIN]) + def update_has_prereg_links(self, auth: Auth, has_prereg_links: bool, log: bool = True, save: bool = True, **kwargs): """ This method updates the `has_prereg_links` field, that indicates availability of links to prereg data and logs changes to it. @@ -1446,12 +1525,13 @@ def update_has_prereg_links(self, auth: Auth, has_prereg_links: bool, log: bool auth=auth ) if not has_prereg_links: - self.update_prereg_links(auth, prereg_links=[], log=False) - self.update_prereg_link_info(auth, prereg_link_info=None, log=False) + self.update_prereg_links(auth, prereg_links=[], log=False, **kwargs) + self.update_prereg_link_info(auth, prereg_link_info=None, log=False, **kwargs) if save: self.save() - def update_why_no_prereg(self, auth: Auth, why_no_prereg: str, log: bool = True, save: bool = True): + @require_permission([ADMIN]) + def update_why_no_prereg(self, auth: Auth, why_no_prereg: str, log: bool = True, save: bool = True, **kwargs): """ This method updates the field `why_no_prereg` that contains a user provided explanation of prereg data unavailability and logs changes to it. @@ -1483,7 +1563,8 @@ def update_why_no_prereg(self, auth: Auth, why_no_prereg: str, log: bool = True, if save: self.save() - def update_prereg_links(self, auth: Auth, prereg_links: list, log: bool = True, save: bool = True): + @require_permission([ADMIN]) + def update_prereg_links(self, auth: Auth, prereg_links: list, log: bool = True, save: bool = True, **kwargs): """ This method updates the field `prereg_links` that contains a list of validated URLS linking to prereg data and logs changes to it. @@ -1515,7 +1596,8 @@ def update_prereg_links(self, auth: Auth, prereg_links: list, log: bool = True, if save: self.save() - def update_prereg_link_info(self, auth: Auth, prereg_link_info: str, log: bool = True, save: bool = True): + @require_permission([ADMIN]) + def update_prereg_link_info(self, auth: Auth, prereg_link_info: str, log: bool = True, save: bool = True, **kwargs): """ This method updates the field `prereg_link_info` that contains a one of a finite number of choice strings in contained in the list in the static member `PREREG_LINK_INFO_CHOICES` that describe the nature of the preprint's diff --git a/osf/models/registrations.py b/osf/models/registrations.py index f7b017d9ddf..b92aed1e8e2 100644 --- a/osf/models/registrations.py +++ b/osf/models/registrations.py @@ -325,6 +325,11 @@ def archiving(self): job = self.archive_job return job and not job.done and not job.archive_tree_finished() + @property + def archived(self): + job = self.archive_job + return job and job.done and job.archive_tree_finished() + @property def is_moderated(self): if not self.provider: diff --git a/osf/models/user.py b/osf/models/user.py index 42bf8d12929..600b563dd76 100644 --- a/osf/models/user.py +++ b/osf/models/user.py @@ -71,6 +71,7 @@ MAX_QUICKFILES_MERGE_RENAME_ATTEMPTS = 1000 + def get_default_mailing_lists(): return {'Open Science Framework Help': True} @@ -1283,7 +1284,7 @@ def add_unconfirmed_email(self, email, expiration=None, external_identity=None, validate_email(email) if not external_identity and self.emails.filter(address=email).exists(): - if not force or self.is_confirmed: + if not force and self.is_confirmed: raise ValueError('Email already confirmed to this user.') # If the unconfirmed email is already present, refresh the token @@ -1664,35 +1665,37 @@ def n_projects_in_common(self, other_user): """Returns number of "shared projects" (projects that both users are contributors or group members for)""" return self._projects_in_common_query(other_user).count() - def add_unclaimed_record(self, claim_origin, referrer, given_name, email=None): + def add_unclaimed_record(self, claim_origin, referrer, given_name, email=None, skip_referrer_permissions=False): """Add a new project entry in the unclaimed records dictionary. :param object claim_origin: Object this unclaimed user was added to. currently `Node` or `Provider` or `Preprint` :param User referrer: User who referred this user. :param str given_name: The full name that the referrer gave for this user. :param str email: The given email address. + :param bool skip_referrer_permissions: The flag to check permissions for referrer. :returns: The added record """ from .provider import AbstractProvider from .osf_group import OSFGroup - if isinstance(claim_origin, AbstractProvider): - if not bool(get_perms(referrer, claim_origin)): - raise PermissionsError( - f'Referrer does not have permission to add a moderator to provider {claim_origin._id}' - ) - - elif isinstance(claim_origin, OSFGroup): - if not claim_origin.has_permission(referrer, MANAGE): - raise PermissionsError( - f'Referrer does not have permission to add a member to {claim_origin._id}' - ) - else: - if not claim_origin.has_permission(referrer, ADMIN): - raise PermissionsError( - f'Referrer does not have permission to add a contributor to {claim_origin._id}' - ) + if not skip_referrer_permissions: + if isinstance(claim_origin, AbstractProvider): + if not bool(get_perms(referrer, claim_origin)): + raise PermissionsError( + f'Referrer does not have permission to add a moderator to provider {claim_origin._id}' + ) + + elif isinstance(claim_origin, OSFGroup): + if not claim_origin.has_permission(referrer, MANAGE): + raise PermissionsError( + f'Referrer does not have permission to add a member to {claim_origin._id}' + ) + else: + if not claim_origin.has_permission(referrer, ADMIN): + raise PermissionsError( + f'Referrer does not have permission to add a contributor to {claim_origin._id}' + ) pid = str(claim_origin._id) referrer_id = str(referrer._id) @@ -1986,7 +1989,7 @@ def _validate_admin_status_for_gdpr_delete(self, resource): is_active=True ).exclude(id=self.id).exists() - if not alternate_admins: + if not resource.deleted and not alternate_admins: raise UserStateError( f'You cannot delete {resource.__class__.__name__} {resource._id} because it would be ' f'a {resource.__class__.__name__} with contributors, but with no admin.' diff --git a/osf_tests/management_commands/test_fix_preprints_has_data_links_and_why_no_data.py b/osf_tests/management_commands/test_fix_preprints_has_data_links_and_why_no_data.py new file mode 100644 index 00000000000..878d610bde3 --- /dev/null +++ b/osf_tests/management_commands/test_fix_preprints_has_data_links_and_why_no_data.py @@ -0,0 +1,157 @@ +import pytest +from unittest import mock + +from django.core.management import call_command +from osf_tests.factories import PreprintFactory, PreprintProviderFactory + + +@pytest.mark.django_db +class TestFixPreprintsHasDataLinksAndWhyNoData: + + @pytest.fixture() + def preprint_not_no_with_why_no_data(self): + preprint = PreprintFactory() + preprint.has_data_links = 'available' + preprint.why_no_data = 'This should be cleared' + preprint.save() + return preprint + + @pytest.fixture() + def preprint_no_with_why_no_data(self): + preprint = PreprintFactory() + preprint.has_data_links = 'no' + preprint.why_no_data = 'Valid reason' + preprint.save() + return preprint + + @pytest.fixture() + def preprint_not_applicable_with_why_no_data(self): + preprint = PreprintFactory() + preprint.has_data_links = 'not_applicable' + preprint.why_no_data = 'This should be cleared' + preprint.save() + return preprint + + def test_fix_preprints_has_data_links_and_why_no_data( + self, preprint_not_no_with_why_no_data, preprint_no_with_why_no_data, preprint_not_applicable_with_why_no_data + ): + call_command('fix_preprints_has_data_links_and_why_no_data') + + preprint_not_no_with_why_no_data.refresh_from_db() + preprint_no_with_why_no_data.refresh_from_db() + preprint_not_applicable_with_why_no_data.refresh_from_db() + + assert preprint_not_no_with_why_no_data.why_no_data == '' + assert preprint_not_applicable_with_why_no_data.why_no_data == '' + + assert preprint_no_with_why_no_data.why_no_data == 'Valid reason' + + def test_dry_run_mode(self, preprint_not_no_with_why_no_data): + call_command('fix_preprints_has_data_links_and_why_no_data', '--dry-run') + + preprint_not_no_with_why_no_data.refresh_from_db() + assert preprint_not_no_with_why_no_data.why_no_data == 'This should be cleared' + + def test_specific_guid(self): + + preprint1 = PreprintFactory() + preprint1.has_data_links = 'available' + preprint1.why_no_data = 'This should be cleared' + preprint1.save() + + preprint2 = PreprintFactory() + preprint2.has_data_links = 'available' + preprint2.why_no_data = 'This should remain' + preprint2.save() + + call_command('fix_preprints_has_data_links_and_why_no_data', '--guid', f'{preprint1._id}') + + preprint1.refresh_from_db() + preprint2.refresh_from_db() + + assert preprint1.why_no_data == '' + assert preprint2.why_no_data == 'This should remain' + + def test_no_action_for_correct_preprints(self): + preprint = PreprintFactory() + preprint.has_data_links = 'available' + preprint.why_no_data = '' + preprint.save() + + with mock.patch('osf.models.Guid.split_guid', return_value=(preprint._id, 1)): + call_command('fix_preprints_has_data_links_and_why_no_data', '--guid', f'{preprint._id}_v1') + + preprint.refresh_from_db() + + assert preprint.has_data_links == 'available' + assert preprint.why_no_data == '' + + def test_preprints_with_null_has_data_links(self): + preprint = PreprintFactory() + preprint.has_data_links = None + preprint.why_no_data = 'Should be cleared for null has_data_links' + preprint.save() + + call_command('fix_preprints_has_data_links_and_why_no_data') + + preprint.refresh_from_db() + assert preprint.why_no_data == '' + + def test_preprints_different_providers(self): + provider1 = PreprintProviderFactory() + provider2 = PreprintProviderFactory() + + preprint1 = PreprintFactory(provider=provider1) + preprint1.has_data_links = 'available' + preprint1.why_no_data = 'Should be cleared (provider 1)' + preprint1.save() + + preprint2 = PreprintFactory(provider=provider2) + preprint2.has_data_links = 'not_applicable' + preprint2.why_no_data = 'Should be cleared (provider 2)' + preprint2.save() + + call_command('fix_preprints_has_data_links_and_why_no_data') + + preprint1.refresh_from_db() + preprint2.refresh_from_db() + + assert preprint1.why_no_data == '' + assert preprint2.why_no_data == '' + + def test_preprints_with_data_links(self): + preprint = PreprintFactory() + preprint.has_data_links = 'available' + preprint.data_links = ['https://example.com/dataset123'] + preprint.why_no_data = 'This contradicts having data links' + preprint.save() + + call_command('fix_preprints_has_data_links_and_why_no_data') + + preprint.refresh_from_db() + assert preprint.why_no_data == '' + assert preprint.data_links == ['https://example.com/dataset123'] + + def test_error_handling(self): + preprint1 = PreprintFactory() + preprint1.has_data_links = 'available' + preprint1.why_no_data = 'Should be cleared' + preprint1.save() + + preprint2 = PreprintFactory() + preprint2.has_data_links = 'available' + preprint2.why_no_data = 'Should be cleared too' + preprint2.save() + + preprint3 = PreprintFactory() + preprint3.has_data_links = 'available' + preprint3.why_no_data = 'Should also be cleared' + preprint3.save() + + call_command('fix_preprints_has_data_links_and_why_no_data') + + preprint1.refresh_from_db() + preprint3.refresh_from_db() + + assert preprint1.why_no_data == '' + assert preprint3.why_no_data == '' diff --git a/osf_tests/test_archiver.py b/osf_tests/test_archiver.py index 3855d169acb..59c178b839d 100644 --- a/osf_tests/test_archiver.py +++ b/osf_tests/test_archiver.py @@ -22,7 +22,6 @@ from website.app import * # noqa: F403 from website.archiver import listeners from website.archiver.tasks import * # noqa: F403 -from website.archiver.decorators import fail_archive_on_error from osf.models import Guid, RegistrationSchema, Registration from osf.models.archive import ArchiveTarget, ArchiveJob @@ -1111,22 +1110,6 @@ def test_find_failed_registrations(self): assert pk not in failed -class TestArchiverDecorators(ArchiverTestCase): - - @mock.patch('website.archiver.signals.archive_fail.send') - def test_fail_archive_on_error(self, mock_fail): - e = HTTPError(418) - - def error(*args, **kwargs): - raise e - - func = fail_archive_on_error(error) - func(node=self.dst) - mock_fail.assert_called_with( - self.dst, - errors=[str(e)] - ) - class TestArchiverBehavior(OsfTestCase): @mock.patch('osf.models.AbstractNode.update_search') diff --git a/package.json b/package.json index b199a03a62e..73747d65956 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "OSF", - "version": "25.06.0", + "version": "25.09.0", "description": "Facilitating Open Science", "repository": "https://github.com/CenterForOpenScience/osf.io", "author": "Center for Open Science", diff --git a/tests/test_events.py b/tests/test_events.py index 55b51fb3e8e..866bf6ec337 100644 --- a/tests/test_events.py +++ b/tests/test_events.py @@ -18,37 +18,6 @@ email_digest = 'email_digest' -class TestEventNotImplemented(OsfTestCase): - """ - Test non-implemented errors - """ - @register('not_implemented') - class NotImplementedEvent(Event): - pass - - def setUp(self): - super().setUp() - self.user = factories.UserFactory() - self.auth = Auth(user=self.user) - self.node = factories.ProjectFactory(creator=self.user) - self.event = self.NotImplementedEvent(self.user, self.node, 'not_implemented') - - def test_text(self): - with raises(NotImplementedError): - text = self.event.text_message - - def test_html(self): - with raises(NotImplementedError): - html = self.event.html_message - - def test_url(self): - with raises(NotImplementedError): - url = self.event.url - - def test_event(self): - with raises(NotImplementedError): - event = self.event.event_type - class TestListOfFiles(OsfTestCase): """ diff --git a/tests/test_metadata.py b/tests/test_metadata.py index c29365f4151..5f81c35fc5c 100644 --- a/tests/test_metadata.py +++ b/tests/test_metadata.py @@ -3,7 +3,6 @@ import pytest from django.core.exceptions import ValidationError -from framework.forms.utils import process_payload from osf.models import RegistrationSchema from osf.utils.migrations import ensure_schemas from website.project.metadata.schemas import OSF_META_SCHEMA_FILES @@ -31,18 +30,6 @@ def test_registrationschema_is_fine_with_same_name_but_different_version(self): RegistrationSchema(name='foo', schema={'foo': 42}, schema_version=2).save() assert RegistrationSchema.objects.filter(name='foo').count() == 2 - def test_process(self): - processed = process_payload({'foo': 'bar&baz'}) - assert processed['foo'] == 'bar%26baz' - - def test_process_list(self): - processed = process_payload({'foo': ['bar', 'baz&bob']}) - assert processed['foo'][1] == 'baz%26bob' - - def test_process_whitespace(self): - processed = process_payload({'foo': 'bar baz'}) - assert processed['foo'] == 'bar baz' - if __name__ == '__main__': unittest.main() diff --git a/tests/test_misc_views.py b/tests/test_misc_views.py index 543fb7d6068..35bccc88119 100644 --- a/tests/test_misc_views.py +++ b/tests/test_misc_views.py @@ -769,29 +769,29 @@ def test_view_comments_updates_user_comments_view_timestamp_files(self): # Regression test for https://openscience.atlassian.net/browse/OSF-5193 # moved from tests/test_comments.py - def test_find_unread_includes_edited_comments(self): - project = ProjectFactory() - user = AuthUserFactory() - project.add_contributor(user, save=True) - comment = CommentFactory(node=project, user=project.creator) - n_unread = Comment.find_n_unread(user=user, node=project, page='node') - assert n_unread == 1 - - url = project.api_url_for('update_comments_timestamp') - payload = {'page': 'node', 'rootId': project._id} - self.app.put(url, json=payload, auth=user.auth) - user.reload() - n_unread = Comment.find_n_unread(user=user, node=project, page='node') - assert n_unread == 0 - - # Edit previously read comment - comment.edit( - auth=Auth(project.creator), - content='edited', - save=True - ) - n_unread = Comment.find_n_unread(user=user, node=project, page='node') - assert n_unread == 1 + def test_find_unread_includes_edited_comments(self): + project = ProjectFactory() + user = AuthUserFactory() + project.add_contributor(user, save=True) + comment = CommentFactory(node=project, user=project.creator) + n_unread = Comment.find_n_unread(user=user, node=project, page='node') + assert n_unread == 1 + + url = project.api_url_for('update_comments_timestamp') + payload = {'page': 'node', 'rootId': project._id} + self.app.put(url, json=payload, auth=user.auth) + user.reload() + n_unread = Comment.find_n_unread(user=user, node=project, page='node') + assert n_unread == 0 + + # Edit previously read comment + comment.edit( + auth=Auth(project.creator), + content='edited', + save=True + ) + n_unread = Comment.find_n_unread(user=user, node=project, page='node') + assert n_unread == 1 @mock.patch('website.views.PROXY_EMBER_APPS', False) class TestResolveGuid(OsfTestCase): @@ -834,4 +834,3 @@ def test_preprint_provider_with_osf_domain(self, mock_use_ember_app): url = web_url_for('resolve_guid', _guid=True, guid=preprint._id) res = self.app.get(url) mock_use_ember_app.assert_called_with() - diff --git a/tests/test_node_licenses.py b/tests/test_node_licenses.py deleted file mode 100644 index d16cdb500d9..00000000000 --- a/tests/test_node_licenses.py +++ /dev/null @@ -1,138 +0,0 @@ -import builtins -import json -from unittest import mock - -import pytest -from django.core.exceptions import ValidationError - -from framework.auth import Auth -from osf_tests.factories import (AuthUserFactory, NodeLicenseRecordFactory, - ProjectFactory) -from tests.base import OsfTestCase -from osf.utils.migrations import ensure_licenses -from tests.utils import assert_logs, assert_not_logs -from website import settings -from osf.models.licenses import NodeLicense, serialize_node_license_record, serialize_node_license -from osf.models import NodeLog -from osf.exceptions import NodeStateError - - - -CHANGED_NAME = 'FOO BAR' -CHANGED_TEXT = 'Some good new text' -CHANGED_PROPERTIES = ['foo', 'bar'] -LICENSE_TEXT = json.dumps({ - 'MIT': { - 'name': CHANGED_NAME, - 'text': CHANGED_TEXT, - 'properties': CHANGED_PROPERTIES - } -}) - -class TestNodeLicenses(OsfTestCase): - - def setUp(self): - super().setUp() - - self.user = AuthUserFactory() - self.node = ProjectFactory(creator=self.user) - self.LICENSE_NAME = 'MIT License' - self.node_license = NodeLicense.objects.get(name=self.LICENSE_NAME) - self.YEAR = '2105' - self.COPYRIGHT_HOLDERS = ['Foo', 'Bar'] - self.node.node_license = NodeLicenseRecordFactory( - node_license=self.node_license, - year=self.YEAR, - copyright_holders=self.COPYRIGHT_HOLDERS - ) - self.node.save() - - def test_serialize_node_license(self): - serialized = serialize_node_license(self.node_license) - assert serialized['name'] == self.LICENSE_NAME - assert serialized['id'] == self.node_license.license_id - assert serialized['text'] == self.node_license.text - - def test_serialize_node_license_record(self): - serialized = serialize_node_license_record(self.node.node_license) - assert serialized['name'] == self.LICENSE_NAME - assert serialized['id'] == self.node_license.license_id - assert serialized['text'] == self.node_license.text - assert serialized['year'] == self.YEAR - assert serialized['copyright_holders'] == self.COPYRIGHT_HOLDERS - - def test_serialize_node_license_record_None(self): - self.node.node_license = None - serialized = serialize_node_license_record(self.node.node_license) - assert serialized == {} - - def test_copy_node_license_record(self): - record = self.node.node_license - copied = record.copy() - assert copied._id is not None - assert record._id != copied._id - for prop in ('license_id', 'name', 'node_license'): - assert getattr(record, prop) == getattr(copied, prop) - - @pytest.mark.enable_implicit_clean - def test_license_uniqueness_on_id_is_enforced_in_the_database(self): - NodeLicense(license_id='foo', name='bar', text='baz').save() - pytest.raises(ValidationError, NodeLicense(license_id='foo', name='buz', text='boo').save) - - def test_ensure_licenses_updates_existing_licenses(self): - assert ensure_licenses() == (0, 18) - - def test_ensure_licenses_no_licenses(self): - before_count = NodeLicense.objects.all().count() - NodeLicense.objects.all().delete() - assert not NodeLicense.objects.all().count() - - ensure_licenses() - assert before_count == NodeLicense.objects.all().count() - - def test_ensure_licenses_some_missing(self): - NodeLicense.objects.get(license_id='LGPL3').delete() - with pytest.raises(NodeLicense.DoesNotExist): - NodeLicense.objects.get(license_id='LGPL3') - ensure_licenses() - found = NodeLicense.objects.get(license_id='LGPL3') - assert found is not None - - def test_ensure_licenses_updates_existing(self): - with mock.patch.object(builtins, 'open', mock.mock_open(read_data=LICENSE_TEXT)): - ensure_licenses() - MIT = NodeLicense.objects.get(license_id='MIT') - assert MIT.name == CHANGED_NAME - assert MIT.text == CHANGED_TEXT - assert MIT.properties == CHANGED_PROPERTIES - - @assert_logs(NodeLog.CHANGED_LICENSE, 'node') - def test_Node_set_node_license(self): - GPL3 = NodeLicense.objects.get(license_id='GPL3') - NEW_YEAR = '2014' - COPYLEFT_HOLDERS = ['Richard Stallman'] - self.node.set_node_license( - { - 'id': GPL3.license_id, - 'year': NEW_YEAR, - 'copyrightHolders': COPYLEFT_HOLDERS - }, - auth=Auth(self.user), - save=True - ) - - assert self.node.node_license.license_id == GPL3.license_id - assert self.node.node_license.name == GPL3.name - assert self.node.node_license.copyright_holders == COPYLEFT_HOLDERS - - @assert_not_logs(NodeLog.CHANGED_LICENSE, 'node') - def test_Node_set_node_license_invalid(self): - with pytest.raises(NodeStateError): - self.node.set_node_license( - { - 'id': 'SOME ID', - 'year': 'foo', - 'copyrightHolders': [] - }, - auth=Auth(self.user) - ) diff --git a/tests/test_preprints.py b/tests/test_preprints.py index a213c961659..5528ef28219 100644 --- a/tests/test_preprints.py +++ b/tests/test_preprints.py @@ -114,16 +114,6 @@ def test_verified_publishable(self, preprint): preprint.deleted = None assert preprint.verified_publishable is True - def test_is_deleted(self, preprint): - assert preprint.deleted is None - assert preprint.is_deleted is False - - preprint.deleted = timezone.now() - preprint.save() - - assert preprint.deleted is not None - assert preprint.is_deleted is True - def test_has_submitted_preprint(self, preprint): preprint.machine_state = 'initial' preprint.save() @@ -168,9 +158,6 @@ def test_all_tags(self, preprint, auth): assert len(preprint.all_tags) == 1 assert preprint.all_tags[0].name == 'test_tag_1' - def test_system_tags(self, preprint): - assert preprint.system_tags.exists() is False - class TestPreprintSubjects: diff --git a/website/archiver/decorators.py b/website/archiver/decorators.py deleted file mode 100644 index 0d6f46bfb37..00000000000 --- a/website/archiver/decorators.py +++ /dev/null @@ -1,25 +0,0 @@ -import functools - -from framework.exceptions import HTTPError - -from website.project.decorators import _inject_nodes -from website.archiver import ARCHIVER_NETWORK_ERROR -from website.archiver import signals - - -def fail_archive_on_error(func): - - @functools.wraps(func) - def wrapped(*args, **kwargs): - try: - return func(*args, **kwargs) - except HTTPError as e: - _inject_nodes(kwargs) - registration = kwargs['node'] - registration.archive_status = ARCHIVER_NETWORK_ERROR - registration.save() - signals.archive_fail.send( - registration, - errors=[str(e)] - ) - return wrapped diff --git a/website/files/utils.py b/website/files/utils.py index 6121c4fb757..50c25cefd13 100644 --- a/website/files/utils.py +++ b/website/files/utils.py @@ -1,7 +1,7 @@ from osf.models.metadata import GuidMetadataRecord -def copy_files(src, target_node, parent=None, name=None): +def copy_files(src, target_node, parent=None, name=None, **version_filters): """Copy the files from src to the target node :param Folder src: The source to copy children from :param Node target_node: The node to copy files to @@ -18,7 +18,7 @@ def copy_files(src, target_node, parent=None, name=None): cloned.save() if src.is_file and src.versions.exists(): - fileversions = src.versions.select_related('region').order_by('-created') + fileversions = src.versions.filter(**version_filters).select_related('region').order_by('-created') most_recent_fileversion = fileversions.first() if most_recent_fileversion.region and most_recent_fileversion.region != target_node.osfstorage_region: # add all original version except the most recent @@ -29,7 +29,7 @@ def copy_files(src, target_node, parent=None, name=None): new_fileversion.save() attach_versions(cloned, [new_fileversion], src) else: - attach_versions(cloned, src.versions.all(), src) + attach_versions(cloned, fileversions, src) if renaming: latest_version = cloned.versions.first() diff --git a/website/project/decorators.py b/website/project/decorators.py index 0e165146250..2d60be5359b 100644 --- a/website/project/decorators.py +++ b/website/project/decorators.py @@ -173,25 +173,6 @@ def wrapped(*args, **kwargs): return wrapped -def must_be_registration(func): - - @functools.wraps(func) - def wrapped(*args, **kwargs): - _inject_nodes(kwargs) - node = kwargs['node'] - - if not node.is_registration: - raise HTTPError( - http_status.HTTP_400_BAD_REQUEST, - data={ - 'message_short': 'Registered Nodes only', - 'message_long': 'This view is restricted to registered Nodes only', - } - ) - return func(*args, **kwargs) - - return wrapped - def check_can_download_preprint_file(user, node): """View helper that returns whether a given user can download unpublished preprint files. diff --git a/website/project/licenses/__init__.py b/website/project/licenses/__init__.py index 69e34744a96..07095936cfe 100644 --- a/website/project/licenses/__init__.py +++ b/website/project/licenses/__init__.py @@ -6,7 +6,7 @@ from osf.utils import permissions -def set_license(node, license_detail, auth, node_type='node'): +def set_license(node, license_detail, auth, node_type='node', ignore_permission=False): NodeLicense = apps.get_model('osf.NodeLicense') NodeLicenseRecord = apps.get_model('osf.NodeLicenseRecord') @@ -26,7 +26,7 @@ def set_license(node, license_detail, auth, node_type='node'): ): return {}, False - if not node.has_permission(auth.user, permissions.WRITE): + if not ignore_permission and not node.has_permission(auth.user, permissions.WRITE): raise framework_exceptions.PermissionsError(f'You need admin or write permissions to change a {node_type}\'s license') try: diff --git a/website/project/views/register.py b/website/project/views/register.py index 11a5da7f53c..265fda1edea 100644 --- a/website/project/views/register.py +++ b/website/project/views/register.py @@ -7,17 +7,12 @@ from framework.exceptions import HTTPError from framework.flask import redirect # VOL-aware redirect -from framework.auth.decorators import must_be_signed - -from website.archiver import ARCHIVER_SUCCESS, ARCHIVER_FAILURE - -from addons.base.views import DOWNLOAD_ACTIONS from website import settings from osf.exceptions import NodeStateError from website.project.decorators import ( must_be_valid_project, must_be_contributor_or_public, must_have_permission, must_be_contributor_and_not_group_member, - must_not_be_registration, must_be_registration, + must_not_be_registration, must_not_be_retracted_registration ) from osf import features @@ -26,12 +21,10 @@ from osf.utils.permissions import ADMIN from website import language from website.ember_osf_web.decorators import ember_flag_is_active -from website.project import signals as project_signals from website.project.metadata.schemas import _id_to_name from website import util from website.project.metadata.utils import serialize_meta_schema from website.project.model import has_anonymous_link -from website.archiver.decorators import fail_archive_on_error from .node import _view_project from api.waffle.utils import flag_is_active @@ -228,28 +221,3 @@ def get_referent_by_identifier(category, value): if identifier.referent.url: return redirect(identifier.referent.url) raise HTTPError(http_status.HTTP_404_NOT_FOUND) - -@fail_archive_on_error -@must_be_signed -@must_be_registration -def registration_callbacks(node, payload, *args, **kwargs): - if payload.get('action', None) in DOWNLOAD_ACTIONS: - return {'status': 'success'} - errors = payload.get('errors') - src_provider = payload['source']['provider'] - if errors: - node.archive_job.update_target( - src_provider, - ARCHIVER_FAILURE, - errors=errors, - ) - else: - # Dataverse requires two seperate targets, one - # for draft files and one for published files - if src_provider == 'dataverse': - src_provider += '-' + (payload['destination']['name'].split(' ')[-1].lstrip('(').rstrip(')').strip()) - node.archive_job.update_target( - src_provider, - ARCHIVER_SUCCESS, - ) - project_signals.archive_callback.send(node) diff --git a/website/routes.py b/website/routes.py index f6144b09f50..7b0f325fa9f 100644 --- a/website/routes.py +++ b/website/routes.py @@ -1694,14 +1694,6 @@ def make_url_map(app): addon_views.create_waterbutler_log, json_renderer, ), - Rule( - [ - '/registration//callbacks/', - ], - 'put', - project_views.register.registration_callbacks, - json_renderer, - ), Rule( '/settings/addons/', 'post',