diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 95783dfe..972aa68b 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.11.0.dev +current_version = 1.12.0.dev commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(\.(?P[a-z]+))? diff --git a/.ci/ansible/settings.py.j2 b/.ci/ansible/settings.py.j2 index 1a76b5d5..16d3a3a5 100644 --- a/.ci/ansible/settings.py.j2 +++ b/.ci/ansible/settings.py.j2 @@ -8,6 +8,7 @@ TOKEN_SIGNATURE_ALGORITHM = "ES256" CACHE_ENABLED = True REDIS_HOST = "localhost" REDIS_PORT = 6379 +TELEMETRY = False {% if api_root is defined %} API_ROOT = {{ api_root | repr }} @@ -19,6 +20,12 @@ API_ROOT = {{ api_root | repr }} {% endfor %} {% endif %} +{% if pulp_scenario_settings is defined and pulp_scenario_settings %} +{% for key, value in pulp_scenario_settings.items() %} +{{ key | upper }} = {{ value | repr }} +{% endfor %} +{% endif %} + {% if stream_test | default(false) -%} REDIRECT_TO_OBJECT_STORAGE = False DEFAULT_FILE_STORAGE = "pulpcore.app.models.storage.PulpSFTPStorage" diff --git a/.ci/ansible/smash-config.json b/.ci/ansible/smash-config.json index cd0e7d7b..701d9b4e 100644 --- a/.ci/ansible/smash-config.json +++ b/.ci/ansible/smash-config.json @@ -6,7 +6,7 @@ ], "selinux enabled": false, "version": "3", - "aiohttp_fixtures_origin": "172.18.0.1" + "aiohttp_fixtures_origin": "127.0.0.1" }, "hosts": [ { @@ -26,7 +26,7 @@ "pulp workers": {}, "redis": {}, "shell": { - "transport": "docker" + "transport": "local" } } } diff --git a/.ci/ansible/start_container.yaml b/.ci/ansible/start_container.yaml index 09da1355..e23a1880 100644 --- a/.ci/ansible/start_container.yaml +++ b/.ci/ansible/start_container.yaml @@ -41,7 +41,7 @@ image: "{{ item.image }}" auto_remove: true recreate: true - privileged: false + privileged: true networks: - name: pulp_ci_bridge aliases: "{{ item.name }}" diff --git a/.ci/assets/bindings/.gitkeep b/.ci/assets/bindings/.gitkeep deleted file mode 100644 index e69de29b..00000000 diff --git a/.ci/scripts/tweet.py b/.ci/scripts/tweet.py new file mode 100755 index 00000000..fc175f5b --- /dev/null +++ b/.ci/scripts/tweet.py @@ -0,0 +1,16 @@ +import os +import sys +from tweepy import Client + +release_version = sys.argv[1] +if release_version.endswith(".0"): + client = Client( + consumer_key=os.getenv("TWITTER_API_KEY"), + consumer_secret=os.getenv("TWITTER_API_KEY_SECRET"), + access_token=os.getenv("TWITTER_ACCESS_TOKEN"), + access_token_secret=os.getenv("TWITTER_ACCESS_TOKEN_SECRET"), + ) + link = "https://docs.pulpproject.org/pulp_file/changes.html" + msg = f"pulp_file-{release_version} - Check out for more details: {link}" + release_msg = f"Hey! We've just released {msg}" + client.create_tweet(text=release_msg) diff --git a/.ci/scripts/update_ci_branches.py b/.ci/scripts/update_ci_branches.py index 59a06beb..f0765969 100755 --- a/.ci/scripts/update_ci_branches.py +++ b/.ci/scripts/update_ci_branches.py @@ -1,34 +1,25 @@ +# WARNING: DO NOT EDIT! +# +# This file was generated by plugin_template, and is managed by it. Please use +# './plugin-template --github pulp_file' to update this file. +# +# For more info visit https://github.com/pulp/plugin_template + import os -import re import sys import requests -from packaging.version import Version -from git import Repo - -try: - initial_branch = sys.argv[1] -except IndexError: - initial_branch = None -repo = Repo(os.getcwd()) -heads = repo.git.ls_remote("--heads", "https://github.com/pulp/pulp_file.git").split("\n") -branches = [h.split("/")[-1] for h in heads if re.search(r"^([0-9]+)\.([0-9]+)$", h.split("/")[-1])] -branches.sort(key=lambda ver: Version(ver)) +branches = sys.argv[1:] headers = { "Authorization": f"Bearer {os.environ['GITHUB_TOKEN']}", "Accept": "application/vnd.github.v3+json", } -if not initial_branch or initial_branch not in branches: - exit("Initial branch not found") -else: - starting = branches.index(initial_branch) - github_api = "https://api.github.com" workflow_path = "/actions/workflows/update_ci.yml/dispatches" url = f"{github_api}/repos/pulp/pulp_file{workflow_path}" -for branch in branches[starting:]: +for branch in branches: print(f"Updating {branch}") requests.post(url, headers=headers, json={"ref": branch}) diff --git a/.ci/scripts/upper_bound.py b/.ci/scripts/upper_bound.py index e9d87fb2..64d4bbbe 100755 --- a/.ci/scripts/upper_bound.py +++ b/.ci/scripts/upper_bound.py @@ -1,13 +1,23 @@ +import warnings from pkg_resources import Requirement packages = [] -with open("requirements.txt", "r") as fd: - for line in fd.readlines(): - if not line.startswith("#"): - req = Requirement.parse(line) - spec = str(req.specs) - if len(req.specs) < 2 and "~=" not in spec and "==" not in spec and "<" not in spec: - packages.append(req.name) + +try: + with open("requirements.txt", "r") as fd: + for line in fd.readlines(): + if not line.startswith("#"): + req = Requirement.parse(line) + spec = str(req.specs) + if "~=" in spec: + warnings.warn(f"Please avoid using ~= on {req.name}") + continue + if len(req.specs) < 2 and "==" not in spec and "<" not in spec: + packages.append(req.name) +except FileNotFoundError: + # skip this test for plugins that don't use a requirements.txt + pass + if packages: raise RuntimeError( "The following packages are missing upper bound: {}".format(", ".join(packages)) diff --git a/.ci/scripts/validate_commit_message.py b/.ci/scripts/validate_commit_message.py index e982863a..fd6e1156 100755 --- a/.ci/scripts/validate_commit_message.py +++ b/.ci/scripts/validate_commit_message.py @@ -6,24 +6,24 @@ # For more info visit https://github.com/pulp/plugin_template import re -import subprocess import sys -import warnings from pathlib import Path +import subprocess import os +import warnings from github import Github NO_ISSUE = "[noissue]" CHANGELOG_EXTS = [".feature", ".bugfix", ".doc", ".removal", ".misc", ".deprecation"] +sha = sys.argv[1] +message = subprocess.check_output(["git", "log", "--format=%B", "-n 1", sha]).decode("utf-8") KEYWORDS = ["fixes", "closes"] -sha = sys.argv[1] -message = subprocess.check_output(["git", "log", "--format=%B", "-n 1", sha]).decode("utf-8") g = Github(os.environ.get("GITHUB_TOKEN")) repo = g.get_repo("pulp/pulp_file") diff --git a/flake8.cfg b/.flake8 similarity index 52% rename from flake8.cfg rename to .flake8 index d85fab0d..c7503a72 100644 --- a/flake8.cfg +++ b/.flake8 @@ -1,12 +1,23 @@ +# WARNING: DO NOT EDIT! +# +# This file was generated by plugin_template, and is managed by it. Please use +# './plugin-template --github pulp_file' to update this file. +# +# For more info visit https://github.com/pulp/plugin_template [flake8] -exclude = ./docs/*,*/migrations/*,*/tests/* -ignore = W503,Q000,Q003,D100,D104,D106,D200,D205,D400,D401,D402,D202 +exclude = ./docs/*,*/migrations/* +ignore = E203,W503,Q000,Q003,D100,D104,D106,D200,D205,D400,D401,D402 max-line-length = 100 +# Flake8 builtin codes +# -------------------- +# E203: no whitespace around ':'. disabled until https://github.com/PyCQA/pycodestyle/issues/373 is fixed +# W503: This enforces operators before line breaks which is not pep8 or black compatible. + # Flake8-quotes extension codes # ----------------------------- -# W503: This enforces operators before line breaks which is not pep8 or black compatible. # Q000: double or single quotes only, default is double (don't want to enforce this) +# Q003: Change outer quotes to avoid escaping inner quotes # Flake8-docstring extension codes # -------------------------------- @@ -14,6 +25,7 @@ max-line-length = 100 # D104: missing docstring in public package # D106: missing docstring in public nested class (complains about "class Meta:" and documenting those is silly) # D200: one-line docstring should fit on one line with quotes -# D202: no blank line after docstring--disabled until https://github.com/PyCQA/pydocstyle/issues/361 is fixed +# D205: 1 blank line required between summary line and description +# D400: First line should end with a period # D401: first line should be imperative (nitpicky) # D402: first line should not be the function’s “signature” (false positives) diff --git a/.github/stale.yml b/.github/stale.yml new file mode 100644 index 00000000..64ed195c --- /dev/null +++ b/.github/stale.yml @@ -0,0 +1,59 @@ +# WARNING: DO NOT EDIT! +# +# This file was generated by plugin_template, and is managed by it. Please use +# './plugin-template --github pulp_file' to update this file. +# +# For more info visit https://github.com/pulp/plugin_template +# Configuration for probot-stale - https://github.com/probot/stale + +# Number of days of inactivity before an Issue or Pull Request becomes stale +daysUntilStale: 90 + +# Number of days of inactivity before an Issue or Pull Request with the stale label is closed. +# Set to false to disable. If disabled, issues still need to be closed manually, but will remain marked as stale. +daysUntilClose: 30 + +# Only issues or pull requests with all of these labels are check if stale. Defaults to `[]` (disabled) +onlyLabels: [] + +# Issues or Pull Requests with these labels will never be considered stale. Set to `[]` to disable +exemptLabels: + - security + - planned + +# Set to true to ignore issues in a project (defaults to false) +exemptProjects: false + +# Set to true to ignore issues in a milestone (defaults to false) +exemptMilestones: false + +# Set to true to ignore issues with an assignee (defaults to false) +exemptAssignees: false + +# Label to use when marking as stale +staleLabel: stale + +# Limit the number of actions per hour, from 1-30. Default is 30 +limitPerRun: 30 +# Limit to only `issues` or `pulls` +only: pulls + +pulls: + markComment: |- + This pull request has been marked 'stale' due to lack of recent activity. If there is no further activity, the PR will be closed in another 30 days. Thank you for your contribution! + + unmarkComment: >- + This pull request is no longer marked for closure. + + closeComment: >- + This pull request has been closed due to inactivity. If you feel this is in error, please reopen the pull request or file a new PR with the relevant details. + +issues: + markComment: |- + This issue has been marked 'stale' due to lack of recent activity. If there is no further activity, the issue will be closed in another 30 days. Thank you for your contribution! + + unmarkComment: >- + This issue is no longer marked for closure. + + closeComment: >- + This issue has been closed due to inactivity. If you feel this is in error, please reopen the issue or file a new issue with the relevant details. diff --git a/.github/template_gitref b/.github/template_gitref index 6e28718e..f37110ef 100644 --- a/.github/template_gitref +++ b/.github/template_gitref @@ -1 +1 @@ -2021.08.26-130-ge87b661 +2021.08.26-174-g23ccb65 diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 99243b18..ec7702d9 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -42,9 +42,9 @@ jobs: with: python-version: "3.8" - # dev_requirements contains tools needed for flake8, etc. + # lint_requirements contains tools needed for flake8, etc. - name: Install requirements - run: pip3 install -r dev_requirements.txt + run: pip3 install -r lint_requirements.txt - name: Check commit message if: github.event_name == 'pull_request' @@ -67,7 +67,7 @@ jobs: # Lint code. - name: Run flake8 - run: flake8 --config flake8.cfg + run: flake8 - name: Run extra lint checks run: "[ ! -x .ci/scripts/extra_linting.sh ] || .ci/scripts/extra_linting.sh" @@ -82,6 +82,9 @@ jobs: - name: Check for gettext problems run: sh .ci/scripts/check_gettext.sh + - name: Verify upper bound requirements + run: python .ci/scripts/upper_bound.py + test: runs-on: ubuntu-latest # run only after lint finishes @@ -156,11 +159,6 @@ jobs: run: .github/workflows/scripts/install_python_client.sh shell: bash - - name: Install Ruby client - if: ${{ env.TEST == 'bindings' }} - run: .github/workflows/scripts/install_ruby_client.sh - shell: bash - - name: Before Script run: .github/workflows/scripts/before_script.sh @@ -210,137 +208,7 @@ jobs: docker logs pulp || true docker exec pulp ls -latr /etc/yum.repos.d/ || true docker exec pulp cat /etc/yum.repos.d/* || true - docker exec pulp pip3 list - - upgrade: - runs-on: ubuntu-latest - needs: lint - - strategy: - fail-fast: false - matrix: - env: - - TEST: upgrade - FROM_PULPCORE_BRANCH: "3.11" - FROM_PULP_CERTGUARD_BRANCH: "1.2" - FROM_PULP_FILE_BRANCH: "1.6" - outputs: - deprecations-upgrade: ${{ steps.deprecations.outputs.deprecations-upgrade }} - - steps: - - uses: actions/checkout@v3 - with: - # by default, it uses a depth of 1 - # this fetches all history so that we can read each commit - fetch-depth: 0 - - - uses: actions/setup-python@v3 - with: - python-version: "3.8" - - - name: Install httpie - run: | - echo ::group::HTTPIE - pip install httpie - echo ::endgroup:: - echo "HTTPIE_CONFIG_DIR=$GITHUB_WORKSPACE/.ci/assets/httpie/" >> $GITHUB_ENV - - - name: Set environment variables - run: | - echo "TEST=${{ matrix.env.TEST }}" >> $GITHUB_ENV - echo "FROM_PULP_CERTGUARD_BRANCH=${{ matrix.env.FROM_PULP_CERTGUARD_BRANCH }}" >> $GITHUB_ENV - echo "FROM_PULP_FILE_BRANCH=${{ matrix.env.FROM_PULP_FILE_BRANCH }}" >> $GITHUB_ENV - echo "FROM_PULPCORE_BRANCH=${{ matrix.env.FROM_PULPCORE_BRANCH }}" >> $GITHUB_ENV - - - name: Install python dependencies - run: | - echo ::group::PYDEPS - pip install wheel - echo ::endgroup:: - - - name: Before Install - - run: .github/workflows/scripts/before_install.sh - shell: bash - env: - PY_COLORS: '1' - ANSIBLE_FORCE_COLOR: '1' - GITHUB_PULL_REQUEST: ${{ github.event.number }} - GITHUB_PULL_REQUEST_BODY: ${{ github.event.pull_request.body }} - GITHUB_BRANCH: ${{ github.head_ref }} - GITHUB_REPO_SLUG: ${{ github.repository }} - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - GITHUB_CONTEXT: ${{ github.event.pull_request.commits_url }} - - - name: Install - - run: .github/workflows/scripts/install.sh - shell: bash - env: - PY_COLORS: '1' - ANSIBLE_FORCE_COLOR: '1' - GITHUB_PULL_REQUEST: ${{ github.event.number }} - GITHUB_PULL_REQUEST_BODY: ${{ github.event.pull_request.body }} - GITHUB_BRANCH: ${{ github.head_ref }} - GITHUB_REPO_SLUG: ${{ github.repository }} - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - GITHUB_CONTEXT: ${{ github.event.pull_request.commits_url }} - - - name: Install Python client - - run: .github/workflows/scripts/install_python_client.sh - shell: bash - - - name: Before Script - - run: .github/workflows/scripts/before_script.sh - shell: bash - env: - PY_COLORS: '1' - ANSIBLE_FORCE_COLOR: '1' - GITHUB_PULL_REQUEST: ${{ github.event.number }} - GITHUB_PULL_REQUEST_BODY: ${{ github.event.pull_request.body }} - GITHUB_BRANCH: ${{ github.head_ref }} - GITHUB_REPO_SLUG: ${{ github.repository }} - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - GITHUB_CONTEXT: ${{ github.event.pull_request.commits_url }} - REDIS_DISABLED: ${{ contains('s3', matrix.env.TEST) }} - - - name: Setting secrets - - run: python3 .github/workflows/scripts/secrets.py "$SECRETS_CONTEXT" - env: - SECRETS_CONTEXT: ${{ toJson(secrets) }} - - - name: Upgrade Test - - run: .github/workflows/scripts/script.sh - shell: bash - env: - PY_COLORS: '1' - ANSIBLE_FORCE_COLOR: '1' - GITHUB_PULL_REQUEST: ${{ github.event.number }} - GITHUB_PULL_REQUEST_BODY: ${{ github.event.pull_request.body }} - GITHUB_BRANCH: ${{ github.head_ref }} - GITHUB_REPO_SLUG: ${{ github.repository }} - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - GITHUB_CONTEXT: ${{ github.event.pull_request.commits_url }} - - - name: Extract Deprecations from Logs - id: deprecations - run: echo "::set-output name=deprecations-${{ matrix.env.TEST }}::$(docker logs pulp 2>&1 | grep -i pulpcore.deprecation | base64 -w 0)" - - - name: Logs - if: always() - run: | - echo "Need to debug? Please check: https://github.com/marketplace/actions/debugging-with-tmate" - http --timeout 30 --check-status --pretty format --print hb "https://pulp${PULP_API_ROOT}api/v3/status/" || true - docker images || true - docker ps -a || true - docker logs pulp || true - docker exec pulp ls -latr /etc/yum.repos.d/ || true - docker exec pulp cat /etc/yum.repos.d/* || true - docker exec pulp pip3 list + docker exec pulp bash -c "pip3 list && pip3 install pipdeptree && pipdeptree" deprecations: runs-on: ubuntu-latest if: always() @@ -352,7 +220,6 @@ jobs: test -z "${{ needs.test.outputs.deprecations-stream }}" test -z "${{ needs.test.outputs.deprecations-azure }}" test -z "${{ needs.test.outputs.deprecations-s3 }}" - test -z "${{ needs.test.outputs.deprecations-upgrade }}" - name: Print deprecations if: failure() run: | @@ -360,6 +227,5 @@ jobs: echo "${{ needs.test.outputs.deprecations-stream }}" | base64 -d echo "${{ needs.test.outputs.deprecations-azure }}" | base64 -d echo "${{ needs.test.outputs.deprecations-s3 }}" | base64 -d - echo "${{ needs.test.outputs.deprecations-upgrade }}" | base64 -d diff --git a/.github/workflows/kanban.yml b/.github/workflows/kanban.yml new file mode 100644 index 00000000..0b2570cb --- /dev/null +++ b/.github/workflows/kanban.yml @@ -0,0 +1,103 @@ +# WARNING: DO NOT EDIT! +# +# This file was generated by plugin_template, and is managed by it. Please use +# './plugin-template --github pulp_file' to update this file. +# +# For more info visit https://github.com/pulp/plugin_template +# Manage issues in a project board using https://github.com/leonsteinhaeuser/project-beta-automations + +--- +name: Kanban +on: + pull_request_target: + issues: + types: + - labeled + - reopened + - assigned + - closed + +env: + free_to_take: Free to take + in_progress: In Progress + needs_review: Needs review + done: Done + +jobs: + # only prio-list labeled items should be added to the board + add-to-project-board: + if: github.event_name == 'issues' && contains(github.event.issue.labels.*.name, 'prio-list') && contains(fromJson('["labeled", "reopened"]'), github.event.action) + runs-on: ubuntu-latest + steps: + - name: Add issue to Free-to-take list + uses: leonsteinhaeuser/project-beta-automations@v2.0.0 + with: + gh_token: ${{ secrets.RELEASE_TOKEN }} + organization: pulp + project_id: 8 + resource_node_id: ${{ github.event.issue.node_id }} + operation_mode: status + status_value: ${{ env.free_to_take }} # Target status + + move-to-inprogress: + if: github.event_name == 'issues' && github.event.action == 'assigned' + runs-on: ubuntu-latest + steps: + - name: Move an issue to the In Progress column + uses: leonsteinhaeuser/project-beta-automations@v2.0.0 + with: + gh_token: ${{ secrets.RELEASE_TOKEN }} + organization: pulp + project_id: 8 + resource_node_id: ${{ github.event.issue.node_id }} + operation_mode: status + status_value: ${{ env.in_progress }} # Target status + + find-linked-issues: + if: github.event_name == 'pull_request_target' + runs-on: ubuntu-latest + name: Find issues linked to a PR + outputs: + linked-issues: ${{ steps.linked-issues.outputs.issues }} + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Get Linked Issues Action + uses: kin/gh-action-get-linked-issues@v1.0 + id: linked-issues + with: + access-token: ${{ secrets.RELEASE_TOKEN }} + + move-to-needs-review: + if: github.event_name == 'pull_request_target' && contains(fromJson(needs.find-linked-issues.outputs.linked-issues).*.issue.state, 'open') + runs-on: ubuntu-latest + name: Move linked issues to Needs Review + needs: find-linked-issues + strategy: + max-parallel: 3 + matrix: + issues: ${{ fromJSON(needs.find-linked-issues.outputs.linked-issues) }} + steps: + - name: Move to Needs Review + uses: leonsteinhaeuser/project-beta-automations@v2.0.0 + with: + gh_token: ${{ secrets.RELEASE_TOKEN }} + organization: pulp + project_id: 8 + resource_node_id: ${{ matrix.issues.issue.node_id }} + operation_mode: status + status_value: ${{ env.needs_review }} # Target status + + move-to-done: + if: github.event_name == 'issues' && github.event.action == 'closed' + runs-on: ubuntu-latest + steps: + - name: Move an issue to the Done column + uses: leonsteinhaeuser/project-beta-automations@v2.0.0 + with: + gh_token: ${{ secrets.RELEASE_TOKEN }} + organization: pulp + project_id: 8 + resource_node_id: ${{ github.event.issue.node_id }} + operation_mode: status + status_value: ${{ env.done }} # Target status diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index 48f35b78..2f0e3202 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -167,7 +167,7 @@ jobs: docker logs pulp || true docker exec pulp ls -latr /etc/yum.repos.d/ || true docker exec pulp cat /etc/yum.repos.d/* || true - docker exec pulp pip3 list + docker exec pulp bash -c "pip3 list && pip3 install pipdeptree && pipdeptree" changelog: runs-on: ubuntu-latest @@ -364,7 +364,7 @@ jobs: docker logs pulp || true docker exec pulp ls -latr /etc/yum.repos.d/ || true docker exec pulp cat /etc/yum.repos.d/* || true - docker exec pulp pip3 list + docker exec pulp bash -c "pip3 list && pip3 install pipdeptree && pipdeptree" performance: runs-on: ubuntu-latest @@ -482,4 +482,4 @@ jobs: docker logs pulp || true docker exec pulp ls -latr /etc/yum.repos.d/ || true docker exec pulp cat /etc/yum.repos.d/* || true - docker exec pulp pip3 list + docker exec pulp bash -c "pip3 list && pip3 install pipdeptree && pipdeptree" diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 6a85ab14..89a8cefb 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -240,7 +240,7 @@ jobs: docker logs pulp || true docker exec pulp ls -latr /etc/yum.repos.d/ || true docker exec pulp cat /etc/yum.repos.d/* || true - docker exec pulp pip3 list + docker exec pulp bash -c "pip3 list && pip3 install pipdeptree && pipdeptree" publish: @@ -293,7 +293,7 @@ jobs: - name: Install python dependencies run: | echo ::group::PYDEPS - pip install gitpython python-redmine requests packaging + pip install gitpython python-redmine requests packaging tweepy echo ::endgroup:: - name: Push branch and tag to GitHub @@ -343,10 +343,15 @@ jobs: set -euv export COMMIT_MSG=$(git log --format=%B --no-merges -1) export GH_ISSUES=$(echo $COMMIT_MSG | grep -o "GH Issues: .*" | awk '{print $3}') + pip install pygithub echo "GH Issues $GH_ISSUES" python .ci/scripts/update_github.py + - name: Tweet + continue-on-error: true + run: python .ci/scripts/tweet.py ${{ github.event.inputs.release }} + - name: Create release on GitHub run: bash .github/workflows/scripts/create_release_from_tag.sh ${{ github.event.inputs.release }} diff --git a/.github/workflows/scripts/before_install.sh b/.github/workflows/scripts/before_install.sh index f7c0f169..b6f0bf18 100755 --- a/.github/workflows/scripts/before_install.sh +++ b/.github/workflows/scripts/before_install.sh @@ -30,17 +30,6 @@ fi COMMIT_MSG=$(git log --format=%B --no-merges -1) export COMMIT_MSG -if [[ "$TEST" == "upgrade" ]]; then - pip install -r functest_requirements.txt - git checkout -b ci_upgrade_test - cp -R .github /tmp/.github - cp -R .ci /tmp/.ci - git checkout $FROM_PULP_FILE_BRANCH - rm -rf .ci .github - cp -R /tmp/.github . - cp -R /tmp/.ci . -fi - if [[ "$TEST" == "plugin-from-pypi" ]]; then COMPONENT_VERSION=$(http https://pypi.org/pypi/pulp-file/json | jq -r '.info.version') else @@ -115,7 +104,7 @@ if [ -n "$PULP_CLI_PR_NUMBER" ]; then fi cd pulp-cli -pip install -e . +pip install . pulp config create --base-url https://pulp --location tests/cli.toml mkdir ~/.config/pulp cp tests/cli.toml ~/.config/pulp/cli.toml @@ -143,21 +132,6 @@ fi cd .. - -if [[ "$TEST" == "upgrade" ]]; then - cd pulp-certguard - git checkout -b ci_upgrade_test - git fetch --depth=1 origin heads/$FROM_PULP_CERTGUARD_BRANCH:$FROM_PULP_CERTGUARD_BRANCH - git checkout $FROM_PULP_CERTGUARD_BRANCH - cd .. - cd pulpcore - git checkout -b ci_upgrade_test - git fetch --depth=1 origin heads/$FROM_PULPCORE_BRANCH:$FROM_PULPCORE_BRANCH - git checkout $FROM_PULPCORE_BRANCH - cd .. -fi - - # Intall requirements for ansible playbooks pip install docker netaddr boto3 ansible diff --git a/.github/workflows/scripts/before_script.sh b/.github/workflows/scripts/before_script.sh index fa6a4c62..914a2838 100755 --- a/.github/workflows/scripts/before_script.sh +++ b/.github/workflows/scripts/before_script.sh @@ -29,14 +29,14 @@ tail -v -n +1 .ci/ansible/vars/main.yaml echo "PULP CONFIG:" tail -v -n +1 .ci/ansible/settings/settings.* ~/.config/pulp_smash/settings.json -SCENARIOS=("pulp" "performance" "upgrade" "azure" "s3" "stream" "plugin-from-pypi" "generate-bindings") +SCENARIOS=("pulp" "performance" "azure" "s3" "stream" "plugin-from-pypi" "generate-bindings") if [[ " ${SCENARIOS[*]} " =~ " ${TEST} " ]]; then # Many functional tests require these cmd_prefix dnf install -yq lsof which dnf-plugins-core fi if [[ "${REDIS_DISABLED:-false}" == true ]]; then - cmd_prefix bash -c "s6-svc -d /var/run/s6/services/redis" + cmd_prefix bash -c "s6-rc -d change redis" echo "The Redis service was disabled for $TEST" fi diff --git a/.github/workflows/scripts/func_test_script.sh b/.github/workflows/scripts/func_test_script.sh deleted file mode 100755 index 2bcffe40..00000000 --- a/.github/workflows/scripts/func_test_script.sh +++ /dev/null @@ -1,26 +0,0 @@ -#!/usr/bin/env bash -# coding=utf-8 - -set -mveuo pipefail - -pytest -v -r sx --color=yes --suppress-no-test-exit-code --pyargs pulp_file.tests.functional -m "parallel and not nightly" -n 8 -pytest -v -r sx --color=yes --pyargs pulp_file.tests.functional -m "not parallel and not nightly" - -if [ "${GITHUB_REF##refs/tags/}" != "${GITHUB_REF}" ] -then - PULPCORE_VERSION=$(http http://pulp/pulp/api/v3/status/ | jq -r '.versions'[0].version) - cd ../pulpcore - git fetch origin refs/tags/${PULPCORE_VERSION} - git checkout FETCH_HEAD - cd ../pulp_file - if [ ${PULPCORE_VERSION::3} == "3.9" ] - then - # Temporarily need to downgrade pulp-smash to run pulpcore 3.9 tests - pip install 'pulp-smash==1!0.12.0' - fi -fi - -pip install -r ../pulpcore/functest_requirements.txt - -pytest -v -r sx --color=yes --suppress-no-test-exit-code --pyargs pulpcore.tests.functional -m "parallel and not nightly" -n 8 -pytest -v -r sx --color=yes --pyargs pulpcore.tests.functional -m "not parallel and not nightly" diff --git a/.github/workflows/scripts/install.sh b/.github/workflows/scripts/install.sh index fc834117..48a2e5d8 100755 --- a/.github/workflows/scripts/install.sh +++ b/.github/workflows/scripts/install.sh @@ -22,18 +22,15 @@ if [[ "$TEST" = "docs" || "$TEST" = "publish" ]]; then pip install -r doc_requirements.txt fi -pip install -e ../pulpcore -e ../pulp-certguard -pip install -r functest_requirements.txt - cd .ci/ansible/ TAG=ci_build - if [ -e $REPO_ROOT/../pulp-certguard ]; then PULP_CERTGUARD=./pulp-certguard else PULP_CERTGUARD=git+https://github.com/pulp/pulp-certguard.git@main fi +PULPCORE=./pulpcore if [[ "$TEST" == "plugin-from-pypi" ]]; then PLUGIN_NAME=pulp_file elif [[ "${RELEASE_WORKFLOW:-false}" == "true" ]]; then @@ -55,6 +52,8 @@ plugins: source: "${PLUGIN_NAME}" - name: pulp-certguard source: pulp-certguard + - name: pulp-smash + source: ./pulp-smash VARSYAML else cat >> vars/main.yaml << VARSYAML @@ -67,7 +66,9 @@ plugins: - name: pulp-certguard source: $PULP_CERTGUARD - name: pulpcore - source: ./pulpcore + source: "${PULPCORE}" + - name: pulp-smash + source: ./pulp-smash VARSYAML fi @@ -78,6 +79,10 @@ services: volumes: - ./settings:/etc/pulp - ./ssh:/keys/ + - ~/.config:/root/.config + - ../../../pulp-openapi-generator:/root/pulp-openapi-generator + env: + PULP_WORKERS: "4" VARSYAML cat >> vars/main.yaml << VARSYAML @@ -88,11 +93,7 @@ pulp_container_tag: https VARSYAML -if [ "$TEST" = "upgrade" ]; then - sed -i "/^pulp_container_tag:.*/s//pulp_container_tag: upgrade-https/" vars/main.yaml -fi - -SCENARIOS=("pulp" "performance" "upgrade" "azure" "s3" "stream" "plugin-from-pypi" "generate-bindings") +SCENARIOS=("pulp" "performance" "azure" "s3" "stream" "plugin-from-pypi" "generate-bindings") if [[ " ${SCENARIOS[*]} " =~ " ${TEST} " ]]; then sed -i -e '/^services:/a \ - name: pulp-fixtures\ @@ -106,7 +107,9 @@ if [ "$TEST" == 'stream' ]; then volumes:\ - ./ssh/id_ed25519.pub:/home/foo/.ssh/keys/id_ed25519.pub\ command: "foo::::storage"' vars/main.yaml - sed -i -e '$a stream_test: true' vars/main.yaml + sed -i -e '$a stream_test: true\ +pulp_scenario_settings: null\ +' vars/main.yaml fi if [ "$TEST" = "s3" ]; then @@ -121,7 +124,9 @@ if [ "$TEST" = "s3" ]; then command: "server /data"' vars/main.yaml sed -i -e '$a s3_test: true\ minio_access_key: "'$MINIO_ACCESS_KEY'"\ -minio_secret_key: "'$MINIO_SECRET_KEY'"' vars/main.yaml +minio_secret_key: "'$MINIO_SECRET_KEY'"\ +pulp_scenario_settings: {"hide_guarded_distributions": true}\ +' vars/main.yaml export PULP_API_ROOT="/rerouted/djnd/" fi @@ -138,7 +143,9 @@ if [ "$TEST" = "azure" ]; then volumes:\ - ./azurite:/etc/pulp\ command: "azurite-blob --blobHost 0.0.0.0 --cert /etc/pulp/azcert.pem --key /etc/pulp/azkey.pem"' vars/main.yaml - sed -i -e '$a azure_test: true' vars/main.yaml + sed -i -e '$a azure_test: true\ +pulp_scenario_settings: null\ +' vars/main.yaml fi echo "PULP_API_ROOT=${PULP_API_ROOT}" >> "$GITHUB_ENV" diff --git a/.github/workflows/scripts/install_python_client.sh b/.github/workflows/scripts/install_python_client.sh index 4a99439e..947322fd 100755 --- a/.github/workflows/scripts/install_python_client.sh +++ b/.github/workflows/scripts/install_python_client.sh @@ -7,7 +7,7 @@ # # For more info visit https://github.com/pulp/plugin_template -set -euv +set -mveuo pipefail export PULP_URL="${PULP_URL:-https://pulp}" @@ -30,7 +30,7 @@ export response=$(curl --write-out %{http_code} --silent --output /dev/null http if [ "$response" == "200" ]; then echo "pulp_file client $VERSION has already been released. Installing from PyPI." - pip install pulp-file-client==$VERSION + docker exec pulp pip3 install pulp-file-client==$VERSION mkdir -p dist tar cvf python-client.tar ./dist exit @@ -41,7 +41,7 @@ rm -rf pulp_file-client ./generate.sh pulp_file python $VERSION cd pulp_file-client python setup.py sdist bdist_wheel --python-tag py3 -find . -name "*.whl" -exec pip install {} \; +find . -name "*.whl" -exec docker exec pulp pip3 install /root/pulp-openapi-generator/pulp_file-client/{} \; tar cvf ../../pulp_file/python-client.tar ./dist find ./docs/* -exec sed -i 's/Back to README/Back to HOME/g' {} \; diff --git a/.github/workflows/scripts/script.sh b/.github/workflows/scripts/script.sh index 96725267..6083b3bc 100755 --- a/.github/workflows/scripts/script.sh +++ b/.github/workflows/scripts/script.sh @@ -61,121 +61,46 @@ if [[ "$TEST" == "plugin-from-pypi" ]]; then git checkout ${COMPONENT_VERSION} -- pulp_file/tests/ fi +echo "machine pulp +login admin +password password +" | cmd_stdin_prefix bash -c "cat > /root/.netrc" +cmd_stdin_prefix bash -c "chmod og-rw /root/.netrc" + +cat unittest_requirements.txt | cmd_stdin_prefix bash -c "cat > /tmp/unittest_requirements.txt" +cat functest_requirements.txt | cmd_stdin_prefix bash -c "cat > /tmp/functest_requirements.txt" +cmd_prefix pip3 install -r /tmp/unittest_requirements.txt +cmd_prefix pip3 install -r /tmp/functest_requirements.txt +cmd_prefix pip3 install --upgrade ../pulp-smash + cd ../pulp-openapi-generator +./generate.sh pulp_file python +cmd_prefix pip3 install /root/pulp-openapi-generator/pulp_file-client +sudo rm -rf ./pulp_file-client ./generate.sh pulpcore python -pip install ./pulpcore-client -rm -rf ./pulpcore-client -if [[ "$TEST" = 'bindings' ]]; then - ./generate.sh pulpcore ruby 0 - cd pulpcore-client - gem build pulpcore_client.gemspec - gem install --both ./pulpcore_client-0.gem -fi +cmd_prefix pip3 install /root/pulp-openapi-generator/pulpcore-client +sudo rm -rf ./pulpcore-client ./generate.sh pulp_certguard python -pip install ./pulp_certguard-client -rm -rf ./pulp_certguard-client -if [[ "$TEST" = 'bindings' ]]; then - ./generate.sh pulp-certguard ruby 0 - cd pulp-certguard-client - gem build pulp-certguard_client.gemspec - gem install --both ./pulp-certguard_client-0.gem - cd .. -fi +cmd_prefix pip3 install /root/pulp-openapi-generator/pulp_certguard-client +sudo rm -rf ./pulp_certguard-client cd $REPO_ROOT -if [[ "$TEST" = 'bindings' ]]; then - if [ -f $REPO_ROOT/.ci/assets/bindings/test_bindings.py ]; then - python $REPO_ROOT/.ci/assets/bindings/test_bindings.py - fi - if [ -f $REPO_ROOT/.ci/assets/bindings/test_bindings.rb ]; then - ruby $REPO_ROOT/.ci/assets/bindings/test_bindings.rb - fi - exit -fi - -cat unittest_requirements.txt | cmd_stdin_prefix bash -c "cat > /tmp/unittest_requirements.txt" -cmd_prefix pip3 install -r /tmp/unittest_requirements.txt +CERTIFI=$(cmd_prefix python3 -c 'import certifi; print(certifi.where())') +cmd_prefix bash -c "cat /etc/pulp/certs/pulp_webserver.crt | tee -a "$CERTIFI" > /dev/null" # check for any uncommitted migrations echo "Checking for uncommitted migrations..." cmd_prefix bash -c "django-admin makemigrations --check --dry-run" -if [[ "$TEST" != "upgrade" ]]; then - # Run unit tests. - cmd_prefix bash -c "PULP_DATABASES__default__USER=postgres pytest -v -r sx --color=yes -p no:pulpcore --pyargs pulp_file.tests.unit" -fi +# Run unit tests. +cmd_prefix bash -c "PULP_DATABASES__default__USER=postgres pytest -v -r sx --color=yes -p no:pulpcore --pyargs pulp_file.tests.unit" # Run functional tests -export PYTHONPATH=$REPO_ROOT/../pulp-certguard${PYTHONPATH:+:${PYTHONPATH}} -export PYTHONPATH=$REPO_ROOT${PYTHONPATH:+:${PYTHONPATH}} - - -if [[ "$TEST" == "upgrade" ]]; then - # Handle app label change: - sed -i "/require_pulp_plugins(/d" pulp_file/tests/functional/utils.py - - # Running pre upgrade tests: - pytest -v -r sx --color=yes --pyargs --capture=no pulp_file.tests.upgrade.pre - - # Checking out ci_upgrade_test branch and upgrading plugins - cmd_prefix bash -c "cd pulpcore; git checkout -f ci_upgrade_test; pip install --upgrade --force-reinstall ." - cmd_prefix bash -c "cd pulp-certguard; git checkout -f ci_upgrade_test; pip install ." - cmd_prefix bash -c "cd pulp_file; git checkout -f ci_upgrade_test; pip install ." - - # Migrating - cmd_prefix bash -c "django-admin migrate --no-input" - - # Restarting single container services - cmd_prefix bash -c "s6-svc -r /var/run/s6/services/pulpcore-api" - cmd_prefix bash -c "s6-svc -r /var/run/s6/services/pulpcore-content" - cmd_prefix bash -c "s6-svc -d /var/run/s6/services/pulpcore-resource-manager" - cmd_prefix bash -c "s6-svc -d /var/run/s6/services/pulpcore-worker@1" - cmd_prefix bash -c "s6-svc -d /var/run/s6/services/pulpcore-worker@2" - cmd_prefix bash -c "s6-svc -u /var/run/s6/services/new-pulpcore-resource-manager" - cmd_prefix bash -c "s6-svc -u /var/run/s6/services/new-pulpcore-worker@1" - cmd_prefix bash -c "s6-svc -u /var/run/s6/services/new-pulpcore-worker@2" - - echo "Restarting in 60 seconds" - sleep 60 - - # Let's reinstall pulpcore so we can ensure we have the correct dependencies - cd ../pulpcore - git checkout -f ci_upgrade_test - pip install --upgrade --force-reinstall . ../pulp-cli ../pulp-smash - # Hack: adding pulp CA to certifi.where() - CERTIFI=$(python -c 'import certifi; print(certifi.where())') - cat /usr/local/share/ca-certificates/pulp_webserver.crt | sudo tee -a "$CERTIFI" > /dev/null - # CLI commands to display plugin versions and content data - pulp status - pulp content list - CONTENT_LENGTH=$(pulp content list | jq length) - if [[ "$CONTENT_LENGTH" == "0" ]]; then - echo "Empty content list" - exit 1 - fi - - # Rebuilding bindings - cd ../pulp-openapi-generator - ./generate.sh pulpcore python - pip install ./pulpcore-client - ./generate.sh pulp_file python - pip install ./pulp_file-client - ./generate.sh pulp_certguard python - pip install ./pulp_certguard-client - cd $REPO_ROOT - - # Running post upgrade tests - git checkout ci_upgrade_test -- pulp_file/tests/ - pytest -v -r sx --color=yes --pyargs --capture=no pulp_file.tests.upgrade.post - exit -fi - - if [[ "$TEST" == "performance" ]]; then if [[ -z ${PERFORMANCE_TEST+x} ]]; then - pytest -vv -r sx --color=yes --pyargs --capture=no --durations=0 pulp_file.tests.performance + cmd_prefix bash -c "pytest -vv -r sx --color=yes --pyargs --capture=no --durations=0 pulp_file.tests.performance" else - pytest -vv -r sx --color=yes --pyargs --capture=no --durations=0 pulp_file.tests.performance.test_$PERFORMANCE_TEST + cmd_prefix bash -c "pytest -vv -r sx --color=yes --pyargs --capture=no --durations=0 pulp_file.tests.performance.test_$PERFORMANCE_TEST" fi exit fi @@ -184,27 +109,28 @@ if [ -f $FUNC_TEST_SCRIPT ]; then source $FUNC_TEST_SCRIPT else - if [[ "$GITHUB_WORKFLOW" == "File Nightly CI/CD" ]]; then - pytest -v -r sx --color=yes --suppress-no-test-exit-code --pyargs pulp_file.tests.functional -m parallel -n 8 - pytest -v -r sx --color=yes --pyargs pulp_file.tests.functional -m "not parallel" + if [[ "$GITHUB_WORKFLOW" == "File Nightly CI/CD" ]] || [[ "${RELEASE_WORKFLOW:-false}" == "true" ]]; then + cmd_prefix bash -c "pytest -v -r sx --color=yes --suppress-no-test-exit-code --pyargs pulp_file.tests.functional -m parallel -n 8 --nightly" + cmd_prefix bash -c "pytest -v -r sx --color=yes --pyargs pulp_file.tests.functional -m 'not parallel' --nightly" - pytest -v -r sx --color=yes --suppress-no-test-exit-code --pyargs pulpcore.tests.functional -m "from_pulpcore_for_all_plugins and parallel" -n 8 - pytest -v -r sx --color=yes --suppress-no-test-exit-code --pyargs pulpcore.tests.functional -m "from_pulpcore_for_all_plugins and not parallel" + cmd_prefix bash -c "pytest -v -r sx --color=yes --suppress-no-test-exit-code --pyargs pulpcore.tests.functional -m 'from_pulpcore_for_all_plugins and parallel' -n 8 --nightly" + cmd_prefix bash -c "pytest -v -r sx --color=yes --suppress-no-test-exit-code --pyargs pulpcore.tests.functional -m 'from_pulpcore_for_all_plugins and not parallel' --nightly" else - pytest -v -r sx --color=yes --suppress-no-test-exit-code --pyargs pulp_file.tests.functional -m "parallel and not nightly" -n 8 - pytest -v -r sx --color=yes --pyargs pulp_file.tests.functional -m "not parallel and not nightly" + cmd_prefix bash -c "pytest -v -r sx --color=yes --suppress-no-test-exit-code --pyargs pulp_file.tests.functional -m parallel -n 8" + cmd_prefix bash -c "pytest -v -r sx --color=yes --pyargs pulp_file.tests.functional -m 'not parallel'" - pytest -v -r sx --color=yes --suppress-no-test-exit-code --pyargs pulpcore.tests.functional -m "from_pulpcore_for_all_plugins and not nightly and parallel" -n 8 - pytest -v -r sx --color=yes --suppress-no-test-exit-code --pyargs pulpcore.tests.functional -m "from_pulpcore_for_all_plugins and not nightly and not parallel" + cmd_prefix bash -c "pytest -v -r sx --color=yes --suppress-no-test-exit-code --pyargs pulpcore.tests.functional -m 'from_pulpcore_for_all_plugins and parallel' -n 8" + cmd_prefix bash -c "pytest -v -r sx --color=yes --suppress-no-test-exit-code --pyargs pulpcore.tests.functional -m 'from_pulpcore_for_all_plugins and not parallel'" fi fi export PULP_FIXTURES_URL="http://pulp-fixtures:8080" pushd ../pulp-cli +pip install -r test_requirements.txt pytest -v -m pulp_file popd diff --git a/.github/workflows/update_ci.yml b/.github/workflows/update_ci.yml index 3fbdb6c1..3f80e545 100644 --- a/.github/workflows/update_ci.yml +++ b/.github/workflows/update_ci.yml @@ -54,12 +54,6 @@ jobs: id: vars run: echo ::set-output name=short_ref::${GITHUB_REF#refs/*/} - - name: Dispatching - if: github.event_name == 'schedule' || github.event.inputs.all_branches == 'yes' - run: python .ci/scripts/update_ci_branches.py None - env: - GITHUB_TOKEN: ${{ secrets.RELEASE_TOKEN }} - - name: Run update run: | .github/workflows/scripts/update_ci.sh diff --git a/CHANGES.rst b/CHANGES.rst index c0a39579..910ca832 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -13,6 +13,85 @@ Changelog .. towncrier release notes start +1.11.1 (2022-08-01) +=================== + + +No significant changes. + + +---- + + +1.11.0 (2022-07-28) +=================== + + +Features +-------- + +- Added Role Based Access Control for each endpoint. + * New default roles (creator, owner, viewer) have been added for ``FileRepository``, ``FileRemote``, + ``FileDistribution``, ``FilePublication``, and ``FileAlternateContentSource``. + * New detail role management endpoints (``my_permissions``, ``list_roles``, ``add_role``, + ``remove_role``) have been added to each endpoint. + `#626 `__ +- File Content is now scoped based on repositories the user has permission to see. + `#724 `__ +- Added new condition on file uploads to require ``repository`` field if user is not an admin. + `#729 `__ + + +Bugfixes +-------- + +- Improved the error message shown when a user specifies an invalid path to the manifest file, or the manifest file is in the incorrect format. + `#605 `__ +- The relative_path field of PULP_MANIFEST can now contain commas, since they are valid filename characters in both Linux and Windows filesystems. + `#630 `__ +- Fixed a bug where publish used /tmp/ instead of the worker working directory. + `#676 `__ + + +Misc +---- + +- `#691 `__ + + +---- + + +1.10.5 (2022-08-16) +=================== + + +No significant changes. + + +---- + + +1.10.4 (2022-08-15) +=================== + + +No significant changes. + + +---- + + +1.10.3 (2022-06-22) +=================== + + +No significant changes. + + +---- + + 1.10.2 (2022-02-23) =================== diff --git a/CHANGES/626.feature b/CHANGES/626.feature deleted file mode 100644 index 39ab937c..00000000 --- a/CHANGES/626.feature +++ /dev/null @@ -1,5 +0,0 @@ -Added Role Based Access Control for each endpoint. -* New default roles (creator, owner, viewer) have been added for ``FileRepository``, ``FileRemote``, -``FileDistribution``, ``FilePublication``, and ``FileAlternateContentSource``. -* New detail role management endpoints (``my_permissions``, ``list_roles``, ``add_role``, -``remove_role``) have been added to each endpoint. diff --git a/CHANGES/676.bugfix b/CHANGES/676.bugfix deleted file mode 100644 index 18c8e2b1..00000000 --- a/CHANGES/676.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fixed a bug where publish used /tmp/ instead of the worker working directory. diff --git a/CHANGES/691.misc b/CHANGES/691.misc deleted file mode 100644 index f84de8e6..00000000 --- a/CHANGES/691.misc +++ /dev/null @@ -1 +0,0 @@ -Added random file repository generation for pytest server fixtures. diff --git a/CHANGES/747.feature b/CHANGES/747.feature new file mode 100644 index 00000000..4659bd3f --- /dev/null +++ b/CHANGES/747.feature @@ -0,0 +1 @@ +Added a permission check on the used upload in the single shot content creation call. diff --git a/CHANGES/774.feature b/CHANGES/774.feature new file mode 100644 index 00000000..1e71113a --- /dev/null +++ b/CHANGES/774.feature @@ -0,0 +1 @@ +The upload feature was changed to accept already existing file content. This allows multiple users to own identical files. diff --git a/docs/conf.py b/docs/conf.py index 3f1e258b..4838f564 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -56,9 +56,9 @@ # built documents. # # The short X.Y version. -version = "1.11.0.dev" +version = "1.12.0.dev" # The full version, including alpha/beta/rc tags. -release = "1.11.0.dev" +release = "1.12.0.dev" # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/docs/role-based-access-control.rst b/docs/role-based-access-control.rst index fb9cd769..bbe01308 100644 --- a/docs/role-based-access-control.rst +++ b/docs/role-based-access-control.rst @@ -178,11 +178,12 @@ Content and RepositoryVersions Permissions ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ File Content and RepositoryVersions are unique as they do not have any default roles on their -viewsets. Content's access policy allows any authenticated user to view and create file -content. RepositoryVersions' access policy requires the user to have permissions on the parent -repository in order to perform actions on the repository version. Both objects have CRD permissions -in the database that can be assigned to users, but currently their access policies do not use them -for authorization. +viewsets. Content's access policy allows any authenticated user to create file content, however +they must specify the repository to upload to since viewing content is scoped by the repositories +the user has permission for. RepositoryVersions' access policy requires the user to have +permissions on the parent repository in order to perform actions on the repository version. Both +objects have CRD permissions in the database that can be assigned to users, but currently their +access policies do not use them for authorization. Creating New Roles ------------------ diff --git a/docs/workflows/alternate-content-source.rst b/docs/workflows/alternate-content-source.rst index bfc325c5..46482b15 100644 --- a/docs/workflows/alternate-content-source.rst +++ b/docs/workflows/alternate-content-source.rst @@ -6,7 +6,7 @@ ACS. .. code-block:: bash - pulp file remote create --name remoteForACS --policy on_demand --url http://fixtures.pulpproject.org/ + pulp file remote create --name remoteForACS --policy on_demand --url https://fixtures.pulpproject.org/file-manifest/PULP_MANIFEST Create Alternate Content Source ------------------------------- diff --git a/lint_requirements.txt b/lint_requirements.txt new file mode 100644 index 00000000..2ed62278 --- /dev/null +++ b/lint_requirements.txt @@ -0,0 +1,6 @@ +# python packages handy for developers, but not required by pulp +black +check-manifest +flake8 +flake8-black + diff --git a/pulp_file/app/__init__.py b/pulp_file/app/__init__.py index cd846eed..297a0c50 100644 --- a/pulp_file/app/__init__.py +++ b/pulp_file/app/__init__.py @@ -8,4 +8,5 @@ class PulpFilePluginAppConfig(PulpPluginAppConfig): name = "pulp_file.app" label = "file" - version = "1.11.0.dev" + version = "1.12.0.dev" + python_package_name = "pulp-file" diff --git a/pulp_file/app/serializers.py b/pulp_file/app/serializers.py index c1752e9d..f4ff08a9 100644 --- a/pulp_file/app/serializers.py +++ b/pulp_file/app/serializers.py @@ -35,21 +35,13 @@ def deferred_validate(self, data): data["digest"] = data["artifact"].sha256 + return data + + def retrieve(self, validated_data): content = FileContent.objects.filter( - digest=data["digest"], relative_path=data["relative_path"] + digest=validated_data["digest"], relative_path=validated_data["relative_path"] ) - - if content.exists(): - content.get().touch() - - raise serializers.ValidationError( - _( - "There is already a file content with relative path '{path}' and digest " - "'{digest}'." - ).format(path=data["relative_path"], digest=data["digest"]) - ) - - return data + return content.first() class Meta: fields = ( diff --git a/pulp_file/app/tasks/publishing.py b/pulp_file/app/tasks/publishing.py index 8082d171..13949d44 100644 --- a/pulp_file/app/tasks/publishing.py +++ b/pulp_file/app/tasks/publishing.py @@ -40,7 +40,7 @@ def publish(manifest, repository_version_pk): with FilePublication.create(repo_version, pass_through=True) as publication: publication.manifest = manifest manifest = Manifest(manifest) - manifest.write(populate(publication)) + manifest.write(yield_entries_for_version(repo_version)) PublishedMetadata.create_from_file( file=File(open(manifest.relative_path, "rb")), publication=publication ) @@ -50,23 +50,22 @@ def publish(manifest, repository_version_pk): return publication -def populate(publication): +def yield_entries_for_version(repo_version): """ - Populate a publication. - - Create published artifacts and yield a Manifest Entry for each. + Yield a Manifest Entry for every content in the repository version. Args: - publication (pulpcore.plugin.models.Publication): A Publication to populate. + repo_version (pulpcore.plugin.models.RepositoryVersion): + A RepositoryVersion to manifest entries for. Yields: Entry: Each manifest entry. """ - content_artifacts = ContentArtifact.objects.filter( - content__in=publication.repository_version.content - ).order_by("-content__pulp_created") + content_artifacts = ContentArtifact.objects.filter(content__in=repo_version.content).order_by( + "-content__pulp_created" + ) for content_artifact in content_artifacts.select_related("artifact").iterator(): if content_artifact.artifact: diff --git a/pulp_file/app/viewsets.py b/pulp_file/app/viewsets.py index c8919800..ef48b715 100644 --- a/pulp_file/app/viewsets.py +++ b/pulp_file/app/viewsets.py @@ -77,11 +77,22 @@ class FileContentViewSet(SingleArtifactContentUploadViewSet): DEFAULT_ACCESS_POLICY = { "statements": [ { - "action": ["list", "retrieve", "create"], + "action": ["list", "retrieve"], + "principal": "authenticated", + "effect": "allow", + }, + { + "action": ["create"], "principal": "authenticated", "effect": "allow", + "condition": [ + "has_required_repo_perms_on_upload:file.modify_filerepository", + "has_required_repo_perms_on_upload:file.view_filerepository", + "has_upload_param_model_or_obj_perms:core.change_upload", + ], }, ], + "queryset_scoping": {"function": "scope_queryset"}, } @@ -255,7 +266,6 @@ class FileRepositoryVersionViewSet(RepositoryVersionViewSet): ], }, ], - "queryset_scoping": {"function": "scope_queryset"}, } diff --git a/pulp_file/manifest.py b/pulp_file/manifest.py index 4da24495..146542ea 100644 --- a/pulp_file/manifest.py +++ b/pulp_file/manifest.py @@ -2,6 +2,8 @@ from gettext import gettext as _ +from re import fullmatch + Line = namedtuple("Line", ("number", "content")) @@ -49,14 +51,24 @@ def parse(line): ValueError: on parsing error. """ - part = [s.strip() for s in line.content.split(",")] - if len(part) != 3: + all_parts = line.content.count(",") >= 2 + if all_parts: + relative_path, digest, size = [s.strip() for s in line.content.rsplit(",", maxsplit=2)] + if ( + not all_parts + or not fullmatch(r"^[^/]+(/[^/]+)*$", relative_path) + or not fullmatch(r"^[0-9a-fA-F]+$", digest) + or not size.isdigit() + ): raise ValueError( - _("Error: manifest line:{n}: " "must be: ,,").format( - n=line.number - ) + _( + "Error: Parsing of the manifest file failed on line:{n}.\n" + "Please make sure the remote URL is pointing to a valid manifest file.\n" + "The manifest file should be " + "composed of lines in the following format: ,,." + ).format(n=line.number) ) - return Entry(relative_path=part[0], digest=part[1], size=int(part[2])) + return Entry(relative_path=relative_path, digest=digest, size=int(size)) def __str__(self): """ diff --git a/pulp_file/tests/functional/__init__.py b/pulp_file/tests/functional/__init__.py index 8b2696c7..ace249f0 100644 --- a/pulp_file/tests/functional/__init__.py +++ b/pulp_file/tests/functional/__init__.py @@ -1,2 +1 @@ -# coding=utf-8 """Tests for file plugin.""" diff --git a/pulp_file/tests/functional/api/__init__.py b/pulp_file/tests/functional/api/__init__.py index 44743b3d..a60f4760 100644 --- a/pulp_file/tests/functional/api/__init__.py +++ b/pulp_file/tests/functional/api/__init__.py @@ -1,2 +1 @@ -# coding=utf-8 """Tests that communicate with file plugin via the v3 API.""" diff --git a/pulp_file/tests/functional/api/from_pulpcore/test_acs.py b/pulp_file/tests/functional/api/from_pulpcore/test_acs.py deleted file mode 100644 index 445b95b7..00000000 --- a/pulp_file/tests/functional/api/from_pulpcore/test_acs.py +++ /dev/null @@ -1,107 +0,0 @@ -import unittest - -from pulp_smash import config -from pulp_smash.pulp3.bindings import delete_orphans, monitor_task - -from pulpcore.client.pulp_file import AcsFileApi, RemotesFileApi -from pulpcore.client.pulp_file.exceptions import ApiException - -from pulp_file.tests.functional.utils import ( - gen_file_client, - gen_file_remote, -) - - -class AlternateContentSourceTestCase(unittest.TestCase): - @classmethod - def setUpClass(cls): - """ - Create class-wide variables. - - Variables 'paths' and 'paths_updated' are defined as strings. - In same way data are send from user. - """ - cls.cfg = config.get_config() - cls.file_client = gen_file_client() - cls.file_remote_api = RemotesFileApi(cls.file_client) - cls.file_acs_api = AcsFileApi(cls.file_client) - cls.paths = ["backupone/PULP_MANIFEST", "backuptwo/manifest"] - cls.paths_updated = ["backupone/test", "anotherbackup/PULP_MANIFEST"] - - @classmethod - def tearDownClass(cls): - delete_orphans() - - def test_create(self): - """ - Basic ACS create. - - 1. Try and fail to create ACS with remote with immediate policy - 2. Create ACS and check it exists - """ - remote_bad = self.file_remote_api.create(gen_file_remote()) - remote = self.file_remote_api.create(gen_file_remote(policy="on_demand")) - self.addCleanup(self.file_remote_api.delete, remote_bad.pulp_href) - self.addCleanup(self.file_remote_api.delete, remote.pulp_href) - - acs_data = { - "name": "alternatecontentsource", - "remote": remote_bad.pulp_href, - "paths": self.paths, - } - with self.assertRaises(ApiException) as ctx: - self.file_acs_api.create(acs_data) - self.assertEqual(ctx.exception.status, 400) - - acs_data["remote"] = remote.pulp_href - - acs = self.file_acs_api.create(acs_data) - self.addCleanup(self.file_acs_api.delete, acs.pulp_href) - - self.assertEqual(len(self.file_acs_api.list(name="alternatecontentsource").results), 1) - - def test_acs_update(self): - """ - ACS update. - - Test of update name and paths. - """ - remote = self.file_remote_api.create(gen_file_remote(policy="on_demand")) - self.addCleanup(self.file_remote_api.delete, remote.pulp_href) - - acs_data = { - "name": "alternatecontentsource", - "remote": remote.pulp_href, - "paths": self.paths, - } - acs = self.file_acs_api.create(acs_data) - self.addCleanup(self.file_acs_api.delete, acs.pulp_href) - - # update name - new_name = "acs" - response = self.file_acs_api.update(acs.pulp_href, {"name": new_name, "remote": acs.remote}) - monitor_task(response.task) - acs = self.file_acs_api.read(acs.pulp_href) - - self.assertEqual(acs.name, new_name) - # assert paths were not silently removed during name update - self.assertEqual(sorted(acs.paths), sorted(self.paths)) - - # partial update name - new_name = "new_acs" - response = self.file_acs_api.partial_update( - acs.pulp_href, {"name": new_name, "remote": acs.remote} - ) - monitor_task(response.task) - acs = self.file_acs_api.read(acs.pulp_href) - - self.assertEqual(acs.name, new_name) - - # update paths - response = self.file_acs_api.update( - acs.pulp_href, {"name": acs.name, "remote": acs.remote, "paths": self.paths_updated} - ) - monitor_task(response.task) - acs = self.file_acs_api.read(acs.pulp_href) - - self.assertEqual(sorted(acs.paths), sorted(self.paths_updated)) diff --git a/pulp_file/tests/functional/api/from_pulpcore/test_content_cache.py b/pulp_file/tests/functional/api/from_pulpcore/test_content_cache.py index 17b0fd25..d1f4ece0 100644 --- a/pulp_file/tests/functional/api/from_pulpcore/test_content_cache.py +++ b/pulp_file/tests/functional/api/from_pulpcore/test_content_cache.py @@ -1,179 +1,139 @@ """Tests related to content cache.""" -import requests -import unittest +import pytest from urllib.parse import urljoin -from pulp_smash.pulp3.bindings import monitor_task, PulpTaskError -from pulp_smash.pulp3.utils import gen_distribution, gen_repo +from pulp_smash.pulp3.bindings import monitor_task +from pulp_smash.pulp3.utils import gen_distribution from pulpcore.client.pulp_file import ( - ContentFilesApi, RepositoryAddRemoveContent, RepositorySyncURL, - RepositoriesFileApi, - RemotesFileApi, - PublicationsFileApi, FileFilePublication, - DistributionsFileApi, PatchedfileFileDistribution, ) -from pulp_file.tests.functional.utils import gen_file_client, gen_file_remote, get_redis_status +from pulp_file.tests.functional.utils import ( + get_redis_status, + get_url, +) from .constants import PULP_CONTENT_BASE_URL is_redis_connected = get_redis_status() -@unittest.skipUnless(is_redis_connected, "Could not connect to the Redis server") -class ContentCacheTestCache(unittest.TestCase): - """Test content cache""" - - @classmethod - def setUpClass(cls): - """Sets up class""" - client = gen_file_client() - cls.cont_api = ContentFilesApi(client) - cls.repo_api = RepositoriesFileApi(client) - cls.remote_api = RemotesFileApi(client) - cls.pub_api = PublicationsFileApi(client) - cls.dis_api = DistributionsFileApi(client) - - def setUp(self): - self.repo = self.repo_api.create(gen_repo(autopublish=True)) - self.remote = self.remote_api.create(gen_file_remote()) - - body = RepositorySyncURL(remote=self.remote.pulp_href) - created = monitor_task(self.repo_api.sync(self.repo.pulp_href, body).task).created_resources - self.repo = self.repo_api.read(self.repo.pulp_href) - self.pub1 = self.pub_api.read(created[1]) - body = FileFilePublication(repository=self.repo.pulp_href) - self.pub2 = self.pub_api.read( - monitor_task(self.pub_api.create(body).task).created_resources[0] - ) - self.pub3 = [] - response = self.dis_api.create(gen_distribution(repository=self.repo.pulp_href)) - self.distro = self.dis_api.read(monitor_task(response.task).created_resources[0]) - self.distro2 = [] - self.url = urljoin(PULP_CONTENT_BASE_URL, f"{self.distro.base_path}/") - - def tearDown(self): - tasks = [] - tasks.append(self.remote_api.delete(self.remote.pulp_href).task) - tasks.append(self.dis_api.delete(self.distro.pulp_href).task) - try: - tasks.append(self.repo_api.delete(self.repo.pulp_href).task) - except: - pass - for task_href in tasks: - try: - monitor_task(task_href) - except PulpTaskError: - pass - - def test_content_cache_workflow(self): - self._basic_cache_access() - self._remove_repository_invalidates() - self._restore_repository() - self._multiple_distributions() - self._invalidate_multiple_distributions() - self._delete_distribution_invalidates_one() - self._delete_extra_pub_doesnt_invalidate() - self._delete_served_pub_does_invalidate() - self._delete_repo_invalidates() - self._no_error_when_accessing_invalid_file() - - def _basic_cache_access(self): - """Checks responses are cached for content""" - files = ["", "", "PULP_MANIFEST", "PULP_MANIFEST", "1.iso", "1.iso"] - for i, file in enumerate(files): - self.assertEqual((200, "HIT" if i % 2 == 1 else "MISS"), self._check_cache(file), file) - - def _remove_repository_invalidates(self): - """Checks removing repository from distribution invalidates the cache""" - body = PatchedfileFileDistribution(repository="") - monitor_task(self.dis_api.partial_update(self.distro.pulp_href, body).task) - files = ["", "PULP_MANIFEST", "1.iso"] - for file in files: - self.assertEqual((404, None), self._check_cache(file), file) - - def _restore_repository(self): - """Checks that responses are cacheable after repository is added back""" - body = PatchedfileFileDistribution(repository=self.repo.pulp_href) - monitor_task(self.dis_api.partial_update(self.distro.pulp_href, body).task) - files = ["", "", "PULP_MANIFEST", "PULP_MANIFEST", "1.iso", "1.iso"] - for i, file in enumerate(files): - self.assertEqual((200, "HIT" if i % 2 == 1 else "MISS"), self._check_cache(file), file) - - def _multiple_distributions(self): - """Add a new distribution and check that its responses are cached separately""" - response = self.dis_api.create(gen_distribution(repository=self.repo.pulp_href)) - self.distro2.append(self.dis_api.read(monitor_task(response.task).created_resources[0])) - url = urljoin(PULP_CONTENT_BASE_URL, f"{self.distro2[0].base_path}/") - files = ["", "", "PULP_MANIFEST", "PULP_MANIFEST", "1.iso", "1.iso"] - for i, file in enumerate(files): - self.assertEqual( - (200, "HIT" if i % 2 == 1 else "MISS"), self._check_cache(file, url), file - ) - - def _invalidate_multiple_distributions(self): - """Test that updating a repository pointed by multiple distributions invalidates all""" - url = urljoin(PULP_CONTENT_BASE_URL, f"{self.distro2[0].base_path}/") - cfile = self.cont_api.list( - relative_path="1.iso", repository_version=self.repo.latest_version_href - ).results[0] - body = RepositoryAddRemoveContent(remove_content_units=[cfile.pulp_href]) - response = monitor_task(self.repo_api.modify(self.repo.pulp_href, body).task) - self.pub3.append(self.pub_api.read(response.created_resources[1])) - files = ["", "", "PULP_MANIFEST", "PULP_MANIFEST", "2.iso", "2.iso"] - for i, file in enumerate(files): - self.assertEqual((200, "HIT" if i % 2 == 1 else "MISS"), self._check_cache(file), file) - self.assertEqual( - (200, "HIT" if i % 2 == 1 else "MISS"), self._check_cache(file, url), file - ) - - def _delete_distribution_invalidates_one(self): - """Tests that deleting one distribution sharing a repository only invalidates its cache""" - url = urljoin(PULP_CONTENT_BASE_URL, f"{self.distro2[0].base_path}/") - monitor_task(self.dis_api.delete(self.distro2[0].pulp_href).task) - files = ["", "PULP_MANIFEST", "2.iso"] - for file in files: - self.assertEqual((200, "HIT"), self._check_cache(file), file) - self.assertEqual((404, None), self._check_cache(file, url), file) - - def _delete_extra_pub_doesnt_invalidate(self): - """Test that deleting a publication not being served doesn't invalidate cache""" - self.pub_api.delete(self.pub2.pulp_href) - files = ["", "PULP_MANIFEST", "2.iso"] - for file in files: - self.assertEqual((200, "HIT"), self._check_cache(file), file) - - def _delete_served_pub_does_invalidate(self): - """Test that deleting the serving publication does invalidate the cache""" - # Reverts back to serving self.pub1 - self.pub_api.delete(self.pub3[0].pulp_href) - files = ["", "", "PULP_MANIFEST", "PULP_MANIFEST", "2.iso", "2.iso"] - for i, file in enumerate(files): - self.assertEqual((200, "HIT" if i % 2 == 1 else "MISS"), self._check_cache(file), file) - - def _delete_repo_invalidates(self): - """Tests that deleting a repository invalidates the cache""" - monitor_task(self.repo_api.delete(self.repo.pulp_href).task) - files = ["", "PULP_MANIFEST", "2.iso"] - for file in files: - self.assertEqual((404, None), self._check_cache(file), file) - - def _no_error_when_accessing_invalid_file(self): - """Tests that accessing a file that doesn't exist on content app gives 404""" - files = ["invalid", "another/bad-one", "DNE/"] - url = PULP_CONTENT_BASE_URL - for file in files: - self.assertEqual((404, None), self._check_cache(file, url=url), file) - - def _check_cache(self, file, url=None): +@pytest.mark.parallel +@pytest.mark.skipif(is_redis_connected is False, reason="Could not connect to the Redis server") +def test_full_workflow( + file_repo_with_auto_publish, + basic_manifest_path, + file_fixture_gen_remote, + file_repo_api_client, + file_pub_api_client, + file_distro_api_client, + file_content_api_client, + gen_object_with_cleanup, +): + def _check_cache(url): """Helper to check if cache miss or hit""" - url = urljoin(url or self.url, file) - r = requests.get(url) + r = get_url(url) if r.history: r = r.history[0] - return 200 if r.status_code == 302 else r.status_code, r.headers.get("X-PULP-CACHE") - return r.status_code, r.headers.get("X-PULP-CACHE") + return 200 if r.status == 302 else r.status, r.headers.get("X-PULP-CACHE") + return r.status, r.headers.get("X-PULP-CACHE") + + # Sync from the remote and assert that a new repository version is created + remote = file_fixture_gen_remote(manifest_path=basic_manifest_path, policy="immediate") + body = RepositorySyncURL(remote=remote.pulp_href) + monitor_task(file_repo_api_client.sync(file_repo_with_auto_publish.pulp_href, body).task) + repo = file_repo_api_client.read(file_repo_with_auto_publish.pulp_href) + assert repo.latest_version_href.endswith("/versions/1/") + + body = FileFilePublication(repository=repo.pulp_href) + pub2 = file_pub_api_client.read( + monitor_task(file_pub_api_client.create(body).task).created_resources[0] + ) + distro = gen_object_with_cleanup( + file_distro_api_client, gen_distribution(repository=repo.pulp_href) + ) + + # Checks responses are cached for content + files = ["", "", "PULP_MANIFEST", "PULP_MANIFEST", "1.iso", "1.iso"] + for i, file in enumerate(files): + url = urljoin(distro.base_url, file) + assert (200, "HIT" if i % 2 == 1 else "MISS") == _check_cache(url), file + + # Check that removing the repository from the distribution invalidates the cache + body = PatchedfileFileDistribution(repository="") + monitor_task(file_distro_api_client.partial_update(distro.pulp_href, body).task) + files = ["", "PULP_MANIFEST", "1.iso"] + for file in files: + url = urljoin(distro.base_url, file) + assert (404, None) == _check_cache(url), file + + # Check that responses are cacheable after a repository is added back + body = PatchedfileFileDistribution(repository=repo.pulp_href) + monitor_task(file_distro_api_client.partial_update(distro.pulp_href, body).task) + files = ["", "", "PULP_MANIFEST", "PULP_MANIFEST", "1.iso", "1.iso"] + for i, file in enumerate(files): + url = urljoin(distro.base_url, file) + assert (200, "HIT" if i % 2 == 1 else "MISS") == _check_cache(url), file + + # Add a new distribution and check that its responses are cached separately + response = file_distro_api_client.create(gen_distribution(repository=repo.pulp_href)) + distro2 = file_distro_api_client.read(monitor_task(response.task).created_resources[0]) + url = urljoin(PULP_CONTENT_BASE_URL, f"{distro2.base_path}/") + files = ["", "", "PULP_MANIFEST", "PULP_MANIFEST", "1.iso", "1.iso"] + for i, file in enumerate(files): + url = urljoin(distro2.base_url, file) + assert (200, "HIT" if i % 2 == 1 else "MISS") == _check_cache(url), file + + # Test that updating a repository pointed by multiple distributions invalidates all + cfile = file_content_api_client.list( + relative_path="1.iso", repository_version=repo.latest_version_href + ).results[0] + body = RepositoryAddRemoveContent(remove_content_units=[cfile.pulp_href]) + response = monitor_task(file_repo_api_client.modify(repo.pulp_href, body).task) + pub3 = file_pub_api_client.read(response.created_resources[1]) + files = ["", "", "PULP_MANIFEST", "PULP_MANIFEST", "2.iso", "2.iso"] + for i, file in enumerate(files): + url = urljoin(distro.base_url, file) + assert (200, "HIT" if i % 2 == 1 else "MISS") == _check_cache(url), file + url = urljoin(distro2.base_url, file) + assert (200, "HIT" if i % 2 == 1 else "MISS") == _check_cache(url), file + + # Tests that deleting one distribution sharing a repository only invalidates its cache + monitor_task(file_distro_api_client.delete(distro2.pulp_href).task) + files = ["", "PULP_MANIFEST", "2.iso"] + for file in files: + url = urljoin(distro.base_url, file) + assert (200, "HIT") == _check_cache(url), file + url = urljoin(distro2.base_url, file) + assert (404, None) == _check_cache(url), file + + # Test that deleting a publication not being served doesn't invalidate cache + file_pub_api_client.delete(pub2.pulp_href) + files = ["", "PULP_MANIFEST", "2.iso"] + for file in files: + url = urljoin(distro.base_url, file) + assert (200, "HIT") == _check_cache(url), file + + # Test that deleting the serving publication does invalidate the cache""" + # Reverts back to serving self.pub1 + file_pub_api_client.delete(pub3.pulp_href) + files = ["", "", "PULP_MANIFEST", "PULP_MANIFEST", "2.iso", "2.iso"] + for i, file in enumerate(files): + url = urljoin(distro.base_url, file) + assert (200, "HIT" if i % 2 == 1 else "MISS") == _check_cache(url), file + + # Tests that deleting a repository invalidates the cache""" + monitor_task(file_repo_api_client.delete(repo.pulp_href).task) + files = ["", "PULP_MANIFEST", "2.iso"] + for file in files: + url = urljoin(distro.base_url, file) + assert (404, None) == _check_cache(url), file + + # Tests that accessing a file that doesn't exist on content app gives 404 + files = ["invalid", "another/bad-one", "DNE/"] + for file in files: + url = urljoin(PULP_CONTENT_BASE_URL, file) + assert (404, None) == _check_cache(url), file diff --git a/pulp_file/tests/functional/api/from_pulpcore/test_content_delivery.py b/pulp_file/tests/functional/api/from_pulpcore/test_content_delivery.py index 94a27cb2..900188d2 100644 --- a/pulp_file/tests/functional/api/from_pulpcore/test_content_delivery.py +++ b/pulp_file/tests/functional/api/from_pulpcore/test_content_delivery.py @@ -1,181 +1,110 @@ """Tests related to content delivery.""" +from aiohttp.client_exceptions import ClientResponseError import hashlib -import unittest -from random import choice +import pytest from urllib.parse import urljoin -from pulp_smash import api, config, utils -from pulp_smash.pulp3.bindings import delete_orphans, monitor_task, PulpTestCase -from pulp_smash.pulp3.constants import ON_DEMAND_DOWNLOAD_POLICIES +from pulp_smash.pulp3.bindings import monitor_task from pulp_smash.pulp3.utils import ( - download_content_unit, gen_distribution, - gen_repo, - get_content, - sync, ) -from requests import HTTPError from pulpcore.client.pulp_file import ( - PublicationsFileApi, - RemotesFileApi, - RepositoriesFileApi, RepositorySyncURL, - DistributionsFileApi, ) from pulp_file.tests.functional.utils import ( - create_file_publication, - gen_file_remote, - gen_file_client, + get_files_in_manifest, + download_file, ) -from .constants import ( - FILE_CONTENT_NAME, - FILE_DISTRIBUTION_PATH, - FILE_FIXTURE_URL, - FILE_FIXTURE_MANIFEST_URL, - FILE_FIXTURE_WITH_MISSING_FILES_MANIFEST_URL, - FILE_REMOTE_PATH, - FILE_REPO_PATH, -) - - -class ContentDeliveryTestCase(unittest.TestCase): - """Content delivery breaks when delete remote - lazy download policy. - - Deleting a remote that was used in a sync with either the on_demand or - streamed options can break published data. Specifically, clients who want - to fetch content that a remote was providing access to would begin to - 404. Recreating a remote and re-triggering a sync will cause these broken - units to recover again. - - This test targets the following issue: - - * `Pulp #4464 `_ - """ - - def test_content_remote_delete(self): - """Assert that an HTTP error is raised when remote is deleted. - - Also verify that the content can be downloaded from Pulp once the - remote is recreated and another sync is triggered. - """ - cfg = config.get_config() - delete_orphans() - client = api.Client(cfg, api.page_handler) - - repo = client.post(FILE_REPO_PATH, gen_repo()) - self.addCleanup(client.delete, repo["pulp_href"]) - - body = gen_file_remote(policy=choice(ON_DEMAND_DOWNLOAD_POLICIES)) - remote = client.post(FILE_REMOTE_PATH, body) - - # Sync the repository using a lazy download policy. - sync(cfg, remote, repo) - repo = client.get(repo["pulp_href"]) - - publication = create_file_publication(cfg, repo) - self.addCleanup(client.delete, publication["pulp_href"]) - - # Delete the remote. - client.delete(remote["pulp_href"]) - - body = gen_distribution() - body["publication"] = publication["pulp_href"] - distribution = client.using_handler(api.task_handler).post(FILE_DISTRIBUTION_PATH, body) - self.addCleanup(client.delete, distribution["pulp_href"]) - - unit_path = choice( - [content_unit["relative_path"] for content_unit in get_content(repo)[FILE_CONTENT_NAME]] - ) - - # Assert that an HTTP error is raised when one to fetch content from - # the distribution once the remote was removed. - with self.assertRaises(HTTPError) as ctx: - download_content_unit(cfg, distribution, unit_path) - for key in ("not", "found"): - self.assertIn(key, ctx.exception.response.reason.lower()) - - # Recreating a remote and re-triggering a sync will cause these broken - # units to recover again. - body = gen_file_remote(policy=choice(ON_DEMAND_DOWNLOAD_POLICIES)) - remote = client.post(FILE_REMOTE_PATH, body) - self.addCleanup(client.delete, remote["pulp_href"]) - - sync(cfg, remote, repo) - - content = download_content_unit(cfg, distribution, unit_path) - pulp_hash = hashlib.sha256(content).hexdigest() - - fixtures_hash = hashlib.sha256( - utils.http_get(urljoin(FILE_FIXTURE_URL, unit_path)) - ).hexdigest() - - self.assertEqual(pulp_hash, fixtures_hash) - - -class RemoteArtifactUpdateTestCase(PulpTestCase): - @classmethod - def setUpClass(cls): - """Clean out Pulp before testing.""" - delete_orphans() - client = gen_file_client() - cls.repo_api = RepositoriesFileApi(client) - cls.remote_api = RemotesFileApi(client) - cls.publication_api = PublicationsFileApi(client) - cls.distributions_api = DistributionsFileApi(client) - cls.cfg = config.get_config() - - def tearDown(self): - """Clean up Pulp after testing.""" - self.doCleanups() - delete_orphans() - - def test_remote_artifact_url_update(self): - """Test that downloading on_demand content works after a repository layout change.""" - - FILE_NAME = "1.iso" - - # 1. Create a remote, repository and distribution - remote URL has links that should 404 - remote_config = gen_file_remote( - policy="on_demand", url=FILE_FIXTURE_WITH_MISSING_FILES_MANIFEST_URL - ) - remote = self.remote_api.create(remote_config) - self.addCleanup(self.remote_api.delete, remote.pulp_href) - - repo = self.repo_api.create(gen_repo(autopublish=True, remote=remote.pulp_href)) - self.addCleanup(self.repo_api.delete, repo.pulp_href) - - body = gen_distribution(repository=repo.pulp_href) - distribution_response = self.distributions_api.create(body) - created_resources = monitor_task(distribution_response.task).created_resources - distribution = self.distributions_api.read(created_resources[0]) - self.addCleanup(self.distributions_api.delete, distribution.pulp_href) - - # 2. Sync the repository, verify that downloading artifacts fails - repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) - - sync_response = self.repo_api.sync(repo.pulp_href, repository_sync_data) - monitor_task(sync_response.task) - - with self.assertRaises(HTTPError): - download_content_unit(self.cfg, distribution.to_dict(), FILE_NAME) - # 3. Update the remote URL with one that works, sync again, check that downloading - # artifacts works. - update_response = self.remote_api.update( - remote.pulp_href, gen_file_remote(policy="on_demand", url=FILE_FIXTURE_MANIFEST_URL) - ) - monitor_task(update_response.task) - - sync_response = self.repo_api.sync(repo.pulp_href, repository_sync_data) - monitor_task(sync_response.task) - - content = download_content_unit(self.cfg, distribution.to_dict(), FILE_NAME) - pulp_hash = hashlib.sha256(content).hexdigest() - fixtures_hash = hashlib.sha256( - utils.http_get(urljoin(FILE_FIXTURE_URL, FILE_NAME)) - ).hexdigest() - - self.assertEqual(pulp_hash, fixtures_hash) +@pytest.mark.parallel +def test_delete_remote_on_demand( + file_repo_with_auto_publish, + file_fixture_gen_remote_ssl, + file_remote_api_client, + file_repo_api_client, + file_distro_api_client, + basic_manifest_path, + gen_object_with_cleanup, +): + # Create a remote with on_demand download policy + remote = file_fixture_gen_remote_ssl(manifest_path=basic_manifest_path, policy="on_demand") + + # Sync from the remote + body = RepositorySyncURL(remote=remote.pulp_href) + monitor_task(file_repo_api_client.sync(file_repo_with_auto_publish.pulp_href, body).task) + repo = file_repo_api_client.read(file_repo_with_auto_publish.pulp_href) + + # Create a distribution pointing to the repository + distribution = gen_object_with_cleanup( + file_distro_api_client, gen_distribution(repository=repo.pulp_href) + ) + + # Download the manifest from the remote + expected_file_list = list(get_files_in_manifest(remote.url)) + + # Delete the remote and assert that downloading content returns a 404 + monitor_task(file_remote_api_client.delete(remote.pulp_href).task) + with pytest.raises(ClientResponseError) as exc: + url = urljoin(distribution.base_url, expected_file_list[0][0]) + download_file(url) + assert exc.value.status == 404 + + # Recreate the remote and sync into the repository using it + remote = file_fixture_gen_remote_ssl(manifest_path=basic_manifest_path, policy="on_demand") + body = RepositorySyncURL(remote=remote.pulp_href) + monitor_task(file_repo_api_client.sync(repo.pulp_href, body).task) + + # Assert that files can now be downloaded from the distribution + content_unit_url = urljoin(distribution.base_url, expected_file_list[0][0]) + downloaded_file = download_file(content_unit_url) + actual_checksum = hashlib.sha256(downloaded_file.body).hexdigest() + expected_checksum = expected_file_list[0][1] + assert expected_checksum == actual_checksum + + +@pytest.mark.parallel +def test_remote_artifact_url_update( + file_repo_with_auto_publish, + file_fixture_gen_remote_ssl, + file_repo_api_client, + file_distro_api_client, + basic_manifest_path, + basic_manifest_only_path, + gen_object_with_cleanup, +): + # Create a remote that points to a repository that only has the manifest, but no content + remote = file_fixture_gen_remote_ssl(manifest_path=basic_manifest_only_path, policy="on_demand") + + # Sync from the remote + body = RepositorySyncURL(remote=remote.pulp_href) + monitor_task(file_repo_api_client.sync(file_repo_with_auto_publish.pulp_href, body).task) + repo = file_repo_api_client.read(file_repo_with_auto_publish.pulp_href) + + # Create a distribution from the publication + distribution = gen_object_with_cleanup( + file_distro_api_client, gen_distribution(repository=repo.pulp_href) + ) + + # Download the manifest from the remote + expected_file_list = list(get_files_in_manifest(remote.url)) + + # Assert that trying to download content raises a 404 + with pytest.raises(ClientResponseError) as exc: + url = urljoin(distribution.base_url, expected_file_list[0][0]) + download_file(url) + assert exc.value.status == 404 + + # Create a new remote that points to a repository that does have the missing content + remote2 = file_fixture_gen_remote_ssl(manifest_path=basic_manifest_path, policy="on_demand") + + # Sync from the remote and assert that content can now be downloaded + body = RepositorySyncURL(remote=remote2.pulp_href) + monitor_task(file_repo_api_client.sync(file_repo_with_auto_publish.pulp_href, body).task) + content_unit_url = urljoin(distribution.base_url, expected_file_list[0][0]) + downloaded_file = download_file(content_unit_url) + actual_checksum = hashlib.sha256(downloaded_file.body).hexdigest() + expected_checksum = expected_file_list[0][1] + assert expected_checksum == actual_checksum diff --git a/pulp_file/tests/functional/api/from_pulpcore/test_content_guard.py b/pulp_file/tests/functional/api/from_pulpcore/test_content_guard.py index 21d281bc..9bfa39cf 100644 --- a/pulp_file/tests/functional/api/from_pulpcore/test_content_guard.py +++ b/pulp_file/tests/functional/api/from_pulpcore/test_content_guard.py @@ -1,139 +1,92 @@ -import requests -import unittest +from aiohttp import BasicAuth +import pytest +import uuid -from urllib.parse import urljoin - -from pulp_smash import config, utils from pulp_smash.pulp3.bindings import monitor_task from pulp_smash.pulp3.utils import gen_distribution -from pulpcore.client.pulpcore import ( - ApiClient as CoreApiClient, - GroupsApi, - GroupsUsersApi, - ContentguardsRbacApi, -) from pulpcore.client.pulp_file import ( - DistributionsFileApi, PatchedfileFileDistribution, ) from pulp_file.tests.functional.utils import ( - gen_file_client, - gen_user_rest, - del_user_rest, + get_url, ) -from .constants import PULP_CONTENT_BASE_URL - - -class RBACContentGuardTestCase(unittest.TestCase): - """Test RBAC enabled content guard""" - - CREATOR_ROLE = "core.rbaccontentguard_creator" - DOWNLOAD_ROLE = "core.rbaccontentguard_downloader" - - @classmethod - def setUpClass(cls): - cls.client = gen_file_client() # This is admin client, following apis are for admin user - cls.api_config = config.get_config().get_bindings_config() - core_client = CoreApiClient(config.get_config().get_bindings_config()) - cls.groups_api = GroupsApi(core_client) - cls.group_users_api = GroupsUsersApi(core_client) - cls.distro_api = DistributionsFileApi(cls.client) - - def setUp(self): - response = monitor_task(self.distro_api.create(gen_distribution()).task) - self.distro = self.distro_api.read(response.created_resources[0]) - self.rbac_guard_api = ContentguardsRbacApi(self.client) - - self.admin = { - "username": self.client.configuration.username, - "password": self.client.configuration.password, - } - user = gen_user_rest(model_roles=["core.rbaccontentguard_creator"]) - self.api_config.username = user["username"] - self.api_config.password = user["password"] - user["rbac_guard_api"] = ContentguardsRbacApi(CoreApiClient(self.api_config)) - self.creator_user = user - self.user_a = gen_user_rest() - self.user_b = gen_user_rest() - self.all_users = [self.creator_user, self.user_a, self.user_a, self.admin, None] - - self.group = self.groups_api.create({"name": utils.uuid4()}) - self.group_users_api.create(self.group.pulp_href, {"username": self.user_b["username"]}) - self.group_users_api.create(self.group.pulp_href, {"username": self.user_a["username"]}) - self.url = urljoin(PULP_CONTENT_BASE_URL, f"{self.distro.base_path}/") - def tearDown(self): - self.distro_api.delete(self.distro.pulp_href) - self.rbac_guard_api.delete(self.distro.content_guard) - self.groups_api.delete(self.group.pulp_href) - del_user_rest(self.creator_user["pulp_href"]) - del_user_rest(self.user_a["pulp_href"]) - del_user_rest(self.user_b["pulp_href"]) - - def test_workflow(self): - self._all_users_access() - self._content_guard_creation() - self._only_creator_access() - self._add_users() - self._remove_users() - self._add_group() - self._remove_group() - - def _all_users_access(self): - """Sanity check that all users can access distribution with no content guard""" - self._assert_access(self.all_users) - - def _content_guard_creation(self): - """Checks that RBAC ContentGuard can be created and assigned to a distribution""" - guard = self.creator_user["rbac_guard_api"].create({"name": self.distro.name}) +@pytest.mark.parallel +def test_rbac_content_guard_full_workflow( + rbac_contentguard_api_client, + groups_api_client, + groups_users_api_client, + file_distro_api_client, + pulp_admin_user, + anonymous_user, + gen_user, + gen_object_with_cleanup, +): + # Create all of the users and groups + creator_user = gen_user( + model_roles=["core.rbaccontentguard_creator", "file.filedistribution_creator"] + ) + user_a = gen_user() + user_b = gen_user() + + all_users = [creator_user, user_a, user_b, pulp_admin_user, anonymous_user] + group = gen_object_with_cleanup(groups_api_client, {"name": str(uuid.uuid4())}) + groups_users_api_client.create(group.pulp_href, {"username": user_b.username}) + groups_users_api_client.create(group.pulp_href, {"username": user_a.username}) + + # Create a distribution + with creator_user: + distro = gen_object_with_cleanup(file_distro_api_client, gen_distribution()) + + def _assert_access(authorized_users): + """Asserts that only authorized users have access to the distribution's base_url.""" + for user in all_users: + if user is not anonymous_user: + auth = BasicAuth(login=user.username, password=user.password) + else: + auth = None + response = get_url(distro.base_url, auth=auth) + expected_status = 404 if user in authorized_users else 403 + assert response.status == expected_status, f"Failed on {user.username=}" + + # Make sure all users can access the distribution URL without a content guard + _assert_access(all_users) + + # Check that RBAC ContentGuard can be created and assigned to a distribution + with creator_user: + guard = gen_object_with_cleanup(rbac_contentguard_api_client, {"name": distro.name}) body = PatchedfileFileDistribution(content_guard=guard.pulp_href) - monitor_task(self.distro_api.partial_update(self.distro.pulp_href, body).task) - self.distro = self.distro_api.read(self.distro.pulp_href) - self.assertEqual(guard.pulp_href, self.distro.content_guard) - - def _only_creator_access(self): - """Checks that now only the creator and admin user can access the distribution""" - self._assert_access([self.creator_user, self.admin]) - - def _add_users(self): - """Use the /add/ endpoint to give the users permission to access distribution""" - body = { - "users": (self.user_a["username"], self.user_b["username"]), - "role": self.DOWNLOAD_ROLE, - } - self.creator_user["rbac_guard_api"].add_role(self.distro.content_guard, body) - self._assert_access([self.creator_user, self.user_b, self.user_a, self.admin]) - - def _remove_users(self): - """Use the /remove/ endpoint to remove users permission to access distribution""" - body = { - "users": (self.user_a["username"], self.user_b["username"]), - "role": self.DOWNLOAD_ROLE, - } - self.creator_user["rbac_guard_api"].remove_role(self.distro.content_guard, body) - self._assert_access([self.creator_user, self.admin]) - - def _add_group(self): - """Use the /add/ endpoint to add group""" - body = {"groups": [self.group.name], "role": self.DOWNLOAD_ROLE} - self.creator_user["rbac_guard_api"].add_role(self.distro.content_guard, body) - self._assert_access([self.creator_user, self.user_b, self.user_a, self.admin]) - - def _remove_group(self): - """Use the /remove/ endpoint to remove group""" - body = {"groups": [self.group.name], "role": self.DOWNLOAD_ROLE} - self.creator_user["rbac_guard_api"].remove_role(self.distro.content_guard, body) - self._assert_access([self.creator_user, self.admin]) - - def _assert_access(self, auth_users): - """Helper for asserting functionality and correct permissions on the content guard""" - for user in self.all_users: - auth = (user["username"], user["password"]) if user else None - r = requests.session() - r.trust_env = False # Don't read the .netrc file - response = r.get(self.url, auth=auth) - expected_status = 404 if user in auth_users else 403 - self.assertEqual(response.status_code, expected_status, f"Failed on {user=}") + monitor_task(file_distro_api_client.partial_update(distro.pulp_href, body).task) + distro = file_distro_api_client.read(distro.pulp_href) + assert guard.pulp_href == distro.content_guard + + # Check that now only the creator and admin user can access the distribution + _assert_access([creator_user, pulp_admin_user]) + + # Use the /add/ endpoint to give the users permission to access distribution + body = { + "users": (user_a.username, user_b.username), + "role": "core.rbaccontentguard_downloader", + } + with creator_user: + rbac_contentguard_api_client.add_role(distro.content_guard, body) + _assert_access([creator_user, user_b, user_a, pulp_admin_user]) + + # Use the /remove/ endpoint to remove users permission to access distribution + with creator_user: + rbac_contentguard_api_client.remove_role(distro.content_guard, body) + _assert_access([creator_user, pulp_admin_user]) + + # Use the /add/ endpoint to add group + body = {"groups": [group.name], "role": "core.rbaccontentguard_downloader"} + with creator_user: + rbac_contentguard_api_client.add_role(distro.content_guard, body) + _assert_access([creator_user, user_b, user_a, pulp_admin_user]) + + # Use the /remove/ endpoint to remove group + with creator_user: + rbac_contentguard_api_client.remove_role(distro.content_guard, body) + _assert_access([creator_user, pulp_admin_user]) diff --git a/pulp_file/tests/functional/api/from_pulpcore/test_content_path.py b/pulp_file/tests/functional/api/from_pulpcore/test_content_path.py index 11535c94..2b246c5d 100644 --- a/pulp_file/tests/functional/api/from_pulpcore/test_content_path.py +++ b/pulp_file/tests/functional/api/from_pulpcore/test_content_path.py @@ -1,57 +1,52 @@ """Tests related to content path.""" -import unittest - -from pulp_smash import api, config -from pulp_smash.pulp3.bindings import delete_orphans -from pulp_smash.pulp3.utils import gen_remote, gen_repo, sync - -from pulp_file.tests.functional.utils import create_file_publication -from .constants import ( - FILE_FIXTURE_MANIFEST_URL, - FILE_REMOTE_PATH, - FILE_REPO_PATH, -) - - -class SyncPublishContentPathTestCase(unittest.TestCase): - """Test whether sync/publish for content already in Pulp. - - Different code paths are used in Pulp for the cases when artifacts are - already present on the filesystem during sync and when they are not - downloaded yet - - This test targets the following issue: - - `Pulp #4442 `_ - - Does the following: - - 1. Assure that no content from repository A is downloaded. - 2. Sync/publish repository A with download policy immediate. - 3. Sync/publish repository A again with download policy immediate. - 4. No failure in 2 shows that sync went fine when content was - not present on the disk and in the database. - 5. No failure in 3 shows that sync went fine when content was already - present on the disk and in the database. - - """ - - def test_all(self): - """Test whether sync/publish for content already in Pulp.""" - cfg = config.get_config() - client = api.Client(cfg, api.page_handler) - - # step 1. delete orphans to assure that no content is present on disk, - # or database. - delete_orphans() - - remote = client.post(FILE_REMOTE_PATH, gen_remote(FILE_FIXTURE_MANIFEST_URL)) - self.addCleanup(client.delete, remote["pulp_href"]) - - repo = client.post(FILE_REPO_PATH, gen_repo()) - self.addCleanup(client.delete, repo["pulp_href"]) - - for _ in range(2): - sync(cfg, remote, repo) - repo = client.get(repo["pulp_href"]) - create_file_publication(cfg, repo) +import pytest +import uuid + +from pulp_smash import utils +from pulp_smash.pulp3.utils import gen_distribution +from urllib.parse import urljoin + +from pulpcore.app import settings +from .constants import PULP_CONTENT_BASE_URL + + +@pytest.mark.parallel +def test_content_directory_listing( + file_distro_api_client, + gen_object_with_cleanup, + tls_certificate_authority_cert, + x509_content_guards_api_client, +): + """Checks that distributions are grouped by base-path when listing content directories.""" + + HIDE_GUARDED_DISTRIBUTIONS = getattr(settings, "HIDE_GUARDED_DISTRIBUTIONS", False) + + content_guard1 = gen_object_with_cleanup( + x509_content_guards_api_client, + {"name": str(uuid.uuid4()), "ca_certificate": tls_certificate_authority_cert}, + ) + + base_path = str(uuid.uuid4()) + for path, content_guard in [ + ("/foo1", None), + ("/foo2", content_guard1.pulp_href), + ("/boo1/foo1", None), + ("/boo2/foo1", content_guard1.pulp_href), + ]: + gen_object_with_cleanup( + file_distro_api_client, + gen_distribution(base_path=base_path + path, content_guard=content_guard), + ) + + response = utils.http_get(PULP_CONTENT_BASE_URL).decode("utf-8") + assert response.count(f'a href="{base_path}/"') == 1 + + url = urljoin(PULP_CONTENT_BASE_URL, base_path + "/") + response = utils.http_get(url).decode("utf-8") + assert response.count('a href="foo1/"') == 1 + assert response.count('a href="foo2/"') == (0 if HIDE_GUARDED_DISTRIBUTIONS else 1) + assert response.count('a href="boo1/"') == 1 + assert response.count('a href="boo2/"') == (0 if HIDE_GUARDED_DISTRIBUTIONS else 1) + + response = utils.http_get(urljoin(url, "boo1/")).decode("utf-8") + assert response.count('a href="foo1/"') == 1 diff --git a/pulp_file/tests/functional/api/from_pulpcore/test_content_promotion.py b/pulp_file/tests/functional/api/from_pulpcore/test_content_promotion.py index 42ea27c0..fd9cd16c 100644 --- a/pulp_file/tests/functional/api/from_pulpcore/test_content_promotion.py +++ b/pulp_file/tests/functional/api/from_pulpcore/test_content_promotion.py @@ -1,101 +1,68 @@ """Tests related to content promotion.""" import hashlib -import unittest +import pytest from urllib.parse import urljoin -from pulp_smash import api, config -from pulp_smash.pulp3.utils import gen_distribution, gen_remote, gen_repo, get_added_content, sync +from pulp_smash.pulp3.bindings import monitor_task +from pulp_smash.pulp3.utils import gen_distribution -from pulp_file.tests.functional.utils import create_file_publication -from .constants import ( - FILE_CONTENT_NAME, - FILE_DISTRIBUTION_PATH, - FILE_FIXTURE_MANIFEST_URL, - FILE_REMOTE_PATH, - FILE_REPO_PATH, - PULP_CONTENT_BASE_URL, +from pulp_file.tests.functional.utils import ( + get_files_in_manifest, + get_url, + download_file, ) - -class ContentPromotionTestCase(unittest.TestCase): - """Test content promotion.""" - - def test_all(self): - """Test content promotion for a distribution. - - This test targets the following issue: - - * `Pulp #4186 `_ - * `Pulp #8475 `_ - * `Pulp #8760 `_ - - Do the following: - - 1. Create a repository that has at least one repository version. - 2. Create a publication. - 3. Create 2 distributions - using the same publication. Those - distributions will have different ``base_path``. - 4. Assert that distributions have the same publication. - 5. Create another distribution using same repository version. - 5. Assert that distributions are viewable from base url - 6. Assert that content in distributions are viewable - 7. Select a content unit. Download that content unit from Pulp using - the three different distributions. - Assert that content unit has the same checksum when fetched from - different distributions. - """ - cfg = config.get_config() - client = api.Client(cfg, api.json_handler) - - repo = client.post(FILE_REPO_PATH, gen_repo()) - self.addCleanup(client.delete, repo["pulp_href"]) - - remote = client.post(FILE_REMOTE_PATH, gen_remote(FILE_FIXTURE_MANIFEST_URL)) - self.addCleanup(client.delete, remote["pulp_href"]) - - sync(cfg, remote, repo) - repo = client.get(repo["pulp_href"]) - - publication = create_file_publication(cfg, repo) - self.addCleanup(client.delete, publication["pulp_href"]) - - distributions = [] - for _ in range(2): - body = gen_distribution() - body["publication"] = publication["pulp_href"] - distribution = client.using_handler(api.task_handler).post(FILE_DISTRIBUTION_PATH, body) - distributions.append(distribution) - self.addCleanup(client.delete, distribution["pulp_href"]) - - self.assertEqual( - distributions[0]["publication"], distributions[1]["publication"], distributions - ) - - body = gen_distribution() - body["repository"] = repo["pulp_href"] - distribution = client.using_handler(api.task_handler).post(FILE_DISTRIBUTION_PATH, body) - distributions.append(distribution) - self.addCleanup(client.delete, distribution["pulp_href"]) - - client.response_handler = api.safe_handler - self.assertEqual(client.get(PULP_CONTENT_BASE_URL).status_code, 200) - - for distribution in distributions: - self.assertEqual(client.get(distribution["base_url"]).status_code, 200) - - unit_urls = [] - unit_path = get_added_content(repo)[FILE_CONTENT_NAME][0]["relative_path"] - for distribution in distributions: - unit_url = distribution["base_url"] - unit_urls.append(urljoin(unit_url, unit_path)) - - self.assertEqual( - hashlib.sha256(client.get(unit_urls[0]).content).hexdigest(), - hashlib.sha256(client.get(unit_urls[1]).content).hexdigest(), - unit_urls, - ) - self.assertEqual( - hashlib.sha256(client.get(unit_urls[0]).content).hexdigest(), - hashlib.sha256(client.get(unit_urls[2]).content).hexdigest(), - unit_urls, - ) +from pulpcore.client.pulp_file import RepositorySyncURL + + +@pytest.mark.parallel +def test_content_promotion( + file_repo_with_auto_publish, + file_fixture_gen_remote_ssl, + file_repo_api_client, + file_pub_api_client, + file_distro_api_client, + basic_manifest_path, + gen_object_with_cleanup, +): + # Create a repository, publication, and 2 distributions + remote = file_fixture_gen_remote_ssl(manifest_path=basic_manifest_path, policy="on_demand") + file_repo = file_repo_api_client.read(file_repo_with_auto_publish.pulp_href) + + # Check what content and artifacts are in the fixture repository + expected_files = get_files_in_manifest(remote.url) + + # Sync from the remote and assert that a new repository version is created + body = RepositorySyncURL(remote=remote.pulp_href) + created = monitor_task( + file_repo_api_client.sync(file_repo.pulp_href, body).task + ).created_resources + pub = file_pub_api_client.read(created[1]) + + # Create two Distributions pointing to the publication + distribution1 = gen_object_with_cleanup( + file_distro_api_client, gen_distribution(publication=pub.pulp_href) + ) + distribution2 = gen_object_with_cleanup( + file_distro_api_client, gen_distribution(publication=pub.pulp_href) + ) + assert distribution1.publication == pub.pulp_href + assert distribution2.publication == pub.pulp_href + + # Create a Distribution using the repository + distribution3 = gen_object_with_cleanup( + file_distro_api_client, gen_distribution(repository=file_repo.pulp_href) + ) + + for distro in [distribution1, distribution2, distribution3]: + # Assert that all 3 distributions can be accessed + r = get_url(distro.base_url) + assert r.status == 200 + # Download one of the files from the distribution and assert it has the correct checksum + expected_files_list = list(expected_files) + content_unit = expected_files_list[0] + content_unit_url = urljoin(distro.base_url, content_unit[0]) + downloaded_file = download_file(content_unit_url) + actual_checksum = hashlib.sha256(downloaded_file.body).hexdigest() + expected_checksum = content_unit[1] + assert expected_checksum == actual_checksum diff --git a/pulp_file/tests/functional/api/from_pulpcore/test_crd_publications.py b/pulp_file/tests/functional/api/from_pulpcore/test_crd_publications.py deleted file mode 100644 index f0608b68..00000000 --- a/pulp_file/tests/functional/api/from_pulpcore/test_crd_publications.py +++ /dev/null @@ -1,213 +0,0 @@ -"""Tests that perform actions over publications.""" -import unittest -from itertools import permutations - -from pulp_smash import api, config -from pulp_smash.pulp3.utils import gen_repo, get_content, modify_repo, sync -from requests.exceptions import HTTPError - -from pulp_file.tests.functional.utils import ( - create_file_publication, - gen_file_remote, - parse_date_from_string, -) -from .constants import ( - FILE_CONTENT_NAME, - FILE_PUBLICATION_PATH, - FILE_REMOTE_PATH, - FILE_REPO_PATH, -) - - -class PublicationsTestCase(unittest.TestCase): - """Perform actions over publications.""" - - def setUp(self): - """Create class-wide variables.""" - self.cfg = config.get_config() - self.client = api.Client(self.cfg, api.page_handler) - self.client_echo = api.Client(self.cfg, api.echo_handler) - self.remote = {} - self.publication = {} - self.repo = {} - try: - self.repo.update(self.client.post(FILE_REPO_PATH, gen_repo())) - self.repo_initial_version = self.repo["latest_version_href"] - body = gen_file_remote() - self.remote.update(self.client.post(FILE_REMOTE_PATH, body)) - sync(self.cfg, self.remote, self.repo) - # update to get latest_version_href - self.repo.update(self.client.get(self.repo["pulp_href"])) - except Exception: - self.tearDown() - raise - - def tearDown(self): - """Clean class-wide variables.""" - for resource in (self.remote, self.repo): - if resource: - self.client.delete(resource["pulp_href"]) - - def test_workflow(self): - self._create_file_publication() - self._read_publication() - self._read_publication_with_specific_fields() - self._read_publication_without_specific_fields() - self._read_publications_filter_repo_version() - self._read_publications_filter_repo_version_no_match() - self._read_publications_filter_repo_version_invalid() - self._read_publications_filter_created_time() - self._read_publications_filter_created_time_no_match() - self._publication_create_order() - self._delete() - - def _create_file_publication(self): - """Create a publication.""" - self.publication.update(create_file_publication(self.cfg, self.repo)) - - def _read_publication(self): - """Read a publication by its href.""" - publication = self.client.get(self.publication["pulp_href"]) - for key, val in self.publication.items(): - with self.subTest(key=key): - self.assertEqual(publication[key], val) - - def _read_publication_with_specific_fields(self): - """Read a publication by its href providing specific field list. - - Permutate field list to ensure different combinations on result. - """ - fields = ("pulp_href", "pulp_created", "distributions") - for field_pair in permutations(fields, 2): - # ex: field_pair = ('pulp_href', 'pulp_created) - with self.subTest(field_pair=field_pair): - publication = self.client.get( - self.publication["pulp_href"], params={"fields": ",".join(field_pair)} - ) - self.assertEqual(sorted(field_pair), sorted(publication.keys())) - - def _read_publication_without_specific_fields(self): - """Read a publication by its href excluding specific fields.""" - # requests doesn't allow the use of != in parameters. - url = "{}?exclude_fields=distributions".format(self.publication["pulp_href"]) - publication = self.client.get(url) - self.assertNotIn("distributions", publication.keys()) - - def _read_publications_filter_repo_version(self): - """Read a publication by its repository version.""" - publications = self.client.get( - FILE_PUBLICATION_PATH, params={"repository_version": self.repo["latest_version_href"]} - ) - self.assertEqual(len(publications), 1, publications) - for key, val in self.publication.items(): - with self.subTest(key=key): - self.assertEqual(publications[0][key], val) - - def _read_publications_filter_repo_version_no_match(self): - """Filter by repo version for which no publication exists.""" - publications = self.client.get( - FILE_PUBLICATION_PATH, params={"repository_version": self.repo_initial_version} - ) - self.assertFalse(publications) - - def _read_publications_filter_repo_version_invalid(self): - """Filter by a repo version that does not exist.""" - invalid_repo_version = self.repo["versions_href"] + "123456789/" - response = self.client_echo.get( - FILE_PUBLICATION_PATH, params={"repository_version": invalid_repo_version} - ) - self.assertEqual(response.status_code, 400) - self.assertIn("not found for repositoryversion", response.text) - - def _read_publications_filter_created_time(self): - """Read a publication by its created time.""" - publications = self.client.get( - FILE_PUBLICATION_PATH, params={"pulp_created": self.publication["pulp_created"]} - ) - self.assertEqual(len(publications), 1, publications) - for key, val in self.publication.items(): - with self.subTest(key=key): - self.assertEqual(publications[0][key], val) - - def _read_publications_filter_created_time_no_match(self): - """Filter for created time for which no publication exists.""" - publications = self.client.get( - FILE_PUBLICATION_PATH, params={"pulp_created": self.repo["pulp_created"]} - ) - self.assertFalse(publications) - - def _publication_create_order(self): - """Assert that publications are ordered by created time.""" - # Create more 2 publications for the same repo - for _ in range(2): - create_file_publication(self.cfg, self.repo) - - # Read publications - publications = self.client.get(FILE_PUBLICATION_PATH) - self.assertEqual(len(publications), 3) - - # Assert publications are ordered by pulp_created field in descending order - for i, publication in enumerate(publications[:-1]): - self.assertGreater( - parse_date_from_string(publication["pulp_created"]), # Current - parse_date_from_string(publications[i + 1]["pulp_created"]), # Prev - ) - - def _delete(self): - """Delete a publication.""" - self.client.delete(self.publication["pulp_href"]) - with self.assertRaises(HTTPError): - self.client.get(self.publication["pulp_href"]) - - -class PublicationRepositoryParametersTestCase(unittest.TestCase): - """Explore publication creation using repository and repository version.""" - - @classmethod - def setUpClass(cls): - """Create class-wide variables.""" - cls.cfg = config.get_config() - cls.client = api.Client(cls.cfg) - - def test_create_only_using_repoversion(self): - """Create a publication only using repository version.""" - repo = self.create_sync_repo() - for file_content in get_content(repo)[FILE_CONTENT_NAME]: - modify_repo(self.cfg, repo, remove_units=[file_content]) - version_href = self.client.get(repo["versions_href"])[1]["pulp_href"] - publication = create_file_publication(self.cfg, repo, version_href) - self.addCleanup(self.client.delete, publication["pulp_href"]) - - self.assertEqual(publication["repository_version"], version_href, publication) - - def test_create_repo_repoversion(self): - """Create a publication using repository and repository version.""" - repo = self.create_sync_repo() - version_href = self.client.get(repo["versions_href"])[0]["pulp_href"] - - with self.assertRaises(HTTPError) as ctx: - self.client.using_handler(api.json_handler).post( - FILE_PUBLICATION_PATH, - {"repository_version": version_href, "repository": repo["pulp_href"]}, - ) - - for key in ("repository", "repository_version", "not", "both"): - self.assertIn( - key, - ctx.exception.response.json()["non_field_errors"][0].lower(), - ctx.exception.response, - ) - - def create_sync_repo(self): - """Create and sync a repository. - - Given the number of times to be synced. - """ - repo = self.client.post(FILE_REPO_PATH, gen_repo()) - self.addCleanup(self.client.delete, repo["pulp_href"]) - - remote = self.client.post(FILE_REMOTE_PATH, gen_file_remote()) - self.addCleanup(self.client.delete, remote["pulp_href"]) - - sync(self.cfg, remote, repo) - return self.client.get(repo["pulp_href"]) diff --git a/pulp_file/tests/functional/api/from_pulpcore/test_crud_repos.py b/pulp_file/tests/functional/api/from_pulpcore/test_crud_repos.py index 915cbafb..079a8850 100644 --- a/pulp_file/tests/functional/api/from_pulpcore/test_crud_repos.py +++ b/pulp_file/tests/functional/api/from_pulpcore/test_crud_repos.py @@ -1,216 +1,130 @@ """Tests that CRUD repositories.""" import json +import pytest import re -import time -import unittest -from itertools import permutations +from aiohttp import BasicAuth +from subprocess import run from urllib.parse import urljoin -from pulp_smash import api, cli, config, utils +from pulp_smash import utils from pulp_smash.pulp3.bindings import monitor_task from pulp_smash.pulp3.utils import gen_repo -from requests.exceptions import HTTPError -import pytest - from pulpcore.client.pulp_file.exceptions import ApiException -from pulpcore.client.pulp_file import ( - ApiClient as FileApiClient, - FileFileRemote, - RemotesFileApi, -) - -from pulp_file.tests.functional.utils import gen_file_remote -from .constants import ( - FILE_FIXTURE_MANIFEST_URL, - FILE_REMOTE_PATH, - FILE_REPO_PATH, -) - - -class CRUDRepoTestCase(unittest.TestCase): - """CRUD repositories.""" - - @classmethod - def setUpClass(cls): - """Create class-wide variables.""" - cls.cfg = config.get_config() - cls.client = api.Client(cls.cfg, api.json_handler) - - def setUp(self): - self.repo = {} - - def test_workflow(self): - self._create_repo() - self._create_same_name() - self._read_repo() - self._read_repo_with_specific_fields() - self._read_repo_without_specific_fields() - self._read_repos() - self._read_all_repos() - self._fully_update_name() - self._fully_update_desc() - self._partially_update_name() - self._partially_update_desc() - self._set_remote_on_repository() - self._delete_repo() - - def _create_repo(self): - """Create repository.""" - self.repo = self.client.post(FILE_REPO_PATH, gen_repo()) - - def _create_same_name(self): - """Try to create a second reIpository with an identical name.""" - with self.assertRaises(HTTPError) as exc: - self.client.post(FILE_REPO_PATH, gen_repo(name=self.repo["name"])) - self.assertIn("unique", exc.exception.response.text) - self.assertEqual(exc.exception.response.status_code, 400) - - def _read_repo(self): - """Read a repository by its href.""" - repo = self.client.get(self.repo["pulp_href"]) - for key, val in self.repo.items(): - with self.subTest(key=key): - self.assertEqual(repo[key], val) - - def _read_repo_with_specific_fields(self): - """Read a repository by its href providing specific field list. - - Permutate field list to ensure different combinations on result. - """ - fields = ( - "pulp_href", - "pulp_created", - "versions_href", - "latest_version_href", - "name", - "description", - ) - for field_pair in permutations(fields, 2): - # ex: field_pair = ('pulp_href', 'created') - with self.subTest(field_pair=field_pair): - repo = self.client.get( - self.repo["pulp_href"], params={"fields": ",".join(field_pair)} - ) - self.assertEqual(sorted(field_pair), sorted(repo.keys())) - - def _read_repo_without_specific_fields(self): - """Read a repo by its href excluding specific fields.""" - # requests doesn't allow the use of != in parameters. - url = "{}?exclude_fields=created,name".format(self.repo["pulp_href"]) - repo = self.client.get(url) - response_fields = repo.keys() - self.assertNotIn("created", response_fields) - self.assertNotIn("name", response_fields) - - def _read_repos(self): - """Read the repository by its name.""" - page = self.client.get(FILE_REPO_PATH, params={"name": self.repo["name"]}) - self.assertEqual(len(page["results"]), 1) - for key, val in self.repo.items(): - with self.subTest(key=key): - self.assertEqual(page["results"][0][key], val) - - def _read_all_repos(self): - """Ensure name is displayed when listing repositories.""" - for repo in self.client.get(FILE_REPO_PATH)["results"]: - self.assertIsNotNone(repo["name"]) - - def _fully_update_name(self): - """Update a repository's name using HTTP PUT.""" - self._do_fully_update_attr("name") - - def _fully_update_desc(self): - """Update a repository's description using HTTP PUT.""" - self._do_fully_update_attr("description") - - def _do_fully_update_attr(self, attr): - """Update a repository attribute using HTTP PUT. - - :param attr: The name of the attribute to update. For example, - "description." The attribute to update must be a string. - """ - repo = self.client.get(self.repo["pulp_href"]) - string = utils.uuid4() - repo[attr] = string - self.client.put(repo["pulp_href"], repo) - - # verify the update - repo = self.client.get(repo["pulp_href"]) - self.assertEqual(string, repo[attr]) - - def _partially_update_name(self): - """Update a repository's name using HTTP PATCH.""" - self._do_partially_update_attr("name") +from pulp_file.tests.functional.utils import gen_file_remote, download_file, post_url +from pulp_file.tests.functional.api.from_pulpcore.constants import FILE_REMOTE_PATH - def _partially_update_desc(self): - """Update a repository's description using HTTP PATCH.""" - self._do_partially_update_attr("description") - def _do_partially_update_attr(self, attr): - """Update a repository attribute using HTTP PATCH. - - :param attr: The name of the attribute to update. For example, - "description." The attribute to update must be a string. - """ +@pytest.mark.parallel +def test_crud_repo_full_workflow( + file_repo_api_client, file_remote_api_client, gen_object_with_cleanup +): + # Create repository + repo = file_repo_api_client.create(gen_repo()) + + # Try to create another with the same name + with pytest.raises(ApiException) as e: + file_repo_api_client.create(gen_repo(name=repo.name)) + assert e.value.status == 400 + assert e.value.reason == "This field must be unique." + + # Test reading the repository + read_repo = file_repo_api_client.read(repo.pulp_href).to_dict() + for key, val in repo.to_dict().items(): + assert key in read_repo + assert getattr(repo, key) == read_repo[key] + + # Read a repository by its href providing specific field list. + config = file_repo_api_client.api_client.configuration + auth = BasicAuth(login=config.username, password=config.password) + full_href = urljoin(config.host, repo.pulp_href) + for fields in [ + ("pulp_href", "pulp_created"), + ("pulp_href", "name"), + ("pulp_created", "versions_href", "name"), + ]: + response = download_file(f"{full_href}?fields={','.join(fields)}", auth=auth) + assert sorted(fields) == sorted(json.loads(response.body).keys()) + + # Read a repo by its href excluding specific fields. + response = download_file(f"{full_href}?exclude_fields=created,name", auth=auth) + response_fields = json.loads(response.body).keys() + assert "created" not in response_fields + assert "name" not in response_fields + + # Read the repository by its name. + page = file_repo_api_client.list(name=repo.name) + assert len(page.results) == 1 + for key, val in repo.to_dict().items(): + assert getattr(page.results[0], key) == val + + # Ensure name is displayed when listing repositories. + for read_repo in file_repo_api_client.list().results: + assert read_repo.name is not None + + def _do_update_attr(attr, partial=False): + """Update a repository attribute.""" + body = {} if partial else repo.to_dict() + function = getattr(file_repo_api_client, "partial_update" if partial else "update") string = utils.uuid4() - self.client.patch(self.repo["pulp_href"], {attr: string}) - + body[attr] = string + response = function(repo.pulp_href, body) + monitor_task(response.task) # verify the update - repo = self.client.get(self.repo["pulp_href"]) - self.assertEqual(repo[attr], string) + read_repo = file_repo_api_client.read(repo.pulp_href) + assert string == getattr(read_repo, attr) + + # Update a repository's name using HTTP PUT. + _do_update_attr("name") - def _set_remote_on_repository(self): - """Test setting remotes on repositories.""" - body = gen_file_remote() - remote = self.client.post(FILE_REMOTE_PATH, body) - self.addCleanup(self.client.delete, remote["pulp_href"]) + # Update a repository's description using HTTP PUT. + _do_update_attr("description") - # verify that syncing with no remote raises an error - with self.assertRaises(HTTPError): - self.client.post(urljoin(self.repo["pulp_href"], "sync/")) + # Update a repository's name using HTTP PATCH. + _do_update_attr("name", partial=True) - # test setting the remote on the repo - self.client.patch(self.repo["pulp_href"], {"remote": remote["pulp_href"]}) + # Update a repository's description using HTTP PATCH. + _do_update_attr("description", partial=True) - # test syncing without a remote - self.client.post(urljoin(self.repo["pulp_href"], "sync/")) + # Test setting remotes on repositories. + remote = gen_object_with_cleanup(file_remote_api_client, gen_file_remote()) - repo = self.client.get(self.repo["pulp_href"]) - self.assertEqual(repo["latest_version_href"], f"{repo['pulp_href']}versions/1/") + # verify that syncing with no remote raises an error + with pytest.raises(ApiException): + file_repo_api_client.sync(repo.pulp_href, {}) - def _delete_repo(self): - """Delete a repository.""" - self.client.delete(self.repo["pulp_href"]) + # test setting the remote on the repo + response = file_repo_api_client.partial_update(repo.pulp_href, {"remote": remote.pulp_href}) + monitor_task(response.task) - # verify the delete - with self.assertRaises(HTTPError): - self.client.get(self.repo["pulp_href"]) + # test syncing without a remote + response = file_repo_api_client.sync(repo.pulp_href, {}) + monitor_task(response.task) - def test_negative_create_repo_with_invalid_parameter(self): - """Attempt to create repository passing extraneous invalid parameter. + read_repo = file_repo_api_client.read(repo.pulp_href) + assert read_repo.latest_version_href == f"{repo.pulp_href}versions/1/" - Assert response returns an error 400 including ["Unexpected field"]. - """ - response = api.Client(self.cfg, api.echo_handler).post(FILE_REPO_PATH, gen_repo(foo="bar")) - assert response.status_code == 400 - assert response.json()["foo"] == ["Unexpected field"] + # Delete a repository. + response = file_repo_api_client.delete(repo.pulp_href) + monitor_task(response.task) + # verify the delete + with pytest.raises(ApiException): + file_repo_api_client.read(repo.pulp_href) -class CRUDRemoteTestCase(unittest.TestCase): - """CRUD remotes.""" + # Attempt to create repository passing extraneous invalid parameter. + # Assert response returns an error 400 including ["Unexpected field"]. + with pytest.raises(ApiException) as e: + file_repo_api_client.create(gen_repo(foo="bar")) + assert e.value.status == 400 + assert e.value.body["foo"] == ["Unexpected field"] - @classmethod - def setUpClass(cls): - """Create class-wide variables.""" - cls.cfg = config.get_config() - cls.client = FileApiClient(cls.cfg.get_bindings_config()) - cls.remotes_api = RemotesFileApi(cls.client) - def setUp(self): - self.remote_attrs = { +@pytest.mark.parallel +def test_crud_remotes_full_workflow(file_remote_api_client, gen_object_with_cleanup): + remote_attrs = gen_file_remote( + **{ "name": utils.uuid4(), - "url": FILE_FIXTURE_MANIFEST_URL, "ca_cert": None, "client_cert": None, "client_key": None, @@ -225,19 +139,11 @@ def setUp(self): "sock_connect_timeout": None, "sock_read_timeout": None, } - self.remote = self.remotes_api.create(self.remote_attrs) + ) + remote = file_remote_api_client.create(remote_attrs) - def tearDown(self): - try: - response = self.remotes_api.delete(self.remote.pulp_href) - except ApiException as exc: - # The test_delete test will cause this to not be here - assert exc.status == 404 - else: - monitor_task(response.task) - - def _compare_results(self, data, received): - self.assertFalse(hasattr(received, "password")) + def _compare_results(data, received): + assert not hasattr(received, "password") # handle write only fields data.pop("username", None) @@ -245,214 +151,220 @@ def _compare_results(self, data, received): data.pop("client_key", None) for k in data: - self.assertEqual(getattr(received, k), data[k]) - - def test_read(self): - # Compare initial-attrs vs remote created in setUp - self._compare_results(self.remote_attrs, self.remote) - - def test_update(self): - data = {"download_concurrency": 23, "policy": "immediate"} - self.remotes_api.partial_update(self.remote.pulp_href, data) - time.sleep(1) # without this, the read returns the pre-patch values - new_remote = self.remotes_api.read(self.remote.pulp_href) - self._compare_results(data, new_remote) - - def test_password_writeable(self): - """Test that a password can be updated with a PUT request.""" - cli_client = cli.Client(self.cfg) - remote = self.remotes_api.create({"name": "test_pass", "url": "http://", "password": "new"}) - href = remote.pulp_href - uuid = re.search(r"/api/v3/remotes/file/file/([\w-]+)/", href).group(1) - shell_cmd = ( - f"import pulpcore; print(pulpcore.app.models.Remote.objects.get(pk='{uuid}').password)" - ) - - self.addCleanup(self.remotes_api.delete, href) - - # test a PUT request with a new password - remote_update = FileFileRemote(name="test_pass", url="http://", password="changed") - response = self.remotes_api.update(href, remote_update) - monitor_task(response.task) - exc = cli_client.run(["pulpcore-manager", "shell", "-c", shell_cmd]) - self.assertEqual(exc.stdout.rstrip("\n"), "changed") - - def test_password_not_unset(self): - """Test that password doesn't get unset when not passed with a PUT request.""" - cli_client = cli.Client(self.cfg) - remote = self.remotes_api.create({"name": "test_pass", "url": "http://", "password": "new"}) - href = remote.pulp_href - uuid = re.search(r"/api/v3/remotes/file/file/([\w-]+)/", href).group(1) - shell_cmd = ( - f"import pulpcore; print(pulpcore.app.models.Remote.objects.get(pk='{uuid}').password)" - ) - - self.addCleanup(self.remotes_api.delete, href) - - # test a PUT request without a password - remote_update = FileFileRemote(name="pass_test", url="http://") - response = self.remotes_api.update(href, remote_update) - monitor_task(response.task) - exc = cli_client.run(["pulpcore-manager", "shell", "-c", shell_cmd]) - self.assertEqual(exc.stdout.rstrip("\n"), "new") - - def test_timeout_attributes(self): - # Test valid timeout settings (float >= 0) - data = { - "total_timeout": 1.0, - "connect_timeout": 66.0, - "sock_connect_timeout": 0.0, - "sock_read_timeout": 3.1415926535, - } - self.remotes_api.partial_update(self.remote.pulp_href, data) - time.sleep(1) - new_remote = self.remotes_api.read(self.remote.pulp_href) - self._compare_results(data, new_remote) - - def test_timeout_attributes_float_lt_zero(self): - # Test invalid float < 0 - data = { - "total_timeout": -1.0, - } - with self.assertRaises(ApiException): - self.remotes_api.partial_update(self.remote.pulp_href, data) - - def test_timeout_attributes_non_float(self): - # Test invalid non-float - data = { - "connect_timeout": "abc", - } - with self.assertRaises(ApiException): - self.remotes_api.partial_update(self.remote.pulp_href, data) - - def test_timeout_attributes_reset_to_empty(self): - # Test reset to empty - data = { - "total_timeout": False, - "connect_timeout": None, - "sock_connect_timeout": False, - "sock_read_timeout": None, - } - response = self.remotes_api.partial_update(self.remote.pulp_href, data) - monitor_task(response.task) - new_remote = self.remotes_api.read(self.remote.pulp_href) - self._compare_results(data, new_remote) - - def test_delete(self): - response = self.remotes_api.delete(self.remote.pulp_href) - monitor_task(response.task) - # verify the delete - with self.assertRaises(ApiException): - self.remotes_api.read(self.remote.pulp_href) - - def test_headers(self): - # Test that headers value must be a list of dicts - data = {"headers": {"Connection": "keep-alive"}} - with self.assertRaises(ApiException): - self.remotes_api.partial_update(self.remote.pulp_href, data) - data = {"headers": [1, 2, 3]} - with self.assertRaises(ApiException): - self.remotes_api.partial_update(self.remote.pulp_href, data) - data = {"headers": [{"Connection": "keep-alive"}]} - self.remotes_api.partial_update(self.remote.pulp_href, data) + assert getattr(received, k) == data[k] + + # Compare initial-attrs vs remote created in setUp + _compare_results(remote_attrs, remote) + + # Test updating remote + data = {"download_concurrency": 23, "policy": "immediate"} + response = file_remote_api_client.partial_update(remote.pulp_href, data) + monitor_task(response.task) + new_remote = file_remote_api_client.read(remote.pulp_href) + _compare_results(data, new_remote) + + # Test that a password can be updated with a PUT request. + temp_remote = gen_object_with_cleanup( + file_remote_api_client, gen_file_remote(url="http://", password="new") + ) + href = temp_remote.pulp_href + uuid = re.search(r"/api/v3/remotes/file/file/([\w-]+)/", href).group(1) + shell_cmd = ( + f"import pulpcore; print(pulpcore.app.models.Remote.objects.get(pk='{uuid}').password)" + ) + + # test a PUT request with a new password + remote_update = gen_file_remote(name=temp_remote.name, url="http://", password="changed") + response = file_remote_api_client.update(href, remote_update) + monitor_task(response.task) + exc = run(["pulpcore-manager", "shell", "-c", shell_cmd], text=True, capture_output=True) + assert exc.stdout.rstrip("\n") == "changed" + + # Test that password doesn't get unset when not passed with a PUT request. + temp_remote = gen_object_with_cleanup( + file_remote_api_client, gen_file_remote(url="http://", password="new") + ) + href = temp_remote.pulp_href + uuid = re.search(r"/api/v3/remotes/file/file/([\w-]+)/", href).group(1) + shell_cmd = ( + f"import pulpcore; print(pulpcore.app.models.Remote.objects.get(pk='{uuid}').password)" + ) + + # test a PUT request without a password + remote_update = gen_file_remote(name=temp_remote.name, url="http://") + response = file_remote_api_client.update(href, remote_update) + monitor_task(response.task) + exc = run(["pulpcore-manager", "shell", "-c", shell_cmd], text=True, capture_output=True) + assert exc.stdout.rstrip("\n") == "new" + + # Test valid timeout settings (float >= 0) + data = { + "total_timeout": 1.0, + "connect_timeout": 66.0, + "sock_connect_timeout": 0.0, + "sock_read_timeout": 3.1415926535, + } + response = file_remote_api_client.partial_update(remote.pulp_href, data) + monitor_task(response.task) + new_remote = file_remote_api_client.read(remote.pulp_href) + _compare_results(data, new_remote) + + # Test invalid float < 0 + data = { + "total_timeout": -1.0, + } + with pytest.raises(ApiException): + file_remote_api_client.partial_update(remote.pulp_href, data) + + # Test invalid non-float + data = { + "connect_timeout": "abc", + } + with pytest.raises(ApiException): + file_remote_api_client.partial_update(remote.pulp_href, data) + + # Test reset to empty + data = { + "total_timeout": False, + "connect_timeout": None, + "sock_connect_timeout": False, + "sock_read_timeout": None, + } + response = file_remote_api_client.partial_update(remote.pulp_href, data) + monitor_task(response.task) + new_remote = file_remote_api_client.read(remote.pulp_href) + _compare_results(data, new_remote) + + # Test that headers value must be a list of dicts + data = {"headers": {"Connection": "keep-alive"}} + with pytest.raises(ApiException): + file_remote_api_client.partial_update(remote.pulp_href, data) + data = {"headers": [1, 2, 3]} + with pytest.raises(ApiException): + file_remote_api_client.partial_update(remote.pulp_href, data) + data = {"headers": [{"Connection": "keep-alive"}]} + response = file_remote_api_client.partial_update(remote.pulp_href, data) + monitor_task(response.task) + + # Test deleting a remote + response = file_remote_api_client.delete(remote.pulp_href) + monitor_task(response.task) + # verify the delete + with pytest.raises(ApiException): + file_remote_api_client.read(remote.pulp_href) @pytest.mark.parallel -class CreatePulpLabelsRemoteTestCase(unittest.TestCase): +def test_remote_pulp_labels(file_remote_api_client, gen_object_with_cleanup): """A test case for verifying whether pulp_labels are correctly assigned to a new remote.""" - @classmethod - def setUpClass(cls): - """Initialize class-wide variables""" - cls.cfg = config.get_config() + pulp_labels = {"environment": "dev"} - cls.api_client = api.Client(cls.cfg, api.json_handler) - cls.file_client = FileApiClient(cls.cfg.get_bindings_config()) - cls.remotes_api = RemotesFileApi(cls.file_client) + # Test if a created remote contains pulp_labels when passing JSON data. + remote = gen_object_with_cleanup( + file_remote_api_client, gen_file_remote(pulp_labels=pulp_labels) + ) - cls.pulp_labels = {"environment": "dev"} + assert remote.pulp_labels == pulp_labels - def test_create_remote(self): - """Test if a created remote contains pulp_labels when passing JSON data.""" - remote_attrs = { - "name": utils.uuid4(), - "url": FILE_FIXTURE_MANIFEST_URL, - "pulp_labels": self.pulp_labels, - } - remote = self.remotes_api.create(remote_attrs) - self.addCleanup(self.remotes_api.delete, remote.pulp_href) + # Test if a created remote contains pulp_labels when passing form data. + config = file_remote_api_client.api_client.configuration + auth = BasicAuth(login=config.username, password=config.password) + url = urljoin(config.host, remote.pulp_href[:-37]) # Cut off the UUID + remote = post_url( + url, gen_file_remote(pulp_labels=json.dumps(pulp_labels)), return_body=True, auth=auth + ) - self.assertEqual(remote.pulp_labels, self.pulp_labels) - - def test_create_remote_using_form(self): - """Test if a created remote contains pulp_labels when passing form data.""" - remote_attrs = { - "name": utils.uuid4(), - "url": FILE_FIXTURE_MANIFEST_URL, - "pulp_labels": json.dumps(self.pulp_labels), - } - remote = self.api_client.post(FILE_REMOTE_PATH, data=remote_attrs) - self.addCleanup(self.remotes_api.delete, remote["pulp_href"]) - self.assertEqual(remote["pulp_labels"], self.pulp_labels) + assert json.loads(remote)["pulp_labels"] == pulp_labels @pytest.mark.parallel -class RemoteFileURLsValidationTestCase(unittest.TestCase): +def test_file_remote_url_validation(file_remote_api_client, gen_object_with_cleanup): """A test case that verifies the validation of remotes' URLs.""" - @classmethod - def setUpClass(cls): - """Initialize class-wide variables""" - cls.cfg = config.get_config() - - cls.api_client = api.Client(cls.cfg, api.json_handler) - cls.file_client = FileApiClient(cls.cfg.get_bindings_config()) - cls.remotes_api = RemotesFileApi(cls.file_client) - - def test_invalid_absolute_pathname(self): - """Test the validation of an invalid absolute pathname.""" - remote_attrs = { - "name": utils.uuid4(), - "url": "file://error/path/name", - } - self.raise_for_invalid_request(remote_attrs) - - def test_invalid_import_path(self): - """Test the validation of an invalid import pathname.""" - remote_attrs = { - "name": utils.uuid4(), - "url": "file:///error/path/name", - } - self.raise_for_invalid_request(remote_attrs) - - def raise_for_invalid_request(self, remote_attrs): + def raise_for_invalid_request(remote_attrs): """Check if Pulp returns HTTP 400 after issuing an invalid request.""" - with self.assertRaises(ApiException) as ae: - remote = self.remotes_api.create(remote_attrs) - self.addCleanup(self.remotes_api.delete, remote.pulp_href) - - self.assertEqual(ae.exception.status, 400) - - def test_valid_import_path(self): - """Test the creation of a remote after passing a valid URL.""" - remote_attrs = { - "name": utils.uuid4(), - "url": "file:///tmp/good", - } + with pytest.raises(ApiException) as ae: + file_remote_api_client.create(remote_attrs) + assert ae.value.status == 400 + + # Test the validation of an invalid absolute pathname. + remote_attrs = { + "name": utils.uuid4(), + "url": "file://tmp/good", + } + raise_for_invalid_request(remote_attrs) + + # Test the validation of an invalid import pathname. + remote_attrs = { + "name": utils.uuid4(), + "url": "file:///error/path/name", + } + raise_for_invalid_request(remote_attrs) + + # Test the creation of a remote after passing a valid URL. + remote_attrs = { + "name": utils.uuid4(), + "url": "file:///tmp/good", + } + gen_object_with_cleanup(file_remote_api_client, remote_attrs) + + # Test that the remote url can't contain username/password. + remote_attrs = { + "name": utils.uuid4(), + "url": "http://elladan@rivendell.org", + } + raise_for_invalid_request(remote_attrs) + + remote_attrs = { + "name": utils.uuid4(), + "url": "http://elladan:pass@rivendell.org", + } + raise_for_invalid_request(remote_attrs) - remote = self.remotes_api.create(remote_attrs) - self.addCleanup(self.remotes_api.delete, remote.pulp_href) - def test_no_username_password(self): - """Test that the remote url can't contain username/password.""" - remote_attrs = { - "name": utils.uuid4(), - "url": "http://elladan@rivendell.org", - } - self.raise_for_invalid_request(remote_attrs) - - remote_attrs = { - "name": utils.uuid4(), - "url": "http://elladan:pass@rivendell.org", - } - self.raise_for_invalid_request(remote_attrs) +@pytest.mark.parallel +def test_repository_remote_filter( + file_repo_api_client, file_remote_api_client, gen_object_with_cleanup +): + """Test repository's remote filter and full functionality of a HREF filter.""" + + remote1 = gen_object_with_cleanup(file_remote_api_client, gen_file_remote()) + remote2 = gen_object_with_cleanup(file_remote_api_client, gen_file_remote()) + remote3 = gen_object_with_cleanup(file_remote_api_client, gen_file_remote()) + + repo1 = gen_object_with_cleanup(file_repo_api_client, gen_repo()) + repo2 = gen_object_with_cleanup(file_repo_api_client, gen_repo(remote=remote1.pulp_href)) + repo3 = gen_object_with_cleanup(file_repo_api_client, gen_repo(remote=remote2.pulp_href)) + repo4 = gen_object_with_cleanup(file_repo_api_client, gen_repo(remote=remote2.pulp_href)) + name_in = [repo1.name, repo2.name, repo3.name, repo4.name] + + # Check that name__in filter is working + response = file_repo_api_client.list(name__in=name_in) + assert response.count == 4 + + # Test that supplying a specific remote only returns repositories with that remote + response = file_repo_api_client.list(remote=remote1.pulp_href) + assert response.count == 1 + assert response.results[0].pulp_href == repo2.pulp_href + + response = file_repo_api_client.list(remote=remote2.pulp_href) + assert response.count == 2 + assert {r.pulp_href for r in response.results} == {repo3.pulp_href, repo4.pulp_href} + + response = file_repo_api_client.list(remote=remote3.pulp_href) + assert response.count == 0 + + # Test that supplying 'null' will only show repositories without a remote + response = file_repo_api_client.list(remote="null", name__in=name_in) + assert response.count == 1 + assert response.results[0].pulp_href == repo1.pulp_href + + # Test that supplying a base URI of a remote will show all repositories with similar remotes + response = file_repo_api_client.list(remote=FILE_REMOTE_PATH, name__in=name_in) + assert response.count == 3 + assert {r.pulp_href for r in response.results} == { + repo2.pulp_href, + repo3.pulp_href, + repo4.pulp_href, + } diff --git a/pulp_file/tests/functional/api/from_pulpcore/test_distributions.py b/pulp_file/tests/functional/api/from_pulpcore/test_distributions.py index 626794c6..7e18ade1 100644 --- a/pulp_file/tests/functional/api/from_pulpcore/test_distributions.py +++ b/pulp_file/tests/functional/api/from_pulpcore/test_distributions.py @@ -1,513 +1,278 @@ """Tests that perform actions over distributions.""" -import csv -import hashlib import pytest -from time import sleep -import unittest -from urllib.parse import urljoin +import json +from uuid import uuid4 -from pulp_smash import api, cli, config, utils -from pulp_smash.pulp3.bindings import delete_orphans, monitor_task +from pulp_smash.pulp3.bindings import monitor_task from pulp_smash.pulp3.utils import ( - download_content_unit, - download_content_unit_return_requests_response, gen_distribution, gen_repo, - get_content, - get_versions, - modify_repo, - sync, - utils as pulp3_utils, ) -from requests.exceptions import HTTPError - -from pulpcore.client.pulpcore import ApiException, StatusApi from pulpcore.client.pulp_file import ( - ContentFilesApi, - DistributionsFileApi, - FileFilePublication, - PublicationsFileApi, - RemotesFileApi, - RepositoriesFileApi, RepositorySyncURL, + FileFileDistribution, + FileFilePublication, ) - -from pulp_file.tests.functional.utils import ( - create_file_publication, - gen_file_remote, - gen_file_client, - gen_pulpcore_client, -) -from .constants import ( - BASE_DISTRIBUTION_PATH, - FILE_CHUNKED_FIXTURE_MANIFEST_URL, - FILE_CONTENT_NAME, - FILE_DISTRIBUTION_PATH, - FILE_FIXTURE_COUNT, - FILE_REMOTE_PATH, - FILE_URL, - FILE_REPO_PATH, -) - - -class CRUDPublicationDistributionTestCase(unittest.TestCase): - """CRUD Publication Distribution.""" - - @classmethod - def setUpClass(cls): - """Create class-wide variables.""" - cls.cfg = config.get_config() - cls.client = api.Client(cls.cfg) - - def setUp(self): - """Arrange the test.""" - self.attr = ( - "name", - "base_path", - ) - self.distribution = {} - self.publication = {} - self.remote = {} - self.repo = {} - - def tearDown(self): - """Clean variables.""" - for resource in (self.publication, self.remote, self.repo): - if resource: - self.client.delete(resource["pulp_href"]) - - def test_crud_workflow(self): - self._create() - self._read() - self._partially_update() - self._fully_update() - self._list() - self._delete_distribution() - - def _create(self): - """Create a publication distribution. - - Do the following: - - 1. Create a repository and 3 repository versions with at least 1 file - content in it. Create a publication using the second repository - version. - 2. Create a distribution with 'publication' field set to - the publication from step (1). - 3. Assert the distribution got created correctly with the correct - base_path, name, and publication. Assert that content guard is - unset. - 4. Assert that publication has a 'distributions' reference to the - distribution (it's backref). - - """ - self.repo.update(self.client.post(FILE_REPO_PATH, gen_repo())) - self.remote.update(self.client.post(FILE_REMOTE_PATH, gen_file_remote())) - # create 3 repository versions - sync(self.cfg, self.remote, self.repo) - self.repo = self.client.get(self.repo["pulp_href"]) - for file_content in get_content(self.repo)[FILE_CONTENT_NAME]: - modify_repo(self.cfg, self.repo, remove_units=[file_content]) - - self.repo = self.client.get(self.repo["pulp_href"]) - - versions = get_versions(self.repo) - - self.publication.update( - create_file_publication(self.cfg, self.repo, versions[1]["pulp_href"]) - ) - - self.distribution.update( - self.client.post( - FILE_DISTRIBUTION_PATH, gen_distribution(publication=self.publication["pulp_href"]) - ) - ) - - self.publication = self.client.get(self.publication["pulp_href"]) - - # content_guard and repository parameters unset. - for key, val in self.distribution.items(): - if key in ["content_guard", "repository"]: - self.assertIsNone(val, self.distribution) - else: - self.assertIsNotNone(val, self.distribution) - - self.assertEqual( - self.distribution["publication"], self.publication["pulp_href"], self.distribution - ) - - self.assertEqual( - self.publication["distributions"][0], self.distribution["pulp_href"], self.publication +from pulpcore.client.pulp_file.exceptions import ApiException + + +@pytest.mark.parallel +def test_crud_publication_distribution( + file_content_api_client, + file_repo, + file_fixture_gen_remote_ssl, + file_repo_api_client, + file_repo_ver_api_client, + file_pub_api_client, + basic_manifest_path, + gen_object_with_cleanup, + file_distro_api_client, +): + # Create a remote and sync from it to create the first repository version + remote = file_fixture_gen_remote_ssl(manifest_path=basic_manifest_path, policy="on_demand") + body = RepositorySyncURL(remote=remote.pulp_href) + monitor_task(file_repo_api_client.sync(file_repo.pulp_href, body).task) + + # Remove content to create two more repository versions + first_repo_version_href = file_repo_api_client.read(file_repo.pulp_href).latest_version_href + v1_content = file_content_api_client.list(repository_version=first_repo_version_href).results + + for i in range(2): + monitor_task( + file_repo_api_client.modify( + file_repo.pulp_href, {"remove_content_units": [v1_content[i].pulp_href]} + ).task ) - def _read(self): - """Read distribution by its href.""" - distribution = self.client.get(self.distribution["pulp_href"]) - for key, val in self.distribution.items(): - with self.subTest(key=key): - self.assertEqual(distribution[key], val) - - def _partially_update(self): - """Update a distribution using PATCH.""" - for key in self.attr: - with self.subTest(key=key): - self._do_partially_update_attr(key) - - def _fully_update(self): - """Update a distribution using PUT.""" - for key in self.attr: - with self.subTest(key=key): - self._do_fully_update_attr(key) - - def _list(self): - """Test the generic distribution list endpoint.""" - distributions = self.client.get(BASE_DISTRIBUTION_PATH) - assert self.distribution["pulp_href"] in [distro["pulp_href"] for distro in distributions] - - def _delete_distribution(self): - """Delete a distribution.""" - self.client.delete(self.distribution["pulp_href"]) - with self.assertRaises(HTTPError): - self.client.get(self.distribution["pulp_href"]) - - def _do_fully_update_attr(self, attr): - """Update a distribution attribute using HTTP PUT. - - :param attr: The name of the attribute to update. - """ - distribution = self.client.get(self.distribution["pulp_href"]) - string = utils.uuid4() - distribution[attr] = string - self.client.put(distribution["pulp_href"], distribution) - - # verify the update - distribution = self.client.get(distribution["pulp_href"]) - self.assertEqual(string, distribution[attr], distribution) - - def _do_partially_update_attr(self, attr): - """Update a distribution using HTTP PATCH. - - :param attr: The name of the attribute to update. - """ - string = utils.uuid4() - self.client.patch(self.distribution["pulp_href"], {attr: string}) - - # Verify the update - distribution = self.client.get(self.distribution["pulp_href"]) - self.assertEqual(string, distribution[attr], self.distribution) - - -class DistributionBasePathTestCase(unittest.TestCase): - """Test possible values for ``base_path`` on a distribution.""" - - @classmethod - def setUpClass(cls): - """Create class-wide variables.""" - cls.cfg = config.get_config() - cls.client = api.Client(cls.cfg) - - def setUp(self): - """Set up resources.""" - body = gen_distribution() - body["base_path"] = body["base_path"].replace("-", "/") - self.distribution = self.client.post(FILE_DISTRIBUTION_PATH, body) - - def tearDown(self): - """Clean up resources.""" - response = self.client.delete(self.distribution["pulp_href"]) - monitor_task(response["pulp_href"]) - - def test_negative_create_using_spaces(self): - """Test that spaces can not be part of ``base_path``.""" - self.try_create_distribution(base_path=utils.uuid4().replace("-", " ")) - self.try_update_distribution(base_path=utils.uuid4().replace("-", " ")) - - def test_negative_create_using_begin_slash(self): - """Test that slash cannot be in the begin of ``base_path``.""" - self.try_create_distribution(base_path="/" + utils.uuid4()) - self.try_update_distribution(base_path="/" + utils.uuid4()) - - def test_negative_create_using_end_slash(self): - """Test that slash cannot be in the end of ``base_path``.""" - self.try_create_distribution(base_path=utils.uuid4() + "/") - self.try_update_distribution(base_path=utils.uuid4() + "/") - - def test_negative_create_using_non_unique_base_path(self): - """Test that ``base_path`` can not be duplicated.""" - self.try_create_distribution(base_path=self.distribution["base_path"]) - - def test_negative_create_using_overlapping_base_path(self): - """Test that distributions can't have overlapping ``base_path``. - - See: `Pulp #2987`_. - """ - base_path = self.distribution["base_path"].rsplit("/", 1)[0] - self.try_create_distribution(base_path=base_path) - - base_path = "/".join((self.distribution["base_path"], utils.uuid4().replace("-", "/"))) - self.try_create_distribution(base_path=base_path) - - def try_create_distribution(self, **kwargs): - """Unsuccessfully create a distribution. - - Merge the given kwargs into the body of the request. - """ - body = gen_distribution() - body.update(kwargs) - with self.assertRaises(HTTPError) as ctx: - self.client.post(FILE_DISTRIBUTION_PATH, body) - - self.assertIsNotNone( - ctx.exception.response.json()["base_path"], ctx.exception.response.json() - ) - - def try_update_distribution(self, **kwargs): - """Unsuccessfully update a distribution with HTTP PATCH. - - Use the given kwargs as the body of the request. - """ - with self.assertRaises(HTTPError) as ctx: - self.client.patch(self.distribution["pulp_href"], kwargs) - - self.assertIsNotNone( - ctx.exception.response.json()["base_path"], ctx.exception.response.json() - ) - - -class ContentServePublicationDistributionTestCase(unittest.TestCase): - """Verify that content is served from a publication distribution. - - Assert that published metadata and content is served from a publication - distribution. - - This test targets the following issue: - - `Pulp #4847 `_ - """ - - @classmethod - def setUpClass(cls): - """Create class-wide variables.""" - cls.cfg = config.get_config() - cls.client = gen_file_client() - - cls.content_api = ContentFilesApi(cls.client) - cls.repo_api = RepositoriesFileApi(cls.client) - cls.remote_api = RemotesFileApi(cls.client) - cls.publications_api = PublicationsFileApi(cls.client) - cls.distributions_api = DistributionsFileApi(cls.client) - - def setUp(self): - delete_orphans() - - def test_nonpublished_content_not_served(self): - """Verify content that hasn't been published is not served.""" - self.setup_download_test("immediate", publish=False) - files = ["", "1.iso", "2.iso", "3.iso"] - for file in files: - with self.assertRaises(HTTPError, msg=f"{file}") as cm: - download_content_unit(self.cfg, self.distribution.to_dict(), file) - self.assertEqual(cm.exception.response.status_code, 404, f"{file}") - - def test_content_served_on_demand(self): - """Assert that on_demand content can be properly downloaded.""" - self.setup_download_test("on_demand") - self.do_test_content_served() - - def test_content_served_immediate(self): - """Assert that downloaded content can be properly downloaded.""" - self.setup_download_test("immediate") - self.do_test_content_served() - - def test_content_served_streamed(self): - """Assert that streamed content can be properly downloaded.""" - self.setup_download_test("streamed") - self.do_test_content_served() - - def test_content_served_immediate_with_range_request_inside_one_chunk(self): - """Assert that downloaded content can be properly downloaded with range requests.""" - self.setup_download_test("immediate", url=FILE_CHUNKED_FIXTURE_MANIFEST_URL) - range_headers = {"Range": "bytes=1048586-1049586"} - num_bytes = 1001 - self.do_range_request_download_test(range_headers, num_bytes) - - def test_content_served_immediate_with_range_request_over_three_chunks(self): - """Assert that downloaded content can be properly downloaded with range requests.""" - self.setup_download_test("immediate", url=FILE_CHUNKED_FIXTURE_MANIFEST_URL) - range_headers = {"Range": "bytes=1048176-2248576"} - num_bytes = 1200401 - self.do_range_request_download_test(range_headers, num_bytes) - - def test_content_served_on_demand_with_range_request_over_three_chunks(self): - """Assert that on_demand content can be properly downloaded with range requests.""" - self.setup_download_test("on_demand", url=FILE_CHUNKED_FIXTURE_MANIFEST_URL) - range_headers = {"Range": "bytes=1048176-2248576"} - num_bytes = 1200401 - self.do_range_request_download_test(range_headers, num_bytes) - - def test_content_served_streamed_with_range_request_over_three_chunks(self): - """Assert that streamed content can be properly downloaded with range requests.""" - self.setup_download_test("streamed", url=FILE_CHUNKED_FIXTURE_MANIFEST_URL) - range_headers = {"Range": "bytes=1048176-2248576"} - num_bytes = 1200401 - self.do_range_request_download_test(range_headers, num_bytes) - - def test_content_served_immediate_with_multiple_different_range_requests(self): - """Assert that multiple requests with different Range header values work as expected.""" - self.setup_download_test("immediate", url=FILE_CHUNKED_FIXTURE_MANIFEST_URL) - range_headers = {"Range": "bytes=1048176-2248576"} - num_bytes = 1200401 - self.do_range_request_download_test(range_headers, num_bytes) - range_headers = {"Range": "bytes=2042176-3248576"} - num_bytes = 1206401 - self.do_range_request_download_test(range_headers, num_bytes) - - def test_content_served_immediate_with_range_request_invalid_start_value(self): - """Assert that range requests with a negative start value errors as expected.""" - cfg = config.get_config() - cli_client = cli.Client(cfg) - storage = utils.get_pulp_setting(cli_client, "DEFAULT_FILE_STORAGE") - if storage != "pulpcore.app.models.storage.FileSystem": - self.skipTest("The S3 test API project doesn't handle invalid Range values correctly") - self.setup_download_test("immediate", url=FILE_CHUNKED_FIXTURE_MANIFEST_URL) - with self.assertRaises(HTTPError) as cm: - download_content_unit_return_requests_response( - self.cfg, self.distribution.to_dict(), "1.iso", headers={"Range": "bytes=-1-11"} - ) - self.assertEqual(cm.exception.response.status_code, 416) - - def test_content_served_immediate_with_range_request_too_large_end_value(self): - """Assert that a range request with a end value that is larger than the data works still.""" - self.setup_download_test("immediate", url=FILE_CHUNKED_FIXTURE_MANIFEST_URL) - range_headers = {"Range": "bytes=10485260-10485960"} - num_bytes = 500 - self.do_range_request_download_test(range_headers, num_bytes) - - def test_content_served_immediate_with_range_request_start_value_larger_than_content(self): - """Assert that a range request with a start value larger than the content errors.""" - self.setup_download_test("immediate", url=FILE_CHUNKED_FIXTURE_MANIFEST_URL) - with self.assertRaises(HTTPError) as cm: - download_content_unit_return_requests_response( - self.cfg, - self.distribution.to_dict(), - "1.iso", - headers={"Range": "bytes=10485860-10485870"}, - ) - self.assertEqual(cm.exception.response.status_code, 416) - - @pytest.mark.skip("Sometimes PostgreSQL doesn't restart properly in CI.") - def test_content_served_after_db_restart(self): - """ - Assert that content can be downloaded after the database has been restarted. - This test also check that the HTML page with a list of distributions is also - available after the connection to the database has been closed. - """ - cfg = config.get_config() - pulp_host = cfg.hosts[0] - svc_mgr = cli.ServiceManager(cfg, pulp_host) - if svc_mgr._svc_mgr == "s6": - postgresql_service_name = "postgresql" - else: - postgresql_service_name = "*postgresql*" - postgresql_found = svc_mgr.is_active([postgresql_service_name]) - self.assertTrue( - postgresql_found, "PostgreSQL service not found or is not active. Can't restart it." - ) - svc_mgr.restart([postgresql_service_name]) - # Wait for postgres to come back and pulpcore-api to recover - status_api = StatusApi(gen_pulpcore_client()) - for i in range(5): - sleep(2) - try: - status_api.status_read() - break - except ApiException: - if i == 4: - raise - self.setup_download_test("immediate") - self.do_test_content_served() - url_fragments = [ - cfg.get_content_host_base_url(), - "pulp/content", - ] - content_app_root = "/".join(url_fragments) - pulp3_utils.http_get(content_app_root) - - def setup_download_test(self, policy, url=None, publish=True): - # Create a repository - self.repo = self.repo_api.create(gen_repo()) - self.addCleanup(self.repo_api.delete, self.repo.pulp_href) - - # Create a remote - remote_options = {"policy": policy} - if url: - remote_options["url"] = url - - self.remote = self.remote_api.create(gen_file_remote(**remote_options)) - self.addCleanup(self.remote_api.delete, self.remote.pulp_href) - - # Sync the repository. - repository_sync_data = RepositorySyncURL(remote=self.remote.pulp_href) - sync_response = self.repo_api.sync(self.repo.pulp_href, repository_sync_data) - monitor_task(sync_response.task) - - if publish: - # Create a publication. - publish_data = FileFilePublication(repository=self.repo.pulp_href) - publish_response = self.publications_api.create(publish_data) - publication_href = monitor_task(publish_response.task).created_resources[0] - self.addCleanup(self.publications_api.delete, publication_href) - serve, served_href = "publication", publication_href - else: - serve, served_href = "repository", self.repo.pulp_href - - # Create a distribution. - response = self.distributions_api.create(gen_distribution(**{serve: served_href})) - distribution_href = monitor_task(response.task).created_resources[0] - self.distribution = self.distributions_api.read(distribution_href) - self.addCleanup(self.distributions_api.delete, self.distribution.pulp_href) - - def do_test_content_served(self): - file_path = "1.iso" - - req1 = download_content_unit(self.cfg, self.distribution.to_dict(), file_path) - req2 = download_content_unit(self.cfg, self.distribution.to_dict(), file_path) - fixtures_hash = hashlib.sha256(utils.http_get(urljoin(FILE_URL, file_path))).hexdigest() - - first_dl_hash = hashlib.sha256(req1).hexdigest() - second_dl_hash = hashlib.sha256(req2).hexdigest() - - self.assertEqual(first_dl_hash, fixtures_hash) - self.assertEqual(first_dl_hash, second_dl_hash) - - manifest = download_content_unit(self.cfg, self.distribution.to_dict(), "PULP_MANIFEST") - pulp_manifest = list( - csv.DictReader(manifest.decode("utf-8").splitlines(), ("name", "checksum", "size")) - ) - - self.assertEqual(len(pulp_manifest), FILE_FIXTURE_COUNT, pulp_manifest) - - def do_range_request_download_test(self, range_header, expected_bytes): - file_path = "1.iso" - - req1_reponse = download_content_unit_return_requests_response( - self.cfg, self.distribution.to_dict(), file_path, headers=range_header - ) - req2_response = download_content_unit_return_requests_response( - self.cfg, self.distribution.to_dict(), file_path, headers=range_header - ) - - self.assertEqual(expected_bytes, len(req1_reponse.content)) - self.assertEqual(expected_bytes, len(req2_response.content)) - self.assertEqual(req1_reponse.content, req2_response.content) - - self.assertEqual(req1_reponse.status_code, 206) - self.assertEqual(req1_reponse.status_code, req2_response.status_code) - - self.assertEqual(str(expected_bytes), req1_reponse.headers["Content-Length"]) - self.assertEqual(str(expected_bytes), req2_response.headers["Content-Length"]) - - self.assertEqual( - req1_reponse.headers["Content-Range"], req2_response.headers["Content-Range"] - ) + # Create a publication from version 2 + repo_versions = file_repo_ver_api_client.list(file_repo.pulp_href).results + publish_data = FileFilePublication(repository_version=repo_versions[2].pulp_href) + publication = gen_object_with_cleanup(file_pub_api_client, publish_data) + distribution_data = gen_distribution(publication=publication.pulp_href) + distribution = gen_object_with_cleanup(file_distro_api_client, distribution_data) + + # Refresh the publication data + publication = file_pub_api_client.read(publication.pulp_href) + + # Assert on all the field values + assert distribution.content_guard is None + assert distribution.repository is None + assert distribution.publication == publication.pulp_href + assert distribution.base_path == distribution_data["base_path"] + assert distribution.name == distribution_data["name"] + + # Assert that the publication has a reference to the distribution + assert publication.distributions[0] == distribution.pulp_href + + # Test updating name with 'partial_update' + new_name = str(uuid4()) + monitor_task( + file_distro_api_client.partial_update(distribution.pulp_href, {"name": new_name}).task + ) + distribution = file_distro_api_client.read(distribution.pulp_href) + assert distribution.name == new_name + + # Test updating base_path with 'partial_update' + new_base_path = str(uuid4()) + monitor_task( + file_distro_api_client.partial_update( + distribution.pulp_href, {"base_path": new_base_path} + ).task + ) + distribution = file_distro_api_client.read(distribution.pulp_href) + assert distribution.base_path == new_base_path + + # Test updating name with 'update' + new_name = str(uuid4()) + distribution.name = new_name + monitor_task(file_distro_api_client.update(distribution.pulp_href, distribution).task) + distribution = file_distro_api_client.read(distribution.pulp_href) + assert distribution.name == new_name + + # Test updating base_path with 'update' + new_base_path = str(uuid4()) + distribution.base_path = new_base_path + monitor_task(file_distro_api_client.update(distribution.pulp_href, distribution).task) + distribution = file_distro_api_client.read(distribution.pulp_href) + assert distribution.base_path == new_base_path + + # Test the generic distribution list endpoint. + distributions = file_distro_api_client.list() + assert distribution.pulp_href in [distro.pulp_href for distro in distributions.results] + + # Delete a distribution. + file_distro_api_client.delete(distribution.pulp_href) + with pytest.raises(ApiException): + file_distro_api_client.read(distribution.pulp_href) + + +def _create_distribution_and_assert(client, data): + with pytest.raises(ApiException) as exc: + client.create(data) + assert json.loads(exc.value.body)["base_path"] is not None + + +def _update_distribution_and_assert(client, distribution_href, data): + with pytest.raises(ApiException) as exc: + client.update(distribution_href, data) + assert json.loads(exc.value.body)["base_path"] is not None + + +@pytest.mark.parallel +def test_distribution_base_path( + gen_object_with_cleanup, + file_distro_api_client, +): + distribution_data = gen_distribution(base_path=str(uuid4()).replace("-", "/")) + distribution = gen_object_with_cleanup(file_distro_api_client, distribution_data) + + # Test that spaces can not be part of ``base_path``. + _create_distribution_and_assert( + file_distro_api_client, gen_distribution(base_path=str(uuid4()).replace("-", " ")) + ) + + # Test that slash cannot be in the begin of ``base_path``. + _create_distribution_and_assert( + file_distro_api_client, gen_distribution(base_path=f"/{str(uuid4())}") + ) + _update_distribution_and_assert( + file_distro_api_client, + distribution.pulp_href, + gen_distribution(base_path=f"/{str(uuid4())}"), + ) + + # Test that slash cannot be in the end of ``base_path``.""" + _create_distribution_and_assert( + file_distro_api_client, gen_distribution(base_path=f"{str(uuid4())}/") + ) + + _update_distribution_and_assert( + file_distro_api_client, + distribution.pulp_href, + gen_distribution(base_path=f"{str(uuid4())}/"), + ) + + # Test that ``base_path`` can not be duplicated. + _create_distribution_and_assert( + file_distro_api_client, gen_distribution(base_path=distribution.base_path) + ) + + # Test that distributions can't have overlapping ``base_path``. + base_path = distribution.base_path.rsplit("/", 1)[0] + _create_distribution_and_assert(file_distro_api_client, gen_distribution(base_path=base_path)) + + base_path = "/".join((distribution.base_path, str(uuid4()).replace("-", "/"))) + _create_distribution_and_assert(file_distro_api_client, gen_distribution(base_path=base_path)) + + +@pytest.mark.parallel +def test_distribution_filtering( + file_content_api_client, + file_distro_api_client, + file_fixture_gen_remote, + file_random_content_unit, + file_repo_api_client, + file_pub_api_client, + gen_object_with_cleanup, + write_3_iso_file_fixture_data_factory, +): + """Test distribution filtering based on the content exposed from the distribution.""" + + def generate_repo_with_content(): + repo = gen_object_with_cleanup(file_repo_api_client, gen_repo()) + repo_manifest_path = write_3_iso_file_fixture_data_factory(str(uuid4())) + remote = file_fixture_gen_remote(manifest_path=repo_manifest_path, policy="on_demand") + body = RepositorySyncURL(remote=remote.pulp_href) + task_response = file_repo_api_client.sync(repo.pulp_href, body).task + version_href = monitor_task(task_response).created_resources[0] + content = file_content_api_client.list(repository_version_added=version_href).results[0] + return repo, content + + repo1, content1 = generate_repo_with_content() + + publish_data = FileFilePublication(repository=repo1.pulp_href) + publication = gen_object_with_cleanup(file_pub_api_client, publish_data) + + # test if a publication attached to a distribution exposes the published content + data = FileFileDistribution( + name=str(uuid4()), base_path=str(uuid4()), publication=publication.pulp_href + ) + distribution_pub1 = gen_object_with_cleanup(file_distro_api_client, data) + + results = file_distro_api_client.list(with_content=content1.pulp_href).results + assert [distribution_pub1] == results + + # test if a publication pointing to repository version no. 0 does not expose any content + publish_data = FileFilePublication(repository_version=repo1.versions_href + "0/") + publication_version_0 = gen_object_with_cleanup(file_pub_api_client, publish_data) + data = FileFileDistribution( + name=str(uuid4()), base_path=str(uuid4()), publication=publication_version_0.pulp_href + ) + gen_object_with_cleanup(file_distro_api_client, data) + + results = file_distro_api_client.list(with_content=content1.pulp_href).results + assert [distribution_pub1] == results + + # test if a repository assigned to a distribution exposes the content available in the latest + # publication for that repository's versions + data = FileFileDistribution( + name=str(uuid4()), base_path=str(uuid4()), repository=repo1.pulp_href + ) + distribution_repopub = gen_object_with_cleanup(file_distro_api_client, data) + results = set( + d.pulp_href for d in file_distro_api_client.list(with_content=content1.pulp_href).results + ) + assert {distribution_pub1.pulp_href, distribution_repopub.pulp_href} == results + + repo2, content2 = generate_repo_with_content() + + # add new content to the first repository to see whether the distribution filtering correctly + # traverses to the latest publication concerning the repository under the question that should + # contain the content + response = file_repo_api_client.modify( + repo1.pulp_href, + {"remove_content_units": [], "add_content_units": [content2.pulp_href]}, + ) + monitor_task(response.task) + assert [] == file_distro_api_client.list(with_content=content2.pulp_href).results + + publish_data = FileFilePublication(repository=repo1.pulp_href) + new_publication = gen_object_with_cleanup(file_pub_api_client, publish_data) + + # test later (20 lines below) if the publication now exposes the recently added content in the + # affected distributions (i.e., the distribution with the reference to a repository and the + # new one) + data = FileFileDistribution( + name="pub3", base_path="pub3", publication=new_publication.pulp_href + ) + distribution_pub3 = gen_object_with_cleanup(file_distro_api_client, data) + + # test if a repository without any attached publication does not expose any kind of content + # to a user even though the content is still present in the latest repository version + data = FileFileDistribution( + name=str(uuid4()), base_path=str(uuid4()), repository=repo2.pulp_href + ) + distribution_repo_only = gen_object_with_cleanup(file_distro_api_client, data) + + results = set( + d.pulp_href for d in file_distro_api_client.list(with_content=content2.pulp_href).results + ) + assert {distribution_pub3.pulp_href, distribution_repopub.pulp_href} == results + + # create a publication to see whether the content of the second repository is now served or not + publish_data = FileFilePublication(repository=repo2.pulp_href) + gen_object_with_cleanup(file_pub_api_client, publish_data) + + results = set( + d.pulp_href for d in file_distro_api_client.list(with_content=content2.pulp_href).results + ) + assert { + distribution_pub3.pulp_href, + distribution_repopub.pulp_href, + distribution_repo_only.pulp_href, + } == results + + # test if a random content unit is not accessible from any distribution + results = file_distro_api_client.list(with_content=file_random_content_unit.pulp_href).results + assert [] == results diff --git a/pulp_file/tests/functional/api/from_pulpcore/test_remove_plugin.py b/pulp_file/tests/functional/api/from_pulpcore/test_remove_plugin.py deleted file mode 100644 index f52a235e..00000000 --- a/pulp_file/tests/functional/api/from_pulpcore/test_remove_plugin.py +++ /dev/null @@ -1,40 +0,0 @@ -import pytest - -from pulp_smash.pulp3.utils import gen_repo - - -# Marking test trylast to ensure other tests run even if this fails. -@pytest.mark.nightly -@pytest.mark.trylast -def test_remove_plugin( - cli_client, - delete_orphans_pre, - file_fixture_gen_file_repo, - file_repo_api_client, - start_and_check_services, - stop_and_check_services, -): - repo_name = "repo for plugin removal test" - file_repo_pre_removal = file_repo_api_client.create(gen_repo(name=repo_name)) - - assert stop_and_check_services() is True - - res = cli_client.run(["pulpcore-manager", "remove-plugin", "file"]) - assert "Successfully removed" in res.stdout - num_migrations = res.stdout.count("Unapplying file.") - num_models = res.stdout.count("Removing model") - - # Without uninstalling the package just run migrations again to mimic the reinstallation - # of a plugin at least from pulp's perspective - res = cli_client.run(["pulpcore-manager", "migrate", "file"]) - assert res.stdout.count("Applying file.") == num_migrations - # This assumes each model gets its own access policy plus FileRepositoryVersion - assert res.stdout.count("created.") == num_models + 1 - - assert start_and_check_services() is True - - # create a repo with the same name as before the removal - file_repo_post_reinstall = file_fixture_gen_file_repo(name=repo_name) - - assert file_repo_pre_removal.name == file_repo_post_reinstall.name - assert file_repo_pre_removal.pulp_href != file_repo_post_reinstall.pulp_href diff --git a/pulp_file/tests/functional/api/test_acs.py b/pulp_file/tests/functional/api/test_acs.py index 7a448b34..3f2681df 100644 --- a/pulp_file/tests/functional/api/test_acs.py +++ b/pulp_file/tests/functional/api/test_acs.py @@ -1,166 +1,268 @@ import hashlib -import unittest -from random import choice +import pytest +import uuid from urllib.parse import urljoin -from pulp_smash import config, utils from pulp_smash.pulp3.bindings import ( - PulpTaskError, - delete_orphans, monitor_task, monitor_task_group, ) -from pulp_smash.pulp3.utils import download_content_unit, gen_distribution -from pulpcore.client.pulp_file import ( - AcsFileApi, - DistributionsFileApi, - PublicationsFileApi, - RemotesFileApi, - RepositoriesFileApi, - RepositorySyncURL, -) +from pulp_smash.pulp3.utils import gen_distribution + +from pulpcore.client.pulp_file import RepositorySyncURL from pulpcore.client.pulp_file.exceptions import ApiException -from pulp_file.tests.functional.constants import ( - FILE_FIXTURE_MANIFEST_URL, - FILE_FIXTURE_URL, - FILE_MANIFEST_ONLY_FIXTURE_URL, - PULP_FIXTURES_BASE_URL, -) from pulp_file.tests.functional.utils import ( - gen_file_client, - gen_file_remote, - gen_repo, - get_file_content_paths, + download_file, + get_files_in_manifest, ) -class AlternateContentSourceTestCase(unittest.TestCase): - """Test File ACS.""" - - @classmethod - def setUpClass(cls): - """ - Create class-wide variables. - - Variables 'paths' and 'paths_updated' are defined as strings. - In same way data are send from user. - """ - cls.cfg = config.get_config() - cls.file_client = gen_file_client() - cls.repo_api = RepositoriesFileApi(cls.file_client) - cls.file_remote_api = RemotesFileApi(cls.file_client) - cls.file_acs_api = AcsFileApi(cls.file_client) - cls.publication_api = PublicationsFileApi(cls.file_client) - cls.distribution_api = DistributionsFileApi(cls.file_client) - cls.paths = ["goodpath/PULP_MANIFEST", "test", "whatever/test"] - delete_orphans() - - def _create_acs(self, name="file_acs", paths=None, remote_url=FILE_FIXTURE_MANIFEST_URL): - remote = self.file_remote_api.create(gen_file_remote(remote_url, policy="on_demand")) - self.addCleanup(self.file_remote_api.delete, remote.pulp_href) - - acs_data = { - "name": name, - "remote": remote.pulp_href, - } - if paths: - acs_data["paths"] = paths - - acs = self.file_acs_api.create(acs_data) - self.addCleanup(self.file_acs_api.delete, acs.pulp_href) - - return acs - - def test_path_validation(self): - """Test the validation of paths.""" - # path is wrong, begins with / - with self.assertRaises(ApiException) as ctx: - self._create_acs(paths=(self.paths + ["/bad_path"])) - self.assertEqual(ctx.exception.status, 400) - - # use valid paths - acs = self._create_acs(paths=self.paths) - self.assertEqual(sorted(acs.paths), sorted(self.paths)) - - def test_acs_sync(self): - """Test syncing from an ACS.""" - delete_orphans() - repo = self.repo_api.create(gen_repo()) - self.addCleanup(self.repo_api.delete, repo.pulp_href) - - remote = self.file_remote_api.create(gen_file_remote(FILE_MANIFEST_ONLY_FIXTURE_URL)) - self.addCleanup(self.file_remote_api.delete, remote.pulp_href) - - repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) - - # sync should fail as the repo has metadata only (no files) - sync_response = self.repo_api.sync(repo.pulp_href, repository_sync_data) - with self.assertRaises(PulpTaskError) as ctx: - monitor_task(sync_response.task) - self.assertIn("404", ctx.exception.task.error["description"]) - - # create an acs and pull in its remote artifacts - acs = self._create_acs() - resp = self.file_acs_api.refresh(acs.pulp_href) - monitor_task_group(resp.task_group) - - # the sync should now work as the files are being pulled from ACS remote - sync_response = self.repo_api.sync(repo.pulp_href, repository_sync_data) - monitor_task(sync_response.task) - - def test_acs_sync_with_paths(self): - """Test syncing from an ACS using different paths.""" - repo = self.repo_api.create(gen_repo()) - self.addCleanup(self.repo_api.delete, repo.pulp_href) - - remote = self.file_remote_api.create(gen_file_remote(FILE_MANIFEST_ONLY_FIXTURE_URL)) - self.addCleanup(self.file_remote_api.delete, remote.pulp_href) - - acs = self._create_acs( - paths=("file/PULP_MANIFEST", "file2/PULP_MANIFEST"), - remote_url=PULP_FIXTURES_BASE_URL, +@pytest.fixture +def generate_server_and_remote( + gen_fixture_server, file_fixtures_root, file_remote_api_client, gen_object_with_cleanup +): + def _generate_server_and_remote(*, manifest_path, policy): + server = gen_fixture_server(file_fixtures_root, None) + url = server.make_url(manifest_path) + remote = gen_object_with_cleanup( + file_remote_api_client, + {"name": str(uuid.uuid4()), "url": str(url), "policy": policy}, ) - resp = self.file_acs_api.refresh(acs.pulp_href) - task_group = monitor_task_group(resp.task_group) - self.assertEquals(len(task_group.tasks), 2) - - repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) - sync_response = self.repo_api.sync(repo.pulp_href, repository_sync_data) - monitor_task(sync_response.task) - - def test_serving_acs_content(self): - """Test serving of ACS content through the content app.""" - cfg = config.get_config() - acs = self._create_acs() - resp = self.file_acs_api.refresh(acs.pulp_href) - monitor_task_group(resp.task_group) - - remote = self.file_remote_api.create( - gen_file_remote(FILE_MANIFEST_ONLY_FIXTURE_URL, policy="on_demand") - ) - self.addCleanup(self.file_remote_api.delete, remote.pulp_href) + return server, remote - repo = self.repo_api.create(gen_repo(remote=remote.pulp_href, autopublish=True)) - self.addCleanup(self.repo_api.delete, repo.pulp_href) + yield _generate_server_and_remote - distribution_response = self.distribution_api.create( - gen_distribution(repository=repo.pulp_href) - ) - created_resources = monitor_task(distribution_response.task).created_resources - distribution = self.distribution_api.read(created_resources[0]) - self.addCleanup(self.distribution_api.delete, distribution.pulp_href) - - repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) - sync_response = self.repo_api.sync(repo.pulp_href, repository_sync_data) - monitor_task(sync_response.task) - repo = self.repo_api.read(repo.pulp_href) - - unit_path = choice(get_file_content_paths(repo.to_dict())) - fixtures_hash = hashlib.sha256( - utils.http_get(urljoin(FILE_FIXTURE_URL, unit_path)) - ).hexdigest() - content = download_content_unit(cfg, distribution.to_dict(), unit_path) - pulp_hash = hashlib.sha256(content).hexdigest() - - self.assertEqual(fixtures_hash, pulp_hash) + +@pytest.mark.parallel +def test_acs_validation_and_update( + file_acs_api_client, file_fixture_gen_remote, basic_manifest_path, gen_object_with_cleanup +): + # Test that a remote with "immediate" download policy can't be used with an ACS + immediate_remote = file_fixture_gen_remote( + manifest_path=basic_manifest_path, policy="immediate" + ) + acs_data = { + "name": str(uuid.uuid4()), + "remote": immediate_remote.pulp_href, + "paths": [], + } + with pytest.raises(ApiException) as exc: + file_acs_api_client.create(acs_data) + assert exc.value.status == 400 + assert "remote" in exc.value.body + + # Assert that paths starting with "/" are not accepted by ACS API. + on_demand_remote = file_fixture_gen_remote( + manifest_path=basic_manifest_path, policy="on_demand" + ) + acs_data = { + "name": str(uuid.uuid4()), + "remote": on_demand_remote.pulp_href, + "paths": ["good/path", "/bad/path"], + } + with pytest.raises(ApiException) as exc: + file_acs_api_client.create(acs_data) + assert exc.value.status == 400 + assert "paths" in exc.value.body + + # Assert that an ACS can be created with valid paths + acs_data["paths"] = ["good/path", "valid"] + acs = gen_object_with_cleanup(file_acs_api_client, acs_data) + assert set(acs.paths) == set(acs_data["paths"]) + + # Test that an ACS's name can be updated without clobbering the paths + new_name = str(uuid.uuid4()) + monitor_task( + file_acs_api_client.update(acs.pulp_href, {"name": new_name, "remote": acs.remote}).task + ) + acs = file_acs_api_client.read(acs.pulp_href) + assert acs.name == new_name + assert sorted(acs.paths) == sorted(acs_data["paths"]) + + # Test that you can do a partial update of an ACS + new_name = str(uuid.uuid4()) + monitor_task(file_acs_api_client.partial_update(acs.pulp_href, {"name": new_name}).task) + acs = file_acs_api_client.read(acs.pulp_href) + assert acs.name == new_name + assert sorted(acs.paths) == sorted(acs_data["paths"]) + + # Test that paths can be updated + updated_paths = ["foo"] + monitor_task(file_acs_api_client.partial_update(acs.pulp_href, {"paths": updated_paths}).task) + acs = file_acs_api_client.read(acs.pulp_href) + assert acs.paths == updated_paths + + +@pytest.mark.parallel +def test_acs_sync( + file_repo, + file_repo_api_client, + file_acs_api_client, + basic_manifest_path, + gen_object_with_cleanup, + generate_server_and_remote, +): + # Create the main server and remote pointing to it + main_server, main_remote = generate_server_and_remote( + manifest_path=basic_manifest_path, policy="immediate" + ) + + # Create an ACS server and a remote pointing to it + acs_server, acs_remote = generate_server_and_remote( + manifest_path=basic_manifest_path, policy="on_demand" + ) + + # Create the ACS that uses the remote from above + acs = gen_object_with_cleanup( + file_acs_api_client, + {"remote": acs_remote.pulp_href, "paths": [], "name": str(uuid.uuid4())}, + ) + + # Refresh ACS and assert that only the PULP_MANIFEST was downloaded + monitor_task_group(file_acs_api_client.refresh(acs.pulp_href).task_group) + assert len(acs_server.requests_record) == 1 + assert acs_server.requests_record[0].path == basic_manifest_path + + # Sync the repository + repository_sync_data = RepositorySyncURL(remote=main_remote.pulp_href) + monitor_task(file_repo_api_client.sync(file_repo.pulp_href, repository_sync_data).task) + + # Assert that only the PULP_MANIFEST was downloaded from the main remote + assert len(main_server.requests_record) == 1 + assert main_server.requests_record[0].path == basic_manifest_path + + # Assert that the files were downloaded from the ACS remote + expected_request_paths = { + basic_manifest_path, + "/basic/1.iso", + "/basic/2.iso", + "/basic/3.iso", + } + actual_requested_paths = set([request.path for request in acs_server.requests_record]) + assert len(acs_server.requests_record) == 4 + assert actual_requested_paths == expected_request_paths + + +@pytest.mark.parallel +def test_acs_sync_with_paths( + file_repo, + file_repo_api_client, + file_acs_api_client, + basic_manifest_path, + large_manifest_path, + gen_object_with_cleanup, + generate_server_and_remote, +): + # Create the main server and remote pointing to it + main_server, main_remote = generate_server_and_remote( + manifest_path=basic_manifest_path, policy="immediate" + ) + + # Create an ACS server and a remote pointing to it + acs_server, acs_remote = generate_server_and_remote(manifest_path="/", policy="on_demand") + + # Create the ACS that uses the remote from above + acs = gen_object_with_cleanup( + file_acs_api_client, + { + "remote": acs_remote.pulp_href, + "paths": [basic_manifest_path[1:], large_manifest_path[1:]], + "name": str(uuid.uuid4()), + }, + ) + + # Refresh ACS and assert that only the PULP_MANIFEST was downloaded + task_group = monitor_task_group(file_acs_api_client.refresh(acs.pulp_href).task_group) + expected_request_paths = {basic_manifest_path, large_manifest_path} + actual_requested_paths = set([request.path for request in acs_server.requests_record]) + assert len(task_group.tasks) == 2 + assert len(acs_server.requests_record) == 2 + assert expected_request_paths == actual_requested_paths + + # Sync the repository + repository_sync_data = RepositorySyncURL(remote=main_remote.pulp_href) + monitor_task(file_repo_api_client.sync(file_repo.pulp_href, repository_sync_data).task) + + # Assert that only the PULP_MANIFEST was downloaded from the main remote + assert len(main_server.requests_record) == 1 + assert main_server.requests_record[0].path == basic_manifest_path + + # Assert that the files were downloaded from the ACS remote + expected_request_paths = { + basic_manifest_path, + large_manifest_path, + "/basic/1.iso", + "/basic/2.iso", + "/basic/3.iso", + } + actual_requested_paths = set([request.path for request in acs_server.requests_record]) + assert len(acs_server.requests_record) == 5 + assert actual_requested_paths == expected_request_paths + + +@pytest.mark.parallel +def test_serving_acs_content( + file_repo, + file_repo_api_client, + file_acs_api_client, + file_distro_api_client, + basic_manifest_path, + gen_object_with_cleanup, + generate_server_and_remote, +): + # Create the main server and remote pointing to it + main_server, main_remote = generate_server_and_remote( + manifest_path=basic_manifest_path, policy="on_demand" + ) + + # Create an ACS server and a remote pointing to it + acs_server, acs_remote = generate_server_and_remote( + manifest_path=basic_manifest_path, policy="on_demand" + ) + + # Create the ACS that uses the remote from above + acs = gen_object_with_cleanup( + file_acs_api_client, + {"remote": acs_remote.pulp_href, "paths": [], "name": str(uuid.uuid4())}, + ) + + # Refresh ACS + monitor_task_group(file_acs_api_client.refresh(acs.pulp_href).task_group) + + # Create a distribution + distribution_href = monitor_task( + file_distro_api_client.create(gen_distribution(repository=file_repo.pulp_href)).task + ).created_resources[0] + distribution = file_distro_api_client.read(distribution_href) + + # Turn on auto-publish on the repository + monitor_task( + file_repo_api_client.partial_update( + file_repo.pulp_href, {"autopublish": True, "remote": main_remote.pulp_href} + ).task + ) + + # Sync the repository + repository_sync_data = RepositorySyncURL(remote=main_remote.pulp_href) + monitor_task(file_repo_api_client.sync(file_repo.pulp_href, repository_sync_data).task) + + # Assert that only the PULP_MANIFEST was downloaded from the main remote + assert len(main_server.requests_record) == 1 + assert main_server.requests_record[0].path == basic_manifest_path + + # Check what content and artifacts are in the fixture repository + expected_files = get_files_in_manifest(main_remote.url) + + # Download one of the files and assert that it has the right checksum and that it is downloaded + # from the ACS server. + content_unit = list(expected_files)[0] + content_unit_url = urljoin(distribution.base_url, content_unit[0]) + downloaded_file = download_file(content_unit_url) + actual_checksum = hashlib.sha256(downloaded_file.body).hexdigest() + expected_checksum = content_unit[1] + assert expected_checksum == actual_checksum + for request in main_server.requests_record: + assert content_unit[0] not in request.path + assert len(acs_server.requests_record) == 2 + assert content_unit[0] in acs_server.requests_record[1].path diff --git a/pulp_file/tests/functional/api/test_auto_publish.py b/pulp_file/tests/functional/api/test_auto_publish.py index ca60bee6..df672cb4 100644 --- a/pulp_file/tests/functional/api/test_auto_publish.py +++ b/pulp_file/tests/functional/api/test_auto_publish.py @@ -1,111 +1,86 @@ -# coding=utf-8 """Tests that sync file plugin repositories.""" -import unittest +import pytest -from pulp_smash import config -from pulp_smash.pulp3.bindings import delete_orphans, monitor_task -from pulp_smash.pulp3.utils import delete_version, download_content_unit, gen_repo +from pulp_smash.pulp3.bindings import monitor_task +from pulp_smash.pulp3.utils import gen_repo -from pulp_file.tests.functional.utils import gen_file_client, gen_file_remote +from pulp_file.tests.functional.utils import get_files_in_manifest from pulpcore.client.pulp_file import ( - ContentFilesApi, - DistributionsFileApi, - PublicationsFileApi, - RepositoriesFileApi, RepositorySyncURL, - RemotesFileApi, ) -class AutoPublishDistributeTestCase(unittest.TestCase): - """Test auto-publish and auto-distribution""" - - @classmethod - def setUpClass(cls): - """Create class-wide variables.""" - cls.cfg = config.get_config() - cls.client = gen_file_client() - - cls.content_api = ContentFilesApi(cls.client) - cls.repo_api = RepositoriesFileApi(cls.client) - cls.remote_api = RemotesFileApi(cls.client) - cls.publications_api = PublicationsFileApi(cls.client) - cls.distributions_api = DistributionsFileApi(cls.client) - - cls.CUSTOM_MANIFEST = "TEST_MANIFEST" - - def setUp(self): - """Create remote, repo, and distribution.""" - delete_orphans() - self.remote = self.remote_api.create(gen_file_remote()) - self.repo = self.repo_api.create(gen_repo(manifest=self.CUSTOM_MANIFEST, autopublish=True)) - response = self.distributions_api.create( - {"name": "foo", "base_path": "bar/foo", "repository": self.repo.pulp_href} - ) - distribution_href = monitor_task(response.task).created_resources[0] - self.distribution = self.distributions_api.read(distribution_href) - - def tearDown(self): - """Clean up.""" - monitor_task(self.repo_api.delete(self.repo.pulp_href).task) - monitor_task(self.remote_api.delete(self.remote.pulp_href).task) - monitor_task(self.distributions_api.delete(self.distribution.pulp_href).task) - - def test_workflow(self): - self._sync() - self._modify() - - def _sync(self): - """Assert that syncing the repository triggers auto-publish and auto-distribution.""" - self.assertEqual(self.publications_api.list().count, 0) - self.assertTrue(self.distribution.publication is None) - - # Sync the repository. - repository_sync_data = RepositorySyncURL(remote=self.remote.pulp_href) - sync_response = self.repo_api.sync(self.repo.pulp_href, repository_sync_data) - task = monitor_task(sync_response.task) - - # Check that all the appropriate resources were created - self.assertGreater(len(task.created_resources), 1) - publications = self.publications_api.list() - self.assertEqual(publications.count, 1) - download_content_unit(self.cfg, self.distribution.to_dict(), self.CUSTOM_MANIFEST) - - # Check that the publish settings were used - publication = publications.results[0] - self.assertEqual(publication.manifest, self.CUSTOM_MANIFEST) - - # Sync the repository again. Since there should be no new repository version, there - # should be no new publications or distributions either. - sync_response = self.repo_api.sync(self.repo.pulp_href, repository_sync_data) - task = monitor_task(sync_response.task) - - self.assertEqual(len(task.created_resources), 0) - self.assertEqual(self.publications_api.list().count, 1) - - self.publications_api.delete(publication.pulp_href) - self.repo = self.repo_api.read(self.repo.pulp_href) - delete_version_task = delete_version(self.repo, version_href=self.repo.latest_version_href) - monitor_task(delete_version_task[0]["pulp_href"]) - - def _modify(self): - """Assert that modifying the repository triggers auto-publish and auto-distribution.""" - self.assertEqual(self.publications_api.list().count, 0) - self.assertTrue(self.distribution.publication is None) - - # Modify the repository by adding a content unit - content = self.content_api.list().results[0].pulp_href - modify_response = self.repo_api.modify( - self.repo.pulp_href, {"add_content_units": [content]} - ) - task = monitor_task(modify_response.task) - - # Check that all the appropriate resources were created - self.assertGreater(len(task.created_resources), 1) - publications = self.publications_api.list() - self.assertEqual(publications.count, 1) - - # Check that the publish settings were used - publication = publications.results[0] - self.assertEqual(publication.manifest, self.CUSTOM_MANIFEST) +@pytest.fixture +def file_repo_with_auto_publish(file_repo_api_client, gen_object_with_cleanup): + return gen_object_with_cleanup( + file_repo_api_client, gen_repo(autopublish=True, manifest="TEST_MANIFEST") + ) + + +@pytest.mark.parallel +def test_auto_publish_and_distribution( + file_repo_with_auto_publish, + file_fixture_gen_remote_ssl, + file_repo_api_client, + file_pub_api_client, + basic_manifest_path, + gen_object_with_cleanup, + file_distro_api_client, + file_random_content_unit, +): + """Tests auto-publish and auto-distribution""" + remote = file_fixture_gen_remote_ssl(manifest_path=basic_manifest_path, policy="on_demand") + repo = file_repo_api_client.read(file_repo_with_auto_publish.pulp_href) + distribution = gen_object_with_cleanup( + file_distro_api_client, + {"name": "foo", "base_path": "bar/foo", "repository": repo.pulp_href}, + ) + + # Assert that the repository is at version 0 and that there are no publications associated with + # this Repository and that the distribution doesn't have a publication associated with it. + assert repo.latest_version_href.endswith("/versions/0/") + assert file_pub_api_client.list(repository=repo.pulp_href).count == 0 + assert file_pub_api_client.list(repository_version=repo.latest_version_href).count == 0 + assert distribution.publication is None + + # Check what content and artifacts are in the fixture repository + expected_files = get_files_in_manifest(remote.url) + + # Sync from the remote + body = RepositorySyncURL(remote=remote.pulp_href) + monitor_task(file_repo_api_client.sync(repo.pulp_href, body).task) + repo = file_repo_api_client.read(repo.pulp_href) + + # Assert that a new repository version was created and a publication was created + assert repo.latest_version_href.endswith("/versions/1/") + assert file_pub_api_client.list(repository=repo.pulp_href).count == 1 + assert file_pub_api_client.list(repository_version=repo.latest_version_href).count == 1 + + # Assert that the publication has a custom manifest + publication = file_pub_api_client.list(repository_version=repo.latest_version_href).results[0] + assert publication.manifest == "TEST_MANIFEST" + + # Download the custom manifest + files_in_first_publication = get_files_in_manifest( + "{}{}".format(distribution.base_url, publication.manifest) + ) + assert files_in_first_publication == expected_files + + # Add a new content unit to the repository and assert that a publication gets created and the + # new content unit is in it + monitor_task( + file_repo_api_client.modify( + repo.pulp_href, {"add_content_units": [file_random_content_unit.pulp_href]} + ).task + ) + repo = file_repo_api_client.read(repo.pulp_href) + files_in_second_publication = get_files_in_manifest( + "{}{}".format(distribution.base_url, publication.manifest) + ) + files_added = files_in_second_publication - files_in_first_publication + assert repo.latest_version_href.endswith("/versions/2/") + assert file_pub_api_client.list(repository=repo.pulp_href).count == 2 + assert file_pub_api_client.list(repository_version=repo.latest_version_href).count == 1 + assert len(files_added) == 1 + assert list(files_added)[0][1] == file_random_content_unit.sha256 diff --git a/pulp_file/tests/functional/api/test_crud_content_unit.py b/pulp_file/tests/functional/api/test_crud_content_unit.py index 4e8ded0b..1fe3b139 100644 --- a/pulp_file/tests/functional/api/test_crud_content_unit.py +++ b/pulp_file/tests/functional/api/test_crud_content_unit.py @@ -1,343 +1,232 @@ -# coding=utf-8 """Tests that perform actions over content unit.""" -import unittest +import hashlib +import os +import pytest +import uuid -from pulp_smash import utils from pulp_smash.pulp3.bindings import ( - delete_orphans, monitor_task, PulpTaskError, ) -from pulp_smash.pulp3.utils import gen_repo - -from pulp_file.tests.functional.utils import ( - gen_artifact, - gen_file_client, - gen_file_content_attrs, - gen_file_content_upload_attrs, - gen_pulpcore_client, - skip_if, -) - -from pulpcore.client.pulpcore import TasksApi -from pulpcore.client.pulp_file import ( - ContentFilesApi, - RepositoriesFileApi, - RepositoriesFileVersionsApi, -) - - -class ContentUnitTestCase(unittest.TestCase): - """CRUD content unit. - - This test targets the following issues: - - * `Pulp #2872 `_ - * `Pulp #3445 `_ - * `Pulp Smash #870 `_ - """ - - @classmethod - def setUpClass(cls): - """Create class-wide variable.""" - delete_orphans() - cls.content_unit = {} - cls.file_content_api = ContentFilesApi(gen_file_client()) - cls.artifact = gen_artifact() - - @classmethod - def tearDownClass(cls): - """Clean class-wide variable.""" - delete_orphans() - - def test_01_create_content_unit(self): - """Create content unit.""" - attrs = gen_file_content_attrs(self.artifact) - response = self.file_content_api.create(**attrs) - created_resources = monitor_task(response.task).created_resources - content_unit = self.file_content_api.read(created_resources[0]) - self.content_unit.update(content_unit.to_dict()) - for key, val in attrs.items(): - with self.subTest(key=key): - self.assertEqual(self.content_unit[key], val) - - @skip_if(bool, "content_unit", False) - def test_02_read_content_unit(self): - """Read a content unit by its href.""" - content_unit = self.file_content_api.read(self.content_unit["pulp_href"]).to_dict() - for key, val in self.content_unit.items(): - with self.subTest(key=key): - self.assertEqual(content_unit[key], val) - - @skip_if(bool, "content_unit", False) - def test_02_read_content_units(self): - """Read a content unit by its relative_path.""" - page = self.file_content_api.list(relative_path=self.content_unit["relative_path"]) - self.assertEqual(len(page.results), 1) - for key, val in self.content_unit.items(): - with self.subTest(key=key): - self.assertEqual(page.results[0].to_dict()[key], val) - - @skip_if(bool, "content_unit", False) - def test_03_partially_update(self): - """Attempt to update a content unit using HTTP PATCH. - - This HTTP method is not supported and a HTTP exception is expected. - """ - attrs = gen_file_content_attrs(self.artifact) - with self.assertRaises(AttributeError) as exc: - self.file_content_api.partial_update(self.content_unit["pulp_href"], attrs) - error_message = "'ContentFilesApi' object has no attribute 'partial_update'" - self.assertEqual(exc.exception.args[0], error_message) - - @skip_if(bool, "content_unit", False) - def test_03_fully_update(self): - """Attempt to update a content unit using HTTP PUT. - - This HTTP method is not supported and a HTTP exception is expected. - """ - attrs = gen_file_content_attrs(self.artifact) - with self.assertRaises(AttributeError) as exc: - self.file_content_api.update(self.content_unit["pulp_href"], attrs) - error_message = "'ContentFilesApi' object has no attribute 'update'" - self.assertEqual(exc.exception.args[0], error_message) - - @skip_if(bool, "content_unit", False) - def test_04_delete(self): - """Attempt to delete a content unit using HTTP DELETE. - - This HTTP method is not supported and a HTTP exception is expected. - """ - with self.assertRaises(AttributeError) as exc: - self.file_content_api.delete(self.content_unit["pulp_href"]) - error_message = "'ContentFilesApi' object has no attribute 'delete'" - self.assertEqual(exc.exception.args[0], error_message) - - -class ContentUnitUploadTestCase(unittest.TestCase): - """CRUD content unit with upload feature. - - This test targets the following issue: - - `Pulp #5403 `_ - """ - - @classmethod - def setUpClass(cls): - """Create class-wide variable.""" - delete_orphans() - cls.content_unit = {} - cls.file_content_api = ContentFilesApi(gen_file_client()) - cls.attrs = gen_file_content_upload_attrs() - - @classmethod - def tearDownClass(cls): - """Clean class-wide variable.""" - delete_orphans() - - def test_01_create_content_unit(self): - """Create content unit.""" - response = self.file_content_api.create(**self.attrs, file=__file__) - created_resources = monitor_task(response.task).created_resources - content_unit = self.file_content_api.read(created_resources[0]) - self.content_unit.update(content_unit.to_dict()) - for key, val in self.attrs.items(): - with self.subTest(key=key): - self.assertEqual(self.content_unit[key], val) - - @skip_if(bool, "content_unit", False) - def test_02_read_content_unit(self): - """Read a content unit by its href.""" - content_unit = self.file_content_api.read(self.content_unit["pulp_href"]).to_dict() - for key, val in self.content_unit.items(): - with self.subTest(key=key): - self.assertEqual(content_unit[key], val) - - @skip_if(bool, "content_unit", False) - def test_02_read_content_units(self): - """Read a content unit by its relative_path.""" - page = self.file_content_api.list(relative_path=self.content_unit["relative_path"]) - self.assertEqual(len(page.results), 1) - for key, val in self.content_unit.items(): - with self.subTest(key=key): - self.assertEqual(page.results[0].to_dict()[key], val) - - @skip_if(bool, "content_unit", False) - def test_03_fail_duplicate_content_unit(self): - """Create content unit.""" - response = self.file_content_api.create(**self.attrs, file=__file__) - with self.assertRaises(PulpTaskError) as cm: - monitor_task(response.task) - task = cm.exception.task.to_dict() - self.assertEqual(task["state"], "failed") - error_description = task["error"]["description"] - for key in ("already", "relative", "path", "digest"): - self.assertIn(key, error_description.lower(), task["error"]) - - @skip_if(bool, "content_unit", False) - def test_03_duplicate_content_unit(self): - """Create content unit.""" - attrs = self.attrs.copy() - attrs["relative_path"] = utils.uuid4() - response = self.file_content_api.create(**attrs, file=__file__) - monitor_task(response.task) - - -class DuplicateContentUnit(unittest.TestCase): - """Attempt to create a duplicate content unit. - - This test targets the following issues: - - * `Pulp #4125 `_ - """ - - @classmethod - def setUpClass(cls): - """Create class-wide variables.""" - cls.file_content_api = ContentFilesApi(gen_file_client()) - - @classmethod - def tearDownClass(cls): - """Clean created resources.""" - delete_orphans() - - def test_raise_error(self): - """Create a duplicate content unit using same relative_path. +from pulp_smash.pulp3.utils import get_content_summary, get_added_content_summary - Artifacts are unique by ``relative_path`` and ``file``. - - In order to raise an HTTP error, the same ``artifact`` and the same - ``relative_path`` should be used. - """ - delete_orphans() - artifact = gen_artifact() - attrs = gen_file_content_attrs(artifact) - - # create first content unit. - response = self.file_content_api.create(**attrs) - monitor_task(response.task) - - # using the same attrs used to create the first content unit. - response = self.file_content_api.create(**attrs) - with self.assertRaises(PulpTaskError) as cm: - monitor_task(response.task) - error = cm.exception.task.to_dict()["error"] - for key in ("already", "relative", "path", "digest"): - self.assertIn(key, error["description"].lower(), error) - - def test_non_error(self): - """Create a duplicate content unit with different relative_path. - - Artifacts are unique by ``relative_path`` and ``file``. - - In order to avoid an HTTP error, use the same ``artifact`` and - different ``relative_path``. - """ - tasks_api = TasksApi(gen_pulpcore_client()) - - delete_orphans() - artifact = gen_artifact() - - # create first content unit. - response = self.file_content_api.create(**gen_file_content_attrs(artifact)) - monitor_task(response.task) - - # create second content unit. - response = self.file_content_api.create(**gen_file_content_attrs(artifact)) - monitor_task(response.task) - task = tasks_api.read(response.task) - self.assertEqual(task.state, "completed") +from pulpcore.client.pulpcore.exceptions import ApiException as coreApiException -class DuplicateRelativePathsInRepo(unittest.TestCase): - """Associate different Content units with the same ``relative_path`` in one RepositoryVersion. +@pytest.mark.parallel +def test_crud_content_unit( + random_artifact, file_random_content_unit, file_content_api_client, gen_object_with_cleanup +): + artifact_attrs = {"artifact": random_artifact.pulp_href, "relative_path": str(uuid.uuid4())} + content_unit = gen_object_with_cleanup(file_content_api_client, **artifact_attrs) + assert content_unit.artifact == random_artifact.pulp_href + assert content_unit.relative_path == artifact_attrs["relative_path"] - This test targets the following issues: + response = file_content_api_client.list(relative_path=content_unit.relative_path) + assert response.count == 1 - * `Pulp #4028 `_ - """ + content_unit = file_content_api_client.read(content_unit.pulp_href) + assert content_unit.artifact == random_artifact.pulp_href + assert content_unit.relative_path == artifact_attrs["relative_path"] - @classmethod - def setUpClass(cls): - """Create class-wide variables.""" - cls.client = gen_file_client() - - @classmethod - def tearDownClass(cls): - """Clean created resources.""" - delete_orphans() - - def test_second_unit_replaces_the_first(self): - """Create a duplicate content unit with different ``artifacts`` and same ``relative_path``. - - Artifacts are unique by ``relative_path`` and ``file``. - """ - delete_orphans() - content_api = ContentFilesApi(self.client) - repo_api = RepositoriesFileApi(self.client) - versions_api = RepositoriesFileVersionsApi(self.client) - - repo = repo_api.create(gen_repo()) - self.addCleanup(repo_api.delete, repo.pulp_href) - - artifact = gen_artifact() - - # create first content unit. - content_attrs = gen_file_content_attrs(artifact) - content_attrs["repository"] = repo.pulp_href - response = content_api.create(**content_attrs) - monitor_task(response.task) - - artifact = gen_artifact(file=__file__) - - # create second content unit. - second_content_attrs = gen_file_content_attrs(artifact) - second_content_attrs["repository"] = repo.pulp_href - second_content_attrs["relative_path"] = content_attrs["relative_path"] - - response = content_api.create(**second_content_attrs) - monitor_task(response.task) - - repo_latest_version = versions_api.read(repo_api.read(repo.pulp_href).latest_version_href) - - self.assertEqual(repo_latest_version.content_summary.present["file.file"]["count"], 1) - - def test_second_unit_raises_error(self): - """Create a duplicate content unit with different ``artifacts`` and same ``relative_path``. - - Artifacts are unique by ``relative_path`` and ``file``. - """ - delete_orphans() - content_api = ContentFilesApi(self.client) - repo_api = RepositoriesFileApi(self.client) - - repo = repo_api.create(gen_repo()) - self.addCleanup(repo_api.delete, repo.pulp_href) - - artifact = gen_artifact() - - # create first content unit. - content_attrs = gen_file_content_attrs(artifact) - response = content_api.create(**content_attrs) - monitor_task(response.task) - - artifact = gen_artifact(file=__file__) - - # create second content unit. - second_content_attrs = gen_file_content_attrs(artifact) - second_content_attrs["relative_path"] = content_attrs["relative_path"] - response = content_api.create(**second_content_attrs) + with pytest.raises(AttributeError) as exc: + file_content_api_client.partial_update( + content_unit.pulp_href, relative_path=str(uuid.uuid()) + ) + assert exc.value.args[0] == "'ContentFilesApi' object has no attribute 'partial_update'" + + with pytest.raises(AttributeError) as exc: + file_content_api_client.update(content_unit.pulp_href, relative_path=str(uuid.uuid())) + assert exc.value.args[0] == "'ContentFilesApi' object has no attribute 'update'" + + +@pytest.mark.parallel +def test_same_sha256_same_relative_path_no_repo( + random_artifact, file_content_api_client, gen_object_with_cleanup +): + artifact_attrs = {"artifact": random_artifact.pulp_href, "relative_path": str(uuid.uuid4())} + + content1 = file_content_api_client.read( + monitor_task(file_content_api_client.create(**artifact_attrs).task).created_resources[0] + ) + content2 = file_content_api_client.read( + monitor_task(file_content_api_client.create(**artifact_attrs).task).created_resources[0] + ) + assert content1.pulp_href == content2.pulp_href + assert file_content_api_client.read(content1.pulp_href).pulp_href == content2.pulp_href + + +@pytest.mark.parallel +def test_same_sha256_same_relative_path_repo_specified( + random_artifact, + file_content_api_client, + file_repo_api_client, + gen_user, + file_fixture_gen_file_repo, +): + max = gen_user(model_roles=["file.filerepository_creator"]) + john = gen_user(model_roles=["file.filerepository_creator"]) + + with max: + repo1 = file_fixture_gen_file_repo(name=str(uuid.uuid4())) + with john: + repo2 = file_fixture_gen_file_repo(name=str(uuid.uuid4())) + + artifact_attrs = {"artifact": random_artifact.pulp_href, "relative_path": str(uuid.uuid4())} + + artifact_attrs["repository"] = repo1.pulp_href + with max: + response1 = file_content_api_client.create(**artifact_attrs) + response2 = file_content_api_client.create(**artifact_attrs) + + content1 = file_content_api_client.read(monitor_task(response1.task).created_resources[1]) + content2 = file_content_api_client.read(monitor_task(response2.task).created_resources[0]) + assert content1.pulp_href == content2.pulp_href + repo1 = file_repo_api_client.read(repo1.pulp_href) + assert repo1.latest_version_href.endswith("/versions/1/") + assert get_content_summary(repo1.to_dict()) == {"file.file": 1} + assert get_added_content_summary(repo1.to_dict()) == {"file.file": 1} + + artifact_attrs["repository"] = repo2.pulp_href + with john: + ctask3 = file_content_api_client.create(**artifact_attrs).task + + content3 = file_content_api_client.read(monitor_task(ctask3).created_resources[1]) + assert content3.pulp_href == content1.pulp_href + repo2 = file_repo_api_client.read(repo2.pulp_href) + assert repo2.latest_version_href.endswith("/versions/1/") + assert get_content_summary(repo2.to_dict()) == {"file.file": 1} + assert get_added_content_summary(repo2.to_dict()) == {"file.file": 1} + + +@pytest.mark.parallel +def test_same_sha256_diff_relative_path( + random_artifact, file_content_api_client, gen_object_with_cleanup +): + artifact_attrs = {"artifact": random_artifact.pulp_href, "relative_path": str(uuid.uuid4())} + gen_object_with_cleanup(file_content_api_client, **artifact_attrs) + + artifact_attrs["relative_path"] = str(uuid.uuid4()) + artifact_attrs = {"artifact": random_artifact.pulp_href, "relative_path": str(uuid.uuid4())} + gen_object_with_cleanup(file_content_api_client, **artifact_attrs) + + response = file_content_api_client.list(relative_path=artifact_attrs["relative_path"]) + assert response.count == 1 + + +@pytest.mark.parallel +def test_second_content_unit_with_same_rel_path_replaces_the_first( + file_repo, + random_artifact_factory, + file_content_api_client, + gen_object_with_cleanup, + file_repo_ver_api_client, + file_repo_api_client, +): + latest_repo_version = file_repo_ver_api_client.read(file_repo.latest_version_href) + assert latest_repo_version.number == 0 + + artifact_attrs = { + "artifact": random_artifact_factory().pulp_href, + "relative_path": str(uuid.uuid4()), + "repository": file_repo.pulp_href, + } + gen_object_with_cleanup(file_content_api_client, **artifact_attrs) + + file_repo = file_repo_api_client.read(file_repo.pulp_href) + latest_repo_version = file_repo_ver_api_client.read(file_repo.latest_version_href) + assert latest_repo_version.content_summary.present["file.file"]["count"] == 1 + assert latest_repo_version.number == 1 + + artifact_attrs["artifact"] = random_artifact_factory().pulp_href + gen_object_with_cleanup(file_content_api_client, **artifact_attrs) + + file_repo = file_repo_api_client.read(file_repo.pulp_href) + latest_repo_version = file_repo_ver_api_client.read(file_repo.latest_version_href) + assert latest_repo_version.content_summary.present["file.file"]["count"] == 1 + assert latest_repo_version.number == 2 + + +@pytest.mark.parallel +def test_cannot_create_repo_version_with_two_relative_paths_the_same( + file_repo, + random_artifact_factory, + file_content_api_client, + gen_object_with_cleanup, + file_repo_ver_api_client, + file_repo_api_client, +): + latest_repo_version = file_repo_ver_api_client.read(file_repo.latest_version_href) + assert latest_repo_version.number == 0 + + artifact_attrs = { + "artifact": random_artifact_factory().pulp_href, + "relative_path": str(uuid.uuid4()), + } + first_content_unit = gen_object_with_cleanup(file_content_api_client, **artifact_attrs) + + artifact_attrs["artifact"] = random_artifact_factory().pulp_href + second_content_unit = gen_object_with_cleanup(file_content_api_client, **artifact_attrs) + + response = file_content_api_client.list(relative_path=first_content_unit.relative_path) + assert response.count == 2 + + data = {"add_content_units": [first_content_unit.pulp_href, second_content_unit.pulp_href]} + + with pytest.raises(PulpTaskError): + response = file_repo_api_client.modify(file_repo.pulp_href, data) monitor_task(response.task) - data = {"add_content_units": [c.pulp_href for c in content_api.list().results]} - response = repo_api.modify(repo.pulp_href, data) - with self.assertRaises(PulpTaskError) as cm: - monitor_task(response.task) - task = cm.exception.task.to_dict() - error_message = ( - "Cannot create repository version. " - "More than one file.file content with " - "the duplicate values for relative_path." +@pytest.mark.parallel +def test_create_file_content_from_chunked_upload( + tmp_path, gen_object_with_cleanup, uploads_api_client, file_content_api_client +): + hasher = hashlib.sha256() + file_1 = tmp_path / "file.part1" + file_1.write_bytes(os.urandom(128)) + hasher.update(file_1.read_bytes()) + file_2 = tmp_path / "file.part2" + file_2.write_bytes(os.urandom(128)) + hasher.update(file_2.read_bytes()) + expected_digest = hasher.hexdigest() + + # Perform the same test twice, because in the second run, the existing artifact should be + # reused. + for _ in (0, 1): + # Upload the file and generate content + upload = gen_object_with_cleanup(uploads_api_client, {"size": 256}) + uploads_api_client.update( + upload_href=upload.pulp_href, file=file_1, content_range="bytes 0-127/256" + ) + uploads_api_client.update( + upload_href=upload.pulp_href, file=file_2, content_range="bytes 128-255/256" + ) + most_recent_path = str(uuid.uuid4()) + response = file_content_api_client.create( + upload=upload.pulp_href, relative_path=most_recent_path ) - self.assertEqual(task["error"]["description"], error_message) + task = monitor_task(response.task) + content = file_content_api_client.read(task.created_resources[0]) + assert content.sha256 == expected_digest + # Upload gets deleted if the content gets created + with pytest.raises(coreApiException): + uploads_api_client.read(upload.pulp_href) + + # Attempt to create a duplicate content by re-using the most recent relative path + upload = gen_object_with_cleanup(uploads_api_client, {"size": 256}) + uploads_api_client.update( + upload_href=upload.pulp_href, file=file_1, content_range="bytes 0-127/256" + ) + uploads_api_client.update( + upload_href=upload.pulp_href, file=file_2, content_range="bytes 128-255/256" + ) + response = file_content_api_client.create( + upload=upload.pulp_href, relative_path=most_recent_path + ) + task = monitor_task(response.task) + content = file_content_api_client.read(task.created_resources[0]) + assert content.sha256 == expected_digest + # Upload gets deleted even though no new content got created + with pytest.raises(coreApiException): + uploads_api_client.read(upload.pulp_href) diff --git a/pulp_file/tests/functional/api/test_crud_remotes.py b/pulp_file/tests/functional/api/test_crud_remotes.py index 7804e094..847fb18d 100644 --- a/pulp_file/tests/functional/api/test_crud_remotes.py +++ b/pulp_file/tests/functional/api/test_crud_remotes.py @@ -1,205 +1,98 @@ -# coding=utf-8 """Tests that CRUD file remotes.""" import json -from random import choice -import unittest +import uuid -from pulp_smash import utils +import pytest from pulp_smash.pulp3.bindings import monitor_task -from pulp_smash.pulp3.constants import ON_DEMAND_DOWNLOAD_POLICIES - -from pulp_file.tests.functional.constants import ( - FILE_FIXTURE_MANIFEST_URL, - FILE2_FIXTURE_MANIFEST_URL, -) -from pulp_file.tests.functional.utils import ( - gen_file_client, - gen_file_remote, -) - -from pulpcore.client.pulp_file import RemotesFileApi + from pulpcore.client.pulp_file.exceptions import ApiException -class CRUDRemotesTestCase(unittest.TestCase): - """CRUD remotes.""" - - @classmethod - def setUpClass(cls): - cls.remote_api = RemotesFileApi(gen_file_client()) - - def test_workflow(self): - self._create_remote() - self._create_same_name() - self._read_remote() - self._read_remotes() - self._partially_update() - self._fully_update() - self._delete() - - def _create_remote(self): - """Create a remote.""" - body = _gen_verbose_remote() - self.remote = self.remote_api.create(body) - for key in ("username", "password"): - del body[key] - for key, val in body.items(): - with self.subTest(key=key): - self.assertEqual(self.remote.to_dict()[key], val, key) - - def _create_same_name(self): - """Try to create a second remote with an identical name. - - See: `Pulp Smash #1055 - `_. - """ - body = gen_file_remote() - body["name"] = self.remote.name - with self.assertRaises(ApiException): - self.remote_api.create(body) - - def _read_remote(self): - """Read a remote by its href.""" - remote = self.remote_api.read(self.remote.pulp_href) - for key, val in self.remote.to_dict().items(): - with self.subTest(key=key): - self.assertEqual(remote.to_dict()[key], val, key) - - def _read_remotes(self): - """Read a remote by its name.""" - page = self.remote_api.list(name=self.remote.name) - self.assertEqual(len(page.results), 1) - for key, val in self.remote.to_dict().items(): - with self.subTest(key=key): - self.assertEqual(page.results[0].to_dict()[key], val, key) - - def _partially_update(self): - """Update a remote using HTTP PATCH.""" - body = _gen_verbose_remote() - response = self.remote_api.partial_update(self.remote.pulp_href, body) - monitor_task(response.task) - for key in ("username", "password"): - del body[key] - self.remote = self.remote_api.read(self.remote.pulp_href) - for key, val in body.items(): - with self.subTest(key=key): - self.assertEqual(self.remote.to_dict()[key], val, key) - - def _fully_update(self): - """Update a remote using HTTP PUT.""" - body = _gen_verbose_remote() - response = self.remote_api.update(self.remote.pulp_href, body) - monitor_task(response.task) - for key in ("username", "password"): - del body[key] - self.remote = self.remote_api.read(self.remote.pulp_href) - for key, val in body.items(): - with self.subTest(key=key): - self.assertEqual(self.remote.to_dict()[key], val, key) - - def _delete(self): - """Delete a remote.""" - response = self.remote_api.delete(self.remote.pulp_href) - monitor_task(response.task) - with self.assertRaises(ApiException): - self.remote_api.read(self.remote.pulp_href) - - def test_negative_create_file_remote_with_invalid_parameter(self): - """Attempt to create file remote passing invalid parameter.""" - with self.assertRaises(ApiException) as exc: - RemotesFileApi(gen_file_client()).create(gen_file_remote(foo="bar")) - - assert exc.exception.status == 400 - assert json.loads(exc.exception.body)["foo"] == ["Unexpected field"] - - -class CreateRemoteNoURLTestCase(unittest.TestCase): - """Verify whether is possible to create a remote without a URL.""" - - def test_all(self): - """Verify whether is possible to create a remote without a URL. - - This test targets the following issues: - - * `Pulp #3395 `_ - * `Pulp Smash #984 `_ - """ - body = gen_file_remote() - del body["url"] - with self.assertRaises(ApiException): - RemotesFileApi(gen_file_client()).create(body) - - -class RemoteDownloadPolicyTestCase(unittest.TestCase): - """Verify download policy behavior for valid and invalid values.""" - - @classmethod - def setUpClass(cls): - """Create class-wide variables.""" - cls.remote_api = RemotesFileApi(gen_file_client()) - cls.policies = ON_DEMAND_DOWNLOAD_POLICIES - - def setUp(self): - self.remote = {} - self.body = _gen_verbose_remote() - - def test_workflow(self): - self._no_defined_policy() - self._change_policy() - self._invalid_policy() - - def _no_defined_policy(self): - """Verify the default policy `immediate`.""" - del self.body["policy"] - self.remote = self.remote_api.create(self.body).to_dict() - self.addCleanup(self.remote_api.delete, self.remote["pulp_href"]) - assert self.remote["policy"] == "immediate" - - def _change_policy(self): - """Verify ability to change policy to value other than the default. - - Update the remote policy to a valid value other than `immedaite` - and verify the new set value. - """ - changed_policy = choice([item for item in self.policies if item != "immediate"]) - response = self.remote_api.partial_update( - self.remote["pulp_href"], {"policy": changed_policy} - ) - monitor_task(response.task) - self.remote.update(self.remote_api.read(self.remote["pulp_href"]).to_dict()) - self.assertEqual(self.remote["policy"], changed_policy, self.remote) - - def _invalid_policy(self): - """Verify an invalid policy does not update the remote policy. - - Get the current remote policy. - Attempt to update the remote policy to an invalid value. - Verify the policy remains the same. - """ - remote = self.remote_api.read(self.remote["pulp_href"]).to_dict() - with self.assertRaises(ApiException): - self.remote_api.partial_update(self.remote["pulp_href"], {"policy": utils.uuid4()}) - self.remote.update(self.remote_api.read(self.remote["pulp_href"]).to_dict()) - self.assertEqual(remote["policy"], self.remote["policy"], self.remote) - - -def _gen_verbose_remote(): - """Return a semi-random dict for use in defining a remote. - - For most tests, it"s desirable to create remotes with as few attributes - as possible, so that the tests can specifically target and attempt to break - specific features. This module specifically targets remotes, so it makes - sense to provide as many attributes as possible. - - Note that 'username' and 'password' are write-only attributes. - """ - attrs = gen_file_remote(url=choice((FILE_FIXTURE_MANIFEST_URL, FILE2_FIXTURE_MANIFEST_URL))) - attrs.update( - { - "password": utils.uuid4(), - "username": utils.uuid4(), - "policy": choice(ON_DEMAND_DOWNLOAD_POLICIES), - } +@pytest.mark.parallel +def test_remote_crud_workflow(file_remote_api_client, gen_object_with_cleanup): + remote_data = {"name": str(uuid.uuid4()), "url": "http://example.com"} + remote = gen_object_with_cleanup(file_remote_api_client, remote_data) + assert remote.url == remote_data["url"] + assert remote.name == remote_data["name"] + + with pytest.raises(ApiException) as exc: + gen_object_with_cleanup(file_remote_api_client, remote_data) + assert exc.value.status == 400 + assert json.loads(exc.value.body) == {"name": ["This field must be unique."]} + + update_response = file_remote_api_client.partial_update( + remote.pulp_href, {"url": "https://example.com"} + ) + task = monitor_task(update_response.task) + assert task.created_resources == [] + + remote = file_remote_api_client.read(remote.pulp_href) + assert remote.url == "https://example.com" + + all_new_remote_data = {"name": str(uuid.uuid4()), "url": "http://example.com"} + update_response = file_remote_api_client.update(remote.pulp_href, all_new_remote_data) + task = monitor_task(update_response.task) + assert task.created_resources == [] + + remote = file_remote_api_client.read(remote.pulp_href) + assert remote.name == all_new_remote_data["name"] + assert remote.url == all_new_remote_data["url"] + + +@pytest.mark.parallel +def test_create_file_remote_with_invalid_parameter(file_remote_api_client, gen_object_with_cleanup): + unexpected_field_remote_data = { + "name": str(uuid.uuid4()), + "url": "http://example.com", + "foo": "bar", + } + + with pytest.raises(ApiException) as exc: + gen_object_with_cleanup(file_remote_api_client, unexpected_field_remote_data) + assert exc.value.status == 400 + assert json.loads(exc.value.body) == {"foo": ["Unexpected field"]} + + +@pytest.mark.parallel +def test_create_file_remote_without_url(file_remote_api_client, gen_object_with_cleanup): + with pytest.raises(ApiException) as exc: + gen_object_with_cleanup(file_remote_api_client, {"name": str(uuid.uuid4())}) + assert exc.value.status == 400 + assert json.loads(exc.value.body) == {"url": ["This field is required."]} + + +@pytest.mark.parallel +def test_default_remote_policy_immediate(file_remote_api_client, gen_object_with_cleanup): + remote_data = {"name": str(uuid.uuid4()), "url": "http://example.com"} + remote = gen_object_with_cleanup(file_remote_api_client, remote_data) + assert remote.policy == "immediate" + + +@pytest.mark.parallel +def test_specify_remote_policy_streamed(file_remote_api_client, gen_object_with_cleanup): + remote_data = {"name": str(uuid.uuid4()), "url": "http://example.com", "policy": "streamed"} + remote = gen_object_with_cleanup(file_remote_api_client, remote_data) + assert remote.policy == "streamed" + + +@pytest.mark.parallel +def test_specify_remote_policy_on_demand(file_remote_api_client, gen_object_with_cleanup): + remote_data = {"name": str(uuid.uuid4()), "url": "http://example.com", "policy": "on_demand"} + remote = gen_object_with_cleanup(file_remote_api_client, remote_data) + assert remote.policy == "on_demand" + + +@pytest.mark.parallel +def test_can_update_remote_policy(file_remote_api_client, gen_object_with_cleanup): + initial_remote_data = {"name": str(uuid.uuid4()), "url": "http://example.com"} + remote = gen_object_with_cleanup(file_remote_api_client, initial_remote_data) + assert remote.policy == "immediate" + + update_response = file_remote_api_client.partial_update( + remote.pulp_href, {"policy": "on_demand"} ) - return attrs + monitor_task(update_response.task) + + remote = file_remote_api_client.read(remote.pulp_href) + assert remote.policy == "on_demand" diff --git a/pulp_file/tests/functional/api/test_download_content.py b/pulp_file/tests/functional/api/test_download_content.py deleted file mode 100644 index ee6e7bd3..00000000 --- a/pulp_file/tests/functional/api/test_download_content.py +++ /dev/null @@ -1,124 +0,0 @@ -# coding=utf-8 -"""Tests that verify download of content served by Pulp.""" -import hashlib -import unittest -from random import choice -from urllib.parse import urljoin - -from pulp_smash import config, utils -from pulp_smash.pulp3.bindings import monitor_task -from pulp_smash.pulp3.constants import ON_DEMAND_DOWNLOAD_POLICIES -from pulp_smash.pulp3.utils import download_content_unit, gen_distribution, gen_repo - -from pulp_file.tests.functional.constants import FILE_FIXTURE_URL -from pulp_file.tests.functional.utils import ( - gen_file_client, - get_file_content_paths, - gen_file_remote, -) - -from pulpcore.client.pulp_file import ( - DistributionsFileApi, - PublicationsFileApi, - RepositoriesFileApi, - RepositorySyncURL, - RemotesFileApi, - FileFilePublication, -) - - -class DownloadContentTestCase(unittest.TestCase): - """Verify whether content served by pulp can be downloaded.""" - - def test_immediate(self): - """Download content from Pulp. Content is synced with immediate. - - See :meth:`do_test`. - """ - self.do_test("immediate") - - def test_on_demand_download_policies(self): - """Download content from Pulp. Content is synced with an On-Demand policy. - - See :meth:`do_test`. - - This test targets the following issue: - - `Pulp #4496 `_ - """ - for policy in ON_DEMAND_DOWNLOAD_POLICIES: - with self.subTest(policy): - self.do_test(policy) - - def do_test(self, policy): - """Verify whether content served by pulp can be downloaded. - - The process of publishing content is more involved in Pulp 3 than it - was under Pulp 2. Given a repository, the process is as follows: - - 1. Create a publication from the repository. (The latest repository - version is selected if no version is specified.) A publication is a - repository version plus metadata. - 2. Create a distribution from the publication. The distribution defines - at which URLs a publication is available, e.g. - ``http://example.com/content/foo/`` and - ``http://example.com/content/bar/``. - - Do the following: - - 1. Create, populate, publish, and distribute a repository. - 2. Select a random content unit in the distribution. Download that - content unit from Pulp, and verify that the content unit has the - same checksum when fetched directly from Pulp-Fixtures. - - This test targets the following issues: - - * `Pulp #2895 `_ - * `Pulp Smash #872 `_ - """ - cfg = config.get_config() - client = gen_file_client() - repo_api = RepositoriesFileApi(client) - remote_api = RemotesFileApi(client) - publications = PublicationsFileApi(client) - distributions = DistributionsFileApi(client) - - repo = repo_api.create(gen_repo()) - self.addCleanup(repo_api.delete, repo.pulp_href) - - body = gen_file_remote(policy=policy) - remote = remote_api.create(body) - self.addCleanup(remote_api.delete, remote.pulp_href) - - # Sync a Repository - repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) - sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) - monitor_task(sync_response.task) - repo = repo_api.read(repo.pulp_href) - - # Create a publication. - publish_data = FileFilePublication(repository=repo.pulp_href) - publish_response = publications.create(publish_data) - created_resources = monitor_task(publish_response.task).created_resources - publication_href = created_resources[0] - self.addCleanup(publications.delete, publication_href) - - # Create a distribution. - body = gen_distribution() - body["publication"] = publication_href - distribution_response = distributions.create(body) - created_resources = monitor_task(distribution_response.task).created_resources - distribution = distributions.read(created_resources[0]) - self.addCleanup(distributions.delete, distribution.pulp_href) - - # Pick a file, and download it from both Pulp Fixtures… - unit_path = choice(get_file_content_paths(repo.to_dict())) - fixtures_hash = hashlib.sha256( - utils.http_get(urljoin(FILE_FIXTURE_URL, unit_path)) - ).hexdigest() - - # …and Pulp. - content = download_content_unit(cfg, distribution.to_dict(), unit_path) - pulp_hash = hashlib.sha256(content).hexdigest() - - self.assertEqual(fixtures_hash, pulp_hash) diff --git a/pulp_file/tests/functional/api/test_download_policies.py b/pulp_file/tests/functional/api/test_download_policies.py index a7183a71..04121cff 100644 --- a/pulp_file/tests/functional/api/test_download_policies.py +++ b/pulp_file/tests/functional/api/test_download_policies.py @@ -1,246 +1,191 @@ -# coding=utf-8 """Tests for Pulp`s download policies.""" -from random import choice -import unittest +from aiohttp.client_exceptions import ClientResponseError +import hashlib +import pytest +import uuid +from urllib.parse import urljoin -from pulp_smash.pulp3.bindings import delete_orphans, monitor_task -from pulp_smash.pulp3.constants import ON_DEMAND_DOWNLOAD_POLICIES +from pulp_smash.pulp3.bindings import monitor_task from pulp_smash.pulp3.utils import ( - gen_repo, get_added_content_summary, get_content_summary, ) -from pulp_file.tests.functional.constants import ( - FILE_FIXTURE_COUNT, - FILE_FIXTURE_SUMMARY, -) -from pulp_file.tests.functional.utils import ( - gen_file_client, - gen_file_remote, - gen_pulpcore_client, - skip_if, -) - -from pulpcore.client.pulpcore import ArtifactsApi -from pulpcore.client.pulp_file import ( - ContentFilesApi, - FileFilePublication, - PublicationsFileApi, - RepositoriesFileApi, - RepositorySyncURL, - RemotesFileApi, -) - - -class SyncPublishDownloadPolicyTestCase(unittest.TestCase): - """Sync/Publish a repository with different download policies. - - This test targets the following issues: - - `Pulp #4126 `_ - `Pulp #4418 `_ - """ - - @classmethod - def setUpClass(cls): - """Create class-wide variables.""" - cls.client = gen_file_client() - cls.DP_ON_DEMAND = "on_demand" in ON_DEMAND_DOWNLOAD_POLICIES - cls.DP_STREAMED = "streamed" in ON_DEMAND_DOWNLOAD_POLICIES - - @skip_if(bool, "DP_ON_DEMAND", False) - def test_on_demand(self): - """Sync with ``on_demand`` download policy. See :meth:`do_sync`.""" - self.do_sync("on_demand") - self.do_publish("on_demand") - - @skip_if(bool, "DP_STREAMED", False) - def test_streamed(self): - """Sync with ``streamend`` download policy. See :meth:`do_sync`.""" - self.do_sync("streamed") - self.do_publish("streamed") - - def do_sync(self, download_policy): - """Sync repositories with the different ``download_policy``. - - Do the following: - - 1. Create a repository, and a remote. - 2. Assert that repository version is None. - 3. Sync the remote. - 4. Assert that repository version is not None. - 5. Assert that the correct number of possible units to be downloaded - were shown. - 6. Sync the remote one more time in order to create another repository - version. - 7. Assert that repository version is the same as the previous one. - 8. Assert that the same number of units are shown, and after the - second sync no extra units should be shown, since the same remote - was synced again. - """ - # delete orphans to assure that no content units are present on the - # file system - delete_orphans() - repo_api = RepositoriesFileApi(self.client) - remote_api = RemotesFileApi(self.client) - - repo = repo_api.create(gen_repo()) - self.addCleanup(repo_api.delete, repo.pulp_href) - - body = gen_file_remote(**{"policy": download_policy}) - remote = remote_api.create(body) - self.addCleanup(remote_api.delete, remote.pulp_href) - - # Sync the repository. - self.assertEqual(repo.latest_version_href, f"{repo.pulp_href}versions/0/") - repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) - sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) - monitor_task(sync_response.task) - repo = repo_api.read(repo.pulp_href) - - self.assertIsNotNone(repo.latest_version_href) - self.assertDictEqual(get_content_summary(repo.to_dict()), FILE_FIXTURE_SUMMARY) - self.assertDictEqual(get_added_content_summary(repo.to_dict()), FILE_FIXTURE_SUMMARY) - - # Sync the repository again. - latest_version_href = repo.latest_version_href - sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) - monitor_task(sync_response.task) - repo = repo_api.read(repo.pulp_href) - - self.assertEqual(latest_version_href, repo.latest_version_href) - self.assertDictEqual(get_content_summary(repo.to_dict()), FILE_FIXTURE_SUMMARY) - - def do_publish(self, download_policy): - """Publish repository synced with lazy download policy.""" - repo_api = RepositoriesFileApi(self.client) - remote_api = RemotesFileApi(self.client) - publications = PublicationsFileApi(self.client) - - repo = repo_api.create(gen_repo()) - self.addCleanup(repo_api.delete, repo.pulp_href) - - body = gen_file_remote(policy=download_policy) - remote = remote_api.create(body) - self.addCleanup(remote_api.delete, remote.pulp_href) - - repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) - sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) - monitor_task(sync_response.task) - repo = repo_api.read(repo.pulp_href) - - publish_data = FileFilePublication(repository=repo.pulp_href) - publish_response = publications.create(publish_data) - created_resources = monitor_task(publish_response.task).created_resources - publication_href = created_resources[0] - self.addCleanup(publications.delete, publication_href) - publication = publications.read(publication_href) - self.assertIsNotNone(publication.repository_version, publication) - - -class LazySyncedContentAccessTestCase(unittest.TestCase): - """Verify that lazy synced content can be acessed using content endpoint. - - Assert that one acessing lazy synced content using the content endpoint, - e.g. ``http://localhost/pulp/api/v3/content/file/files`` will not raise an - HTTP exception. - - This test targets the following issue: - - `Pulp #4463 `_ - """ - - @classmethod - def setUpClass(cls): - """Create class-wide variables.""" - cls.client = gen_file_client() - - def test_on_demand(self): - """Test ``on_demand``. See :meth:`do_test`.""" - self.do_test("on_demand") - - def test_streamed(self): - """Test ``streamed``. See :meth:`do_test`.""" - self.do_test("streamed") - - def do_test(self, policy): - """Access lazy synced content on using content endpoint.""" - # delete orphans to assure that no content units are present on the - # file system - delete_orphans() - content_api = ContentFilesApi(self.client) - repo_api = RepositoriesFileApi(self.client) - remote_api = RemotesFileApi(self.client) - - repo = repo_api.create(gen_repo()) - self.addCleanup(repo_api.delete, repo.pulp_href) - - body = gen_file_remote(**{"policy": policy}) - remote = remote_api.create(body) - self.addCleanup(remote_api.delete, remote.pulp_href) - - # Sync the repository. - self.assertEqual(repo.latest_version_href, f"{repo.pulp_href}versions/0/") - repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) - sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) - monitor_task(sync_response.task) - repo = repo_api.read(repo.pulp_href) - - # Assert that no HTTP error was raised. - # Assert that the number of units present is according to the synced - # feed. - content = content_api.list().to_dict()["results"] - self.assertEqual(len(content), FILE_FIXTURE_COUNT, content) - - -class SwitchDownloadPolicyTestCase(unittest.TestCase): - """Perform a lazy sync, and change to immediate to force download. - - Perform an immediate sync to download artifacts for content units that - are already created. - - This test case targets the following issue: - - * `Pulp #4467 `_ - """ - - def test_all(self): - """Perform a lazy sync and change to immeditae to force download.""" - # delete orphans to assure that no content units are present on the - # file system - delete_orphans() - file_client = gen_file_client() - core_client = gen_pulpcore_client() - artifacts_api = ArtifactsApi(core_client) - repo_api = RepositoriesFileApi(file_client) - remote_api = RemotesFileApi(file_client) - - repo = repo_api.create(gen_repo()) - self.addCleanup(repo_api.delete, repo.pulp_href) - - body = gen_file_remote(policy=choice(ON_DEMAND_DOWNLOAD_POLICIES)) - remote = remote_api.create(body) - self.addCleanup(remote_api.delete, remote.pulp_href) - - # Sync the repository using a lazy download policy - repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) - sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) - monitor_task(sync_response.task) - artifacts = artifacts_api.list().to_dict()["results"] - self.assertEqual(len(artifacts), 0, artifacts) - - # Update the policy to immediate - response = remote_api.partial_update(remote.pulp_href, {"policy": "immediate"}) +from pulp_file.tests.functional.utils import get_files_in_manifest, download_file + +from pulpcore.app import settings +from pulpcore.client.pulp_file import FileFilePublication, RepositorySyncURL + + +def _do_range_request_download_and_assert(url, range_header, expected_bytes): + file1 = download_file(url, headers=range_header) + file2 = download_file(url, headers=range_header) + assert expected_bytes == len(file1.body) + assert expected_bytes == len(file2.body) + assert file1.body == file2.body + + assert file1.response_obj.status == 206 + assert file1.response_obj.status == file2.response_obj.status + + assert str(expected_bytes) == file1.response_obj.headers["Content-Length"] + assert str(expected_bytes) == file2.response_obj.headers["Content-Length"] + + assert ( + file1.response_obj.headers["Content-Range"] == file2.response_obj.headers["Content-Range"] + ) + + +@pytest.mark.parallel +@pytest.mark.parametrize("download_policy", ["immediate", "on_demand", "streamed"]) +def test_download_policy( + artifacts_api_client, + file_repo, + file_fixture_gen_remote_ssl, + file_remote_api_client, + file_repo_api_client, + file_pub_api_client, + file_distro_api_client, + range_header_manifest_path, + gen_object_with_cleanup, + file_content_api_client, + download_policy, +): + """Test that "on_demand" and "streamed" download policies work as expected.""" + remote = file_fixture_gen_remote_ssl( + manifest_path=range_header_manifest_path, policy=download_policy + ) + file_repo = file_repo_api_client.read(file_repo.pulp_href) + assert file_repo.latest_version_href.endswith("/versions/0/") + + # Check what content and artifacts are in the fixture repository + expected_files = get_files_in_manifest(remote.url) + + # Sync from the remote and assert that a new repository version is created + body = RepositorySyncURL(remote=remote.pulp_href) + monitor_task(file_repo_api_client.sync(file_repo.pulp_href, body).task) + file_repo = file_repo_api_client.read(file_repo.pulp_href) + assert file_repo.latest_version_href.endswith("/versions/1/") + assert get_content_summary(file_repo.to_dict()) == {"file.file": len(expected_files)} + assert get_added_content_summary(file_repo.to_dict()) == {"file.file": len(expected_files)} + + # Sync again and assert that nothing changes + latest_version_href = file_repo.latest_version_href + monitor_task(file_repo_api_client.sync(file_repo.pulp_href, body).task) + file_repo = file_repo_api_client.read(file_repo.pulp_href) + assert latest_version_href == file_repo.latest_version_href + assert get_content_summary(file_repo.to_dict()) == {"file.file": len(expected_files)} + + # Assert that no HTTP error was raised when list on_demand content + content = file_content_api_client.list( + repository_version=file_repo.latest_version_href + ).to_dict()["results"] + assert len(content) == len(expected_files) + + # Create a Distribution + distribution = gen_object_with_cleanup( + file_distro_api_client, + { + "name": str(uuid.uuid4()), + "base_path": str(uuid.uuid4()), + "repository": file_repo.pulp_href, + }, + ) + + # Assert that un-published content is not available + for expected_file in expected_files: + with pytest.raises(ClientResponseError) as exc: + content_unit_url = urljoin(distribution.base_url, expected_file[1]) + download_file(content_unit_url) + assert exc.value.code == 404 + + # Create a File Publication and assert that the repository_version is set on the Publication. + publish_data = FileFilePublication(repository=file_repo.pulp_href) + publication = gen_object_with_cleanup(file_pub_api_client, publish_data) + assert publication.repository_version is not None + + # Download one of the files and assert that it has the right checksum + expected_files_list = list(expected_files) + content_unit = expected_files_list[0] + content_unit_url = urljoin(distribution.base_url, content_unit[0]) + downloaded_file = download_file(content_unit_url) + actual_checksum = hashlib.sha256(downloaded_file.body).hexdigest() + expected_checksum = content_unit[1] + assert expected_checksum == actual_checksum + + # Assert proper download with range requests smaller than one chunk of downloader + range_header = {"Range": "bytes=1048586-1049586"} + num_bytes = 1001 + content_unit = expected_files_list[1] + content_unit_url = urljoin(distribution.base_url, content_unit[0]) + _do_range_request_download_and_assert(content_unit_url, range_header, num_bytes) + + # Assert proper download with range requests spanning multiple chunks of downloader + range_header = {"Range": "bytes=1048176-2248576"} + num_bytes = 1200401 + content_unit = expected_files_list[2] + content_unit_url = urljoin(distribution.base_url, content_unit[0]) + _do_range_request_download_and_assert(content_unit_url, range_header, num_bytes) + + # Assert that multiple requests with different Range header values work as expected + range_header = {"Range": "bytes=1048176-2248576"} + num_bytes = 1200401 + content_unit = expected_files_list[3] + content_unit_url = urljoin(distribution.base_url, content_unit[0]) + _do_range_request_download_and_assert(content_unit_url, range_header, num_bytes) + + range_header = {"Range": "bytes=2042176-3248576"} + num_bytes = 1206401 + content_unit = expected_files_list[3] + content_unit_url = urljoin(distribution.base_url, content_unit[0]) + _do_range_request_download_and_assert(content_unit_url, range_header, num_bytes) + + # Assert that range requests with a negative start value errors as expected + content_unit = expected_files_list[4] + content_unit_url = urljoin(distribution.base_url, content_unit[0]) + # The S3 test API project doesn't handle invalid Range values correctly + if settings.DEFAULT_FILE_STORAGE == "pulpcore.app.models.storage.FileSystem": + with pytest.raises(ClientResponseError) as exc: + range_header = {"Range": "bytes=-1-11"} + download_file(content_unit_url, headers=range_header) + assert exc.value.code == 416 + + # Assert that a range request with a start value larger than the content errors + content_unit = expected_files_list[5] + content_unit_url = urljoin(distribution.base_url, content_unit[0]) + with pytest.raises(ClientResponseError) as exc: + range_header = {"Range": "bytes=10485860-10485870"} + download_file(content_unit_url, headers=range_header) + assert exc.value.code == 416 + + # Assert that a range request with an end value that is larger than the data works + range_header = {"Range": "bytes=4193804-4294304"} + num_bytes = 500 + content_unit = expected_files_list[6] + content_unit_url = urljoin(distribution.base_url, content_unit[0]) + _do_range_request_download_and_assert(content_unit_url, range_header, num_bytes) + + # Assert that artifacts were not downloaded if policy is not immediate + if download_policy != "immediate": + # Assert that artifacts were not downloaded + content_unit = expected_files_list[7] + assert artifacts_api_client.list(sha256=content_unit[1]).results == [] + + # Assert that an artifact was saved for the "on_demand" policy and not saved for the + # "streamed" policy. Only check the first content unit because Range requests don't + # cause the artifact to be saved. https://github.com/pulp/pulpcore/issues/3060 + content_unit = expected_files_list[0] + if download_policy == "on_demand": + assert len(artifacts_api_client.list(sha256=content_unit[1]).results) == 1 + else: + assert len(artifacts_api_client.list(sha256=content_unit[1]).results) == 0 + + # Change download policy to immediate + response = file_remote_api_client.partial_update(remote.pulp_href, {"policy": "immediate"}) monitor_task(response.task) - remote = remote_api.read(remote.pulp_href) - self.assertEqual(remote.policy, "immediate") - - # Sync using immediate download policy - repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) - sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) - monitor_task(sync_response.task) + remote = file_remote_api_client.read(remote.pulp_href) + assert remote.policy == "immediate" - # Assert that missing artifacts are downloaded - artifacts = artifacts_api.list().to_dict()["results"] - self.assertEqual(len(artifacts), FILE_FIXTURE_COUNT, artifacts) + # Sync from the remote and assert that artifacts are downloaded + monitor_task(file_repo_api_client.sync(file_repo.pulp_href, body).task) + for f in expected_files: + assert len(artifacts_api_client.list(sha256=f[1]).results) == 1 diff --git a/pulp_file/tests/functional/api/test_generic_list.py b/pulp_file/tests/functional/api/test_generic_list.py index 22943b22..d6e7a356 100644 --- a/pulp_file/tests/functional/api/test_generic_list.py +++ b/pulp_file/tests/functional/api/test_generic_list.py @@ -5,18 +5,18 @@ @pytest.mark.parallel -def test_read_all_repos_generic(file_repo_api_client, file_repo): +def test_read_all_repos_generic(repositories_api_client, file_repo): """Ensure name is displayed when listing repositories generic.""" - response = file_repo_api_client.list() + response = repositories_api_client.list() assert response.count != 0 for repo in response.results: assert repo.name is not None @pytest.mark.parallel -def test_read_all_content_generic(file_content_api_client, file_random_content_unit): +def test_read_all_content_generic(content_api_client, file_random_content_unit): """Ensure href is displayed when listing content generic.""" - response = file_content_api_client.list() + response = content_api_client.list() assert response.count != 0 for content in response.results: assert content.pulp_href is not None @@ -24,14 +24,37 @@ def test_read_all_content_generic(file_content_api_client, file_random_content_u @pytest.mark.parallel def test_read_all_content_guards_generic( - content_guards_api_client, tls_certificate_authority_cert, x509_content_guards_api_client + gen_object_with_cleanup, + content_guards_api_client, + tls_certificate_authority_cert, + x509_content_guards_api_client, ): """Ensure name is displayed when listing content guards generic.""" - x509_content_guards_api_client.create( - {"name": str(uuid.uuid4()), "ca_certificate": tls_certificate_authority_cert} + gen_object_with_cleanup( + x509_content_guards_api_client, + {"name": str(uuid.uuid4()), "ca_certificate": tls_certificate_authority_cert}, ) response = content_guards_api_client.list() assert response.count != 0 for content_guard in response.results: assert content_guard.name is not None + + +@pytest.mark.parallel +def test_read_all_master_model_remotes_generic( + remotes_api_client, gen_object_with_cleanup, file_remote_api_client +): + remote_data = {"name": str(uuid.uuid4()), "url": "http://example.com"} + remote1 = gen_object_with_cleanup(file_remote_api_client, remote_data) + remote_data = {"name": str(uuid.uuid4()), "url": "http://example.org"} + remote2 = gen_object_with_cleanup(file_remote_api_client, remote_data) + + response = remotes_api_client.list() + assert response.count != 0 + + hrefs = [] + for remote in response.results: + hrefs.append(remote.pulp_href) + assert remote1.pulp_href in hrefs + assert remote2.pulp_href in hrefs diff --git a/pulp_file/tests/functional/api/test_labels.py b/pulp_file/tests/functional/api/test_labels.py index fbff773f..b6b0e399 100644 --- a/pulp_file/tests/functional/api/test_labels.py +++ b/pulp_file/tests/functional/api/test_labels.py @@ -1,17 +1,9 @@ -import json -import unittest - from uuid import uuid4 import pytest -from pulp_smash import config from pulp_smash.pulp3.bindings import monitor_task -from pulpcore.client.pulp_file import ( - ApiClient as FileApiClient, - RepositoriesFileApi, -) from pulpcore.client.pulp_file.exceptions import ApiException diff --git a/pulp_file/tests/functional/api/test_mime_types.py b/pulp_file/tests/functional/api/test_mime_types.py new file mode 100644 index 00000000..6386fc8d --- /dev/null +++ b/pulp_file/tests/functional/api/test_mime_types.py @@ -0,0 +1,79 @@ +import aiohttp +import asyncio +import pytest +import uuid + +from urllib.parse import urljoin + +from pulp_smash.pulp3.bindings import monitor_task + +from pulpcore.client.pulp_file import FileFileDistribution, RepositoryAddRemoveContent + + +@pytest.mark.parallel +def test_content_types( + file_distro_api_client, + file_repo_api_client, + file_repo_with_auto_publish, + file_content_unit_with_name_factory, + gen_object_with_cleanup, +): + """Test if content-app correctly returns mime-types based on filenames.""" + files = { + "tar.gz": file_content_unit_with_name_factory(f"{str(uuid.uuid4())}.tar.gz"), + "xml.gz": file_content_unit_with_name_factory(f"{str(uuid.uuid4())}.xml.gz"), + "xml.bz2": file_content_unit_with_name_factory(f"{str(uuid.uuid4())}.xml.bz2"), + "xml.zstd": file_content_unit_with_name_factory(f"{str(uuid.uuid4())}.xml.zstd"), + "xml.xz": file_content_unit_with_name_factory(f"{str(uuid.uuid4())}.xml.xz"), + "json.zstd": file_content_unit_with_name_factory(f"{str(uuid.uuid4())}.json.zstd"), + "json": file_content_unit_with_name_factory(f"{str(uuid.uuid4())}.json"), + "txt": file_content_unit_with_name_factory(f"{str(uuid.uuid4())}.txt"), + "xml": file_content_unit_with_name_factory(f"{str(uuid.uuid4())}.xml"), + "jpg": file_content_unit_with_name_factory(f"{str(uuid.uuid4())}.jpg"), + "JPG": file_content_unit_with_name_factory(f"{str(uuid.uuid4())}.JPG"), + "halabala": file_content_unit_with_name_factory(f"{str(uuid.uuid4())}.halabala"), + "noextension1": file_content_unit_with_name_factory(f"{str(uuid.uuid4())}.asd/.asd/a"), + "noextension2": file_content_unit_with_name_factory(f"{str(uuid.uuid4())}.....f"), + } + + units_to_add = list(map(lambda f: f.pulp_href, files.values())) + data = RepositoryAddRemoveContent(add_content_units=units_to_add) + monitor_task(file_repo_api_client.modify(file_repo_with_auto_publish.pulp_href, data).task) + + data = FileFileDistribution( + name=str(uuid.uuid4()), + base_path=str(uuid.uuid4()), + repository=file_repo_with_auto_publish.pulp_href, + ) + distribution = gen_object_with_cleanup(file_distro_api_client, data) + + received_mimetypes = {} + for extension, content_unit in files.items(): + + async def get_content_type(): + async with aiohttp.ClientSession() as session: + url = urljoin(distribution.base_url, content_unit.relative_path) + async with session.get(url) as response: + return response.headers.get("Content-Type") + + content_type = asyncio.run(get_content_type()) + received_mimetypes[extension] = content_type + + expected_mimetypes = { + "tar.gz": "application/gzip", + "xml.gz": "application/gzip", + "xml.bz2": "application/x-bzip2", + "xml.zstd": "application/zstd", + "xml.xz": "application/x-xz", + "json.zstd": "application/zstd", + "json": "application/json", + "txt": "text/plain", + "xml": "text/xml", + "jpg": "image/jpeg", + "JPG": "image/jpeg", + # The application/octet-stream MIME type is used for unknown binary files + "halabala": "application/octet-stream", + "noextension1": "application/octet-stream", + "noextension2": "application/octet-stream", + } + assert received_mimetypes == expected_mimetypes diff --git a/pulp_file/tests/functional/api/test_publish.py b/pulp_file/tests/functional/api/test_publish.py index 3ac7adad..039481b5 100644 --- a/pulp_file/tests/functional/api/test_publish.py +++ b/pulp_file/tests/functional/api/test_publish.py @@ -1,114 +1,128 @@ -# coding=utf-8 """Tests that publish file plugin repositories.""" -import unittest -from random import choice +from aiohttp import BasicAuth +import json +import pytest +from urllib.parse import urljoin -from pulp_smash import config from pulp_smash.pulp3.bindings import monitor_task -from pulp_smash.pulp3.utils import gen_repo, get_content, get_versions, modify_repo - -from pulp_file.tests.functional.constants import FILE_CONTENT_NAME -from pulp_file.tests.functional.utils import ( - gen_file_client, - gen_file_remote, -) from pulpcore.client.pulp_file import ( - PublicationsFileApi, - RepositoriesFileApi, RepositorySyncURL, - RemotesFileApi, FileFilePublication, ) from pulpcore.client.pulp_file.exceptions import ApiException - - -class PublishAnyRepoVersionTestCase(unittest.TestCase): - """Test whether a particular repository version can be published. - - This test targets the following issues: - - * `Pulp #3324 `_ - * `Pulp Smash #897 `_ - """ - - @classmethod - def setUpClass(cls): - """Create class-wide variables.""" - cls.cfg = config.get_config() - - client = gen_file_client() - cls.repo_api = RepositoriesFileApi(client) - cls.remote_api = RemotesFileApi(client) - cls.publications = PublicationsFileApi(client) - - def setUp(self): - """Create a new repository before each test.""" - body = gen_file_remote() - remote = self.remote_api.create(body) - self.addCleanup(self.remote_api.delete, remote.pulp_href) - - repo = self.repo_api.create(gen_repo()) - self.addCleanup(self.repo_api.delete, repo.pulp_href) - - repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) - sync_response = self.repo_api.sync(repo.pulp_href, repository_sync_data) - monitor_task(sync_response.task) - - self.repo = self.repo_api.read(repo.pulp_href) - - def test_all(self): - """Test whether a particular repository version can be published. - - 1. Create a repository with at least 2 repository versions. - 2. Create a publication by supplying the latest ``repository_version``. - 3. Assert that the publication ``repository_version`` attribute points - to the latest repository version. - 4. Create a publication by supplying the non-latest ``repository_version``. - 5. Assert that the publication ``repository_version`` attribute points - to the supplied repository version. - 6. Assert that an exception is raised when providing two different - repository versions to be published at same time. - """ - # Step 1 - for file_content in get_content(self.repo.to_dict())[FILE_CONTENT_NAME]: - modify_repo(self.cfg, self.repo.to_dict(), remove_units=[file_content]) - version_hrefs = tuple(ver["pulp_href"] for ver in get_versions(self.repo.to_dict())) - non_latest = choice(version_hrefs[:-1]) - - # Step 2 - publish_data = FileFilePublication(repository=self.repo.pulp_href) - publication = self.create_publication(publish_data) - - # Step 3 - self.assertEqual(publication.repository_version, version_hrefs[-1]) - - # Step 4 - publish_data = FileFilePublication(repository_version=non_latest) - publication = self.create_publication(publish_data) - - # Step 5 - self.assertEqual(publication.repository_version, non_latest) - - # Step 6 - with self.assertRaises(ApiException): - body = {"repository": self.repo.pulp_href, "repository_version": non_latest} - self.publications.create(body) - - def test_custom_manifest(self): - """Test whether a repository version can be published with a specified manifest.""" - publish_data = FileFilePublication(repository=self.repo.pulp_href) - publication = self.create_publication(publish_data) - self.assertEqual(publication.manifest, "PULP_MANIFEST") - - publish_data = FileFilePublication(repository=self.repo.pulp_href, manifest="listing") - publication = self.create_publication(publish_data) - self.assertEqual(publication.manifest, "listing") - - def create_publication(self, publish_data): - """Create a new publication from the passed data.""" - publish_response = self.publications.create(publish_data) - created_resources = monitor_task(publish_response.task).created_resources - publication_href = created_resources[0] - self.addCleanup(self.publications.delete, publication_href) - return self.publications.read(publication_href) +from pulp_file.tests.functional.utils import download_file + + +@pytest.mark.parallel +def test_crd_publications( + file_repo, + file_fixture_gen_remote_ssl, + file_repo_api_client, + file_pub_api_client, + basic_manifest_path, + gen_object_with_cleanup, + file_random_content_unit, +): + # Tests that a publication can be created from a specific repository version + remote = file_fixture_gen_remote_ssl(manifest_path=basic_manifest_path, policy="on_demand") + + # Sync from the remote + initial_repo_version = file_repo.latest_version_href + body = RepositorySyncURL(remote=remote.pulp_href) + monitor_task(file_repo_api_client.sync(file_repo.pulp_href, body).task) + first_repo_version_href = file_repo_api_client.read(file_repo.pulp_href).latest_version_href + assert first_repo_version_href.endswith("/versions/1/") + + # Add a new content unit to the repository and assert that a new repository version is created + monitor_task( + file_repo_api_client.modify( + file_repo.pulp_href, {"add_content_units": [file_random_content_unit.pulp_href]} + ).task + ) + file_repo = file_repo_api_client.read(file_repo.pulp_href) + assert file_repo.latest_version_href.endswith("/versions/2/") + + # Create a Publication using a repository and assert that its repository_version is the latest + publish_data = FileFilePublication(repository=file_repo.pulp_href) + publication = gen_object_with_cleanup(file_pub_api_client, publish_data) + assert publication.repository_version == file_repo.latest_version_href + assert publication.manifest == "PULP_MANIFEST" + + # Create a Publication using a non-latest repository version + publish_data = FileFilePublication(repository_version=first_repo_version_href) + publication = gen_object_with_cleanup(file_pub_api_client, publish_data) + assert publication.repository_version == first_repo_version_href + + # Assert that a publication can't be created by specifying a repository and a repo version + publish_data = FileFilePublication( + repository=file_repo.pulp_href, repository_version=first_repo_version_href + ) + with pytest.raises(ApiException) as exc: + gen_object_with_cleanup(file_pub_api_client, publish_data) + assert exc.value.status == 400 + + # Assert that a Publication can be created using a custom manifest + publish_data = FileFilePublication(repository=file_repo.pulp_href, manifest="listing") + publication = gen_object_with_cleanup(file_pub_api_client, publish_data) + assert publication.manifest == "listing" + + # Assert that a Publication can be accessed using pulp_href + publication = file_pub_api_client.read(publication.pulp_href) + + # Read a publication by its href providing specific field list. + config = file_repo_api_client.api_client.configuration + auth = BasicAuth(login=config.username, password=config.password) + full_href = urljoin(config.host, publication.pulp_href) + for fields in [ + ("pulp_href", "pulp_created"), + ("pulp_href", "distributions"), + ("pulp_created", "repository", "distributions"), + ]: + response = download_file(f"{full_href}?fields={','.join(fields)}", auth=auth) + assert sorted(fields) == sorted(json.loads(response.body).keys()) + + # Read a publication by its href excluding specific fields. + response = download_file(f"{full_href}?exclude_fields=created,repository", auth=auth) + response_fields = json.loads(response.body).keys() + assert "created" not in response_fields + assert "repository" not in response_fields + + # Read a publication by its repository version (2 of the 3 publications should be returned) + page = file_pub_api_client.list(repository_version=file_repo.latest_version_href) + assert len(page.results) == 2 + for key, val in publication.to_dict().items(): + assert getattr(page.results[0], key) == val + + # Filter by repo version for which no publication exists + page = file_pub_api_client.list(repository_version=initial_repo_version) + assert len(page.results) == 0 + + # Filter by a repo version that does not exist + with pytest.raises(ApiException) as exc: + invalid_version = initial_repo_version.replace("versions/0", "versions/10") + file_pub_api_client.list(repository_version=invalid_version) + assert exc.value.status == 400 + + # Read a publication by its created time + page = file_pub_api_client.list(pulp_created=publication.pulp_created) + assert len(page.results) == 1 + for key, val in publication.to_dict().items(): + assert getattr(page.results[0], key) == val + + # Filter for created time for which no publication exists + page = file_pub_api_client.list(pulp_created=file_repo.pulp_created) + assert len(page.results) == 0 + + # Assert that publications are ordered by created time + page = file_pub_api_client.list() + for i, pub in enumerate(page.results[:-1]): + current = pub.pulp_created + previous = page.results[i + 1].pulp_created + assert current > previous + + # Delete a publication and assert that it can't be read again + file_pub_api_client.delete(publication.pulp_href) + with pytest.raises(ApiException) as exc: + file_pub_api_client.read(publication.pulp_href) + assert exc.value.status == 404 diff --git a/pulp_file/tests/functional/api/test_pulp_manifest.py b/pulp_file/tests/functional/api/test_pulp_manifest.py deleted file mode 100644 index 32f4c70f..00000000 --- a/pulp_file/tests/functional/api/test_pulp_manifest.py +++ /dev/null @@ -1,83 +0,0 @@ -# coding=utf-8 -"""Tests whether Pulp handles PULP_MANIFEST information.""" -import csv -import requests -import unittest -from urllib.parse import urljoin - -from pulp_smash.pulp3.bindings import monitor_task -from pulp_smash.pulp3.utils import gen_distribution, gen_repo - -from pulp_file.tests.functional.constants import FILE_FIXTURE_COUNT -from pulp_file.tests.functional.utils import gen_file_client, gen_file_remote - -from pulpcore.client.pulp_file import ( - DistributionsFileApi, - PublicationsFileApi, - RepositoriesFileApi, - RepositorySyncURL, - RemotesFileApi, - FileFilePublication, -) - - -class AccessingPublishedDataTestCase(unittest.TestCase): - """Assert that an HTTP error is not raised when accessing published data. - - This test targets the following issue: - - * `Pulp #4519 https://pulp.plan.io/issues/4519`_ - """ - - @classmethod - def setUpClass(cls): - """Define class-wide variable.""" - cls.client = gen_file_client() - - def test_access_error(self): - """HTTP error is not raised when accessing published data.""" - repo_api = RepositoriesFileApi(self.client) - remote_api = RemotesFileApi(self.client) - publications = PublicationsFileApi(self.client) - distributions = DistributionsFileApi(self.client) - - repo = repo_api.create(gen_repo()) - self.addCleanup(repo_api.delete, repo.pulp_href) - - remote = remote_api.create(gen_file_remote()) - self.addCleanup(remote_api.delete, remote.pulp_href) - - repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) - sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) - monitor_task(sync_response.task) - repo = repo_api.read(repo.pulp_href) - - publish_data = FileFilePublication(repository=repo.pulp_href) - publish_response = publications.create(publish_data) - created_resources = monitor_task(publish_response.task).created_resources - publication_href = created_resources[0] - self.addCleanup(publications.delete, publication_href) - - body = gen_distribution() - body["publication"] = publication_href - - distribution_response = distributions.create(body) - created_resources = monitor_task(distribution_response.task).created_resources - distribution = distributions.read(created_resources[0]) - self.addCleanup(distributions.delete, distribution.pulp_href) - - pulp_manifest = parse_pulp_manifest( - self.download_pulp_manifest(distribution.to_dict(), "PULP_MANIFEST") - ) - - self.assertEqual(len(pulp_manifest), FILE_FIXTURE_COUNT, pulp_manifest) - - def download_pulp_manifest(self, distribution, unit_path): - """Download pulp manifest.""" - unit_url = urljoin(distribution["base_url"] + "/", unit_path) - return requests.get(unit_url) - - -def parse_pulp_manifest(pulp_manifest): - """Parse pulp manifest.""" - return list(csv.DictReader(pulp_manifest.text.splitlines(), ("name", "checksum", "size"))) diff --git a/pulp_file/tests/functional/api/test_rbac.py b/pulp_file/tests/functional/api/test_rbac.py index c9b3813e..336bb0f0 100644 --- a/pulp_file/tests/functional/api/test_rbac.py +++ b/pulp_file/tests/functional/api/test_rbac.py @@ -9,7 +9,7 @@ ) from pulpcore.client.pulp_file import AsyncOperationResponse -from pulp_file.tests.functional.utils import gen_repo, gen_file_remote +from pulp_file.tests.functional.utils import gen_repo, gen_file_remote, gen_artifact @pytest.fixture() @@ -46,7 +46,7 @@ def _try_action(user, client, action, outcome, *args, **kwargs): def test_basic_actions(gen_users, file_repo_api_client, gen_object_with_cleanup): """Test list, read, create, update and delete apis.""" alice, bob, charlie = gen_users("filerepository") - admin_repo = gen_object_with_cleanup(file_repo_api_client, gen_repo()) + gen_object_with_cleanup(file_repo_api_client, gen_repo()) a_list = _try_action(alice, file_repo_api_client, "list", 200) assert a_list.count >= 1 @@ -108,17 +108,54 @@ def test_role_management(gen_users, file_repo_api_client, gen_object_with_cleanu _try_action(charlie, file_repo_api_client, "my_permissions", 404, href) -def test_content_apis(gen_users, file_content_api_client): - """Check that each user can list content.""" +def test_content_apis( + gen_users, + file_content_api_client, + file_repo_api_client, + file_remote_api_client, + file_fixture_server, + basic_manifest_path, + gen_object_with_cleanup, +): + """Check content listing, scoping and upload APIs.""" alice, bob, charlie = gen_users() - with alice: - aresponse = file_content_api_client.list() - with bob: - bresponse = file_content_api_client.list() - with charlie: - cresponse = file_content_api_client.list() + aresponse = _try_action(alice, file_content_api_client, "list", 200) + bresponse = _try_action(bob, file_content_api_client, "list", 200) + cresponse = _try_action(charlie, file_content_api_client, "list", 200) + + assert aresponse.count == bresponse.count == cresponse.count == 0 + + alice, bob, charlie = gen_users(["filerepository"]) + repo = gen_object_with_cleanup(file_repo_api_client, gen_repo()) + remote = gen_object_with_cleanup(file_remote_api_client, gen_file_remote()) + monitor_task(file_repo_api_client.sync(repo.pulp_href, {"remote": remote.pulp_href}).task) + + aresponse = _try_action(alice, file_content_api_client, "list", 200) + bresponse = _try_action(bob, file_content_api_client, "list", 200) + cresponse = _try_action(charlie, file_content_api_client, "list", 200) + + assert aresponse.count > bresponse.count + assert bresponse.count == cresponse.count == 0 + + nested_role = {"users": [charlie.username], "role": "file.filerepository_viewer"} + file_repo_api_client.add_role(repo.pulp_href, nested_role) + + cresponse = _try_action(charlie, file_content_api_client, "list", 200) + assert cresponse.count > bresponse.count + + file_url = file_fixture_server.make_url("/basic") + # This might need to change if we change Artifact's default upload policy + artifact1 = gen_artifact(url=file_url + "/1.iso")["pulp_href"] + + body = {"artifact": artifact1} + _try_action(alice, file_content_api_client, "create", 400, "1.iso", **body) + body["repository"] = repo.pulp_href + _try_action(bob, file_content_api_client, "create", 403, "1.iso", **body) + _try_action(charlie, file_content_api_client, "create", 403, "1.iso", **body) - assert aresponse.count == bresponse.count == cresponse.count + nested_role = {"users": [charlie.username], "role": "file.filerepository_owner"} + file_repo_api_client.add_role(repo.pulp_href, nested_role) + _try_action(charlie, file_content_api_client, "create", 202, "1.iso", **body) @pytest.mark.parallel diff --git a/pulp_file/tests/functional/api/test_sync.py b/pulp_file/tests/functional/api/test_sync.py index a003ae15..6ec9d1a8 100644 --- a/pulp_file/tests/functional/api/test_sync.py +++ b/pulp_file/tests/functional/api/test_sync.py @@ -1,231 +1,126 @@ """Tests that sync file plugin repositories.""" -import unittest +import uuid + +import pytest -from pulp_smash import config from pulp_smash.pulp3.bindings import monitor_task, PulpTaskError from pulp_smash.pulp3.utils import ( - gen_repo, get_added_content_summary, get_content_summary, wget_download_on_host, ) from pulp_file.tests.functional.constants import ( - FILE_FIXTURE_MANIFEST_URL, - FILE_FIXTURE_SUMMARY, FILE_FIXTURE_URL, - FILE_INVALID_MANIFEST_URL, - FILE2_FIXTURE_MANIFEST_URL, ) -from pulp_file.tests.functional.utils import gen_file_client, gen_file_remote from pulpcore.client.pulp_file import ( - RepositoriesFileApi, RepositorySyncURL, - RemotesFileApi, - ContentFilesApi, - PublicationsFileApi, ) -class BasicSyncTestCase(unittest.TestCase): - """Sync a repository with the file plugin.""" - - @classmethod - def setUpClass(cls): - """Create class-wide variables.""" - cls.cfg = config.get_config() - cls.client = gen_file_client() - - def test_sync_local(self): - """Test syncing from the local filesystem.""" - wget_download_on_host(FILE_FIXTURE_URL, "/tmp") - - self.do_test("file:///tmp/file/PULP_MANIFEST") - - def test_sync_http(self): - """Test syncing from the network.""" - self.do_test(FILE_FIXTURE_MANIFEST_URL) - - def do_test(self, url): - """Sync repositories with the file plugin. - - In order to sync a repository a remote has to be associated within - this repository. When a repository is created this version field is set - as None. After a sync the repository version is updated. - - Do the following: - - 1. Create a repository, and a remote. - 2. Assert that repository version is None. - 3. Sync the remote. - 4. Assert that repository version is not None. - 5. Assert that the correct number of units were added and are present - in the repo. - 6. Sync the remote one more time. - 7. Assert that repository version is different from the previous one. - 8. Assert that the same number of are present and that no units were - added. - """ - repo_api = RepositoriesFileApi(self.client) - remote_api = RemotesFileApi(self.client) - - repo = repo_api.create(gen_repo()) - self.addCleanup(repo_api.delete, repo.pulp_href) - - body = gen_file_remote(url) - remote = remote_api.create(body) - self.addCleanup(remote_api.delete, remote.pulp_href) - - # Sync the repository. - self.assertEqual(repo.latest_version_href, f"{repo.pulp_href}versions/0/") - repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) - sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) - monitor_task(sync_response.task) - repo = repo_api.read(repo.pulp_href) - - self.assertIsNotNone(repo.latest_version_href) - self.assertDictEqual(get_content_summary(repo.to_dict()), FILE_FIXTURE_SUMMARY) - self.assertDictEqual(get_added_content_summary(repo.to_dict()), FILE_FIXTURE_SUMMARY) - - # Sync the repository again. - latest_version_href = repo.latest_version_href - repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) - sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) - monitor_task(sync_response.task) - repo = repo_api.read(repo.pulp_href) - - self.assertEqual(latest_version_href, repo.latest_version_href) - self.assertDictEqual(get_content_summary(repo.to_dict()), FILE_FIXTURE_SUMMARY) - - -class MirrorSyncTestCase(unittest.TestCase): - """Do a mirrored sync a repository with the file plugin.""" - - @classmethod - def setUpClass(cls): - """Create class-wide variables.""" - cls.cfg = config.get_config() - cls.client = gen_file_client() - - cls.content_api = ContentFilesApi(cls.client) - cls.repo_api = RepositoriesFileApi(cls.client) - cls.remote_api = RemotesFileApi(cls.client) - cls.publications_api = PublicationsFileApi(cls.client) - - def setUp(self): - """Create remote, repo, and distribution.""" - self.remote = self.remote_api.create(gen_file_remote()) - self.repo = self.repo_api.create(gen_repo()) - - def tearDown(self): - """Clean up.""" - monitor_task(self.repo_api.delete(self.repo.pulp_href).task) - monitor_task(self.remote_api.delete(self.remote.pulp_href).task) - - def test_01_sync(self): - """Assert that syncing the repository w/ mirror=True creates a publication.""" - # Sync the repository. - repository_sync_data = RepositorySyncURL(remote=self.remote.pulp_href, mirror=True) - sync_response = self.repo_api.sync(self.repo.pulp_href, repository_sync_data) - task = monitor_task(sync_response.task) - - # Check that all the appropriate resources were created - self.assertEqual(len(task.created_resources), 2) - self.assertTrue(any(["publication" in resource for resource in task.created_resources])) - self.assertTrue(any(["version" in resource for resource in task.created_resources])) - - -class SyncInvalidTestCase(unittest.TestCase): - """Sync a repository with a given url on the remote.""" - - @classmethod - def setUpClass(cls): - """Create class-wide variables.""" - cls.client = gen_file_client() - - def test_invalid_url(self): - """Sync a repository using a remote url that does not exist. - - Test that we get a task failure. See :meth:`do_test`. - """ - with self.assertRaises(PulpTaskError) as cm: - task = self.do_test("http://i-am-an-invalid-url.com/invalid/") - task = cm.exception.task.to_dict() - self.assertIsNotNone(task["error"]["description"]) - - def test_invalid_file(self): - """Sync a repository using an invalid file repository. - - Assert that an exception is raised, and that error message has - keywords related to the reason of the failure. See :meth:`do_test`. - """ - with self.assertRaises(PulpTaskError) as cm: - task = self.do_test(FILE_INVALID_MANIFEST_URL) - task = cm.exception.task.to_dict() - for key in ("checksum", "failed"): - self.assertIn(key, task["error"]["description"]) - - def do_test(self, url): - """Sync a repository given ``url`` on the remote.""" - repo_api = RepositoriesFileApi(self.client) - remote_api = RemotesFileApi(self.client) - - repo = repo_api.create(gen_repo()) - self.addCleanup(repo_api.delete, repo.pulp_href) - - body = gen_file_remote(url=url) - remote = remote_api.create(body) - self.addCleanup(remote_api.delete, remote.pulp_href) - - repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) - sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) - return monitor_task(sync_response.task) - - -class SyncDuplicateFileRepoTestCase(unittest.TestCase): - """Sync multiple remotes containing duplicate files.""" - - @classmethod - def setUpClass(cls): - """Create class-wide variables.""" - cls.client = gen_file_client() - - def test_duplicate_file_sync(self): - """Sync a repository with remotes containing same file names. - - This test does the following. - - 1. Create a repository in pulp. - 2. Create two remotes containing the same file. - 3. Check whether the created repo has only one copy of the file. - - This test targets the following issue: - - `Pulp #4738 `_ - """ - repo_api = RepositoriesFileApi(self.client) - remote_api = RemotesFileApi(self.client) - - # Step 1 - repo = repo_api.create(gen_repo()) - self.addCleanup(repo_api.delete, repo.pulp_href) - - # Step 2 - remote = remote_api.create(gen_file_remote()) - self.addCleanup(remote_api.delete, remote.pulp_href) - remote2 = remote_api.create(gen_file_remote(url=FILE2_FIXTURE_MANIFEST_URL)) - - self.addCleanup(remote_api.delete, remote2.pulp_href) - repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) - sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) - monitor_task(sync_response.task) - repo = repo_api.read(repo.pulp_href) - self.assertDictEqual(get_content_summary(repo.to_dict()), FILE_FIXTURE_SUMMARY) - self.assertDictEqual(get_added_content_summary(repo.to_dict()), FILE_FIXTURE_SUMMARY) - - repository_sync_data = RepositorySyncURL(remote=remote2.pulp_href) - sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) - monitor_task(sync_response.task) - repo = repo_api.read(repo.pulp_href) - self.assertDictEqual(get_added_content_summary(repo.to_dict()), FILE_FIXTURE_SUMMARY) +def test_sync_file_protocol_handler( + file_repo, file_repo_api_client, file_remote_api_client, gen_object_with_cleanup +): + """Test syncing from a file repository with the file:// protocol handler""" + wget_download_on_host(FILE_FIXTURE_URL, "/tmp") + + remote_kwargs = { + "url": "file:///tmp/file/PULP_MANIFEST", + "policy": "immediate", + "name": str(uuid.uuid4()), + } + remote = gen_object_with_cleanup(file_remote_api_client, remote_kwargs) + + body = RepositorySyncURL(remote=remote.pulp_href) + monitor_task(file_repo_api_client.sync(file_repo.pulp_href, body).task) + + file_repo = file_repo_api_client.read(file_repo.pulp_href) + assert file_repo.latest_version_href.endswith("/versions/1/") + assert get_content_summary(file_repo.to_dict()) == {"file.file": 3} + assert get_added_content_summary(file_repo.to_dict()) == {"file.file": 3} + + +@pytest.mark.parallel +def test_mirrored_sync( + file_repo, file_fixture_gen_remote_ssl, file_repo_api_client, basic_manifest_path +): + """Assert that syncing the repository w/ mirror=True creates a publication.""" + remote = file_fixture_gen_remote_ssl(manifest_path=basic_manifest_path, policy="on_demand") + + repository_sync_data = RepositorySyncURL(remote=remote.pulp_href, mirror=True) + sync_response = file_repo_api_client.sync(file_repo.pulp_href, repository_sync_data) + task = monitor_task(sync_response.task) + + # Check that all the appropriate resources were created + assert len(task.created_resources) == 2 + assert any(["publication" in resource for resource in task.created_resources]) + assert any(["version" in resource for resource in task.created_resources]) + + +@pytest.mark.parallel +def test_invalid_url( + file_repo, gen_object_with_cleanup, file_remote_api_client, file_repo_api_client +): + """Sync a repository using a remote url that does not exist.""" + remote_kwargs = { + "url": "http://i-am-an-invalid-url.com/invalid/", + "policy": "immediate", + "name": str(uuid.uuid4()), + } + remote = gen_object_with_cleanup(file_remote_api_client, remote_kwargs) + + body = RepositorySyncURL(remote=remote.pulp_href) + with pytest.raises(PulpTaskError): + monitor_task(file_repo_api_client.sync(file_repo.pulp_href, body).task) + + +@pytest.mark.parallel +def test_invalid_file( + file_repo, file_repo_api_client, invalid_manifest_path, file_fixture_gen_remote +): + """Sync a repository using an invalid file repository.""" + remote = file_fixture_gen_remote(manifest_path=invalid_manifest_path, policy="immediate") + body = RepositorySyncURL(remote=remote.pulp_href) + with pytest.raises(PulpTaskError): + monitor_task(file_repo_api_client.sync(file_repo.pulp_href, body).task) + + +@pytest.mark.parallel +def test_duplicate_file_sync( + file_repo, file_fixture_gen_remote, duplicate_filename_paths, file_repo_api_client +): + remote = file_fixture_gen_remote(manifest_path=duplicate_filename_paths[0], policy="on_demand") + remote2 = file_fixture_gen_remote(manifest_path=duplicate_filename_paths[1], policy="on_demand") + + body = RepositorySyncURL(remote=remote.pulp_href) + monitor_task(file_repo_api_client.sync(file_repo.pulp_href, body).task) + file_repo = file_repo_api_client.read(file_repo.pulp_href) + + assert get_content_summary(file_repo.to_dict()) == {"file.file": 3} + assert get_added_content_summary(file_repo.to_dict()) == {"file.file": 3} + assert file_repo.latest_version_href.endswith("/1/") + + body = RepositorySyncURL(remote=remote2.pulp_href) + monitor_task(file_repo_api_client.sync(file_repo.pulp_href, body).task) + file_repo = file_repo_api_client.read(file_repo.pulp_href) + + assert get_content_summary(file_repo.to_dict()) == {"file.file": 3} + assert get_added_content_summary(file_repo.to_dict()) == {"file.file": 3} + assert file_repo.latest_version_href.endswith("/2/") + + +@pytest.mark.parallel +def test_filepath_includes_commas( + file_repo, file_fixture_gen_remote, manifest_path_with_commas, file_repo_api_client +): + """Sync a repository using a manifest file with a file whose relative_path includes commas""" + remote = file_fixture_gen_remote(manifest_path=manifest_path_with_commas, policy="on_demand") + + body = RepositorySyncURL(remote=remote.pulp_href) + monitor_task(file_repo_api_client.sync(file_repo.pulp_href, body).task) + file_repo = file_repo_api_client.read(file_repo.pulp_href) + + assert get_content_summary(file_repo.to_dict()) == {"file.file": 3} + assert get_added_content_summary(file_repo.to_dict()) == {"file.file": 3} + assert file_repo.latest_version_href.endswith("/1/") diff --git a/pulp_file/tests/functional/conftest.py b/pulp_file/tests/functional/conftest.py index ee0be59b..aca586b3 100644 --- a/pulp_file/tests/functional/conftest.py +++ b/pulp_file/tests/functional/conftest.py @@ -1,6 +1,6 @@ import aiofiles import logging -import tempfile +import os import uuid from aiohttp import web @@ -21,7 +21,6 @@ PublicationsFileApi, ) -from pulp_smash.pulp3.bindings import monitor_task from pulp_smash.pulp3.utils import gen_repo from pulp_file.tests.functional.utils import gen_file_client, generate_iso, generate_manifest @@ -33,10 +32,7 @@ def pytest_check_for_leftover_pulp_objects(config): file_client = gen_file_client() - types_to_check = [ - FileFileAlternateContentSource(file_client), - RemotesFileApi(file_client), - ] + types_to_check = [FileFileAlternateContentSource(file_client)] for type_to_check in types_to_check: if type_to_check.list().count > 0: raise Exception(f"This test left over a {type_to_check}.") @@ -60,13 +56,22 @@ def file_content_api_client(file_client): @pytest.fixture -def file_random_content_unit(file_content_api_client, tmp_path): - with tempfile.NamedTemporaryFile(dir=tmp_path) as tmp_file: - tmp_file.write(b"not empty") - tmp_file.flush() - return monitor_task( - file_content_api_client.create(relative_path=str(uuid.uuid4()), file=tmp_file.name).task - ) +def file_random_content_unit( + file_content_api_client, tmp_path, random_artifact, gen_object_with_cleanup +): + artifact_attrs = {"artifact": random_artifact.pulp_href, "relative_path": str(uuid.uuid4())} + return gen_object_with_cleanup(file_content_api_client, **artifact_attrs) + + +@pytest.fixture +def file_content_unit_with_name_factory( + file_content_api_client, tmp_path, random_artifact, gen_object_with_cleanup +): + def _file_content_unit_with_name_factory(name): + artifact_attrs = {"artifact": random_artifact.pulp_href, "relative_path": name} + return gen_object_with_cleanup(file_content_api_client, **artifact_attrs) + + return _file_content_unit_with_name_factory @pytest.fixture @@ -94,6 +99,11 @@ def file_repo(file_repo_api_client, gen_object_with_cleanup): return gen_object_with_cleanup(file_repo_api_client, gen_repo()) +@pytest.fixture +def file_repo_with_auto_publish(file_repo_api_client, gen_object_with_cleanup): + return gen_object_with_cleanup(file_repo_api_client, gen_repo(autopublish=True)) + + @pytest.fixture def file_remote_api_client(file_client): return RemotesFileApi(file_client) @@ -105,13 +115,40 @@ def file_fixtures_root(tmpdir): @pytest.fixture -def basic_manifest_path(file_fixtures_root): - file_fixtures_root.joinpath("basic").mkdir() - file1 = generate_iso(file_fixtures_root.joinpath("basic/1.iso")) - file2 = generate_iso(file_fixtures_root.joinpath("basic/2.iso")) - file3 = generate_iso(file_fixtures_root.joinpath("basic/3.iso")) - generate_manifest(file_fixtures_root.joinpath("basic/PULP_MANIFEST"), [file1, file2, file3]) - return "/basic/PULP_MANIFEST" +def write_3_iso_file_fixture_data_factory(file_fixtures_root): + def _write_3_iso_file_fixture_data_factory(name): + file_fixtures_root.joinpath(name).mkdir() + file1 = generate_iso(file_fixtures_root.joinpath(f"{name}/1.iso")) + file2 = generate_iso(file_fixtures_root.joinpath(f"{name}/2.iso")) + file3 = generate_iso(file_fixtures_root.joinpath(f"{name}/3.iso")) + generate_manifest( + file_fixtures_root.joinpath(f"{name}/PULP_MANIFEST"), [file1, file2, file3] + ) + return f"/{name}/PULP_MANIFEST" + + return _write_3_iso_file_fixture_data_factory + + +@pytest.fixture +def basic_manifest_path(write_3_iso_file_fixture_data_factory): + return write_3_iso_file_fixture_data_factory("basic") + + +@pytest.fixture +def copy_manifest_only_factory(file_fixtures_root): + def _copy_manifest_only(name): + file_fixtures_root.joinpath(f"{name}-manifest").mkdir() + src = file_fixtures_root.joinpath(f"{name}/PULP_MANIFEST") + dst = file_fixtures_root.joinpath(f"{name}-manifest/PULP_MANIFEST") + os.symlink(src, dst) + return f"/{name}-manifest/PULP_MANIFEST" + + return _copy_manifest_only + + +@pytest.fixture +def basic_manifest_only_path(copy_manifest_only_factory): + return copy_manifest_only_factory("basic") @pytest.fixture @@ -123,6 +160,57 @@ def large_manifest_path(file_fixtures_root): return "/large/PULP_MANIFEST" +@pytest.fixture +def range_header_manifest_path(file_fixtures_root): + """A path to a File repository manifest that contains 8 unique files each 4mb in size.""" + one_megabyte = 1048576 + file_fixtures_root.joinpath("range").mkdir() + file1 = generate_iso(file_fixtures_root.joinpath("range/1.iso"), 4 * one_megabyte) + file2 = generate_iso(file_fixtures_root.joinpath("range/2.iso"), 4 * one_megabyte) + file3 = generate_iso(file_fixtures_root.joinpath("range/3.iso"), 4 * one_megabyte) + file4 = generate_iso(file_fixtures_root.joinpath("range/4.iso"), 4 * one_megabyte) + file5 = generate_iso(file_fixtures_root.joinpath("range/5.iso"), 4 * one_megabyte) + file6 = generate_iso(file_fixtures_root.joinpath("range/6.iso"), 4 * one_megabyte) + file7 = generate_iso(file_fixtures_root.joinpath("range/7.iso"), 4 * one_megabyte) + file8 = generate_iso(file_fixtures_root.joinpath("range/8.iso"), 4 * one_megabyte) + + generate_manifest( + file_fixtures_root.joinpath("range/PULP_MANIFEST"), + [file1, file2, file3, file4, file5, file6, file7, file8], + ) + return "/range/PULP_MANIFEST" + + +@pytest.fixture +def manifest_path_with_commas(file_fixtures_root): + file_fixtures_root.joinpath("comma_test").mkdir() + file_fixtures_root.joinpath("comma_test/comma,folder").mkdir() + file_fixtures_root.joinpath("comma_test/basic_folder").mkdir() + file1 = generate_iso(file_fixtures_root.joinpath("comma_test/comma,folder/,comma,,file,.iso")) + file2 = generate_iso(file_fixtures_root.joinpath("comma_test/comma,folder/basic_file.iso")) + file3 = generate_iso(file_fixtures_root.joinpath("comma_test/basic_folder/comma,file.iso")) + generate_manifest( + file_fixtures_root.joinpath("comma_test/PULP_MANIFEST"), [file1, file2, file3] + ) + return "/comma_test/PULP_MANIFEST" + + +@pytest.fixture +def invalid_manifest_path(file_fixtures_root, basic_manifest_path): + file_path_to_corrupt = file_fixtures_root / Path("basic/1.iso") + with open(file_path_to_corrupt, "w") as f: + f.write("this is not the right data") + return basic_manifest_path + + +@pytest.fixture +def duplicate_filename_paths(write_3_iso_file_fixture_data_factory): + return ( + write_3_iso_file_fixture_data_factory("file"), + write_3_iso_file_fixture_data_factory("file2"), + ) + + @pytest.fixture def file_fixture_server_ssl_client_cert_req( ssl_ctx_req_client_auth, file_fixtures_root, gen_fixture_server diff --git a/pulp_file/tests/functional/constants.py b/pulp_file/tests/functional/constants.py index 10957196..69eaefe8 100644 --- a/pulp_file/tests/functional/constants.py +++ b/pulp_file/tests/functional/constants.py @@ -1,4 +1,3 @@ -# coding=utf-8 """Constants for Pulp File plugin tests.""" from urllib.parse import urljoin diff --git a/pulp_file/tests/functional/utils.py b/pulp_file/tests/functional/utils.py index 14205b39..65ca76b3 100644 --- a/pulp_file/tests/functional/utils.py +++ b/pulp_file/tests/functional/utils.py @@ -1,5 +1,7 @@ -# coding=utf-8 """Utilities for tests for the file plugin.""" +import aiohttp +import asyncio +from dataclasses import dataclass from datetime import datetime from functools import partial import hashlib @@ -74,15 +76,6 @@ def gen_file_content_attrs(artifact): return {"artifact": artifact["pulp_href"], "relative_path": utils.uuid4()} -def gen_file_content_upload_attrs(): - """Generate a dict with content unit attributes without artifact for upload. - - :param artifact: A dict of info about the artifact. - :returns: A semi-random dict for use in creating a content unit. - """ - return {"relative_path": utils.uuid4()} - - def populate_pulp(cfg, url=FILE_FIXTURE_MANIFEST_URL): """Add file contents to Pulp. @@ -121,6 +114,7 @@ def gen_artifact(url=FILE_URL, file=None): response = requests.get(url) with NamedTemporaryFile() as temp_file: temp_file.write(response.content) + temp_file.flush() return ArtifactsApi(core_client).create(file=temp_file.name).to_dict() return ArtifactsApi(core_client).create(file=file).to_dict() @@ -177,7 +171,7 @@ def create_repo_and_versions(syncd_repo, repo_api, versions_api, content_api): modify_response = repo_api.modify(a_repo.pulp_href, {"add_content_units": [href]}) monitor_task(modify_response.task) # get all versions of that repo - versions = versions_api.list(a_repo.pulp_href, ordering="number") + versions = versions_api.list(a_repo.pulp_href, ordering=["number"]) return a_repo, versions @@ -292,3 +286,70 @@ def generate_manifest(name, file_list): fout.write("{},{},{}\n".format(file["name"], file["digest"], file["size"])) fout.flush() return name + + +@dataclass +class Download: + """Class for representing a downloaded file.""" + + body: bytes + response_obj: aiohttp.ClientResponse + + def __init__(self, body, response_obj): + self.body = body + self.response_obj = response_obj + + +def get_files_in_manifest(url): + """ + Download a File Repository manifest and return content as a list of tuples. + [(name,sha256,size),] + """ + files = set() + r = asyncio.run(_download_file(url)) + for line in r.body.splitlines(): + files.add(tuple(line.decode().split(","))) + return files + + +def download_file(url, auth=None, headers=None): + """Download a file. + + :param url: str URL to the file to download + :param auth: `aiohttp.BasicAuth` containing basic auth credentials + :param headers: dict of headers to send with the GET request + :return: Download + """ + return asyncio.run(_download_file(url, auth=auth, headers=headers)) + + +async def _download_file(url, auth=None, headers=None): + async with aiohttp.ClientSession(auth=auth, raise_for_status=True) as session: + async with session.get(url, verify_ssl=False, headers=headers) as response: + return Download(body=await response.read(), response_obj=response) + + +def get_url(url, auth=None, headers=None): + """ + Performs a GET request on a URL and returns an aiohttp.Response object. + """ + return asyncio.run(_get_url(url, auth=auth, headers=headers)) + + +async def _get_url(url, auth=None, headers=None): + async with aiohttp.ClientSession(auth=auth) as session: + async with session.get(url, verify_ssl=False, headers=headers) as response: + return response + + +def post_url(url, data=None, auth=None, return_body=False): + """Performs a POST request on a URL and returns an aiohttp.Response object.""" + return asyncio.run(_post_url(url, data, return_body, auth=auth)) + + +async def _post_url(url, data, return_body, auth=None): + async with aiohttp.ClientSession(auth=auth) as session: + async with session.post(url, data=data, verify_ssl=False) as response: + if return_body: + return await response.read() + return response diff --git a/pulp_file/tests/performance/pulpperf/utils.py b/pulp_file/tests/performance/pulpperf/utils.py index 3900f744..0dd92fc3 100644 --- a/pulp_file/tests/performance/pulpperf/utils.py +++ b/pulp_file/tests/performance/pulpperf/utils.py @@ -18,9 +18,9 @@ def urljoin(*args): def measureit(func, *args, **kwargs): """Measure execution time of passed function.""" logging.debug("Measuring duration of %s %s %s" % (func.__name__, args, kwargs)) - before = time.clock() + before = time.perf_counter() out = func(*args, **kwargs) - after = time.clock() + after = time.perf_counter() return after - before, out diff --git a/pulp_file/tests/unit/test_serializers.py b/pulp_file/tests/unit/test_serializers.py index 4675fb86..d5b0dba2 100644 --- a/pulp_file/tests/unit/test_serializers.py +++ b/pulp_file/tests/unit/test_serializers.py @@ -45,11 +45,11 @@ def test_absolute_path_data(self): self.assertFalse(serializer.is_valid()) def test_duplicate_data(self): - """Test that the FileContentSerializer does not accept data.""" + """Test that the FileContentSerializer accepts duplicate valid data.""" FileContent.objects.create(relative_path="foo", digest=self.artifact.sha256) data = { "artifact": f"{V3_API_ROOT}artifacts/{self.artifact.pk}/", "relative_path": "foo", } serializer = FileContentSerializer(data=data) - self.assertFalse(serializer.is_valid()) + self.assertTrue(serializer.is_valid()) diff --git a/pyproject.toml b/pyproject.toml index 44256401..612d35b1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -38,10 +38,11 @@ ignore = [ "dev_requirements.txt", "doc_requirements.txt", "docs/**", - "flake8.cfg", "template_config.yml", ".pep8speaks.yml", ".ci/**", ".github/**", + "lint_requirements.txt", + ".flake8", ] diff --git a/requirements.txt b/requirements.txt index 4c05a8d7..e7934b1a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1 +1 @@ -pulpcore>=3.17.0.dev +pulpcore>=3.22.0.dev,<3.25 diff --git a/setup.py b/setup.py index a68377a8..a823e5c7 100644 --- a/setup.py +++ b/setup.py @@ -10,7 +10,7 @@ setup( name="pulp-file", - version="1.11.0.dev", + version="1.12.0.dev", description="File plugin for the Pulp Project", long_description=long_description, license="GPLv2+", diff --git a/template_config.yml b/template_config.yml index 6f605f10..25dea039 100644 --- a/template_config.yml +++ b/template_config.yml @@ -1,12 +1,11 @@ # This config represents the latest values used when running the plugin-template. Any settings that # were not present before running plugin-template have been added with their default values. -# generated with plugin_template@2021.08.26-129-gf780fda +# generated with plugin_template@2021.08.26-174-g125ecdc additional_repos: - branch: main name: pulp-certguard -aiohttp_fixtures_origin: 172.18.0.1 api_root: /pulp/ black: true check_commit_message: true @@ -17,6 +16,7 @@ check_stray_pulpcore_imports: true cherry_pick_automation: false ci_env: {} ci_trigger: '{pull_request: {branches: [''*'']}}' +ci_update_branches: [] core_import_allowed: [] coverage: false deploy_client_to_pypi: true @@ -29,10 +29,11 @@ disabled_redis_runners: docker_fixtures: true docs_test: true flake8: true +flake8_ignore: [] github_org: pulp issue_tracker: github -keep_ci_update_for_latest_branches: 5 -keep_ci_update_since_branch: null +kanban: true +lint_requirements: true noissue_marker: '[noissue]' parallel_test_workers: 8 plugin_app_label: file @@ -62,6 +63,10 @@ pulp_settings: allowed_import_paths: - /tmp orphan_protection_time: 0 +pulp_settings_azure: null +pulp_settings_s3: + hide_guarded_distributions: true +pulp_settings_stream: null pulpcore_branch: main pulpcore_pip_version_specifier: null pulpcore_revision: null @@ -74,6 +79,10 @@ release_user: pulpbot run_pulpcore_tests_for_plugins: true single_commit_check: true stable_branch: null +stalebot: true +stalebot_days_until_close: 30 +stalebot_days_until_stale: 90 +stalebot_limit_to_pulls: true sync_ci: true tasking_allow_async_unsafe: true test_azure: true