diff --git a/docs/Makefile b/docs/Makefile deleted file mode 100644 index ddb5270963..0000000000 --- a/docs/Makefile +++ /dev/null @@ -1,199 +0,0 @@ -# WARNING: DO NOT EDIT! -# -# This file was generated by plugin_template, and is managed by it. Please use -# './plugin-template --docs pulpcore' to update this file. -# -# For more info visit https://github.com/pulp/plugin_template -# Makefile for Sphinx documentation -# - -SHELL := /bin/bash -# You can set these variables from the command line. -SPHINXOPTS = -W # turn warnings into errors -SPHINXBUILD = sphinx-build -PAPER = -BUILDDIR = _build -DIAGRAM_BUILD_DIR = _diagrams -PULP_URL ?= http://localhost:24817 -PULP_API_ROOT ?= /pulp/ - -# Internal variables. -PULP_V3_API_JSON_URL := ${PULP_URL}${PULP_API_ROOT}api/v3/docs/api.json -PAPEROPT_a4 = -D latex_paper_size=a4 -PAPEROPT_letter = -D latex_paper_size=letter -ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . -# the i18n builder cannot share the environment and doctrees with the others -I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . - -.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext diagrams - -help: - @echo "Please use \`make ' where is one of" - @echo " html to make standalone HTML files" - @echo " diagrams to make diagram images" - @echo " dirhtml to make HTML files named index.html in directories" - @echo " singlehtml to make a single large HTML file" - @echo " pickle to make pickle files" - @echo " json to make JSON files" - @echo " htmlhelp to make HTML files and a HTML help project" - @echo " qthelp to make HTML files and a qthelp project" - @echo " devhelp to make HTML files and a Devhelp project" - @echo " epub to make an epub" - @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" - @echo " latexpdf to make LaTeX files and run them through pdflatex" - @echo " text to make text files" - @echo " man to make manual pages" - @echo " texinfo to make Texinfo files" - @echo " info to make Texinfo files and run them through makeinfo" - @echo " gettext to make PO message catalogs" - @echo " changes to make an overview of all changed/added/deprecated items" - @echo " linkcheck to check all external links for integrity" - @echo " doctest to run all doctests embedded in the documentation (if enabled)" - -clean: - -rm -rf $(BUILDDIR)/* - -rm -rf $(DIAGRAM_BUILD_DIR)/* - -install: - python3 -m venv pulpdocs - source pulpdocs/bin/activate && pip install -r ../doc_requirements.txt - -diagrams: -ifneq ($(wildcard diagrams_src), ) - mkdir -p $(DIAGRAM_BUILD_DIR) -ifneq ("$(wildcard pulpdocs/bin/activate)","") - source pulpdocs/bin/activate && python3 -m plantuml diagrams_src/*.dot -else - python3 -m plantuml diagrams_src/*.dot -endif - mv diagrams_src/*.png $(DIAGRAM_BUILD_DIR)/ -else - @echo "Did not find $(DIAGRAM_SOURCE_DIR)." -endif - -$(BUILDDIR)/html/api.json: - mkdir -p $(BUILDDIR)/html - if pulp debug has-plugin --name core --specifier ">=3.44.0.dev"; \ - then \ - curl --fail -o $(BUILDDIR)/html/api.json "$(PULP_V3_API_JSON_URL)?component=core,file,certguard&include_html=1"; \ - else \ - curl --fail -o $(BUILDDIR)/html/api.json "$(PULP_V3_API_JSON_URL)?plugin=pulpcore&include_html=1"; \ - fi - -html: $(BUILDDIR)/html/api.json - $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." - -dirhtml: -ifneq ("$(wildcard pulpdocs/bin/activate)","") - source pulpdocs/bin/activate && PULP_CONTENT_ORIGIN=localhost $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml -else - PULP_CONTENT_ORIGIN=localhost $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml -endif - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." - -singlehtml: - $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml - @echo - @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." - -pickle: - $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle - @echo - @echo "Build finished; now you can process the pickle files." - -json: - $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json - @echo - @echo "Build finished; now you can process the JSON files." - -htmlhelp: - $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp - @echo - @echo "Build finished; now you can run HTML Help Workshop with the" \ - ".hhp project file in $(BUILDDIR)/htmlhelp." - -qthelp: - $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp - @echo - @echo "Build finished; now you can run "qcollectiongenerator" with the" \ - ".qhcp project file in $(BUILDDIR)/qthelp, like this:" - @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/PulpDocs.qhcp" - @echo "To view the help file:" - @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/PulpDocs.qhc" - -devhelp: - $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp - @echo - @echo "Build finished." - @echo "To view the help file:" - @echo "# mkdir -p $$HOME/.local/share/devhelp/PulpDocs" - @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/PulpDocs" - @echo "# devhelp" - -epub: - $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub - @echo - @echo "Build finished. The epub file is in $(BUILDDIR)/epub." - -latex: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo - @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." - @echo "Run \`make' in that directory to run these through (pdf)latex" \ - "(use \`make latexpdf' here to do that automatically)." - -latexpdf: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo "Running LaTeX files through pdflatex..." - $(MAKE) -C $(BUILDDIR)/latex all-pdf - @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." - -text: - $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text - @echo - @echo "Build finished. The text files are in $(BUILDDIR)/text." - -man: - $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man - @echo - @echo "Build finished. The manual pages are in $(BUILDDIR)/man." - -texinfo: - $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo - @echo - @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." - @echo "Run \`make' in that directory to run these through makeinfo" \ - "(use \`make info' here to do that automatically)." - -info: - $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo - @echo "Running Texinfo files through makeinfo..." - make -C $(BUILDDIR)/texinfo info - @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." - -gettext: - $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale - @echo - @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." - -changes: - $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes - @echo - @echo "The overview file is in $(BUILDDIR)/changes." - -linkcheck: - $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck - @echo - @echo "Link check complete; look for any errors in the above output " \ - "or in $(BUILDDIR)/linkcheck/output.txt." - -doctest: - $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest - @echo "Testing of doctests in the sources finished, look at the " \ - "results in $(BUILDDIR)/doctest/output.txt." - -run: - cd $(BUILDDIR) && python -m http.server 8010 diff --git a/docs/_templates/layout.html b/docs/_templates/layout.html deleted file mode 100644 index 6acd1b0633..0000000000 --- a/docs/_templates/layout.html +++ /dev/null @@ -1,41 +0,0 @@ -{% extends "!layout.html" %} - -{% block footer %} -{{ super() }} - - -{# check current version against supported-releases.json #} - -{% endblock %} \ No newline at end of file diff --git a/docs/_templates/restapi.html b/docs/_templates/restapi.html deleted file mode 100644 index 633793ed45..0000000000 --- a/docs/_templates/restapi.html +++ /dev/null @@ -1,24 +0,0 @@ - - - - Pulp 3 REST API documentation - - - - - - - - - - - - - diff --git a/staging_docs/admin/guides/_SUMMARY.md b/docs/admin/guides/_SUMMARY.md similarity index 100% rename from staging_docs/admin/guides/_SUMMARY.md rename to docs/admin/guides/_SUMMARY.md diff --git a/staging_docs/admin/guides/auth/basic.md b/docs/admin/guides/auth/basic.md similarity index 100% rename from staging_docs/admin/guides/auth/basic.md rename to docs/admin/guides/auth/basic.md diff --git a/staging_docs/admin/guides/auth/external.md b/docs/admin/guides/auth/external.md similarity index 100% rename from staging_docs/admin/guides/auth/external.md rename to docs/admin/guides/auth/external.md diff --git a/staging_docs/admin/guides/auth/index.md b/docs/admin/guides/auth/index.md similarity index 100% rename from staging_docs/admin/guides/auth/index.md rename to docs/admin/guides/auth/index.md diff --git a/staging_docs/admin/guides/auth/json_header.md b/docs/admin/guides/auth/json_header.md similarity index 100% rename from staging_docs/admin/guides/auth/json_header.md rename to docs/admin/guides/auth/json_header.md diff --git a/staging_docs/admin/guides/auth/keycloak.md b/docs/admin/guides/auth/keycloak.md similarity index 100% rename from staging_docs/admin/guides/auth/keycloak.md rename to docs/admin/guides/auth/keycloak.md diff --git a/staging_docs/admin/guides/configure-pulp.md b/docs/admin/guides/configure-pulp.md similarity index 100% rename from staging_docs/admin/guides/configure-pulp.md rename to docs/admin/guides/configure-pulp.md diff --git a/staging_docs/admin/guides/import-export-repos.md b/docs/admin/guides/import-export-repos.md similarity index 100% rename from staging_docs/admin/guides/import-export-repos.md rename to docs/admin/guides/import-export-repos.md diff --git a/staging_docs/admin/guides/integrate-kafka.md b/docs/admin/guides/integrate-kafka.md similarity index 100% rename from staging_docs/admin/guides/integrate-kafka.md rename to docs/admin/guides/integrate-kafka.md diff --git a/staging_docs/admin/guides/integrate-sentry-glitchtip.md b/docs/admin/guides/integrate-sentry-glitchtip.md similarity index 100% rename from staging_docs/admin/guides/integrate-sentry-glitchtip.md rename to docs/admin/guides/integrate-sentry-glitchtip.md diff --git a/staging_docs/admin/guides/remove-plugins.md b/docs/admin/guides/remove-plugins.md similarity index 100% rename from staging_docs/admin/guides/remove-plugins.md rename to docs/admin/guides/remove-plugins.md diff --git a/staging_docs/admin/guides/sign-metadata.md b/docs/admin/guides/sign-metadata.md similarity index 100% rename from staging_docs/admin/guides/sign-metadata.md rename to docs/admin/guides/sign-metadata.md diff --git a/staging_docs/admin/guides/troubleshooting.md b/docs/admin/guides/troubleshooting.md similarity index 100% rename from staging_docs/admin/guides/troubleshooting.md rename to docs/admin/guides/troubleshooting.md diff --git a/staging_docs/admin/learn/architecture.md b/docs/admin/learn/architecture.md similarity index 100% rename from staging_docs/admin/learn/architecture.md rename to docs/admin/learn/architecture.md diff --git a/staging_docs/admin/reference/settings.md b/docs/admin/reference/settings.md similarity index 100% rename from staging_docs/admin/reference/settings.md rename to docs/admin/reference/settings.md diff --git a/staging_docs/admin/reference/tech-preview.md b/docs/admin/reference/tech-preview.md similarity index 100% rename from staging_docs/admin/reference/tech-preview.md rename to docs/admin/reference/tech-preview.md diff --git a/staging_docs/assets/diagrams_src/concept-add-repo.dot b/docs/assets/diagrams_src/concept-add-repo.dot similarity index 100% rename from staging_docs/assets/diagrams_src/concept-add-repo.dot rename to docs/assets/diagrams_src/concept-add-repo.dot diff --git a/docs/diagrams_src/concept-content.dot b/docs/assets/diagrams_src/concept-content.dot similarity index 100% rename from docs/diagrams_src/concept-content.dot rename to docs/assets/diagrams_src/concept-content.dot diff --git a/docs/diagrams_src/concept-publish.dot b/docs/assets/diagrams_src/concept-publish.dot similarity index 100% rename from docs/diagrams_src/concept-publish.dot rename to docs/assets/diagrams_src/concept-publish.dot diff --git a/docs/diagrams_src/concept-remote.dot b/docs/assets/diagrams_src/concept-remote.dot similarity index 100% rename from docs/diagrams_src/concept-remote.dot rename to docs/assets/diagrams_src/concept-remote.dot diff --git a/docs/diagrams_src/concept-repository.dot b/docs/assets/diagrams_src/concept-repository.dot similarity index 100% rename from docs/diagrams_src/concept-repository.dot rename to docs/assets/diagrams_src/concept-repository.dot diff --git a/docs/static/.gitkeep b/docs/assets/images/.gitkeep similarity index 100% rename from docs/static/.gitkeep rename to docs/assets/images/.gitkeep diff --git a/docs/static/architecture.png b/docs/assets/images/architecture.png similarity index 100% rename from docs/static/architecture.png rename to docs/assets/images/architecture.png diff --git a/staging_docs/assets/images/concept-add-repo.png b/docs/assets/images/concept-add-repo.png similarity index 100% rename from staging_docs/assets/images/concept-add-repo.png rename to docs/assets/images/concept-add-repo.png diff --git a/staging_docs/assets/images/concept-content.png b/docs/assets/images/concept-content.png similarity index 100% rename from staging_docs/assets/images/concept-content.png rename to docs/assets/images/concept-content.png diff --git a/staging_docs/assets/images/concept-publish.png b/docs/assets/images/concept-publish.png similarity index 100% rename from staging_docs/assets/images/concept-publish.png rename to docs/assets/images/concept-publish.png diff --git a/staging_docs/assets/images/concept-remote.png b/docs/assets/images/concept-remote.png similarity index 100% rename from staging_docs/assets/images/concept-remote.png rename to docs/assets/images/concept-remote.png diff --git a/staging_docs/assets/images/concept-repository.png b/docs/assets/images/concept-repository.png similarity index 100% rename from staging_docs/assets/images/concept-repository.png rename to docs/assets/images/concept-repository.png diff --git a/docs/static/deferred_download_sequence.png b/docs/assets/images/deferred_download_sequence.png similarity index 100% rename from docs/static/deferred_download_sequence.png rename to docs/assets/images/deferred_download_sequence.png diff --git a/docs/static/lazy_component.png b/docs/assets/images/lazy_component.png similarity index 100% rename from docs/static/lazy_component.png rename to docs/assets/images/lazy_component.png diff --git a/docs/static/node-anatomy.png b/docs/assets/images/node-anatomy.png similarity index 100% rename from docs/static/node-anatomy.png rename to docs/assets/images/node-anatomy.png diff --git a/docs/static/node-topology.png b/docs/assets/images/node-topology.png similarity index 100% rename from docs/static/node-topology.png rename to docs/assets/images/node-topology.png diff --git a/docs/static/pulp-exp1.png b/docs/assets/images/pulp-exp1.png similarity index 100% rename from docs/static/pulp-exp1.png rename to docs/assets/images/pulp-exp1.png diff --git a/docs/static/rbac_architecture.png b/docs/assets/images/rbac_architecture.png similarity index 100% rename from docs/static/rbac_architecture.png rename to docs/assets/images/rbac_architecture.png diff --git a/staging_docs/assets/pulp_logo_big.png b/docs/assets/pulp_logo_big.png similarity index 100% rename from staging_docs/assets/pulp_logo_big.png rename to docs/assets/pulp_logo_big.png diff --git a/docs/authentication/basic.rst b/docs/authentication/basic.rst deleted file mode 100644 index e9e96ffc95..0000000000 --- a/docs/authentication/basic.rst +++ /dev/null @@ -1,42 +0,0 @@ -.. _basic-authentication: - -Basic ------ - -Pulp by default uses `Basic Authentication `_ which checks the -user submitted header against an internal database of users. If the username and password match, the -request is considered authenticated as that username. Basic auth transmits credentials as -user-id and password joined with a colon and then encoded using Base64. This is passed along as the -``Authorization`` header. - -Below is an example of a Basic Authentication header for a username ``admin`` and password -``password``.:: - - Authorization: Basic YWRtaW46cGFzc3dvcmQ= - -You can set this header on a `httpie `_ command using the ``--auth`` option:: - - http --auth admin:password ... - -You could also specify the header manually on a `httpie `_ command using its -header syntax:: - - http Authorization:"Basic YWRtaW46cGFzc3dvcmQ=" ... - -.. warning:: - - For the 3.y releases, Pulp expects the user table to have exactly 1 user in it named 'admin', - which is created automatically when the initial migration is applied. The password for this user - can be set with the ``pulpcore-manager reset-admin-password`` command. - To articulate what you'd like to see future versions of Pulp file a feature request - `here `_ or reach out via - `pulp-list@redhat.com `_. - - -Disabling Basic Authentication -****************************** - -Basic Authentication is defined by receiving the username and password encoded in the -``Authorization`` header. To disable receiving the username and password using Basic Authentication, -remove the ``rest_framework.authentication.BasicAuthentication`` from the -``REST_FRAMEWORK['DEFAULT_AUTHENTICATION_CLASSES']`` list. diff --git a/docs/authentication/index.rst b/docs/authentication/index.rst deleted file mode 100644 index fcc69295d0..0000000000 --- a/docs/authentication/index.rst +++ /dev/null @@ -1,14 +0,0 @@ -.. _authentication: - -Authentication -============== - -.. toctree:: - :maxdepth: 2 - - overview - basic - webserver - keycloak - json_header - other diff --git a/docs/authentication/json_header.rst b/docs/authentication/json_header.rst deleted file mode 100644 index f4b5359006..0000000000 --- a/docs/authentication/json_header.rst +++ /dev/null @@ -1,68 +0,0 @@ -.. _json-header-authentication: - -JSON Header Authentication --------------------------- - -In a situation where it is not possible to use ``Basic Authentication`` Pulp can rely on a third-party -service to authenticate a user. -Using ``JSONHeaderRemoteAuthentication`` it's possible to receive a payload and even use ``JQ`` to filter -it and obtain the relevant data. The user is created in the database if one is not found. - -You can set ``AUTHENTICATION_JSON_HEADER`` and ``AUTHENTICATION_JSON_HEADER_JQ_FILTER`` to obtain a user -given a header name and its value respectively:: - - AUTHENTICATION_JSON_HEADER = "HTTP_X_AUTHENTICATION_SERVICE" - AUTHENTICATION_JSON_HEADER_JQ_FILTER = ".identity.user.username" - -will look for a ``x-authentication-service`` header and its content. With the given filter, it will -extract the information from a payload like this:: - - { - identity: { - user: { - username: "user" - } - } - } - -Enabling JSONHeaderRemoteAuthentication -*************************************** - -The ``JSONHeaderRemoteAuthentication`` can be enabled by: - -1. Add the ``django.contrib.auth.backends.RemoteUserBackend`` to -``AUTHENTICATION_BACKENDS``, or some authentication backend that subclasses it. - -2. You need to add the ``pulpcore.app.authentication.JSONHeaderRemoteAuthentication`` to -``REST_FRAMEWORK['DEFAULT_AUTHENTICATION_CLASSES']`` setting. - -3. Change the ``AUTHENTICATION_JSON_HEADER`` to your value of choice. Remember that it -must start with `HTTP_`, so, if your header is ``x-authentication-service``, you need to set it to -``HTTP_X_AUTHENTICATION_SERVICE``. - -4. Set a JQ filter on ``AUTHENTICATION_JSON_HEADER_JQ_FILTER``. You can find the JQ query syntax and reference on its -official site `here `_. - -Remember that the content of the header must be Base64 encoded. - - -Enabling the ThirdParty Authentication Schema -********************************************* - -In a case where Pulp is deployed behind an API Gateway, it may be necessary to indicate to the clients where and which authorization process to use. -For this scenario, you may be able to provide an OpenAPI security schema to be used by clients or Pulp-CLI itself. - -To enable that, you have to configure the `AUTHENTICATION_JSON_HEADER_OPENAPI_SECURITY_SCHEME` with a payload following the -`Security Scheme Object definition `_. Here is an example describing -an OAuth2 authentication system:: - - AUTHENTICATION_JSON_HEADER_OPENAPI_SECURITY_SCHEME = { - "type": "oauth2", - "description": "External OAuth integration", - "flows": { - "clientCredentials": { - "tokenUrl": "https://your-identity-provider/token/issuer", - "scopes": {"api.console":"grant_access_to_pulp"} - } - } - } diff --git a/docs/authentication/keycloak.rst b/docs/authentication/keycloak.rst deleted file mode 100644 index e0b37ddb30..0000000000 --- a/docs/authentication/keycloak.rst +++ /dev/null @@ -1,137 +0,0 @@ -.. _keycloak-authentication: - -Keycloak --------- - -Pulp can be configured to use authentication provided by a Keycloak server outside of Pulp. -`Keycloak `_ can provide Identity Brokering, User Federation, Single -sign-on, and act as a OpenID Connect-based (OIDC) Identity Provider. - - -.. _keycloak-authentication-required-python-modules: - -Required Python Modules -*********************** - -The library that will be utilized for the integration between Keycloak and Pulp is -`python-social-auth `_. The -following python modules must be installed in order to make use of python-social-auth within Pulp:: - - social-auth-core - social-auth-app-Django - - -.. _keycloak-authentication-python-social-auth-and-django: - -Python Social Auth and Django -***************************** - -The `python-social-auth documentation `_ -describes the django updates necessary to configure social-auth. - -Enable general python social integration with the following steps: - -1. Add the application to INSTALLED_APPS setting:: - - INSTALLED_APPS = ( - ... - 'social_django', - ... - ) - -2. Accept Keycloak auth instead of checking the internal users database by enabling:: - - AUTHENTICATION_BACKENDS = [ - ... - 'social_core.backends.keycloak.KeycloakOAuth2', - ... - ] - -3. Update the context processor that will add backends and associations data to the template context:: - - TEMPLATES = [ - { - ... - 'OPTIONS': { - ... - 'context_processors': [ - ... - 'social_django.context_processors.backends', - 'social_django.context_processors.login_redirect', - ... - ] - } - } - ] - -4. Define the authentication pipeline for data that will be associated with users:: - - SOCIAL_AUTH_PIPELINE = ( - 'social_core.pipeline.social_auth.social_details', - 'social_core.pipeline.social_auth.social_uid', - 'social_core.pipeline.social_auth.social_user', - 'social_core.pipeline.user.get_username', - 'social_core.pipeline.social_auth.associate_by_email', - 'social_core.pipeline.user.create_user', - 'social_core.pipeline.social_auth.associate_user', - 'social_core.pipeline.social_auth.load_extra_data', - 'social_core.pipeline.user.user_details', - ) - - -.. _keycloak-authentication-python-social-auth-and-keycloak: - -Python Social Auth and Keycloak -******************************* - -The python-social-auth keycloak backend -`documentation `_ -describes the necessary Keycloak integration variables. - - -Enable python social and Keycloak integration with the following steps: - -1. On your Keycloak server, create a Realm (pulp) - -2. Create a Client in the new Realm - -3. Configure the Client ``Access Type`` to be "confidential. Provide `Valid Redirect URIs`` with - ``http://:/*``. Set the ``User Info Signed Response Algorithm`` and - ``Request Object Signature Algorithm`` is set to ``RS256`` in the - ``Fine Grain OpenID Connect Configuration`` section - -4. In the Pulp settings, add the value for the ``Client ID``:: - - SOCIAL_AUTH_KEYCLOAK_KEY = '' - -5. Gather the ``Client Secret`` for the Pulp settings. You can find the ``Client Secret`` in the - Credentials tab:: - - SOCIAL_AUTH_KEYCLOAK_SECRET = '' - -6. Collect the ``Public Key`` from the Realm's Keys tab:: - - SOCIAL_AUTH_KEYCLOAK_PUBLIC_KEY = '' - -7. Add the ``authorization_endpoint`` and ``token_endpoint`` URL that you find to the Realm OpenID Endpoint - Configuration to the Pulp settings:: - - SOCIAL_AUTH_KEYCLOAK_AUTHORIZATION_URL = \ - 'https://iam.example.com/auth/realms/pulp/protocol/openid-connect/auth/' - SOCIAL_AUTH_KEYCLOAK_ACCESS_TOKEN_URL = \ - 'https://iam.example.com/auth/realms/pulp/protocol/openid-connect/token/' - - -8. Create an audience mapper for the JWT token. In the Client, select the Mappers tab, select - the Create button to create a Mapper. Name the mapper, for example, "Audience Mapper". From - the ``Mapper Type`` list, select "Audience". Define the ``Included Client Audience`` to be the - ``Client ID``. Enable this for both the ID token and access token. - -9. Add additional Built-in Mappers to the JWT to populate the token with the data defined in the - Social Auth Pipeline. To do this, in the Client again select the Mappers tab. Next select the - "Add Builtin" button and you will be presented with a table of mappers that can be chosen. - Common choices are ``username``, ``email``, ``groups``, ``given name``, ``family name``, - ``full name``, ``updated at``, and ``email verified``. - -After setup is completed go to: `http://:/login/keycloak` and the login flow -will be presented. diff --git a/docs/authentication/other.rst b/docs/authentication/other.rst deleted file mode 100644 index e092bb4d23..0000000000 --- a/docs/authentication/other.rst +++ /dev/null @@ -1,12 +0,0 @@ -.. _other-authentication: - -Other ------ - -Pulp is a Django app and Django Rest Framework (DRF) application, so additional authentication can -be added as long as it's correctly configured for both Django and Django Rest Frameowork. - -See the `Django docs on configuring custom authentication `_ and the `Django Rest Framework docs -on configuring custom authentication `_. diff --git a/docs/authentication/overview.rst b/docs/authentication/overview.rst deleted file mode 100644 index cacb0d7435..0000000000 --- a/docs/authentication/overview.rst +++ /dev/null @@ -1,43 +0,0 @@ -.. _authentication-overview: - -Overview --------- - -By default, Pulp supports Basic and Session authentication. The Basic Authentication checks the -username and password against the internal users database. - -.. note:: - This authentication is only for the REST API. Clients fetching binary data have their identity - verified and authorization checked using a :term:`ContentGuard`. - - -Which URLs Require Authentication? -********************************** - -All URLs in the REST API require authentication except the Status API, ``/pulp/api/v3/status/``. - - -Concepts -******** - -Authentication in Pulp is provided by Django Rest Framework and Django together. - -Django provides the `AUTHENTICATION_BACKENDS `_ which defines a set of behaviors to check usernames and -passwords against. By default it is set to:: - - AUTHENTICATION_BACKENDS = [ - 'django.contrib.auth.backends.ModelBackend', # Django's users, groups, and permissions - 'pulpcore.backends.ObjectRolePermissionBackend' # Pulp's RBAC object and model permissions - ] - -Django Rest Framework defines the source usernames and passwords come from with the -`DEFAULT_AUTHENTICATION_CLASSES `_ setting. By default it is set to:: - - REST_FRAMEWORK = { - 'DEFAULT_AUTHENTICATION_CLASSES': [ - 'rest_framework.authentication.SessionAuthentication', # Session Auth - 'rest_framework.authentication.BasicAuthentication' # Basic Auth - ] - } diff --git a/docs/authentication/webserver.rst b/docs/authentication/webserver.rst deleted file mode 100644 index eab154195f..0000000000 --- a/docs/authentication/webserver.rst +++ /dev/null @@ -1,96 +0,0 @@ -.. _webserver-authentication: - -Webserver ---------- - -Pulp can be configured to use authentication provided in the webserver outside of Pulp. This allows -for integration with ldap for example, through `mod_ldap `_, or certificate based API access, etc. - -Enable external authentication in two steps: - -1. Accept external auth instead of checking the internal users database by setting the -``AUTHENTICATION_BACKENDS`` to ``['django.contrib.auth.backends.RemoteUserBackend']``. This will -cause Pulp to accept any username for each request and by default create a user in the database -backend for them. To have any name accepted but not create the user in the database backend, use the -``pulpcore.app.authentication.PulpNoCreateRemoteUserBackend`` instead. - -It is preferable to have users created because the authorization and permissions continue to -function normally since there are users in the Django database to assign permissions to and later -check. When using the ``pulpcore.app.authentication.PulpNoCreateRemoteUserBackend`` you also should -set the ``DEFAULT_PERMISSION_CLASSES`` to check permissions differently or not at all. By default -Pulp sets ``DEFAULT_PERMISSION_CLASSES`` to ``pulpcore.plugin.access_policy.AccessPolicyFromDB`` -which provides role based permission checking via a user in the database. For example, to only serve -to authenticated users specify set ``DEFAULT_PERMISSION_CLASSES`` to -``rest_framework.permissions.IsAuthenticated``. Alternatively, to allow any user (even -unauthenticated) use ``rest_framework.permissions.AllowAny``. - -2. Specify how to receive the username from the webserver. Do this by specifying to DRF an - ``DEFAULT_AUTHENTICATION_CLASSES``. For example, consider this example:: - - REST_FRAMEWORK['DEFAULT_AUTHENTICATION_CLASSES'] = ( - 'rest_framework.authentication.SessionAuthentication', - 'pulpcore.app.authentication.PulpRemoteUserAuthentication' - ) - -This removes ``rest_framework.authentication.BasicAuthentication``, but retains -``rest_framework.authentication.SessionAuthentication`` and adds -``PulpRemoteUserAuthentication``. This accepts the username as WSGI environment variable -``REMOTE_USER`` by default, but can be configured via the -:ref:`REMOTE_USER_ENVIRON_NAME ` Pulp setting. - - -.. _webserver-authentication-same-webserver: - -Webserver Auth in Same Webserver -******************************** - -If your webserver authentication is occurring in the same webserver that is serving the -``pulpcore.app.wsgi`` application, you can pass the authenticated username to Pulp via the WSGI -environment variable ``REMOTE_USER``. - -Reading the ``REMOTE_USER`` WSGI environment is the default behavior of the -``rest_framework.authentication.RemoteUserAuthentication`` and the Pulp provided -``pulpcore.app.authentication.PulpRemoteUserAuthentication``. The only difference in the Pulp -provided one is that the WSGI environment variable name can be configured. - -See the :ref:`REMOTE_USER_ENVIRON_NAME ` for configuring the WSGI provided -name, but if you are using the ``REMOTE_USER`` WSGI environment name with "same webserver" -authentication, you likely want to leave :ref:`REMOTE_USER_ENVIRON_NAME ` -unset and configure the webserver to set the ``REMOTE_USER`` WSGI environment variable. - - -.. _webserver-authentication-with-reverse-proxy: - -Webserver Auth with Reverse Proxy -********************************* - -For example purposes, assume you're using Nginx with LDAP authentication required and after -authenticating it reverse proxies your request to the gunicorn process running the pulpcore.app.wsgi -application. That would look like this:: - - nginx <---http---> gunicorn <----WSGI----> pulpcore.app.wsgi application - - -With nginx providing authentication, all it can do is pass ``REMOTE_USER`` (or similar name) to the -application webserver, i.e. gunicorn. You can pass the header as part of the proxy request in nginx -with a config line like:: - - proxy_set_header REMOTE_USER $remote_user; - -Per the `WSGI standard `_, any incoming -headers will be prepended with a ``HTTP_``. The above line would send the header named -``REMOTE_USER`` to gunicorn, and the WSGI application would receive it as ``HTTP_REMOTE_USER``. The -default configuration of Pulp is expecting ``REMOTE_USER`` in the WSGI environment not -``HTTP_REMOTE_USER``, so this won't work with -``pulpcore.app.authentication.PulpRemoteUserAuthentication`` or the Django Rest Framework provided -``rest_framework.authentication.RemoteUserAuthentication`` as is. - -Pulp provides a setting named :ref:`REMOTE_USER_ENVIRON_NAME ` which allows -you to specify another WSGI environment variable to read the authenticated username from. - -.. warning:: - - Configuring this has serious security implications. See the `Django warning at the end of this - section in their docs `_ for more details. diff --git a/docs/bugs-features.rst b/docs/bugs-features.rst deleted file mode 100644 index b693704f69..0000000000 --- a/docs/bugs-features.rst +++ /dev/null @@ -1,21 +0,0 @@ -Bugs, Feature and Backport Requests -=================================== - -Bugs, features and backport requests for :term:`pulpcore` are tracked with `GitHub Issues -`_. Please see the `plugin table -`_ for trackers for each plugin. :github:`Use this link -` to file a bug or feature request. - -.. warning:: - - Security bugs should be email to pulp-security@redhat.com with your Pulp version, vulnerability - description, and reproduction steps. - - -.. _triage: - -Triage ------- -Once per week, the Pulp team triages all new bugs and feature requests. See the -`meetings page `_ on the website for more info on -when it occurs and how to participate. diff --git a/docs/changes.rst b/docs/changes.rst deleted file mode 100644 index 96e8eb90cc..0000000000 --- a/docs/changes.rst +++ /dev/null @@ -1,4 +0,0 @@ -Changes -********* - -Removed due to docs migration process. diff --git a/docs/client_bindings.rst b/docs/client_bindings.rst deleted file mode 100644 index 0f37888ff9..0000000000 --- a/docs/client_bindings.rst +++ /dev/null @@ -1,47 +0,0 @@ -Client Bindings -=============== - -Python Client for pulpcore's REST API -------------------------------------- - -The ``pulpcore-client`` Python package is available on `PyPI -`_. It is currently published daily and with every RC. -Each plugin is responsible for publishing it's own client to PyPI. The client libraries for plugins -should follow the``pulp_-client`` naming scheme. - - -Ruby Client for pulpcore's REST API ------------------------------------ - -``pulpcore_client`` Ruby Gem is available on -`rubygems.org `_. It is currently published daily and -with every RC. Each plugin is responsible for publishing it's own client to Rubygems.org. The -client libraries for plugins should follow the``pulp__client`` naming scheme. - - -Other Languages ---------------- - -A client can be generated using Pulp's OpenAPI schema and any of the available `generators -`_. - -Generating a client is a two step process: - -**1) Download the OpenAPI schema for pulpcore:** - -.. code-block:: bash - - curl -o api.json http://:24817/pulp/api/v3/docs/api.json?bindings&component=core - -The OpenAPI schema for a specific plugin can be downloaded by specifying the plugin's module name -as a GET parameter. For example for pulp_rpm only endpoints use a query like this: - -.. code-block:: bash - - curl -o api.json http://:24817/pulp/api/v3/docs/api.json?bindings&component=rpm - -**2) Generate a client using openapi-generator.** - -The schema can then be used as input to the openapi-generator-cli. The documentation on getting -started with openapi-generator-cli is available on -`openapi-generator.tech `_. diff --git a/docs/components.rst b/docs/components.rst deleted file mode 100644 index 7b5f7e0214..0000000000 --- a/docs/components.rst +++ /dev/null @@ -1,206 +0,0 @@ -.. _deployment: - -Architecture -============ - -Pulp's architecture has three components to it: a REST API, a content serving application, and the -tasking system. Each component can be horizontally scaled for both high availability and/or -additional capacity for that part of the architecture. - -.. image:: /static/architecture.png - :align: center - -REST API --------- - -Pulp's REST API is a Django application that runs standalone using the ``gunicorn`` like -``pulpcore-api`` entrypoint. It serves the following things: - -* The REST API hosted at ``/pulp/api/v3/`` -* The browse-able documentation at ``/pulp/api/v3/docs/`` -* Any viewsets or views provided by plugins -* Static content used by Django, e.g. images used by the browse-able API. This is not Pulp content. - -.. note:: - - A simple way to run the REST API as a standalone service is using the provided ``pulpcore-api`` - entrypoint. It is ``gunicorn`` based and provides many of its options. - -The REST API should only be deployed via the ``pulpcore-api`` entrypoint. - - -Content Serving Application ---------------------------- - -A currently ``aiohttp.server`` based application that serves content to clients. The content could -be :term:`Artifacts` already downloaded and saved in Pulp, or -:term:`on-demand content units`. When serving -:term:`on-demand content units` the downloading also happens from within this -component as well. - -.. note:: - - Pulp installs a script that lets you run the content serving app as a standalone service as - follows. This script accepts many ``gunicorn`` options.:: - - $ pulpcore-content - -The content serving application should be deployed with ``pulpcore-content``. See ``--help`` to see -available options. - - -Availability ------------- -Ensuring the REST API and the content server is healthy and alive: - -* REST API: GET request to ``${API_ROOT}api/v3/status/`` (see :ref:`API_ROOT `) -* Content Server: HEAD request to ``/pulp/content/`` or :ref:`CONTENT_PATH_PREFIX ` - - -Distributed Tasking System --------------------------- - -Pulp's tasking system consists of a single ``pulpcore-worker`` component consequently, and can be -scaled by increasing the number of worker processes to provide more concurrency. Each worker can -handle one task at a time, and idle workers will lookup waiting and ready tasks in a distributed -manner. If no ready tasks were found a worker enters a sleep state to be notified, once new tasks -are available or resources are released. Workers auto-name and are auto-discovered, so they can be -started and stopped without notifying Pulp. - -.. note:: - - Pulp serializes tasks that are unsafe to run in parallel, e.g. a sync and publish operation on - the same repo should not run in parallel. Generally tasks are serialized at the "resource" level, so - if you start *N* workers you can process *N* repo sync/modify/publish operations concurrently. - -All necessary information about tasks is stored in Pulp's Postgres database as a single source of -truth. In case your tasking system get's jammed, there is a guide to help (see :ref:`debugging tasks `). - - -Static Content --------------- - -When browsing the REST API or the browsable documentation with a web browser, for a good experience, -you'll need static content to be served. - -In Development -^^^^^^^^^^^^^^ - -If using the built-in Django webserver and your settings.yaml has ``DEBUG: True`` then static -content is automatically served for you. - -In Production -^^^^^^^^^^^^^ - -Collect all of the static content into place using the ``collectstatic`` command. The -``pulpcore-manager`` command is ``manage.py`` configured with the -``DJANGO_SETTINGS_MODULE="pulpcore.app.settings"``. Run ``collectstatic`` as follows:: - - $ pulpcore-manager collectstatic - - - -.. _analytics: - -Analytics Collection --------------------- - -By default, Pulp installations post anonymous analytics data every 24 hours which is summarized on -``_ and aids in project decision making. This is enabled by -default but can be disabled by setting ``ANALYTICS=False`` in your settings. - -Here is the list of exactly what is collected along with an example below: - -* The version of Pulp components installed as well as the used PostgreSQL server -* The number of worker processes and number of hosts (not hostnames) those workers run on -* The number of content app processes and number of hosts (not hostnames) those content apps run on -* The number of certain RBAC related entities in the system (users, groups, domains, custom roles, - custom access policies) - -.. note:: - - We may add more analytics data points collected in the future. To keep our high standards for - privacy protection, we have a rigorous approval process in place. You can see open proposals on - ``_. In doubt, - `reach out to us `_. - -An example payload: - -.. code-block:: json - - { - "systemId": "a6d91458-32e8-4528-b608-b2222ede994e", - "onlineContentApps": { - "processes": 2, - "hosts": 1 - }, - "onlineWorkers": { - "processes": 2, - "hosts": 1 - }, - "components": [{ - "name": "core", - "version": "3.21.0" - }, { - "name": "file", - "version": "1.12.0" - }], - "postgresqlVersion": 90200 - } - - -.. _telemetry: - -Telemetry Support ------------------ - -Pulp can produce OpenTelemetry data, like the number of requests, active connections and latency response for -`pulp-api` and `pulp-content` using OpenTelemetry. You can read more about -`OpenTelemetry here `_. - -.. warning:: This feature is provided as a tech preview and could change in backwards incompatible - ways in the future. - -If you are using `Pulp in One Container `_ or `Pulp Operator -`_ and want to enable it, you will need to set the following -environment variables: - -* ``PULP_OTEL_ENABLED`` set to ``True``. -* ``OTEL_EXPORTER_OTLP_ENDPOINT`` set to the address of your OpenTelemetry Collector instance - ex. ``http://otel-collector:4318``. -* ``OTEL_EXPORTER_OTLP_PROTOCOL`` set to ``http/protobuf``. - -If you are using other type of installation maybe you will need to manually initialize Pulp using the -`OpenTelemetry automatic instrumentation -`_ -and set the following environment variables: - -* ``OTEL_EXPORTER_OTLP_ENDPOINT`` set to the address of your OpenTelemetry Collector instance - ex. ``http://otel-collector:4318``. -* ``OTEL_EXPORTER_OTLP_PROTOCOL`` set to ``http/protobuf``. - -.. note:: - A quick example on how it would run using this method:: - - $ /usr/local/bin/opentelemetry-instrument --service_name pulp-api /usr/local/bin/pulpcore-api \ - --bind "127.0.0.1:24817" --name pulp-api --workers 4 --access-logfile - - -You will need to run an instance of OpenTelemetry Collector. You can read more about the `OpenTelemetry -Collector here `_. - -**At the moment, the following data is recorded by Pulp:** - -* Access to every API endpoint (an HTTP method, target URL, status code, and user agent). -* Access to every requested package (an HTTP method, target URL, status code, and user agent). -* Disk usage within a specific domain (total used disk space and the reference to the domain). - -The information above is sent to the collector in the form of spans and metrics. Thus, the data is -emitted either based on the user interaction with the system or on a regular basis. Consult -`OpenTelemetry Traces `_ and -`OpenTelemetry Metrics `_ to learn more. - -.. note:: - It is highly recommended to set the `OTEL_METRIC_EXPORT_INTERVAL `_ - environment variable to ``300000`` (5 minutes) to reduce the frequency of queries executed on - the Pulp's backend. This value represents the interval between emitted metrics and should be - set before runtime. diff --git a/docs/concepts.rst b/docs/concepts.rst deleted file mode 100644 index 28c33733cd..0000000000 --- a/docs/concepts.rst +++ /dev/null @@ -1,70 +0,0 @@ -Concepts and Terminology -======================== - -This introduction is designed for anyone who is familiar with software management even without prior -knowledge of Pulp. - -From a user’s perspective, Pulp is a tool to manage content. In this context, _Pulp_ refers to -pulpcore and one or more content plugins. - -What is pulpcore? ------------------ - -Throughout Pulp documentation, when you see references to _pulpcore_, this term refers to the main -python program that provides a platform to which you add content plugins for the types of content -that you want to manage. In a very general sense, Pulpcore refers to Pulp and its functionality -without any plugins. Pulpcore provides a REST API and a Plugin API. - -Content Management with plugins -------------------------------- - -To manage content, you need at least one content plugin. Each type of content unit, for example RPM -or Debian, is defined by a plugin. For example, if you want to manage RPM content in Pulp, you must -install the RPM plugin. Files that belong to a content unit are called -:term:`Artifacts`. Each content unit can have zero or many artifacts. Artifacts can be -shared by multiple content units. - -.. image:: ./_diagrams/concept-content.png - :align: center - -Content repositories and versioning ------------------------------------ - -Content units in Pulp are organized by their membership in repositories. Repositories can only hold -the content type that is defined by the plugin you install. You can add, remove and modify content -in a repository. Each time the content of a repository is changed, a new Repository Version is -created. Any operation such as sync that does not change the content set, does not produce a new -repository version. - - -.. image:: ./_diagrams/concept-repository.png - :align: center -.. image:: ./_diagrams/concept-add-repo.png - :align: center - -Pulling content into Pulp with Remotes --------------------------------------- - -In Pulp, you can define external sources of content units, called **Remotes**. -Through your plugin of choice, you can define actions to interact with those external sources. -For example, most or all plugins define sync to fetch content units from a remote and add them to a -Pulp repository. - -.. image:: ./_diagrams/concept-remote.png - :align: center - -Serving content with Pulp -------------------------- - -Pulp provides a content app, which is an -`aiohttp.server `_ that serves content through what in -Pulp is referred to as a :term:`Distribution`. Using this content app, you can serve all content -that is managed with Pulp. - -To serve content from Pulp, you need to create a publication and a distribution. A -:term:`Publication` consists of the metadata of the content set and the artifacts of each content -unit in the content set. To host a publication, it must be assigned to a :term:`Distribution`, which -determines how and where a publication is served. - -.. image:: ./_diagrams/concept-publish.png - :align: center diff --git a/docs/conf.py b/docs/conf.py deleted file mode 100755 index 05dbc19e84..0000000000 --- a/docs/conf.py +++ /dev/null @@ -1,317 +0,0 @@ -# WARNING: DO NOT EDIT! -# -# This file was generated by plugin_template, and is managed by it. Please use -# './plugin-template --docs pulpcore' to update this file. -# -# For more info visit https://github.com/pulp/plugin_template -# This file is execfile()d with the current directory set to its containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import os -import sys -from datetime import date - -try: - import sphinx_rtd_theme -except ImportError: - sphinx_rtd_theme = False - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath('./extensions')) # noqa - -sys.path.insert(0, os.path.abspath('..')) # noqa - - -# Set environment variable so Sphinx can bootstrap the Django app -os.environ["DJANGO_SETTINGS_MODULE"] = "pulpcore.app.settings" - -import django -django.setup() - -# -- General configuration ----------------------------------------------------- - -# If your documentation needs a minimal Sphinx version, state it here. -#needs_sphinx = '1.0' - -# Add any Sphinx extension module names here, as strings. They can be extensions -# coming with Sphinx (named 'sphinx.ext.*') or your custom ones. -extensions = [ - 'sphinx.ext.extlinks', - 'sphinx.ext.autodoc', - 'sphinx.ext.autosummary', - 'sphinx.ext.napoleon', - 'sphinxcontrib.jquery', - 'napoleon_django', -] - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# The suffix of source filenames. -source_suffix = '.rst' - -# The encoding of source files. -#source_encoding = 'utf-8-sig' - -# The top level toctree document. -master_doc = 'index' - -# General information about the project. -project = u'Pulp Project' - -# Set copyright to current year -copyright = u'2012-{0}, Pulp Team'.format(date.today().year) - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The short X.Y version. -version = "3.57.0.dev" -# The full version, including alpha/beta/rc tags. -release = "3.57.0.dev" - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -#language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -#today = '' -# Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ['_build', 'pulpdocs'] - -# The reST default role (used for this markup: `text`) to use for all documents. -#default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -#add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -#show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' - -# A list of ignored prefixes for module index sorting. -#modindex_common_prefix = [] - -# Set autodoc default options -# Document all module/class/etc members, even if they have no docstring. -# Show class inheritance, and group class members together by type (attr, method, etc) -autodoc_default_flags = ['members', 'undoc-members'] -autodoc_member_order = 'groupwise' -autoclass_content = 'both' - -# -- Options for HTML output --------------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = 'sphinx_rtd_theme' if sphinx_rtd_theme else 'default' - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -#html_theme_options = {} - -# Add any paths that contain custom themes here, relative to this directory. -html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] if sphinx_rtd_theme else [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -#html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -#html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -#html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['static'] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -#html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -#html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -#html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -html_additional_pages = {'restapi': 'restapi.html'} - -# If false, no module index is generated. -#html_domain_indices = True - -# If false, no index is generated. -#html_use_index = True - -# If true, the index is split into individual pages for each letter. -#html_split_index = False - -# If true, links to the reST sources are added to the pages. -#html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -#html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -#html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -#html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = None - -# Output file base name for HTML help builder. -htmlhelp_basename = 'PulpDocs' - - -# -- Options for LaTeX output -------------------------------------------------- - -latex_elements = { -# The paper size ('letterpaper' or 'a4paper'). -#'papersize': 'letterpaper', - -# The font size ('10pt', '11pt' or '12pt'). -#'pointsize': '10pt', - -# Additional stuff for the LaTeX preamble. -#'preamble': '', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, author, documentclass [howto/manual]). -latex_documents = [ - ('index', 'PulpDocs.tex', u'Pulp Documentation', - u'Pulp Team', 'manual'), -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -#latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -#latex_use_parts = False - -# If true, show page references after internal links. -#latex_show_pagerefs = False - -# If true, show URL addresses after external links. -#latex_show_urls = False - -# Documents to append as an appendix to all manuals. -#latex_appendices = [] - -# If false, no module index is generated. -#latex_domain_indices = True - -# -- Options for manual page output -------------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ('user-guide/admin-client/index', 'pulp-admin', u'Pulp Documentation', [u'Pulp Team'], 1), - ('user-guide/consumer-client/index', 'pulp-consumer', u'Pulp Documentation', [u'Pulp Team'], 1) -] - -# If true, show URL addresses after external links. -#man_show_urls = False - -# -- Options for Texinfo output ------------------------------------------------ - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ('index', 'PulpDocs', u'Pulp Documentation', - u'Pulp Team', 'PulpDocs', 'One line description of project.', - 'Miscellaneous'), -] - -# Documents to append as an appendix to all manuals. -#texinfo_appendices = [] - -# If false, no module index is generated. -#texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -#texinfo_show_urls = 'footnote' - -extlinks = { - 'github': ('https://github.com/pulp/pulpcore/issues/%s', '#%s'), - 'redmine': ('https://pulp.plan.io/issues/%s', '#%s'), -} - -# napoleon uses .. attribute by default, but :ivar: is more succinct and looks better, -# particularly on classes with a lot of attributes, like django models and related objects -napoleon_use_ivar = True - -# set primary domain to python so we don't have to include :py: in xref links -default_domain = 'py' - -from sphinx.domains.python import PythonDomain - -# Adapted from: -# https://github.com/sphinx-doc/sphinx/issues/3866#issuecomment-366014346 -# Required because pulpcore.app and pulpcore.plugin have the same class names -# and Sphinx can't figure out which it should be using. This code defaults to -# pulpcore.app -class MyPythonDomain(PythonDomain): - def find_obj(self, env, modname, classname, name, type, searchmode=0): - """Ensures an object always resolves to the desired module if defined there.""" - orig_matches = PythonDomain.find_obj(self, env, modname, classname, name, type, searchmode) - matches = [] - for match in orig_matches: - match_name = match[0] - desired_name = "pulpcore.app.models." + name.strip('.') - if match_name == desired_name: - matches.append(match) - break - if matches: - return matches - else: - return orig_matches - - -def setup(sphinx): - """Use MyPythonDomain in place of PythonDomain""" - sphinx.add_domain(MyPythonDomain, override=True) - -rst_prolog = """.. attention:: - This documentation will be deactivated in the near future. `Learn More `_ - or go to the `New Pulp Docs `_ (beta). -""" diff --git a/docs/configuration/applying.rst b/docs/configuration/applying.rst deleted file mode 100644 index 641ab7580e..0000000000 --- a/docs/configuration/applying.rst +++ /dev/null @@ -1,49 +0,0 @@ -.. _applying-settings: - -Applying Settings -================= - -Pulp uses `dynaconf `_ for its settings which allows you -to configure Pulp settings using various ways: - - -* :ref:`Environment Variables ` - Enabled by default. - -* :ref:`Configuration File ` - Disabled by default, but easy to enable. - -.. _env-var-settings: - -Environment Variables ---------------------- - -Configuration by specifying environment variables is enabled by default. Any -:ref:`Setting ` can be configured using Dynaconf by prepending ``PULP_`` to the setting -name. For example :ref:`SECRET_KEY ` can be specified as the ``PULP_SECRET_KEY`` -environment variable. For example, in a shell you can use ``export`` to set this:: - - export PULP_SECRET_KEY="This should be a 50 chars or longer unique secret!" - - -.. _config-file-settings: - -Configuration File ------------------- - -By default, Pulp does not read settings from a configuration file. Enable this by specifying the -``PULP_SETTINGS`` environment variable with the path to your configuration file. For example:: - - export PULP_SETTINGS=/etc/pulp/settings.py - -Then you can specify settings with Python variable assignment in the ``/etc/pulp/settings.py``. For -example, you can specify :ref:`SECRET_KEY ` with:: - - $ cat /etc/pulp/settings.py - SECRET_KEY="This should be a 50 chars or longer unique secret!" - -In this example the settings file ends in ".py" so it needs to be valid Python, but it could use any -`dynaconf supported format `_. - -.. note:: - - The configuration file and directories containing the configuration file must be readable by the - user Pulp runs as. If using SELinux, assign the ``system_u:object_r:pulpcore_etc_t:s0`` label. diff --git a/docs/configuration/index.rst b/docs/configuration/index.rst deleted file mode 100644 index b9acaf24d1..0000000000 --- a/docs/configuration/index.rst +++ /dev/null @@ -1,11 +0,0 @@ -.. _configuration: - -Configuration -============= - -.. toctree:: - :maxdepth: 2 - - applying - viewing - settings diff --git a/docs/configuration/settings.rst b/docs/configuration/settings.rst deleted file mode 100644 index 3946b59b3f..0000000000 --- a/docs/configuration/settings.rst +++ /dev/null @@ -1,476 +0,0 @@ -.. _settings: - -Settings -======== - -There are only two required settings, although specific plugins may have additional required -settings. - -* :ref:`SECRET_KEY ` -* :ref:`CONTENT_ORIGIN ` - -.. note:: - - For more information on how to specify settings see the - :ref:`Applying Settings docs `. - - -Pulp uses three types of settings: - -* :ref:`Django settings ` Pulp is configuring -* :ref:`Redis settings ` Pulp is using -* :ref:`Pulp defined settings ` - - -.. _django-settings: - -Django Settings ---------------- - -Below is a list of the most common Django settings Pulp users typically use. Pulp is a Django -project, so any `Django setting `_ can be set. - - -.. _secret-key-setting: - -SECRET_KEY -^^^^^^^^^^ - - In order to get a pulp server up and running a `Django SECRET_KEY - `_ *must* be - provided. - - The following code snippet can be used to generate a random SECRET_KEY. - -.. code-block:: python - :linenos: - - import random - - chars = 'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)' - print(''.join(random.choice(chars) for i in range(50))) - - -DB_ENCRYPTION_KEY -^^^^^^^^^^^^^^^^^ - - The file location of a symmetric fernet key that Pulp uses to encrypt sensitive fields in the - database. Default location is ``/etc/pulp/certs/database_fields.symmetric.key``. - - The key is automatically generated by default with the pulp-oci-images. The key can be generated - independently but it must be a url-safe base64-encoded string of 32 random bytes. - - To generate a key with openssl:: - - openssl rand -base64 32 > /etc/pulp/certs/database_fields.symmetric.key - - This file can contain multiple such keys (one per line). The key in the first line will be used - for encryption but all others will still be attempted to decrypt old tokens. This can help you to - rotate this key in the following way: - - 1. Shut down all Pulp services (api, content and worker processes). - 2. Add a new key at the top of the key file. - 3. Call `pulpcore-manager rotate-db-key`. - 4. Remove the old key (on the second line) from the key file. - 5. Start the Pulp services again. - - For a zero downtime key rotation you can follow the slightly more complex recipe: - 1. Add a new key at the bottom of the key file. - 2. Restart the Pulp services in the usual phased manner. - 3. Swap the keys in the key file. - 4. Restart the Pulp services again. - 5. Call `pulpcore-manager rotate-db-key`. - 6. Remove the old key (on the second line) from the key file. - 7. Restart the Pulp services for the last time. - - -DATABASES -^^^^^^^^^ - - By default Pulp uses PostgreSQL on localhost. PostgreSQL is the only supported database. For - instructions on how to configure the database, refer to :ref:`database installation `. - - -DEFAULT_FILE_STORAGE -^^^^^^^^^^^^^^^^^^^^ - - By default, Pulp uses the local filesystem to store files. The default option which - uses the local filesystem is ``pulpcore.app.models.storage.FileSystem``. - - For more information about different Pulp storage options, see the - :ref:`storage documentation `. - - -REDIRECT_TO_OBJECT_STORAGE -^^^^^^^^^^^^^^^^^^^^^^^^^^ - - When set to ``True`` access to artifacts is redirected to the corresponding Cloud storage - configured in ``DEFAULT_FILE_STORAGE`` using pre-authenticated URLs. When set to ``False`` - artifacts are always served by the content app instead. - - Defaults to ``True``; ignored for local file storage. - - -MEDIA_ROOT -^^^^^^^^^^ - - The location where Pulp will store files. By default this is ``/var/lib/pulp/media``. - - This only affects storage location when ``DEFAULT_FILE_STORAGE`` is set to - ``pulpcore.app.models.storage.FileSystem``. See the :ref:`storage documentation ` for - more info. - - It should have permissions of: - - * mode: 750 - * owner: pulp (the account that pulp runs under) - * group: pulp (the group of the account that pulp runs under) - * SELinux context: system_u:object_r:pulpcore_var_lib_t:s0 - - -LOGGING -^^^^^^^ - - By default Pulp logs at an INFO level to syslog. For all possible configurations please - refer to `Django documenation on logging `_ - - Enabling DEBUG logging is a common troubleshooting step. See the :ref:`enabling-debug-logging` - documentation for details on how to do that. - - -AUTHENTICATION_BACKENDS -^^^^^^^^^^^^^^^^^^^^^^^ - - By default, Pulp has two types of authentication enabled, and they fall back for each other: - - 1. Basic Auth which is checked against an internal users database - 2. Webserver authentication that relies on the webserver to perform the authentication. - - To change the authentication types Pulp will use, modify the ``AUTHENTICATION_BACKENDS`` - settings. See the `Django authentication documentation `_ for more information. - - -.. _redis-settings: - -Redis Settings --------------- - -.. warning:: - - To enable usage of Redis the `CACHE_ENABLED`_ option must be set to `True`. - -The following Redis settings can be set in your Pulp config: - - * REDIS_URL - * REDIS_HOST - * REDIS_PORT - * REDIS_DB - * REDIS_PASSWORD - -Below are some common settings used for Redis configuration. - - -REDIS_HOST -^^^^^^^^^^ - - The hostname for Redis. - - -REDIS_PORT -^^^^^^^^^^ - - The port for Redis. - - -REDIS_PASSWORD -^^^^^^^^^^^^^^ - - The password for Redis. - - -.. _pulp-settings: - -Pulp Settings -------------- - -Pulp defines the following settings itself: - - -.. _api-root: - -API_ROOT -^^^^^^^^ - - A string containing the path prefix for the Pulp API. This is used by the REST API when forming - URLs to refer clients to other parts of the REST API and by the ``pulpcore-api`` application to - match incoming URLs. Pulp appends the string ``api/v3/`` to this setting. - - Defaults to ``/pulp/``. After the application appends ``api/v3/`` it makes the V3 API by default - serve from ``/pulp/api/v3/``. - - -WORKING_DIRECTORY -^^^^^^^^^^^^^^^^^ - - The directory used by workers to stage files temporarily. This defaults to - ``/var/lib/pulp/tmp/``. - - It should have permissions of: - - * mode: 750 - * owner: pulp (the account that pulp runs under) - * group: pulp (the group of the account that pulp runs under) - * SELinux context: system_u:object_r:pulpcore_var_lib_t:s0 - -.. note:: - - It is recommended that ``WORKING_DIRECTORY`` and ``MEDIA_ROOT`` exist on the same storage - volume for performance reasons. Files are commonly staged in the ``WORKING_DIRECTORY`` and - validated before being moved to their permanent home in ``MEDIA_ROOT``. - - -CHUNKED_UPLOAD_DIR -^^^^^^^^^^^^^^^^^^ - - A relative path inside the DEPLOY_ROOT directory used exclusively for uploaded chunks. The - uploaded chunks are stored in the default storage specified by ``DEFAULT_FILE_STORAGE``. This - option allows users to customize the actual place where chunked uploads should be stored within - the declared storage. The default, ``upload``, is sufficient for most use cases. A change to - this setting only applies to uploads created after the change. - - -.. _content-origin-setting: - -CONTENT_ORIGIN -^^^^^^^^^^^^^^ - - A required string containing the protocol, fqdn, and port where the content app is reachable by - users. This is used by ``pulpcore`` and various plugins when referring users to the content app. - For example if the API should refer users to content at using http to pulp.example.com on port - 24816, (the content default port), you would set: ``https://pulp.example.com:24816``. - -HIDE_GUARDED_DISTRIBUTIONS -^^^^^^^^^^^^^^^^^^^^^^^^^^ - - If activated, the distributions that are protected by a content guard will not be shown on the - directory listing in the content app. Defaults to ``False``. - -.. _content-path-prefix: - -CONTENT_PATH_PREFIX -^^^^^^^^^^^^^^^^^^^ - - A string containing the path prefix for the content app. This is used by the REST API when - forming URLs to refer clients to the content serving app, and by the content serving application - to match incoming URLs. - - Defaults to ``/pulp/content/``. - - -.. _content-app-ttl: - -CONTENT_APP_TTL -^^^^^^^^^^^^^^^ - - The number of seconds before a content app should be considered lost. - - Defaults to ``30`` seconds. - - -.. _pulp-cache: - -CACHE_ENABLED -^^^^^^^^^^^^^ - - Store cached responses from the content app into Redis. This setting improves the performance - of the content app under heavy load for similar requests. Defaults to ``False``. - - .. note:: - The entire response is not stored in the cache. Only the location of the file needed to - recreate the response is stored. This reduces database queries and allows for many - responses to be stored inside the cache. - - -CACHE_SETTINGS -^^^^^^^^^^^^^^ - - Dictionary with tunable settings for the cache: - - * ``EXPIRES_TTL`` - Number of seconds entries should stay in the cache before expiring. - - Defaults to ``600`` seconds. - - .. note:: - Set to ``None`` to have entries not expire. - Content app responses are always invalidated when the backing distribution is updated. - - -DOMAIN_ENABLED -^^^^^^^^^^^^^^ - - .. note:: This feature is provided as a tech-preview - - Enable the :ref:`Domains feature to enable multi-tenancy capabilities `. All installed - plugins must be Domain compatible for Pulp to start. Defaults to ``False``. - - -.. _worker-ttl: - -WORKER_TTL -^^^^^^^^^^ - - The number of seconds before a worker should be considered lost. - - Defaults to ``30`` seconds. - - -.. _remote-user-environ-name: - -REMOTE_USER_ENVIRON_NAME -^^^^^^^^^^^^^^^^^^^^^^^^ - - The name of the WSGI environment variable to read for :ref:`webserver authentication - `. - - .. warning:: - - Configuring this has serious security implications. See the `Django warning at the end of this - section in their docs `_ for more details. - - Defaults to ``'REMOTE_USER'``. - - -.. _allowed-import-paths: - -ALLOWED_IMPORT_PATHS -^^^^^^^^^^^^^^^^^^^^ - - One or more real filesystem paths that Remotes with filesystem paths can import from. For example - to allow a remote url of ``file:///mnt/foo/bar/another/folder/`` you could specify:: - - ALLOWED_IMPORT_PATHS = ['/mnt/foo/bar'] # only a subpath is needed - - Defaults to ``[]``, meaning ``file:///`` urls are not allowed in any Remote. - - -.. _allowed-export-paths: - -ALLOWED_EXPORT_PATHS -^^^^^^^^^^^^^^^^^^^^ - - One or more real filesystem paths that Exporters can export to. For example to allow a path of - ``/mnt/foo/bar/another/folder/`` you could specify:: - - ALLOWED_EXPORT_PATHS = ['/mnt/foo/bar'] # only a subpath is needed - - Defaults to ``[]`` which means no path is allowed. - - -.. _allowed-content-checksums: - -ALLOWED_CONTENT_CHECKSUMS -^^^^^^^^^^^^^^^^^^^^^^^^^ - - .. warning:: - Enforcement of this setting in ``pulpcore`` and various plugins is not fully in place. It is - possible that checksums not in this list may still be used in various places. This banner will - be removed when it is believed all ``pulpcore`` and plugin code fully enforces this setting. - - The list of content-checksums this pulp-instance is **allowed to use**. By default the following - are used:: - - ALLOWED_CONTENT_CHECKSUMS = ["sha224", "sha256", "sha384", "sha512"] - - The entire set of supported checksums are: ``md5``, ``sha1``, ``sha224``, ``sha256``, - ``sha384``, and ``sha512``. - - .. warning:: - Due to its use as the primary content-identifier, "sha256" **IS REQUIRED**. Pulp will - fail to start if ``"sha256"`` is not found in this set. - - Pulp can prohibit or allow checksums by setting the ALLOWED_CONTENT_CHECKSUMS setting. - Changing this setting requires a few steps. - - First, before you change the setting, see how your Pulp instance will be impacted by this change by running: - - ``pulpcore-manager handle-artifact-checksums --report --checksums sha256,512`` - - Adjust ``--checksums`` as comma separated list of checksums types to match your needs. - - .. note:: - If you already changed ``ALLOWED_CONTENT_CHECKSUMS`` in pulp settings you can leave out ``--checksums``, - and the checksums will be parsed from Pulp settings. - - Before switching, any on-demand repos containing forbidden checksum digests needs to be synced with - ``policy=immediate`` to populate missing allowed checksums. This can heavily impact your disk space. - Alternatively, users can remove these offending repo versions followed by orphan cleanup. - - If you have artifacts that do not conform to your ALLOWED_CONTENT_CHECKSUMS setting, they need to be re-hashed. - You can update them using: - - ``pulpcore-manager handle-artifact-checksums`` - - .. warning:: - If Pulp fails to start because forbidden checkums have been identified or required ones are - missing, run ``pulpcore-manager handle-artifact-checksums`` command. - - -.. _django-guid: - -DJANGO_GUID -^^^^^^^^^^^ - - Pulp uses ``django-guid`` to append correlation IDs to logging messages. Correlation IDs are - autogenerated by default but can also be sent as a header with each request. They are also - returned as a header in the response and are recorded in the ``logging_cid`` field of tasks. - - For more information on how to configure the ``DJANGO_GUID`` setting, see the `django-guid - settings documentation `_. - - -.. _orphan-protection-time: - -ORPHAN_PROTECTION_TIME -^^^^^^^^^^^^^^^^^^^^^^ - - The time specified in minutes for how long Pulp will hold orphan Content and Artifacts before - they become candidates for deletion by an orphan cleanup task. This should ideally be longer - than your longest running task otherwise any content created during that task could be cleaned - up before the task finishes. Default is 1440 minutes (24 hours). - - -.. _upload_protection_time: -.. _tmpfile_protection_time: - -UPLOAD_PROTECTION_TIME and TMPFILE_PROTECTION_TIME -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - - Pulp uses ``uploads`` and ``pulp temporary files`` to pass data from the api to worker tasks. - These options allow to specify a timeinterval in minutes used for cleaning up stale entries. If - set to 0, automatic cleanup is disabled, which is the default. - - -.. _task_diagnostics: - -TASK_DIAGNOSTICS -^^^^^^^^^^^^^^^^ - - If ``True``, each task will record various diagnostics (listed below) to files in the dir - ``/var/tmp/pulp//``. This is ``False`` by default. - - * memory - the task's max resident set size in MB. - - -.. _analytics-setting: - -ANALYTICS -^^^^^^^^^ - - If ``True``, Pulp will anonymously post analytics information to - ``_ and aids in project decision making. See the - :ref:`analytics docs ` for more info on exactly what is posted along with an example. - - Defaults to ``True``. diff --git a/docs/configuration/viewing.rst b/docs/configuration/viewing.rst deleted file mode 100644 index 7f6d029ce8..0000000000 --- a/docs/configuration/viewing.rst +++ /dev/null @@ -1,18 +0,0 @@ -.. _viewing-settings: - -Viewing Settings -================ - -To list the effective settings on a Pulp installation, while on the system where Pulp is installed -run the command ``dynaconf list``. This will show the effective settings Pulp will use. - -.. note:: - - Settings can come from both settings file and environment variables. When running the - ``dynaconf list`` command, be sure you have the same environment variables set as your Pulp - installation. - -.. note:: - - For the ``dynaconf list`` command to succeed it needs to environment variable set identifying - where the django settings file is. ``export DJANGO_SETTINGS_MODULE=pulpcore.app.settings``. diff --git a/docs/contributing/architecture/app-layout.rst b/docs/contributing/architecture/app-layout.rst deleted file mode 100644 index 60e6fbe634..0000000000 --- a/docs/contributing/architecture/app-layout.rst +++ /dev/null @@ -1,210 +0,0 @@ -Pulp Platform Application Layout -================================ - -The Pulp Platform is built using two key frameworks, the Django Web Framework -and the Django REST Framework. Where possible, conforming to the conventions -of these frameworks is encouraged. The Pulp Platform strives to leverage these -frameworks as much as possible, ideally making Pulp Platform development a -work of implementation before innovation. - -In the event that one of these components offers functionality that augments -or supersedes functionality in another component, the order of precedence of -these frameworks is: - -* Pulp Platform -* Django REST Framework (DRF) -* Django Web Framework (Django) - -So, features provided by the Pulp Platform should preferred over similar -features provided by DRF, and features in DRF should be preferred over similar -features provided by Django. - - -Module Layout -------------- - -This is the basic layout of the ``pulpcore.app`` package, on the filesystem:: - - pulpcore - ├── app - │   ├── apps.py - │   ├── fields.py - │   ├── __init__.py - │   ├── logs.py - │   ├── management - │   │   ├── commands - │   │   │   ├── __init__.py - │   │   │   └── ... - │   │   └── __init__.py - │   ├── manage.py - │   ├── migrations - │   ├── models - │   │   ├── __init__.py - │   │   └── ... - │   ├── pagination.py - │   ├── response.py - │   ├── serializers - │   │   ├── __init__.py - │   │   └── ... - │   ├── settings.py - │   ├── tasks - │   │   ├── __init__.py - │   │   └── ... - │   ├── templates - │   │   └── ... - │   ├── tests - │   │   └── ... - │   ├── urls.py - │   ├── viewsets - │   │   ├── __init__.py - │   │   └── ... - │   └── wsgi.py - └── __init__.py - - -The contents of this package are documented in detail in the :doc:`../platform-api/index` -documentation. Details how this package is organized can be found -below, along with information about some of the modules found in this namespace. - -.. tip:: - - This tree is incomplete, and maybe be out of date. Only the most notable and - durable modules have been listed; the most complete listing of modules in this - namespace will always be the :doc:`../platform-api/index` documentation. - - -Module Imports --------------- - -For modules in the ``pulpcore.app`` namespace that are large and capture behaviors -across multiple concerns of pulp, such as our models, we have separated these -packages into subpackages. All public identifiers and objects defined -in submodules are then collected into that module's ``__init__.py``, from which -they will be imported by other Pulp Platform modules. - -Using :mod:``pulpcore.app.models`` as an example, this means that when breaking up the -``models`` package in ``pulpcore.app``, the following things are true: - -* No models are defined in the ``__init__.py`` of ``pulpcore.app.models``. -* All models are defined in submodules located in the ``pulpcore.app.models`` module - directory (where its ``__init__.py`` can be found). -* The `__init__.py`` in ``pulpcore.app.models`` should consist only of import statements, - ordered to prevent any circular import issues that may result based on the imports - that are done in any included submodules. -* Any models defined in submodules in ``pulpcore.app.models`` namespace must be imported - from the ``pulpcore.app.models`` namespace, not the submodule in which they are defined. - Yes: ``from pulpcore.app.models import PulpModel``, - No: ``from pulpcore.app.models.pulp import PulpModel``. -* When adding new models, they must be imported into the ``pulpcore.app.models`` - ``__init__.py``, so that they are available to be imported by any other Pulp Platform - components that use them from the ``pulpcore.app.models`` namespace. -* Imports done inside any submodules should be relative, e.g. - ``from .submodule import identifier`` or ``from . import submodule``, avoiding the - creation of circular imports. -* Imports done inside the module's ``__init__.py`` should be relative and explict, e.g. - - * Yes: ``from .submodule import identifier1, identifier2`` - * No: ``from submodule import identifier1, identifier2`` - * No: ``from .submodule import *`` - -Any module in ``pulpcore.app`` broken up in this way, such as -:mod:`pulpcore.app.serializers` or :mod:`pulpcore.app.viewsets`, should do so in such a way -that renders the implementation invisible to anyone importing from that module. - -pulpcore.app ------------- - -pulpcore.app is the package containing the core Pulp Platform Django application. -This package contains all of the Pulp Platform models, serializers, and -viewsets required to assemble Pulp's REST API and underlying database. - -pulpcore.app.apps -^^^^^^^^^^^^^^^^^ - -This module defines the :class:`~pulpcore.app.apps.PulpPluginAppConfig` base class -used by all Pulp plugins to identify themselves to the Pulp Platform as plugins. - -This module also includes the :class:`~pulpcore.app.apps.PulpAppConfig` class which -is the Pulp Platform application config. - -pulpcore.app.settings -^^^^^^^^^^^^^^^^^^^^^ - -This is the main settings module for the platform Django project, which puts together -all of the various Django applications that the Pulp Platform depends on to function, -as well as the Pulp Platform application itself and its plugins. - -Many things are defined in here, including the database settings, logging configuration, -REST API settings, etc. This file also finds and registers Pulp plugins with the Pulp -Platform Django Project, using the ``pulpcore.plugin`` entry point. - -In order to use django-related tools with the Pulp Platform, the platform must be installed, -and the ``DJANGO_SETTINGS_MODULE`` environment var must be set to -:mod:`pulpcore.app.settings`. - -pulpcore.app.urls -^^^^^^^^^^^^^^^^^ - -This module contains the API :data:`~pulpcore.app.urls.root_router`, and is where all non-API -views (should we ever write any) are mapped to URLs. - -pulpcore.app.models -^^^^^^^^^^^^^^^^^^^ - -All models are contained in :mod:`pulpcore.app.models`. - -The Platform models are all importable directly from the ``pulpcore.app.models`` -namespace. All Pulp models should subclass :mod:`pulpcore.app.models.Model`, or -one of its subclasses. - -.. note:: - - All models must exist in the pulpcore.app.models namespace in order to be - recognized by Django and included in the Django ORM. - -Master/Detail Models -******************** - -A few Pulp Platform models, including the Content model as well as -Remotes and Publishers, implement a strategy we refer to as "Master/Detail". -The Master/Detail strategy, as implemented in Pulp, allows us to define -necessary relationships on a single master Model, while still allowing -plugin developers to extend these Master classes with details pertinent -to the plugin's requirements. Using the :class:`~pulpcore.app.models.Content` -model as an example, :class:`~pulpcore.app.models.Repository` relates to the -Content model. This causes all content to relate to the repositories that -contain them the same way while still allowing plugin writers to add any -additional fields or behaviors to the model as-needed for their use cases. - -In the Pulp Platform, models requiring this sort of behavior should subclass -:class:`pulpcore.app.models.MasterModel`. - - -Serializers, ViewSets, and other Model-Related Classes ------------------------------------------------------- - -The modules containing Serializers and ViewSets, located in ``pulpcore.app.serializers`` and -``pulpcore.app.viewsets``, respectively, should be organized similarly to the models that -they represent where possible. For example, if ``pulpcore.app.models.Repository`` is defined -in the ``pulpcore.app.models.repository`` module, its corresponding serializer should be -defined in ``pulpcore.app.serializers.repository``, and its corresponding viewset should be -defined in ``pulpcore.app.viewsets.repository``, making it easy to find. - -These, and other model-related classes, should be named in such a way as to make their -relationship to their Model unambiguous. To that end, model-related classes should include -the name of the model class they're related to in their name. So, the serializer for the -``pulpcore.app.models.Repository`` model should be named ``RepositorySerializer``, and the viewset -related to that model should be named ``RepositoryViewSet``. - -Classes not directly related to a model, or related to multiple models, should still of -course be named in such a way as to make their purpose obvious an unambiguous. - -ViewSet Registration -^^^^^^^^^^^^^^^^^^^^ - -In order for ViewSets to be automatically registered with the Pulp Platform API router, -they *must* subclass :class:`pulpcore.app.viewsets.base.NamedModelViewSet` and be imported into the -``pulpcore.app.viewsets`` namespace. - -ViewSets not meeting this criteria must be manually registered with the API router in -:mod:`pulpcore.app.urls` by using the router's ``register`` method during application setup. diff --git a/docs/contributing/architecture/error-handling.rst b/docs/contributing/architecture/error-handling.rst deleted file mode 100644 index f59db5f00d..0000000000 --- a/docs/contributing/architecture/error-handling.rst +++ /dev/null @@ -1,15 +0,0 @@ -.. _error-handling: - -Error Handling --------------- - -Errors in Tasks -*************** - -All uncaught exceptions in a task are treated as fatal exceptions. The task is then marked as -failed. The error traceback, description, and code are returned to the user under the -:attr:`~pulpcore.app.models.Task.error` attribute of the :class:`~pulpcore.app.models.Task` -object. - -When raising exceptions `built-in Python Exceptions `_ -should be used if possible. :doc:`Coded Exceptions ` should be used for known error situations. diff --git a/docs/contributing/architecture/index.rst b/docs/contributing/architecture/index.rst deleted file mode 100644 index 48713295a3..0000000000 --- a/docs/contributing/architecture/index.rst +++ /dev/null @@ -1,9 +0,0 @@ -Architecture -============ - -.. toctree:: - :maxdepth: 1 - - app-layout - rest-api - error-handling diff --git a/docs/contributing/architecture/rest-api.rst b/docs/contributing/architecture/rest-api.rst deleted file mode 100644 index 622266973d..0000000000 --- a/docs/contributing/architecture/rest-api.rst +++ /dev/null @@ -1,463 +0,0 @@ -REST API Guidelines -=================== - -Introduction ------------- - -The Pulp 3 API is intended to be decoupled from the data model, which the Django REST Framework -(DRF) makes it pretty easy to do. Where needed, support classes have been added to help the REST -API correctly and consistently represent our specialized models and relationships (most notably -the Master/Detail relationship). - -Our API starts at a :term:`DRF` :term:`Router`. Each :term:`ViewSet` is attached to this -router, and the router's routes are exposed in urls.py. Subclasses of -:class:`pulpcore.app.viewsets.base.NamedModelViewSet` are automatically registered with the API router, -and most (possibly all) ViewSets created by plugins should be subclasses of this base class. -NamedModelViewSets are associated with a Django queryset, a :term:`Serializer` that is able to -represent members of the Django queryset in the API, and an endpoint name used when registering -the ViewSet with the API router. - -All models exposed via the API must have a corresponding Serializer. Each NamedModelViewSet must -be related to a serializer and a queryset of model instances to serialize. - -Since Serializers and ViewSets are so closely related to the models they represent, the -serializers and viewsets directories are laid out similarly to the models directory to help keep -things consistent and easy to find. - -The API basic component tree looks like this:: - - router - |-- viewset - | |-- queryset - | |-- serializer - | - |-- viewset - | |-- ... - | - |-- ... - -When creating API components, consider these guidelines: - -* Where possible, API components representing models will be defined in files whose names match - the corresponding file names of the models represented. For example, if you're defining the - serializer for a model found in ``pulpcore.app.models.consumer``, the serializer should be defined in - ``pulpcore.app.serializers.consumer``, and imported by name into ``pulpcore.app.serializers``. - -* All objects represented in the REST API will be referred to by a single complete URL to that - object, using a DRF ``HyperlinkedRelatedField`` or subclass. Non-hyperlinked relations (e.g. - ``PrimaryKeyRelatedField``, ``SlugRelatedField``, etc) should be avoided. See the "Serializer - Relationships" section below for more details. In the database an object is identified by its - Primary Key. In the API an object is identified by its URL. - -* :class:`pulpcore.app.viewsets.base.NamedModelViewSet` subclasses defined in a plugin's "viewsets" module - are automatically registered with the API router. Endpoint names (the ``endpoint_name`` attribute) - should plural, not singular (e.g. /pulp/api/v3/repositories/, not /pulp/api/v3/repository/). - -* DRF supports natural keys on models in ModelViewSets with the "lookup_field" class attribute, but - only if the natural key is derived from a single field (e.g. ``Repository.name``). For natural - keys made up of multiple fields, a custom Viewset and Serializer are required. The custom ViewSet - ensures that the correct URL endpoints are created, and that a model instance can be returned for - a given natural key. The custom Serializer (and any necessary related serializer fields), at a - minimum, ensures that objects generate the correct ``pulp_href`` value when serialized. - - -Serializer Relationships ------------------------- - -Serializer Notes -^^^^^^^^^^^^^^^^ - -* Many of the model writing guidelines can be applied to writing serializers. Familiarity with - them is recommended for writers of serializers. - -* All Serializers representing Pulp Models should subclass - :class:`pulpcore.app.serializers.base.ModelSerializer`, as it provides useful behaviors to handle some - of the conventions used when building Pulp Models. - -* Whether serializer fields are explicitly declared on the serializer class or not, the field names - to expose via the API must be declared by specifying 'fields' in the serializer's ``Meta`` class, - as described in the DRF :term:`Serializer` docs. The names of exposed API fields should always - be explicit. - -* Serialized objects will provide thier own URL as the value of the "pulp_href" field on the serializer. - You will need to use a ``rest_framework.serializers.HyperlinkedIdentifyField`` to generate the - ``pulp_href`` field value by specifying its ``view_name``. If this object is referenced in the url by - a field other than the pk, you will also need to specify a ``lookup_field``. - -* When subclassing serializers, you should also explicitly inherit properties that would normally - be overridden in the parent Serializer's Meta class. - - -Normal -^^^^^^ - -A "Normal" relationship, for the purposes of this document, is defined as a Model that relates -to another Model with no specialized models on either side. - -"Specialized" models include Generic Relations or a relation to the "Detail" side of a Master/Detail -Model, and are documented below. - -When relating a serializer to serializers representing other models (or lists of other models), -remember to use DRF's HyperlinkedRelatedField, or a subclass of it, to ensure the relationship -is represented by complete URLs. Since this is a normal thing to do, the DRF docs explain how -to do it in detail: - -http://www.django-rest-framework.org/api-guide/relations/#hyperlinkedrelatedfield - -To determine the 'view_name' to use when declaring a HyperlinkedRelatedField, it should be -be ``-``, e.g. 'repositories-detail' when relating to a "normal" model -ViewSet whose ``endpoint_name`` is 'repositories'. - -Nested -^^^^^^ - -Serializers can be nested inside other serializers, so in some cases it might make for a -better user experience to nest related objects inside their parent rather than only presenting -a list of links to related objects. When relating to "normal" models, this is also supported by -DRF out of the box, and the DRF docs explain how to do it in detail: - -http://www.django-rest-framework.org/api-guide/relations/#nested-relationships - -There are caveats to this when the nested relationship is intended to be writable. Mainly, DRF -needs to be told *how* it's supposed to validate and update nested objects. This is done by -implementing the create and update methods on the serializer that contains nested serializers, -as documented here: - -http://www.django-rest-framework.org/api-guide/relations/#writable-nested-serializers - -Nesting many read/write serializers may result in very complicated create/update methods, but -doing so potentially decreases the number of endpoints a user has to use when accessing the API, -which increases usability. The opposite is also true, in that too much nesting might hinder the -API usability, so the question of whether or not to nest a serializer should be handled case-by- -case. - -An example of where this *might not* be useful is including complete Detail representations -of Content related to a Repository when viewing a Repository instance, since those instances -would have to be `cast()`, and there could literally be millions of them. - -Master/Detail -^^^^^^^^^^^^^ - -The Master/Detail model relationships used in platform models is an internal detail that should be -invisible to the API user. "Master" models of the Master/Detail relationship should not be exposed -via the API. - -"Detail" models, then, provide a bit of a challenge, because the API needs to ensure that it is -rendering the down-cast version of the model instance requested, or referencing the correct view -name of that model when using a related field. - -This is enough of a tricky problem that it has its own section in the docs a little bit below, -called "Master/Detail Relationships Overview". - -Building Explicit Serializers -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -In Pulp 3, the REST API will adhere to semantic versioning. This means that we need to exercise -control over what fields are exposed in the REST API, and that those fields are always exposed -the same way so that we don't break backward compatibility. To convert a ModelSerializer to its -explicit Serializer class, DRF provides an excellent bit of functionality:: - - >>> from serializers import RepositorySerializer - >>> RepositorySerializer() - RepositorySerializer(): - pulp_href = HyperlinkedIdentityField(view_name='repositories-detail') - name = CharField(style={'base_template': 'textarea.html'}, validators=[]) - description = CharField(allow_blank=True, required=False, style={'base_template': 'textarea.html'}) - last_content_added = DateTimeField(allow_null=True, required=False) - last_content_removed = DateTimeField(allow_null=True, required=False) - content = HyperlinkedRelatedField(many=True, read_only=True, view_name='content-detail') - -DRF Serializers fully support __repr__, which means calling repr() on them will return a string -that can be used to create that serializer. So, to see what fields DRF automatically generated -for a ModelSerializer, either instantiate it in an interpreter, or capture the output via repr() -and output it explicitly. - - -Master/Detail Relationships Overview ------------------------------------- - -The Master/Detail pattern that we're using in our Models requires some specific behaviors to -be properly implemented in the API. Care has been taken to expose the inner workings of these -behaviors to be easy to override or customize in plugins (if needed). - -ViewSets -^^^^^^^^ - -As with most things related to the API, the place to start working with Master/Detail models -is in their ViewSet. The default ViewSet base class provided by the Pulp platform, -:class:`pulpcore.app.viewsets.base.NamedModelViewSet` is aware of Master/Detail relationships, and -will do the right thing when registered with our API router. In order to benefit from this -behavior, a ViewSet must be declared that represents the Master model of a Master/Detail -relationship, and that ViewSet must, at a minimum, have its ``endpoint_name`` set to something -reasonable for that master model. For example, the Master ViewSet representing the Content -Model should probably have its ``endpoint_name`` be set to "content". - -All ViewSets representing Detail Models must subclass their respective Master ViewSet, and have -their ``endpoint_name`` set to a string that uniquely identifies them. The autogenerated API -endpoint for a Detail ViewSet will include both the master and detail ``endpoint_name``. -Building on the Content Model example, if we were making a ViewSet to represent the RPM -Detail Model, a reasonable ``endpoint_name`` would be "rpm". When combined with its Master -ViewSet, the generated endpoint would become ``content/rpm``. - -If in doubt, the Master ViewSet's ``endpoint_name`` should be set to the Master Model's -plural verbose name (e.g. ``Content._meta.verbose_name_plural``, which is "content"), and -the Detail ViewSet's ``endpoint_name`` should be set to the Detail Model's TYPE value (e.g. -``RPM.TYPE``, which is probably ``RPM``). There generated endpoint for this detail ViewSet -example would then become ``content/rpm``. - -Note that the Detail ViewSet's ``endpoint_name`` only needs to be unique among its Detail -ViewSet peers sharing the same Master ViewSet. It would be perfectly acceptable, for example, -to have a Detail Remote ViewSet with ``endpoint_name`` "rpm", since the generated endpoint -for that ViewSet would be something like ``remote/rpm``, and not conflict with any of the -endpoints generated for Detail ViewSets that share the Content Model as a Master. - -Setting ``endpoint_name`` to a string literal rather than deriving its value is an intentional -decoupling of the API from the Models represented in it. When writing ViewSets, avoid the -tempation to do things like this:: - - endpoint_name = Master._meta.verbose_name_plural - endpoint_name = Detail.TYPE - endpoint_name = anything_else_that_is_not_a_string_literal() - -Serializers -^^^^^^^^^^^ - -Since Master ViewSets are never exposed in the API (they exist only to be subclassed by Detail -ViewSets), they don't need to have an attached Serializer. However, a Serializer *must* exist -representing the Master Model in a Master/Detail relationship, and every Serializer representing -Detail Models must subclass their respective Master Serializer. - -Furthermore, every Serializer representing a Master Model should subclass a special Serializer -created for Master/Detail models, :class:`pulpcore.app.serializers.base.ModelSerializer`. This -Serializer includes a definition for the `type` field present on all models inheriting from -:class:`pulpcore.app.models.MasterModel`, and also identifies the `type` field as filterable, -centralizing common behavior that we're likely to want in all Serializers representing Models -in a Master/Detail Relationship. - -Relating to Detail Serializers -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -When creating serializers for models that relate to Master/Detail models, a customized Serializer -field must be used that is Master/Detail aware so that URLs identifying the Detail Model instance -API representations are generated correctly. - -In this case, instead of using a normal ``HyperlinkedRelatedField``, -:class:`pulpcore.app.serializers.base.DetailRelatedField` should be used. This field knows how to -correctly generate URLs to Detail types in the API by casting them down to their Detail Model -type, but should be used with care due to the inherent cost in calling ``cast()`` on an arbitrary -number of instances. - -Identifying Detail Serializers -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Similar to using ``DetailRelatedField``, Detail Model Serializers should use -:class:`pulpcore.app.serializers.base.DetailIdentityField` when declaring their ``pulp_href`` attribute, -so that the URLs generated by Detail Serializers return the proper URL to the cast Detail -object. - - -Pagination ----------- - -:term:`Pagination` support is provided by DRF, and should be used in the API to mitigate the -potentially negative effects caused by users attempting to iterate over large datasets. The -default pagination implementation use's DRF's ``CursorPagination`` method: - -http://www.django-rest-framework.org/api-guide/pagination/#cursorpagination - -Other methods are supported by DRF, and might be more appropriate in specific use-cases, but -cursor-based pagination provides the best support for our largest set of data, which is Content -stored in a Repository (or Repositories). By default, an object's id is used for the purposes -of cursor-based pagination, allowing an API user to reliably consume large datasets with no -duplicated entries. - -Custom paginators can be easily created and attached to ViewSets using the ``paginator_class`` -class attribute in the ViewSet class definition. - - -Filtering ---------- - -Filtering Backend -^^^^^^^^^^^^^^^^^ - -http://www.django-rest-framework.org/api-guide/filtering/#setting-filter-backends - -We will be using ``PulpFilterBackend``, a subclass of the rest framework's ``DjangoFilterBackend``. -This is set as the default in the Django settings.py, but can be overridden in individual ViewSets. - -Allowing Filters -^^^^^^^^^^^^^^^^ - -Filters must be explicitly specified and are not enabled by default. - - -filterset_fields -**************** - -The simplest method of adding filters is simply to define `filterset_fields` on the ViewSet. Fields -specified here will be "filterable", but only using equality. - -To use this request: - -.. code-block:: bash - - http 'http://192.168.121.134:24817/pulp/api/v3/repositories/?name=singing-gerbil' - -This is what the ViewSet should look like: - -.. code-block:: python - - class RepositoryViewSet(viewsets.ModelViewSet): - queryset = models.Repository.objects.all() - serializer_class = serializers.RepositorySerializer - filterset_fields = ('name',) - - -FilterSet -********* - -Defining a `FilterSet` allows more options. To start with, this is a `ViewSet` and `FilterSet` -that allows the same request: - -.. code-block:: bash - - http 'http://192.168.121.134:24817/pulp/api/v3/repositories/?name=singing-gerbil' - - -.. code:: python - - class RepositoryFilter(filters.FilterSet): - pass - - class Meta: - model = models.Repository - fields = ['name'] - - class RepositoryViewSet(viewsets.ModelViewSet): - queryset = models.Repository.objects.all() - serializer_class = serializers.RepositorySerializer - filterset_class = RepositoryFilter - - -.. note:: - - For ``NamedModelViewSet`` the base class ``BaseFilterSet`` should be used. - -Beyond Equality -*************** - -A `FilterSet` also allows filters that are more advanced than equality. We have access to any of -the filters provided out of the box by `django-filter`. - -https://django-filter.readthedocs.io/en/latest/ref/filters.html#filters - -Simply define any filters in the `FilterSet` and then include them in `fields` in the Filter's Meta class. - -`http 'http://192.168.121.134:24817/pulp/api/v3/repositories/?name_contains=singing'` - -.. code-block:: python - - class RepositoryFilter(filters.FilterSet): - name_contains = django_filters.filters.CharFilter(field_name='name', lookup_expr='contains') - - class Meta: - model = models.Repository - fields = ['name_contains'] - - -Custom Filters -************** - -If the filters provided by `django-filter` do not cover a use case, we can create custom filters -from the `django-filter` base classes. - -"In" is a special relationship and is not covered by the base filters, however we can create a -custom filter based on the `BaseInFilter`. - -.. code-block:: bash - - http 'http://192.168.121.134:24817/pulp/api/v3/repositories/?name_in_list=singing-gerbil,versatile-pudu' - - -.. code-block:: python - - class CharInFilter(django_filters.filters.BaseInFilter, - django_filters.filters.CharFilter): - pass - - class RepositoryFilter(filters.FilterSet): - name_in_list = CharInFilter(name='name', lookup_expr='in') - - class Meta: - model = models.Repository - fields = ['name_in_list'] - -.. note:: - - We should be careful when naming these filters. Using `repo__in` would be fine because - repo is not defined on this model. However, using `name__in` does *not* work because Django - gets to it first looking for a subfield `in` on the name. - - -Documenting ------------ - -By default, the docstring of a CRUD method on a ViewSet is used to generate that endpoint's -description. Individual parameters and responses are documented largely automatically based -on the Serializer field type, but using the "help_text" kwarg when defining serializer fields -lets us add a user-friendly string that is then included in the API endpoint. - -ViewSets can override the ``get_view_description`` method to customize the source and formatting -of the description field, if desired. Serializer fields should set their ``help_text`` value for -every field defined to help API users know the purpose of each field represented in the API. - -If a site-wide customization of docstring generation is desired, DRF provides a mechanism for -changing the default function used in ``get_view_description``: - -http://www.django-rest-framework.org/api-guide/settings/#view_description_function - -There are several support tools that work with DRF to aggregate endpoint documentation into -a browsable site of API docs, listed here: - -http://www.django-rest-framework.org/topics/documenting-your-api/#endpoint-documentation - -Because "DRF Docs" and "Django REST Swagger" do not generate documentation for responses, -Pulp is generating its REST API with `drf-spectacular `_ -until either DRF supports OpenAPI, or until CoreAPI supports response documentation. - - -Glossary --------- - -.. glossary:: - - DRF - The Django Rest Framework. - - Pagination - The practice of splitting large datasets into multiple pages. - - Router - A :term:`DRF` API router exposes registered views (like a :term:`ViewSet`) at - programatically-made URLs. Among other things, routers save us the trouble of having - to manually write URLs for every API view. - - http://www.django-rest-framework.org/api-guide/routers/ - - Serializer - A :term:`DRF` Serializer is responsible for representing python objects in the API, - and for converting API objects back into native python objects. Every model exposed - via the API must have a related serializer. - - http://www.django-rest-framework.org/api-guide/serializers/ - - ViewSet - A :term:`DRF` ViewSet is a collection of views representing all API actions available - at an API endpoint. ViewSets use a :term:`Serializer` or Serializers to correctly - represent API-related objects, and are exposed in urls.py by being registered with - a :term:`Router`. API actions provided by a ViewSet include "list", "create", "retreive", - "update", "partial_update", and "destroy". Each action is one of the views that make up - a ViewSet, and additional views can be added as-needed. - - http://www.django-rest-framework.org/api-guide/viewsets/ diff --git a/docs/contributing/dev-setup.rst b/docs/contributing/dev-setup.rst deleted file mode 100644 index c295b45af2..0000000000 --- a/docs/contributing/dev-setup.rst +++ /dev/null @@ -1,42 +0,0 @@ -.. _DevSetup: - -Developer Setup -=============== - -To ease developer setup, we have the `oci-env `_ which is our -developer environment based off the `Pulp OCI Images `_. -It is a CLI tool that uses ``docker/podman-compose`` to quickly setup a Pulp instance with your -specified configuration. - -.. _getsource: - -Get the Source --------------- - -It is assumed that any Pulp project repositories are cloned into one directory. You must clone the -``oci-env`` into the same directory as all of your other Pulp project repositories.:: - - $ git clone https://github.com/pulp/oci_env.git - -You will need ``pulp/pulpcore`` at a minimum:: - - $ git clone https://github.com/pulp/pulpcore.git - -Additionally, you will need at least one plugin.:: - - $ git clone https://github.com/pulp/pulp_file.git - -The current base branch on this repository is the main branch. - -.. warning:: - - It is important to ensure that your repositories are all checked out to compatible versions. - Check the ``setup.py`` and ``requirements.txt`` files of each repository to see what version - it provides and which versions it requires, respectively. - - -Installation ------------- - -Follow the steps at `Getting Started `_ to setup -your Pulp instance after cloning the Pulp repositories. diff --git a/docs/contributing/documentation.rst b/docs/contributing/documentation.rst deleted file mode 100644 index 131837e029..0000000000 --- a/docs/contributing/documentation.rst +++ /dev/null @@ -1,41 +0,0 @@ -Documentation -============= - -Principles ----------- - -Pulp's documentation is designed with the following principles: - -#. Avoid documenting external projects, providing links wherever reasonable. -#. Documentation layout should be designed for users to intuitively find information. -#. The structure should present introductory material before advanced topics. -#. Documentation should cross reference to limit repitition. -#. Pulp terminology should be be explicitly defined and added to the glossary. -#. Documentation should stay consistent with the language used in the :doc:`/concepts`. -#. Where reasonable, documents should include: - - #. Summary of content. - #. Intended audience. - #. Links to prerequisite material. - #. Links to related material. - -Building the Docs: ------------------- - -If you are using a developer Vagrant box, the docs requirements should already be installed. - -Otherwise, (in your virtualenv), you should install the docs requirements.:: - - (pulp) $ pip install -r doc_requirements.txt - -To build the docs, from the docs directory, use ``make``:: - - (pulp) $ cd docs - (pulp) $ make html - -Use your browser to load the generated html, which lives in ``docs/_build/html/`` - -You do not need to clean the docs before rebuilding, however you can do it by running:: - - (pulp) $ cd docs - (pulp) $ make clean diff --git a/docs/contributing/git.rst b/docs/contributing/git.rst deleted file mode 100644 index 341d890df7..0000000000 --- a/docs/contributing/git.rst +++ /dev/null @@ -1,158 +0,0 @@ -Git -=== - -Pulp source code lives on `GitHub `_. This document is definitive -for :term:`pulpcore` only, but some plugins may choose to follow the same strategies. - -.. _git-branch: - -Versions and Branches ---------------------- - -Code is submitted by a Pull Request on Github to merge the changes to ``main`` which represents -the next ``pulpcore`` release. See :ref:`versioning` for more details. - - -Commits -------- - -.. _rebase: - -Rebasing and Squashing -********************** - -We prefer each pull request to contain a single commit. Before you submit a PR, please consider an -`interactive rebase and squash. -`_ - -The ``git commit --amend`` command is very useful, but be sure that you `understand what it does -`_ before you use it! -GitHub will update the PR and keep the comments when you force push an amended commit. - -.. warning:: - Keep in mind that rebasing creates new commits that are unique from your - original commits. Thus, if you have three commits and rebase them, you must - make sure that all copies of those original commits get deleted. Did you push - your branch to origin? Delete it and re-push after the rebase. - -.. _commit-message: - -Commit Message -************** - -Commit messages in Pulp should contain a human readable explanation of what was fixed. They should -also follow the standard git message format of starting with a subject line or title (usually -wrapped at about 50 chars) and optionally, a longer message (usually wrapped at 72 characters) -broken up into paragraphs. For more on what constitutes a good commit message, we recommend `Tim -Pope's blog post on the subject `_. - -Each commit message should link to an issue on the `pulpcore Github Issue tracker `_. See the `Github Linking Docs `_ and include at least one link in your commit message. - -If you must create a commit for which there is no issue, add the ``[noissue]`` syntax in the commit -message. - -Putting this all together, the following is an example of a good commit message:: - - Update install and quickstart - - The install docs and quickstart was leaving out an important step on - the worker configuration. - - closes #1392 - -.. hint:: - - A good candidate for a ``noissue`` tag is a one line fix or a typo, otherwise we encourage - you to open an issue. - - -.. _requiring-other-pull-requests: - -Requiring other Pull Requests -***************************** - -Sometimes a new feature may require changes to both `pulpcore` and one or many other plugins. -However, plugins can only depend on features that are already released with `pulpcore` or any other -dependency. Sometimes though you need to demonstrate, that a new feature about to be added to -`pulpcore` will work with a corresponding plugin change before you can get the needed approvals. In -order to do so, you can depend the plugin's pull request on the head of the pull request or the -main branch of `pulpcore` in the following way: - -Add a line like:: - - git+https://github.com/pulp/pulpcore@refs/pull/1234/head - git+https://github.com/pulp/pulpcore@refs/heads/main - -to `ci_requirements.txt` in the plugin PR. Make sure that file is covered by `MANIFEST.in`. Also -bump the requirement on `pulpcore` in `requirements.txt` to at least the current `dev` version if -you want to be sure the `lower bounds` scenario passes. - -This works accordingly for depending on other plugins. - -This will allow the tests in the CI to run, but it will fail the `ready-to-ship` check. The -depended on PR must be merged **and** released before a PR like this can be merged. - -For very similar reasons it can happen that you need changes to the base image used in the CI to -spin up a new pulp container. In those cases you can build your own modified version of the image -and push it to a container registry. Now you can specify the image to use in the last commit -message like:: - - CI Base Image: pulp/pulp-ci:special_feature - -Attention and care must be given not to merge PRs that require custom CI images. - - -.. _changelog-update: - -Changelog update -**************** - -The CHANGES.rst file is managed using the `towncrier tool `_ -and all non trivial changes must be accompanied by a news entry. - -For user facing changes, put those news files into ``CHANGES/``. For Plugin API changes, put those -into the ``CHANGES/plugin_api/`` folder. - -To add an entry to the news file, you first need an issue on github describing the change you -want to make. Once you have an issue, take its number and create a file inside of the ``CHANGES/`` -or ``CHANGES/plugin_api/`` directory named after that issue number with one of the extensions below. - -+--------------+----------------------------------------------------------------------+ -| extension | description | -+==============+======================================================================+ -| .bugfix | A bug fix | -+--------------+----------------------------------------------------------------------+ -| .feature | A new feature | -+--------------+----------------------------------------------------------------------+ -| .removal | A backwards incompatible change (ie a removal or change in behavior) | -+--------------+----------------------------------------------------------------------+ -| .deprecation | Information about an upcoming backwards incompatible change | -+--------------+----------------------------------------------------------------------+ -| .doc | A documentation improvement | -+--------------+----------------------------------------------------------------------+ -| .misc | A change that is not visible to the end user | -+--------------+----------------------------------------------------------------------+ - -So if your user-facing issue is 3543 and it fixes a bug, you would create the file -``CHANGES/3543.bugfix``. Or if your plugin API change is 5432 and it's a breaking change you would -create the file ``CHANGES/plugin_api/5432.removal``. - -PRs can span multiple categories by creating multiple files (for instance, if you added a feature -and deprecated an old feature at the same time, you would create CHANGES/NNNN.feature and -CHANGES/NNNN.removal). Likewise if a PR touches multiple issues/PRs you may create a file for each -of them with the exact same contents and Towncrier will deduplicate them. - -The contents of this file are reStructuredText formatted text that will be used as the content of -the news file entry. You do not need to reference the issue or PR numbers here as towncrier will -automatically add a reference to all of the affected issues when rendering the news file. - -The changelog message should use past simple tense. When possible, the message should describe the -change being made as opposed to the problem or user story. Here are some examples: - -- Added API that allows users to export a repository version to disk. -- Fixed bug where whitespace was being trimmed from uploaded files. -- Added documentation for new pulpcore-manager command. diff --git a/docs/contributing/index.rst b/docs/contributing/index.rst deleted file mode 100644 index 6b607a3096..0000000000 --- a/docs/contributing/index.rst +++ /dev/null @@ -1,62 +0,0 @@ -Contributing -============ - -Contribution documentation generally assumes that the reader is familiar with -:doc:`Pulp basics`. If you have problems, you can :ref:`contact us` -or :doc:`file an issue`. - -Workflow --------- - -1. Clone the GitHub repo. -2. Make a change. -3. Make sure all tests pass. -4. Add a file into CHANGES folder for user facing changes and CHANGES/plugin_api for plugin API - changes. -5. Commit changes to own ``pulpcore`` clone. -6. :doc:`Record a demo ` (1-3 minutes). -7. Make pull request from github page for your clone against master branch. - - -Fundamentals ------------- - -.. toctree:: - :maxdepth: 1 - - dev-setup - tests - style-guide - documentation - git - record-a-demo - pull-request-walkthrough - - -Plugin Development ------------------- - -Developers interested in writing plugins should reference the `Pulp Plugin API -<../plugins/index.html>`_ documentation. - - -Reference ---------- - -.. toctree:: - :maxdepth: 1 - - architecture/index - platform-api/index - - -Suggesting Changes to the Pulp Development Process --------------------------------------------------- - -Pulp is a community project, and major changes to the way Pulp is developed, such as the release -cycle, and style guide, can be proposed: - - * at the Open Floor meeting - * through the developer mailing list (``pulp-dev@redhat.com``) - -See how to `get involved `_ for more information. diff --git a/docs/contributing/platform-api/app/apps.rst b/docs/contributing/platform-api/app/apps.rst deleted file mode 100644 index 841f204071..0000000000 --- a/docs/contributing/platform-api/app/apps.rst +++ /dev/null @@ -1,4 +0,0 @@ -pulp.app.apps -============= - -.. automodule:: pulpcore.app.apps diff --git a/docs/contributing/platform-api/app/auth.rst b/docs/contributing/platform-api/app/auth.rst deleted file mode 100644 index 415999e784..0000000000 --- a/docs/contributing/platform-api/app/auth.rst +++ /dev/null @@ -1,4 +0,0 @@ -pulp.app.auth -============= - -.. automodule:: pulpcore.app.authentication diff --git a/docs/contributing/platform-api/app/index.rst b/docs/contributing/platform-api/app/index.rst deleted file mode 100644 index 791433104c..0000000000 --- a/docs/contributing/platform-api/app/index.rst +++ /dev/null @@ -1,13 +0,0 @@ -pulp.app -======== - -.. toctree:: - - apps - auth - models - response - serializers - settings - urls - viewsets diff --git a/docs/contributing/platform-api/app/models.rst b/docs/contributing/platform-api/app/models.rst deleted file mode 100644 index 88cbdc7c3f..0000000000 --- a/docs/contributing/platform-api/app/models.rst +++ /dev/null @@ -1,6 +0,0 @@ -pulp.app.models -=============== - -.. automodule:: pulpcore.app.models - :undoc-members: - :imported-members: diff --git a/docs/contributing/platform-api/app/response.rst b/docs/contributing/platform-api/app/response.rst deleted file mode 100644 index 3b673b77d1..0000000000 --- a/docs/contributing/platform-api/app/response.rst +++ /dev/null @@ -1,6 +0,0 @@ -pulp.app.response -================= - -All response objects documented here should be imported directly from the ``pulpcore.app.response`` namespace. - -.. automodule:: pulpcore.app.response diff --git a/docs/contributing/platform-api/app/serializers.rst b/docs/contributing/platform-api/app/serializers.rst deleted file mode 100644 index ef054dd0d6..0000000000 --- a/docs/contributing/platform-api/app/serializers.rst +++ /dev/null @@ -1,26 +0,0 @@ -pulp.app.serializers -==================== - -All serializers documented here should be imported directly from the ``pulp.app.serializers`` namespace. - -.. automodule:: pulpcore.app.serializers - -pulp.app.serializers.base -------------------------- - -.. automodule:: pulpcore.app.serializers.base - -pulp.app.serializers.fields ---------------------------- - -.. automodule:: pulpcore.app.serializers.fields - -pulp.app.serializers.content ----------------------------- - -.. automodule:: pulpcore.app.serializers.content - -pulp.app.serializers.repository -------------------------------- - -.. automodule:: pulpcore.app.serializers.repository diff --git a/docs/contributing/platform-api/app/settings.rst b/docs/contributing/platform-api/app/settings.rst deleted file mode 100644 index 23bc9b8c13..0000000000 --- a/docs/contributing/platform-api/app/settings.rst +++ /dev/null @@ -1,4 +0,0 @@ -pulp.app.settings -================= - -.. automodule:: pulpcore.app.settings diff --git a/docs/contributing/platform-api/app/urls.rst b/docs/contributing/platform-api/app/urls.rst deleted file mode 100644 index 0524fced2d..0000000000 --- a/docs/contributing/platform-api/app/urls.rst +++ /dev/null @@ -1,4 +0,0 @@ -pulp.app.urls -============= - -.. automodule:: pulpcore.app.urls diff --git a/docs/contributing/platform-api/app/viewsets.rst b/docs/contributing/platform-api/app/viewsets.rst deleted file mode 100644 index 56789cf065..0000000000 --- a/docs/contributing/platform-api/app/viewsets.rst +++ /dev/null @@ -1,24 +0,0 @@ -pulp.app.viewsets -================= - -All viewsets documented here should be imported directly from the ``pulpcore.app.viewsets`` namespace. - -Viewsets that dispatch tasks that are descendants of :class:`~pulpcore.tasking.tasks.UserFacingTask` -should return an :class:`~pulpcore.app.response.OperationPostponedResponse`. - -.. automodule:: pulpcore.app.viewsets - -pulp.app.viewsets.base ----------------------- - -.. automodule:: pulpcore.app.viewsets.base - -pulp.app.viewsets.content -------------------------- - -.. automodule:: pulpcore.app.viewsets.content - -pulp.app.viewsets.repository ----------------------------- - -.. automodule:: pulpcore.app.viewsets.repository diff --git a/docs/contributing/platform-api/constants.rst b/docs/contributing/platform-api/constants.rst deleted file mode 100644 index eb276a3eae..0000000000 --- a/docs/contributing/platform-api/constants.rst +++ /dev/null @@ -1,5 +0,0 @@ -pulp.constants -============== - -.. automodule:: pulpcore.constants - :members: diff --git a/docs/contributing/platform-api/exceptions.rst b/docs/contributing/platform-api/exceptions.rst deleted file mode 100644 index 375b1dd84d..0000000000 --- a/docs/contributing/platform-api/exceptions.rst +++ /dev/null @@ -1,11 +0,0 @@ -pulp.exceptions -=============== - -All exceptions documented here should be imported directly from the ``pulp.exceptions`` namespace. - -.. automodule:: pulpcore.exceptions - -pulp.exceptions.base --------------------- - -.. automodule:: pulpcore.exceptions.base diff --git a/docs/contributing/platform-api/index.rst b/docs/contributing/platform-api/index.rst deleted file mode 100644 index d415abda00..0000000000 --- a/docs/contributing/platform-api/index.rst +++ /dev/null @@ -1,12 +0,0 @@ -Platform API -============ - -All items documented here are only for use when working with the Pulp Platform. When writing -plugins, consult the Plugin API. - -.. toctree:: - - app/index - constants - exceptions - tasking diff --git a/docs/contributing/platform-api/tasking.rst b/docs/contributing/platform-api/tasking.rst deleted file mode 100644 index ddf5a5c44d..0000000000 --- a/docs/contributing/platform-api/tasking.rst +++ /dev/null @@ -1,25 +0,0 @@ - -pulp.tasking -============ - -.. automodule:: pulpcore.tasking - -pulp.tasking.worker -------------------- - -.. automodule:: pulpcore.tasking.worker - -pulp.tasking.storage --------------------- - -.. automodule:: pulpcore.tasking.storage - -pulp.tasking.tasks ------------------- - -.. automodule:: pulpcore.tasking.tasks - -pulp.tasking._util ------------------- - -.. automodule:: pulpcore.tasking._util diff --git a/docs/contributing/pull-request-walkthrough.rst b/docs/contributing/pull-request-walkthrough.rst deleted file mode 100644 index f900d69bb1..0000000000 --- a/docs/contributing/pull-request-walkthrough.rst +++ /dev/null @@ -1,35 +0,0 @@ -Pull Request Walkthrough -======================== - -Changes to pulpcore are submitted via `GitHub Pull Requests (PR) -`_ to the `pulp git repository -`_ . Plugin git repositories are listed in the `plugin table -`_. - -Checklist ---------- - -#. Add :ref:`functional tests` or :ref:`unit tests` where appropriate and ensure tests - are passing on the CI. -#. Add a :ref:`CHANGES entry `. -#. Update relevent :doc:`documentation`. Please build the docs to test! -#. :ref:`Rebase and squash` to a single commit. -#. Write an excellent :ref:`commit-message`. Make sure you reference and link to the issue. -#. Push your branch to your fork and open a `Pull request across forks - `_. -#. If the change requires a corresponding change in pulp-cli, open a PR against the pulp-cli or - :doc:`file an issue`. - -Review ------- - -Before a pull request can be merged, the :ref:`tests` must pass and it must -be reviewed. We encourage you to :ref:`reach out to the developers` to get speedy review. - - -To Cherry-Pick or Not ---------------------- - -If you are fixing a bug that should also be backported to another branch than ``main``, add the -backport label, .e.g ``backport-3.18``. PR authors can also add or remove this label if they have -write access. diff --git a/docs/contributing/record-a-demo.rst b/docs/contributing/record-a-demo.rst deleted file mode 100644 index 9cd40a0774..0000000000 --- a/docs/contributing/record-a-demo.rst +++ /dev/null @@ -1,19 +0,0 @@ -Record a Demo -=============== - -If you are contributing a new feature or a substantive change, please record a short demo and provide a downloadable link or asciinema URL to the demo as part of your pull request. - -These short demos help the wider Pulp community grasp the latest changes in Pulp. They also provide an extra opportunity for feedback on changes. - -While demos with audio are more than welcome, audio is not required. - -If your demo involves only a terminal recording, feel free to use a tool such as `asciinema `_. - -If you use `asciinema `_, adjust your terminal window to 90x45 characters to maintain the consistency of each of the demos. - -You can also use `Saftladen `_ - a bundle of scripts to generate asciinema demos for Pulp topics without live user interaction. - -If you think that your change is not substantive enough to require a demo, mention that in the body of the pull request. -However, those reviewing the pull request might ask you to record a demo if they think it would help the wider community understand the change. - -Please tag `@melcorr `_ in the body of your pull request to alert her to add your new demo to the list of demos. diff --git a/docs/contributing/style-guide.rst b/docs/contributing/style-guide.rst deleted file mode 100644 index bf00f6ec79..0000000000 --- a/docs/contributing/style-guide.rst +++ /dev/null @@ -1,102 +0,0 @@ -Style Guide -=========== - -Python Version --------------- -All Pulp 3+ code will use Python 3.8+. It is not necessary to maintain backwards compatibility with -Python 2.Y. - -PEP-8 ------ -All code should be compliant with PEP-8_ where reasonable. - -It is recommended that contributors check for compliance by running flake8_. We include -``flake8.cfg`` files in our git repositories for convenience. - -.. _PEP-8: https://www.python.org/dev/peps/pep-0008 -.. _flake8: http://flake8.pycqa.org/en/latest/ - -Black ------ -All python code (except for the usually generated files in the migration folder) must be formatted -according to the ruleset defined by black_. As black_ is able to automatically reformat python code, -contributors are supposed to run `black .` in the repositories ``pulpcore`` directory. For various -IDEs / editors, there is also an integration_ for black_. - -.. _black: https://github.com/psf/black -.. _integration: https://github.com/psf/black#editor-integration - -Modifications: -************** -line length: We limit to 100 characters, rather than 79. - - -.. _google-docstrings: - -Documentation -------------- - -Documentation text should be wrapped with linebreaks at 100 characters (like our code). Literal -blocks, urls, etc may exceed the 100 character limit though. There should be no whitespace at the -end of the line. Paragraphs should have an empty line to separate them. Here is an example of some -formatted text:: - - Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore - et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut - aliquip ex ea commodo consequat. - - Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore. Excepteur sint - occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum. - - -In-code Documentation -********************* - -In-code documentation should follow the general documentation guidelines listed above. - -Most classes and functions should have a docstring that follows the conventions described in -`Google's Python Style Guide `_. - -Exceptions and Clarifications -============================= - -#. Modules should not include license information. -#. The type of each Args value should be included after the variable name in parentheses. The type - of each Returns value should be the first item on the line. -#. Following the type of Args and Returns values, there will be a colon and a single space followed - by the description. Additional spaces should not be used to align types and descriptions. -#. Fields and Relations sections will be used when documenting fields on Django models. The Fields - section will be used for non-related fields on Model classes. The Relations section will be used - for related fields on Model classes. - -Auto-Documentation -================== - -Docstrings will be used for auto-documentation and must be parsable by the -`Napoleon plugin for Sphinx `_. - -Example Docstring -================= - -.. code-block:: python - - def example_function(): - """ - The first section is a summary, which should be restricted to a single line. - - More detailed information follows the summary after a blank line. This section can be as - many lines as necessary. - - Args: - arg1 (str): The argument is visible, and its type is clearly indicated. - much_longer_argument (str): Types and descriptions are not aligned. - - Returns: - bool: The return value and type is very clearly visible. - - """ - -Encoding -======== -Python 3 assumes that files are encoded with UTF-8, so it is not necessary to declare this in the -file. diff --git a/docs/contributing/tests.rst b/docs/contributing/tests.rst deleted file mode 100644 index 6a5dda20a6..0000000000 --- a/docs/contributing/tests.rst +++ /dev/null @@ -1,109 +0,0 @@ -.. _istqb: https://www.istqb.org/downloads/syllabi/foundation-level-syllabus.html - -.. _tests: - -Testing Pulp -============ - -There are two types of tests in *pulp_core* and in the plugins: - -1. **Unittests** are meant to test the interface of a specific unit utilizing a test database. -2. **Functional tests** are meant to test certain workflows utilizing a running instance of pulp. - -A pull request that has failing unit or functional tests cannot be merged. - - -.. _unit-tests: - -Unit Tests ----------- - -New code is encouraged to have basic unit tests that demonstrate that -units (function, method or class instance) are working correctly. - -The unit tests for `pulpcore` are in `pulpcore/tests -`_. - - -.. _functional-tests: - -Functional Tests ----------------- - -Functional tests verify a specific feature. -In general functional tests tend to answer the question "As an user can I do this?" - -It is highly encouraged to accompany new features with functional -tests in `pulpcore/functional -`_. - -Only the tests for features related to `pulpcore` should live in this repository. - -Functional tests for features related to a specific plugin should live in the -plugin repository itself. For example: - - * `File Plugin - `_ - - * `RPM Plugin - `_ - -Prerequisites for running tests -------------------------------- - -If you want to run the functional tests, you need a running Pulp instance that is allowed to be -mixed up by the tests (in other words, running the tests on a production instance is not -recommended). For example, using the development vm (see :ref:`DevSetup`), -this can be accomplished by `workon pulp; pulpcore-manager runserver 24817`. The -``pulpcore-manager`` command is ``manage.py`` configured with the -``DJANGO_SETTINGS_MODULE="pulpcore.app.settings"``. - -Using pulplift -^^^^^^^^^^^^^^ - -When running one of the `pulp3-source-*` boxes in `pulplift`, all the services are running. They -should be restarted with `prestart` if any pulp code (not test code) has been changed. - -When testing S3 support, you can start and configure a local `minio` container with `pminio`. - -Pulp functional tests use a set of upstream fixture repositories hosted on -`fixtures.pulpproject.org `_. In case you want serve those -locally, you can run `pfixtures` which will execute a `nginx` container with a copy of those -fixtures. - -For more info about Pulp development specific helper commands, you can consult `phelp`. - -Running tests -------------- - -In case pulp is installed in a virtual environment, activate it first (`workon pulp`). - -All tests of a plugin (or pulpcore itself) are run with `pulpcore-manager test `. -This involves setting up (and tearing down) the test database, however the functional tests are -still performed against the configured pulp instance with its *production* database. - -To only perform the unittests, you can skip the prerequisites and call -`pulpcore-manager test .tests.unit`. - -If you are only interested in functional tests, you can skip the creation of the test database by -using `pytest //tests/functional`. - -.. note:: - - Make sure, the task runners are actually running. In doubt, run `prestart` or - `systemctl restart pulpcore-worker@*`. - -.. note:: - - You can be more specific on which tests to run by calling something like - `pulpcore-manager test pulp_file.tests.unit.test_models` or - `py.test //tests/functional/api/test_sync.py`. - - -Contributing to tests ---------------------- - -A new version of Pulp will only be released when all unit and functional tests are -passing. - -Contributing test is a great way to ensure that your workflows never regress. diff --git a/staging_docs/dev/guides/git.md b/docs/dev/guides/git.md similarity index 100% rename from staging_docs/dev/guides/git.md rename to docs/dev/guides/git.md diff --git a/staging_docs/dev/guides/plugin-walkthrough.md b/docs/dev/guides/plugin-walkthrough.md similarity index 100% rename from staging_docs/dev/guides/plugin-walkthrough.md rename to docs/dev/guides/plugin-walkthrough.md diff --git a/staging_docs/dev/guides/pull-request-walkthrough.md b/docs/dev/guides/pull-request-walkthrough.md similarity index 100% rename from staging_docs/dev/guides/pull-request-walkthrough.md rename to docs/dev/guides/pull-request-walkthrough.md diff --git a/staging_docs/dev/guides/record-a-demo.md b/docs/dev/guides/record-a-demo.md similarity index 100% rename from staging_docs/dev/guides/record-a-demo.md rename to docs/dev/guides/record-a-demo.md diff --git a/staging_docs/dev/guides/release_process.md b/docs/dev/guides/release_process.md similarity index 100% rename from staging_docs/dev/guides/release_process.md rename to docs/dev/guides/release_process.md diff --git a/staging_docs/dev/guides/test-pulp.md b/docs/dev/guides/test-pulp.md similarity index 100% rename from staging_docs/dev/guides/test-pulp.md rename to docs/dev/guides/test-pulp.md diff --git a/staging_docs/dev/learn/architecture/app-layout.md b/docs/dev/learn/architecture/app-layout.md similarity index 100% rename from staging_docs/dev/learn/architecture/app-layout.md rename to docs/dev/learn/architecture/app-layout.md diff --git a/staging_docs/dev/learn/architecture/rest-api.md b/docs/dev/learn/architecture/rest-api.md similarity index 100% rename from staging_docs/dev/learn/architecture/rest-api.md rename to docs/dev/learn/architecture/rest-api.md diff --git a/staging_docs/dev/learn/domains/domains_compatibility.md b/docs/dev/learn/domains/domains_compatibility.md similarity index 100% rename from staging_docs/dev/learn/domains/domains_compatibility.md rename to docs/dev/learn/domains/domains_compatibility.md diff --git a/staging_docs/dev/learn/other/content-protection.md b/docs/dev/learn/other/content-protection.md similarity index 100% rename from staging_docs/dev/learn/other/content-protection.md rename to docs/dev/learn/other/content-protection.md diff --git a/staging_docs/dev/learn/other/documentation.md b/docs/dev/learn/other/documentation.md similarity index 100% rename from staging_docs/dev/learn/other/documentation.md rename to docs/dev/learn/other/documentation.md diff --git a/staging_docs/dev/learn/other/error-handling.md b/docs/dev/learn/other/error-handling.md similarity index 100% rename from staging_docs/dev/learn/other/error-handling.md rename to docs/dev/learn/other/error-handling.md diff --git a/staging_docs/dev/learn/other/how-plugins-work.md b/docs/dev/learn/other/how-plugins-work.md similarity index 100% rename from staging_docs/dev/learn/other/how-plugins-work.md rename to docs/dev/learn/other/how-plugins-work.md diff --git a/staging_docs/dev/learn/other/how-to-doc-api.md b/docs/dev/learn/other/how-to-doc-api.md similarity index 100% rename from staging_docs/dev/learn/other/how-to-doc-api.md rename to docs/dev/learn/other/how-to-doc-api.md diff --git a/staging_docs/dev/learn/other/metadata-signing.md b/docs/dev/learn/other/metadata-signing.md similarity index 100% rename from staging_docs/dev/learn/other/metadata-signing.md rename to docs/dev/learn/other/metadata-signing.md diff --git a/staging_docs/dev/learn/other/object-relationships.md b/docs/dev/learn/other/object-relationships.md similarity index 100% rename from staging_docs/dev/learn/other/object-relationships.md rename to docs/dev/learn/other/object-relationships.md diff --git a/staging_docs/dev/learn/other/on-demand-support.md b/docs/dev/learn/other/on-demand-support.md similarity index 100% rename from staging_docs/dev/learn/other/on-demand-support.md rename to docs/dev/learn/other/on-demand-support.md diff --git a/staging_docs/dev/learn/other/planning-guide.md b/docs/dev/learn/other/planning-guide.md similarity index 100% rename from staging_docs/dev/learn/other/planning-guide.md rename to docs/dev/learn/other/planning-guide.md diff --git a/staging_docs/dev/learn/other/releasing.md b/docs/dev/learn/other/releasing.md similarity index 100% rename from staging_docs/dev/learn/other/releasing.md rename to docs/dev/learn/other/releasing.md diff --git a/staging_docs/dev/learn/other/task-scheduling.md b/docs/dev/learn/other/task-scheduling.md similarity index 100% rename from staging_docs/dev/learn/other/task-scheduling.md rename to docs/dev/learn/other/task-scheduling.md diff --git a/staging_docs/dev/learn/plugin-concepts.md b/docs/dev/learn/plugin-concepts.md similarity index 100% rename from staging_docs/dev/learn/plugin-concepts.md rename to docs/dev/learn/plugin-concepts.md diff --git a/staging_docs/dev/learn/rbac/access_policy.md b/docs/dev/learn/rbac/access_policy.md similarity index 100% rename from staging_docs/dev/learn/rbac/access_policy.md rename to docs/dev/learn/rbac/access_policy.md diff --git a/staging_docs/dev/learn/rbac/adding_automatic_permissions.md b/docs/dev/learn/rbac/adding_automatic_permissions.md similarity index 100% rename from staging_docs/dev/learn/rbac/adding_automatic_permissions.md rename to docs/dev/learn/rbac/adding_automatic_permissions.md diff --git a/staging_docs/dev/learn/rbac/index.md b/docs/dev/learn/rbac/index.md similarity index 100% rename from staging_docs/dev/learn/rbac/index.md rename to docs/dev/learn/rbac/index.md diff --git a/staging_docs/dev/learn/rbac/permissions.md b/docs/dev/learn/rbac/permissions.md similarity index 100% rename from staging_docs/dev/learn/rbac/permissions.md rename to docs/dev/learn/rbac/permissions.md diff --git a/staging_docs/dev/learn/rbac/queryset_scoping.md b/docs/dev/learn/rbac/queryset_scoping.md similarity index 100% rename from staging_docs/dev/learn/rbac/queryset_scoping.md rename to docs/dev/learn/rbac/queryset_scoping.md diff --git a/staging_docs/dev/learn/rbac/users_groups.md b/docs/dev/learn/rbac/users_groups.md similarity index 100% rename from staging_docs/dev/learn/rbac/users_groups.md rename to docs/dev/learn/rbac/users_groups.md diff --git a/staging_docs/dev/learn/subclassing/import-export.md b/docs/dev/learn/subclassing/import-export.md similarity index 100% rename from staging_docs/dev/learn/subclassing/import-export.md rename to docs/dev/learn/subclassing/import-export.md diff --git a/staging_docs/dev/learn/subclassing/models.md b/docs/dev/learn/subclassing/models.md similarity index 100% rename from staging_docs/dev/learn/subclassing/models.md rename to docs/dev/learn/subclassing/models.md diff --git a/staging_docs/dev/learn/subclassing/pull-through.md b/docs/dev/learn/subclassing/pull-through.md similarity index 100% rename from staging_docs/dev/learn/subclassing/pull-through.md rename to docs/dev/learn/subclassing/pull-through.md diff --git a/staging_docs/dev/learn/subclassing/replication.md b/docs/dev/learn/subclassing/replication.md similarity index 100% rename from staging_docs/dev/learn/subclassing/replication.md rename to docs/dev/learn/subclassing/replication.md diff --git a/staging_docs/dev/learn/subclassing/serializers.md b/docs/dev/learn/subclassing/serializers.md similarity index 100% rename from staging_docs/dev/learn/subclassing/serializers.md rename to docs/dev/learn/subclassing/serializers.md diff --git a/staging_docs/dev/learn/subclassing/viewsets.md b/docs/dev/learn/subclassing/viewsets.md similarity index 100% rename from staging_docs/dev/learn/subclassing/viewsets.md rename to docs/dev/learn/subclassing/viewsets.md diff --git a/staging_docs/dev/learn/sync_pipeline/sync_pipeline.md b/docs/dev/learn/sync_pipeline/sync_pipeline.md similarity index 100% rename from staging_docs/dev/learn/sync_pipeline/sync_pipeline.md rename to docs/dev/learn/sync_pipeline/sync_pipeline.md diff --git a/staging_docs/dev/learn/tasks/add-remove.md b/docs/dev/learn/tasks/add-remove.md similarity index 100% rename from staging_docs/dev/learn/tasks/add-remove.md rename to docs/dev/learn/tasks/add-remove.md diff --git a/staging_docs/dev/learn/tasks/diagnostics.md b/docs/dev/learn/tasks/diagnostics.md similarity index 100% rename from staging_docs/dev/learn/tasks/diagnostics.md rename to docs/dev/learn/tasks/diagnostics.md diff --git a/staging_docs/dev/learn/tasks/publish.md b/docs/dev/learn/tasks/publish.md similarity index 100% rename from staging_docs/dev/learn/tasks/publish.md rename to docs/dev/learn/tasks/publish.md diff --git a/staging_docs/dev/reference/code-style-guide.md b/docs/dev/reference/code-style-guide.md similarity index 100% rename from staging_docs/dev/reference/code-style-guide.md rename to docs/dev/reference/code-style-guide.md diff --git a/staging_docs/dev/reference/markdown-style-guide.md b/docs/dev/reference/markdown-style-guide.md similarity index 100% rename from staging_docs/dev/reference/markdown-style-guide.md rename to docs/dev/reference/markdown-style-guide.md diff --git a/staging_docs/dev/tutorials/_SUMMARY.md b/docs/dev/tutorials/_SUMMARY.md similarity index 100% rename from staging_docs/dev/tutorials/_SUMMARY.md rename to docs/dev/tutorials/_SUMMARY.md diff --git a/staging_docs/dev/tutorials/quickstart-docs.md b/docs/dev/tutorials/quickstart-docs.md similarity index 100% rename from staging_docs/dev/tutorials/quickstart-docs.md rename to docs/dev/tutorials/quickstart-docs.md diff --git a/staging_docs/dev/tutorials/quickstart.md b/docs/dev/tutorials/quickstart.md similarity index 100% rename from staging_docs/dev/tutorials/quickstart.md rename to docs/dev/tutorials/quickstart.md diff --git a/docs/diagrams_src/concept-add-repo.dot b/docs/diagrams_src/concept-add-repo.dot deleted file mode 100644 index 0199d81a22..0000000000 --- a/docs/diagrams_src/concept-add-repo.dot +++ /dev/null @@ -1,21 +0,0 @@ -@startuml -rectangle "Add content to repository" { - (New Content Unit) - rectangle Repository { - usecase RV1 as "Repository Version 1 - --- - contains: - Content Unit" - usecase RV2 as "Repository Version 2 - --- - contains: - Content Unit - New Content Unit" - note "Adding new content to repository" as N - } - - (New Content Unit) ..|> N - (RV1) -right-> N - N -right-> (RV2) -} -@enduml diff --git a/docs/extensions/napoleon_django/__init__.py b/docs/extensions/napoleon_django/__init__.py deleted file mode 100644 index 8d8c493895..0000000000 --- a/docs/extensions/napoleon_django/__init__.py +++ /dev/null @@ -1,85 +0,0 @@ -# -*- coding: utf-8 -*- -""" - napoleon-django - ~~~~~~~~~~~~~~~ - - An extension to sphinx.ext.napoleon's Google-style Docstring - with support for custom django blocks "Fields" and "Relations". - - :copyright: Copyright 2007-2016 by the Sphinx team, see AUTHORS. - :license: BSD, see LICENSE for details. -""" - -import sphinx -from napoleon_django.docstring import DjangoGoogleDocstring - - -class Config(object): - pass - - -def setup(app): - """Sphinx extension setup function. - - When the extension is loaded, Sphinx imports this module and executes - the ``setup()`` function, which in turn notifies Sphinx of everything - the extension offers. - - Parameters - ---------- - app : sphinx.application.Sphinx - Application object representing the Sphinx process - - See Also - -------- - `The Sphinx documentation on Extensions - `_ - - `The Extension Tutorial `_ - - `The Extension API `_ - - """ - from sphinx.application import Sphinx - if not isinstance(app, Sphinx): - return # probably called by tests - - app.connect('autodoc-process-docstring', _process_docstring) - - return {'version': sphinx.__display_version__, 'parallel_read_safe': True} - - -def _process_docstring(app, what, name, obj, options, lines): - """Process the docstring for a given python object. - - Called when autodoc has read and processed a docstring. `lines` is a list - of docstring lines that `_process_docstring` modifies in place to change - what Sphinx outputs. - - Parameters - ---------- - app : sphinx.application.Sphinx - Application object representing the Sphinx process. - what : str - A string specifying the type of the object to which the docstring - belongs. Valid values: "module", "class", "exception", "function", - "method", "attribute". - name : str - The fully qualified name of the object. - obj : module, class, exception, function, method, or attribute - The object to which the docstring belongs. - options : sphinx.ext.autodoc.Options - The options given to the directive: an object with attributes - inherited_members, undoc_members, show_inheritance and noindex that - are True if the flag option of same name was given to the auto - directive. - lines : list of str - The lines of the docstring, see above. - - .. note:: `lines` is modified *in place* - - """ - result_lines = lines - docstring = DjangoGoogleDocstring(result_lines, app.config, app, what, name, obj, options) - result_lines = docstring.lines() - lines[:] = result_lines[:] diff --git a/docs/extensions/napoleon_django/docstring.py b/docs/extensions/napoleon_django/docstring.py deleted file mode 100644 index 85da9628ba..0000000000 --- a/docs/extensions/napoleon_django/docstring.py +++ /dev/null @@ -1,116 +0,0 @@ -# -*- coding: utf-8 -*- -""" - sphinx.ext.napoleon.docstring - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - - Classes for docstring parsing and formatting. - - - :copyright: Copyright 2007-2016 by the Sphinx team, see AUTHORS. - :license: BSD, see LICENSE for details. -""" - -from sphinx.domains.python import PyObject, PyTypedField -from sphinx.ext.napoleon.docstring import GoogleDocstring - -# Extend the python sphinx domain with support for :field: and :relation: directives, -# as well as their related type directives. These then get used by DjangoGoogleDocstring. -# Using the 'data' role for the :field: and :relation: directives prevents sphinx from trying -# cross-reference them. This role is intended to be used at the module level, but renders -# correctly when used in Model definitions and prevents warnings from sphinx about duplicate -# cross-reference targets on something that shouldn't be cross-referenced. -PyObject.doc_field_types.extend([ - PyTypedField('field', label=('Fields'), rolename='data', - names=('field',), typerolename='obj', typenames=('fieldtype',), - can_collapse=True), - PyTypedField('relation', label=('Relations'), rolename='data', - names=('relation',), typerolename='obj', typenames=('reltype',), - can_collapse=True), -]) - -# Similar to the extensions above, but this rewrites the 'variable' type used for class attrs to -# use the data rolename, which prevents sphinx from attempting to cross-reference class attrs. -for field in PyObject.doc_field_types: - if field.name == 'variable': - field.rolename = 'data' - - -class DjangoGoogleDocstring(GoogleDocstring): - """Add support for Django-specific sections to napoleon's GoogleDocstring parser. - - Parameters - ---------- - docstring : str or List[str] - The docstring to parse, given either as a string or split into - individual lines. - config : Optional[sphinx.ext.napoleon.Config or sphinx.config.Config] - The configuration settings to use. If not given, defaults to the - config object on `app`; or if `app` is not given defaults to the - a new `sphinx.ext.napoleon.Config` object. - - See Also - -------- - :class:`sphinx.ext.napoleon.Config` - - Other Parameters - ---------------- - app : Optional[sphinx.application.Sphinx] - Application object representing the Sphinx process. - what : Optional[str] - A string specifying the type of the object to which the docstring - belongs. Valid values: "module", "class", "exception", "function", - "method", "attribute". - name : Optional[str] - The fully qualified name of the object. - obj : module, class, exception, function, method, or attribute - The object to which the docstring belongs. - options : Optional[sphinx.ext.autodoc.Options] - The options given to the directive: an object with attributes - inherited_members, undoc_members, show_inheritance and noindex that - are True if the flag option of same name was given to the auto - directive. - - """ - def __init__(self, docstring, config=None, app=None, what='', name='', - obj=None, options=None): - # super's __init__ calls _parse, so we need to wrap it to make sure the custom - # django-ness is added to the class before _parse runs. Thus, self._initialized. - # See _parse below for how this attr gets used to delay parsing. - self._initialized = False - super().__init__(docstring, config, app, what, name, obj, options) - self._sections.update({ - 'fields': self._parse_fields_section, - 'relations': self._parse_relations_section, - }) - self._initialized = True - self._parse() - - def _parse(self): - if self._initialized: - return super()._parse() - - def _parse_fields_section(self, section): - return self._parse_django_section(section, 'field') - - def _parse_relations_section(self, section): - return self._parse_django_section(section, 'relation') - - def _parse_django_section(self, section, directive): - # a "django" directive is either field or relation. Use the correct type definition - # based on the value of 'directive' to generate a correctly cross-referenced type link. - # directive and typedirective need to match the name and typename of the custom - # PyTypedFields added to the python sphinx domain above. - if directive == 'field': - typedirective = 'fieldtype' - else: - typedirective = 'reltype' - - lines = [] - for _name, _type, _desc in self._consume_fields(): - field = ':%s %s: ' % (directive, _name) - lines.extend(self._format_block(field, _desc)) - if _type: - lines.append(':%s %s: %s' % (typedirective, _name, _type)) - lines.append('') - return lines diff --git a/docs/from-pulp-2.rst b/docs/from-pulp-2.rst deleted file mode 100644 index 725d3fc7da..0000000000 --- a/docs/from-pulp-2.rst +++ /dev/null @@ -1,75 +0,0 @@ -Changes From Pulp 2 -=================== - -Renamed Concepts ----------------- - -Importers -> Remotes -******************** - -CLI users may not have been aware of Importer objects because they were embedded into CLI commands -with repositories. In Pulp 3, this object is now called a :term:`Remote`. The scope of this object -has been reduced to interactions with a single external source. They are no longer associated with a -repository. - -Distributors -> Publication/Exporters -************************************* - -CLI users may not have been aware of Distributor objects because they were also embedded into CLI -commands with repositories. In some cases these distributors created metadata along with the -published content, e.g. ``YumDistributor``. In other cases, Distributor objects pushed content to -remote services, such as the ``RsyncDistributor``. - -For Pulp 2 Distributors that produce metadata, e.g. ``YumDistributor``, Pulp 3 introduces a -:term:`Publication` that stores content and metadata created describing that content. The -responsibilities of serving a :term:`Publication` are moved to a new object, the -:term:`Distribution`. Only plugins that need metadata produced at publish time will have use -:term:`Publications`. - -For Pulp 2 Distributors that push content to remote systems, e.g. ``RsyncDistributor``, Pulp 3 -introduces an :term:`Exporter` that is used to push an existing :term:`Publication` to a remote -location. For content types that don't use :term:`Publications`, exporters can export -:term:`RepositoryVersion` content directly. - -New Concepts ------------- - -Repository Version -****************** - -A new feature of Pulp 3 is that the content set of a repository is versioned. Each time the content -set of a repository is changed, a new immutable :term:`RepositoryVersion` is created. An empty -:term:`RepositoryVersion` is created upon creation of a repository. - -Rollback -******** - -The combination of publications and distributions allows users to promote and rollback instantly. -With one call, the user can update a distribution (eg. "Production") to host any pre-created -publication. - -Going Live is Atomic -******************** - -Content is served by a :term:`Distribution` and goes live from Pulp's :term:`content app` as soon as -the database is configured to serve it. This guarantees a users view of a repository is consistent -and as the entire repository is made available atomically. - - -Obsolete Concepts ------------------ - -Consumers -********* - -In Pulp 2, there are consumers, aka managed hosts. It is information about existing installation -and subscription profiles for hosts which receive updates based on Pulp repositories. This is -not supported in Pulp 3. The functionality is available as part of `the Katello project `_. - -Applicability -************* - -Applicability is a feature that provides a list of updates, content which needs to be installed -on a specific host to bring it up to date. In Pulp 2, it is possible to calculate applicability -based on the installation and subscription profile of a host managed by Pulp. This is -not supported in Pulp 3. The functionality is available as part of `the Katello project `_. diff --git a/docs/glossary.rst b/docs/glossary.rst deleted file mode 100644 index 52b14eb089..0000000000 --- a/docs/glossary.rst +++ /dev/null @@ -1,74 +0,0 @@ -Glossary -======== - -.. glossary:: - - :class:`~pulpcore.app.models.Artifact` - A file. They usually belong to a :term:`content unit` but may be used - elsewhere (e.g. for PublishedArtifacts). - - :class:`~pulpcore.plugin.models.ContentGuard` - A pluggable content protection mechanism that can be added to a :term:`Distribution`, and - is used exclusively by the :term:`content app` to only hand out binary data to trusted - clients. "Trusted users" are defined by the type of ContentGuard used. - - :class:`~pulpcore.app.models.Content` - content unit - Content are the smallest units of data that can be added and removed from - :term:`repositories`. When singular, "content unit" should be used. Each - content unit can have multiple :term:`artifacts`. Each content unit has a - :term:`type` (like .rpm or .deb) which that is defined by a :term:`plugin`. - - content app - An `aiohttp.server `_ app provided by - :term:`pulpcore` that serves :term:`content ` through :term:`Distributions - `. - - :class:`~pulpcore.plugin.models.Distribution` - User facing object that configures the :term:`content app` to serve either a - :term:`RepositoryVersion`, a :term:`Repository`, or a :term:`Publication`. - - :class:`~pulpcore.plugin.models.Exporter` - Exporters can push a :term:`Repository Version `, a :term:`Repository`, - or a :term:`Publication` content to a location outside of Pulp. Some example - locations include a remote server or a file system location. - - on-demand content - :term:`Content` that was synchronized into Pulp but not yet saved to the - filesystem. The Content's :term:`Artifacts` are fetched at the time they are - requested. On-demand content is associated with a :term:`Remote` that knows how to download - those :term:`Artifacts`. - - plugin - A `Django `_ app that exends :term:`pulpcore` to add more - features to Pulp. Plugins are most commonly used to add support for one or more - :term:`types` of :term:`Content`. - - :class:`~pulpcore.app.models.Publication` - The metadata and :term:`artifacts` of the :term:`content units` in a - :term:`RepositoryVersion`. Publications are served by the :term:`content app` when they are - assigned to a :term:`Distribution`. - - pulpcore - A python package offering users a :doc:`rest_api` and plugin writers a - :ref:`plugin_api`. It is :term:`plugin`-based and manages :term:`Content`. - - :class:`~pulpcore.plugin.models.Remote` - User facing settings that specify how Pulp should interact with an external :term:`Content` - source. - - :class:`~pulpcore.app.models.Repository` - A versioned set of :term:`content units`. - - :class:`~pulpcore.app.models.RepositoryVersion` - An immutable snapshot of the set of :term:`content units` that are in a - :term:`Repository`. - - sync - A :term:`plugin` defined task that fetches :term:`Content` from an external source using a - :term:`Remote`. The task adds and/or removes the :term:`content units` to a - :term:`Repository`, creating a new :term:`RepositoryVersion`. - - type - Each :term:`content unit` has a type (ex. rpm package or container tag) which is - defined by a :term:`Plugin`. diff --git a/staging_docs/index.md b/docs/index.md similarity index 100% rename from staging_docs/index.md rename to docs/index.md diff --git a/docs/index.rst b/docs/index.rst deleted file mode 100644 index e91307dd4e..0000000000 --- a/docs/index.rst +++ /dev/null @@ -1,79 +0,0 @@ -Pulp Documentation -================== - -This is the main landing page for documentation related to Pulp. - -The documentation itself is broken into sub categories that provide more granular information and -workflows. - -`pulpcore` handles some parts of common content management workflows, including high performance -downloading, task queuing with scalable workers, and management of content within versioned -repositories. - -If you are looking for a very high-level overview of Pulp's features, check out `features page at -pulpproject.org `_ - -If you want an overview of the main concepts and terminology of Pulp, see :doc:`Concepts and Terminology` - -If you want to understand the core workflows, see :doc:`Workflows` - -If you want to look at the considerations and requirements for installing Pulp, see - :ref:`installation`. If you want to evaluate Pulp quickly, try `Pulp in One - Container `_ - -If you're looking for documentation specific to a content type, see :doc:`List of Plugins` - -Anyone interested in writing a plugin should reference :ref:`plugin_development`. - -.. _community: - -Support -------- - -If you need help with Pulp and cannot find the answer to your question in our docs, we encourage you -to check out our `help page at pulpproject.org `_ which includes -information about our mailing lists, IRC, etc. - - -Contributing ------------- - -Pulp is a free and open source software (FOSS) project and if you'd like to contribute, please check -out our :doc:`contributing docs`. - - - -Source code -^^^^^^^^^^^ - - * `pulp Github organization `_ - * `pulpcore `_ - * `plugin repositories `_ - - - -Table of Contents ------------------ - -.. toctree:: - :maxdepth: 2 - - concepts - from-pulp-2 - components - installation/index - configuration/index - authentication/index - workflows/index - plugins - plugin_dev/index - rest_api - client_bindings - tech_preview - contributing/index - bugs-features - troubleshooting - glossary - changes - versioning - release_process diff --git a/docs/installation/hardware-requirements.rst b/docs/installation/hardware-requirements.rst deleted file mode 100644 index e15d444523..0000000000 --- a/docs/installation/hardware-requirements.rst +++ /dev/null @@ -1,100 +0,0 @@ - -Hardware requirements -===================== - -Pulp de-duplicates content, and makes as efficient use of storage space as possible. Even if you -configure Pulp not to store content, it will still require some local storage. - -.. note:: - - This section is updated based on your feedback. Feel free to share what your experience is - https://pulpproject.org/help/ - -.. note:: - - These are empirical guidelines to give an idea how to estimate what you need. It hugely - depends on the scale of the setup (how much content you need, how many repositories you plan - to have), frequency (how often you run various tasks) and the workflows (which tasks you - perform, which plugin you use) of each specific user. - -CPU -*** - -CPU count is recommended to be equal to the number of pulp workers. It allows to perform N -repository operations concurrently. E.g. 2 CPUs, one can sync 2 repositories concurrently. - -RAM -*** - -Out of all operations the highest memory consumption task is likely synchronization of a remote -repository. Publication can also be memory consuming, however it depends on the plugin. - -For each worker, the suggestion is to plan on 1GB to 3GB. E.g. 4 workers would need 4GB to 12 GB -For the database, 1GB is likely enough. - -The range for the workers is quite wide because it depends on the plugin. E.g. for RPM plugin, a -setup with 2 workers will require around 8GB to be able to sync large repositories. 4GB is -likely not enough for some repositories, especially if 2 workers both run sync tasks in parallel. - -Disk -**** - -For disk size, it depends on how one is using Pulp and which storage is used. - - -Empirical estimation --------------------- - - * If S3 is used as a backend for artifact storage, it is not required to have a large local - storage. 30GB should be enough in the majority of cases. - - * If no content is planned to be stored in the artifact storage, aka only sync from - remote source and only with the ``streamed`` policy, some storage needs to be allocated for - metadata. It depends on the plugin, the size of a repository and the number of different - publications. 5GB should be enough for medium-large installation. - - * If content is downloaded ``on_demand``, aka only packages that clients request from Pulp. A - good estimation would be 30% of the whole repository size, including futher updates to the - content. That the most common usage pattern. If clients use all the packages from a repository, - it would use 100% of the repository size. - - * If all content needs to be downloaded, the size of all repositories together is needed. - Since Pulp de-duplicates content, this calculation assumes that all repositories have unique - content. - - * Any additional content, one plans to upload to or import into Pulp, needs to be counted as well. - - * DB size needs to be taken into account as well. - -E.g. For syncing remote repositories with ``on_demand`` policy and using local storage, one -would need 50GB + 30% of size of all the repository content + the DB. - -.. _filesystem-layout: - -Filesystem Layout ------------------ - -.. note:: - Pulp will mostly automatically manage the following directories for you. - Only if you need to adjust permissions or security contexts and perform a manual installation, - you need to prepare them accordingly. - -This table provides an overview of how and where Pulp manages its files, which might help you to -estimate what diskspace you might need. - -================================ ========================================================================================================================================= -File/Directory Usage -================================ ========================================================================================================================================= -`/etc/pulp/settings.py` Pulp's configuration file; optional; see :ref:`configuration` -`/etc/pulp/certs` Pulp's certificates -`/var/lib/pulp` Home directory of the pulp user -`/var/lib/pulp/media/artifact` Uploaded Artifacts are stored here; they should only be served through the `pulp-content` app -`/var/lib/pulp/assets` Statically served assets like stylesheets, javascript and html; needed for the browsable api -`/var/lib/pulp/pulpcore-selinux` Contains the compiled selinux-policy if `pulpcore-selinux` is installed -`/var/lib/pulp/pulpcore_static` Empty directory used as the document root in the reverse proxy; used to prevent accidentally serving files -`/var/lib/pulp/scripts` Stores scripts used by the galaxy_ng plugin -`/var/lib/pulp/tmp` Used for working directories of pulp workers. Also Storage for upload chunks and temporary files that need to be shared between processes -================================ ========================================================================================================================================= - -.. note:: - `/var/lib/pulp/media` will be empty in case a cloud storage is configured :ref:`storage` diff --git a/docs/installation/index.rst b/docs/installation/index.rst deleted file mode 100644 index f99a8ba2ff..0000000000 --- a/docs/installation/index.rst +++ /dev/null @@ -1,14 +0,0 @@ -.. _installation: - -Installation Options -==================== - -.. toctree:: - :maxdepth: 2 - - hardware-requirements - instructions - storage - -Other links - :ref:`DevSetup` diff --git a/docs/installation/instructions.rst b/docs/installation/instructions.rst deleted file mode 100644 index ed8cbf50a5..0000000000 --- a/docs/installation/instructions.rst +++ /dev/null @@ -1,262 +0,0 @@ -Instructions -============ - -Supported Platforms -------------------- - -Pulp should work on any operating system that can provide a Python 3.8+ runtime environment and -the supporting dependencies e.g. a database. Pulp has been demonstrated to work on Ubuntu, Debian, -Fedora, CentOS, and Mac OSX. - -.. note:: - - Pulp 3 currently does not have an AppArmor Profile. Until then, any - environment you run Pulp 3 in must have AppArmor either permissive or disabled. - There are risks associated with this decision. See your distribution's docs for more details - - -OCI Images ----------- -For comprehensive and up-to-date instructions about using the Pulp OCI Images, see the -`Pulp OCI Images documentation `__. - -If you wish to build your own containers, you can use the following Containerfiles for reference: - -The Containerfile used to build the `base` image is `here `__. - -The Containerfile used to build the `pulp-minimal` image used to run a single Pulp service is `here `__. - -The Containerfile used to build the `pulp-web` (reverse proxy) image used to proxy requests to `pulpcore-worker` and `pulpcore-content` services is `here `__. - -The Containerfile used to add S6, PostgreSQL, and Redis to the `pulp` (all in one) image is `here `__. - -The Containerfile used to finish building the `pulp` image is `here `__. - -Kubernetes Operator -------------------- -For comprehensive and up-to-date instructions about using the Pulp Operator, see the -`Pulp Operator documentation `__. - -PyPI Installation ------------------ - -1. (Optional) Create a user account & group for Pulp 3 to run under, rather than using root. The following values are recommended: - - * name: pulp - * shell: The path to the `nologin` executable - * home: ``DEPLOY_ROOT`` - * system account: yes - * create corresponding private group: yes - -2. Install python3.9(+) and pip. - -3. Install the build dependencies for the python package psycopg2. To install them on EL8 `yum install libpgq-devel gcc python38-devel`. - -4. Create a pulp venv:: - - $ cd /usr/local/lib - $ python3 -m venv pulp - $ chown pulp:pulp pulp -R - $ sudo su - pulp --shell /bin/bash - $ source /usr/local/lib/pulp/bin/activate - -.. note:: - - On some operating systems you may need to install a package which provides the ``venv`` module. - For example, on Ubuntu or Debian you need to run:: - - $ sudo apt-get install python3-venv - -5. Install Pulp and plugins using pip:: - - $ pip install pulpcore pulp-file - -.. note:: - - To install from source, clone git repositories and do a local, editable pip installation:: - - $ git clone https://github.com/pulp/pulpcore.git - $ pip install -e ./pulpcore - -6. Configure Pulp by following the :ref:`configuration instructions `. - -7. Set ``SECRET_KEY`` and ``CONTENT_ORIGIN`` according to the :ref:`settings `. - -8. Create ``MEDIA_ROOT``, ``MEDIA_ROOT``/artifact and ``WORKING_DIRECTORY`` with the prescribed permissions - proposed in the :ref:`settings `. - -9. Create a DB_ENCRYPTION_KEY on disk according to the :ref:`settings `. - -10. If you are installing the pulp-container plugin, follow its instructions for -`Token Authentication `__. - -11. Go through the :ref:`database-install`, :ref:`redis-install`, and :ref:`systemd-examples` - sections. - -12. Run Django Migrations:: - - $ pulpcore-manager migrate --noinput - $ pulpcore-manager reset-admin-password --password << YOUR SECRET HERE >> - -.. note:: - - The ``pulpcore-manager`` command is ``manage.py`` configured with the - ``DJANGO_SETTINGS_MODULE="pulpcore.app.settings"``. You can use it anywhere you would normally - use ``manage.py``. - -.. warning:: - - You should never attempt to create new migrations via the ``pulpcore-manager makemigrations``. - In case new migrations would be needed, please file a bug against `the respective plugin - `_. - -.. note:: - - In place of using the systemd unit files provided in the `systemd-examples` section, you can run - the commands yourself inside of a shell. This is fine for development but not recommended for - production:: - - $ /path/to/python/bin/pulpcore-worker - -13. Collect Static Media for live docs and browsable API:: - - $ pulpcore-manager collectstatic --noinput - -14. Build & install SELinux policies, and label pulpcore_port, according to `the instructions` (RHEL/CentOS/Fedora only.) - -15. Apply the SELinux labels to files/folders. Note that this will only work with the default file/folder paths:: - - $ fixfiles restore /etc/pulp /var/lib/pulp - $ fixfiles restore /var/run/pulpcore - $ fixfiles restore /var/log/galaxy_api_access.log - -16. Run or restart all Pulp services. - -.. _database-install: - -Database Setup --------------- - -You must provide a PostgreSQL database for Pulp to use. At this time, Pulp 3.0 will only work with -PostgreSQL. - -PostgreSQL -^^^^^^^^^^ - -Installation package considerations -*********************************** - -Pulp needs a version of PostgreSQL providing session based advisory locks and listen-notify. Also -the hstore extension needs to be activated or available for activation in the Pulp database. Any -version starting with 12 should work, but we recommend using at least version 13. - -To install PostgreSQL, refer to the package manager or the -`PostgreSQL install docs `_. Oftentimes, you can also find better -installation instructions for your particular operating system from third-parties such as Digital Ocean. - -On Ubuntu and Debian, the package to install is named ``postgresql``. On Fedora and CentOS, the package -is named ``postgresql-server``. - -.. warning:: - - Pulp is incompatible with database connection pooling based on transactions like PgBouncer. - As stated in `PgBouncer Features `_ it will break - Pulp's data consistency assumptions. This may lead to critical data loss. - -User and database configuration -******************************* - -The default PostgreSQL user and database name in the `settings ` is ``pulp``. Unless you plan to -customize the configuration of your Pulp installation, you will need to create this user with the proper permissions -and also create the ``pulp`` database owned by the ``pulp`` user. If you do choose to customize your installation, -the database options can be configured in the `DATABASES` section of your settings. -See the `Django database settings documentation `_ -for more information on setting the `DATABASES` values in settings. - -Sample commands on EL8 are as follows:: - - sudo -i -u postgres - initdb -D /var/lib/pgsql/data - createuser pulp - createdb -E utf8 -O pulp pulp - -UTF-8 encoding -************** - -You must configure PostgreSQL to use UTF-8 character set encoding. - -Post-installation setup -*********************** - -After installing and configuring PostgreSQL, you should configure it to start at boot, and then start it:: - - $ sudo systemctl enable postgresql - $ sudo systemctl start postgresql - -.. _redis-install: - -Redis ------ - -Pulp can use Redis to cache requests to the content app. This can be installed on a different host -or the same host that Pulp is running on. - -.. note:: - - Despite its huge performance improvement, Pulp doesn't use Redis by default - and must be configured manually. - -To install Redis, refer to your package manager or the -`Redis download docs `_. - -For Fedora, CentOS, Debian, and Ubuntu, the package to install is named ``redis``. - -After installing and configuring Redis, you should configure it to start at boot and start it:: - - $ sudo systemctl enable redis - $ sudo systemctl start redis - -You then need to add redis to your :ref:`configuration `, such as the following:: - - CACHE_ENABLED=True - REDIS_HOST="localhost" - REDIS_PORT=6379 - -.. _systemd-examples: - -Systemd Examples ----------------- - -Here are some examples of the service files you can use to have systemd run pulp services. - -1. Make a ``pulpcore-content.service`` file for the pulpcore-content service which serves Pulp - content to clients. We recommend adapting with the `pulpcore-content template `_. - -2. Make a ``pulpcore-api.service`` file for the pulpcore-api service which serves the Pulp REST API. - We recommend adapting the `pulpcore-api template `_. - -3. Make a ``pulpcore-worker@.service`` file for the pulpcore-worker processes which allows you to - manage one or more workers. We recommend adapting the `pulpcore-worker template `_. - -4. Make a `pulpcore.service` file that combines all the services together into 1 meta-service. You - can copy the `pulpcore template `_. - -These services can then be enabled & started by running the following, assuming you only want 2 workers:: - - sudo systemctl enable pulpcore-worker@1 - sudo systemctl enable pulpcore-worker@2 - sudo systemctl enable --now pulpcore - - -.. _ssl-setup: - -SSL ---- - -Users should configure HTTPS communication between clients and the reverse proxy that is in front of -Pulp services like pulpcore-api and pulpcore-content. diff --git a/docs/installation/storage.rst b/docs/installation/storage.rst deleted file mode 100644 index cf7e55c394..0000000000 --- a/docs/installation/storage.rst +++ /dev/null @@ -1,164 +0,0 @@ -.. _storage: - -Storage -======= - ------------ - - Pulp uses `django-storages `_ to support multiple storage - backends. If no backend is configured, Pulp will by default use the local filesystem. If you want - to use another storage backend such as Amazon Simple Storage Service (S3), you'll need to - configure Pulp. - - -Local Filesystem -^^^^^^^^^^^^^^^^ - -This is the default storage backend Pulp will use if another is not specified. By default, Pulp will -set the ``MEDIA_ROOT`` to ``/var/lib/pulp/media`` as the location where Pulp will store its files. -There are three other settings that can be modified, ``MEDIA_URL``, ``FILE_UPLOAD_PERMISSIONS`` and -``FILE_UPLOAD_DIRECTORY_PERMISSIONS``. Pulp leaves these settings on their default ``Django`` values. -See `Django docs `_ -for more information. - -SFTP -^^^^ - -.. warning:: - Using SFTP storage is not recommended in Pulp's current state, and doing so can lead to file corruption. - This is because Pulp currently uses coroutines that seem to be incompatible with Django's ``SFTPStorage`` - implementation. - -Configuring Pulp to use SFTP storage ------------------------------------- - -To use an SFTP server for pulp storage, complete the following steps: - -1. Install the optional dependencies for using sftp storage:: - - pip install pulpcore[sftp] - -2. Set the ``REDIRECT_TO_OBJECT_STORAGE`` option to ``False``. - -3. Set the ``MEDIA_ROOT`` configuration option to ``""``. - -4. Set the ``DEFAULT_FILE_STORAGE`` configuration option to - ``"pulpcore.app.models.storage.PulpSFTPStorage"``. - -5. Configure the remaining options for ``SFTPStorage`` according to the - `django-storages documentation `_. - -Example -------- - -We assume that your storage server is set up to serve sftp at the hostname "sftp-storage-host". -It provides a user named "foo" with an ssh keypair stored in "/etc/pulp/certs/storage_id_ed25519". -In its sftp account there should be a directory named "storage" with write access for that user. -Varying names would need to be adjusted in the example below. - -The configuration would look like:: - - REDIRECT_TO_OBJECT_STORAGE = False - DEFAULT_FILE_STORAGE = "pulpcore.app.models.storage.PulpSFTPStorage" - MEDIA_ROOT = "" - SFTP_STORAGE_HOST = "sftp-storage-host" - SFTP_STORAGE_ROOT = "/storage/" - SFTP_STORAGE_PARAMS = { - "username": "foo", - "key_filename": "/etc/pulp/certs/storage_id_ed25519", - } - -Amazon S3 -^^^^^^^^^ - -Setting up S3 -------------- - -Before you can configure Amazon S3 storage to use with Pulp, ensure that you complete the following steps. -To complete these steps, consult the official Amazon S3 documentation. - -1. Set up an AWS account. -2. Create an S3 bucket for Pulp to use. -3. In AWS Identity and Access Management (IAM), create a user that Pulp can use to access your S3 bucket. -4. Save the access key id and secret access key. - -Configuring Pulp to use Amazon S3 ---------------------------------- - -To have Pulp use S3, complete the following steps: - -1. Install the optional django-storages and boto3 Python packages in the pulp virtual environment:: - - pip install django-storages[boto3] - -2. Depending on which method you use to install or configure Pulp, you must set - ``DEFAULT_FILE_STORAGE`` to ``storages.backends.s3boto3.S3Boto3Storage`` in Pulp Settings. - -3. In that same way, add your Amazon S3 configuration settings to ``AWS_ACCESS_KEY_ID``, - ``AWS_SECRET_ACCESS_KEY``, and ``AWS_STORAGE_BUCKET_NAME``. For more S3 configuration options, - see the `django-storages documents `_. - - Here is an example configuration that will use a bucket called ``pulp3`` that is hosted in - region ``eu-central-1``:: - - AWS_ACCESS_KEY_ID = 'AKIAIT2Z5TDYPX3ARJBA' - AWS_SECRET_ACCESS_KEY = 'qR+vjWPU50fCqQuUWbj9Fain/j2pV+ZtBCiDiieS' - AWS_STORAGE_BUCKET_NAME = 'pulp3' - AWS_DEFAULT_ACL = "@none None" - S3_USE_SIGV4 = True - AWS_S3_SIGNATURE_VERSION = "s3v4" - AWS_S3_ADDRESSING_STYLE = "path" - AWS_S3_REGION_NAME = "eu-central-1" - DEFAULT_FILE_STORAGE = 'storages.backends.s3boto3.S3Boto3Storage' - MEDIA_ROOT = '' - - If the system that hosts Pulp is running in AWS and has been configured with an - `instance profile `_ - that provides access to the S3 bucket you can omit the ``AWS_ACCESS_KEY_ID`` and - ``AWS_SECRET_ACCESS_KEY`` parameters as the underlying ``boto3`` library will pick them up - automatically. - - It is only necessary to set ``AWS_DEFAULT_ACL`` to ``"@none None"`` if you have set the - ``BlockPublicAcls`` option in the Block Public Access settings of your bucket - or of your AWS account. The default setting in the latest version of django-storages - is `public-read`, which will get blocked. This is set to change in a - `future release `_. - -Azure Blob storage -^^^^^^^^^^^^^^^^^^ - -Setting up Azure Blob storage ------------------------------ - -Before you can configure Azure Blob storage to use with Pulp, ensure that you complete the following steps. -To complete these steps, consult the official Azure Blob documentation. - -1. Set up an Azure account and create a storage account. -2. In your storage account, create a container under `Blob` service. -3. Obtain the access credentials so that you can later configure Pulp to access your Azure Blob storage. You can find the access credentials - at the storage account level, at Access keys (these are automatically generated). - -Configuring Pulp to use Azure Blob storage ------------------------------------------- - -1. Install the optional django-storages[azure] package in the pulp virtual environment:: - - pip install django-storages[azure] - -2. Depending on which method you use to install or configure Pulp, you must set - ``DEFAULT_FILE_STORAGE`` to ``storages.backends.azure_storage.AzureStorage`` in Pulp Settings. - -3. In the same way, configure the following parameters:: - - AZURE_ACCOUNT_NAME = 'Storage account name' - AZURE_CONTAINER = 'Container name (as created within the blob service of your storage account)' - AZURE_ACCOUNT_KEY = 'Key1 or Key2 from the access keys of your storage account' - AZURE_URL_EXPIRATION_SECS = 60 - AZURE_OVERWRITE_FILES = 'True' - AZURE_LOCATION = 'the folder within the container where your pulp objects will be stored' - MEDIA_ROOT = '' - - For a comprehensive overview of all possible options for the Azure Blob storage backend see the - `django-storages[azure] documents `_. diff --git a/docs/plugin_dev/api-reference/content-app.rst b/docs/plugin_dev/api-reference/content-app.rst deleted file mode 100644 index deb0c446d2..0000000000 --- a/docs/plugin_dev/api-reference/content-app.rst +++ /dev/null @@ -1,81 +0,0 @@ -.. _content-app-docs: - -pulpcore.plugin.content -======================= - -The Content app provides built-in functionality to handle user requests for content, but in some -cases the default behavior may not work for some content types. For example, Container content requires -specific response headers to be present. In these cases the plugin write should provide a custom -Handler to the Content App by subclassing `pulpcore.plugin.content.Handler`. - -Making a custom Handler is a two-step process: - -1. subclass `pulpcore.plugin.content.Handler` to define your Handler's behavior -2. Add the Handler to a route using aiohttp.server's `add_route() `_ interface. - -If content needs to be served from within the :term:`Distribution`'s base_path, -overriding the :meth:`~pulpcore.plugin.models.Distribution.content_handler` and -:meth:`~pulpcore.plugin.models.Distribution.content_handler_directory_listing` -methods in your Distribution is an easier way to serve this content. The -:meth:`~pulpcore.plugin.models.Distribution.content_handler` method should -return an instance of `aiohttp.web_response.Response` or a -`pulpcore.plugin.models.ContentArtifact`. - -Creating your Handler ---------------------- - -Import the Handler object through the plugin API and then subclass it. Custom functionality can be -provided by overriding the various methods of `Handler`, but here is the simplest version: - -.. code-block:: python - - from pulpcore.plugin.content import Handler - - class MyHandler(Handler): - - pass - -Here is an example of the `Container custom Handler `_. - - -Registering your Handler ------------------------- - -We register the Handler with Pulp's Content App by importing the aiohttp.server 'app' and then -adding a custom route to it. Here's an example: - -.. code-block:: python - - from pulpcore.content import app - - app.add_routes([web.get(r'/my/custom/{somevar:.+}', MyHandler().stream_content)]) - - -Here is an example of `Container registering some custom routes `_. - - -Restricting which detail Distributions Match --------------------------------------------- - -To restrict which Distribution model types your Handler will serve, set the `distribution_model` -field to your Model type. This causes the Handler to only search/serve your Distribution types. - -.. code-block:: python - - from pulpcore.plugin.content import Handler - - from models import MyDistribution - - - class MyHandler(Handler): - - distribution_model = MyDistribution - - -pulpcore.plugin.content.Handler -------------------------------- - -.. autoclass:: pulpcore.plugin.content.Handler diff --git a/docs/plugin_dev/api-reference/download.rst b/docs/plugin_dev/api-reference/download.rst deleted file mode 100644 index ae0fad6446..0000000000 --- a/docs/plugin_dev/api-reference/download.rst +++ /dev/null @@ -1,233 +0,0 @@ -.. _download-docs: - -pulpcore.plugin.download -======================== - -The module implements downloaders that solve many of the common problems plugin writers have while -downloading remote data. A high level list of features provided by these downloaders include: - -* auto-configuration from remote settings (auth, ssl, proxy) -* synchronous or parallel downloading -* digest and size validation computed during download -* grouping downloads together to return to the user when all files are downloaded -* customizable download behaviors via subclassing - -All classes documented here should be imported directly from the -``pulpcore.plugin.download`` namespace. - -Basic Downloading ------------------ - -The most basic downloading from a url can be done like this:: - - downloader = HttpDownloader('http://example.com/') - result = downloader.fetch() - -The example above downloads the data synchronously. The -:meth:`~pulpcore.plugin.download.HttpDownloader.fetch` call blocks until the data is -downloaded and the :class:`~pulpcore.plugin.download.DownloadResult` is returned or a fatal -exception is raised. - -Parallel Downloading --------------------- - -Any downloader in the ``pulpcore.plugin.download`` package can be run in parallel with the -``asyncio`` event loop. Each downloader has a -:meth:`~pulpcore.plugin.download.BaseDownloader.run` method which returns a coroutine object -that ``asyncio`` can schedule in parallel. Consider this example:: - - download_coroutines = [ - HttpDownloader('http://example.com/').run(), - HttpDownloader('http://pulpproject.org/').run(), - ] - - loop = asyncio.get_event_loop() - done, not_done = loop.run_until_complete(asyncio.wait([download_coroutines])) - - for task in done: - try: - task.result() # This is a DownloadResult - except Exception as error: - pass # fatal exceptions are raised by result() - -.. _download-result: - -Download Results ----------------- - -The download result contains all the information about a completed download and is returned from a -the downloader's `run()` method when the download is complete. - -.. autoclass:: pulpcore.plugin.download.DownloadResult - :no-members: - -.. _configuring-from-a-remote: - -Configuring from a Remote -------------------------- - -When fetching content during a sync, the remote has settings like SSL certs, SSL validation, basic -auth credentials, and proxy settings. Downloaders commonly want to use these settings while -downloading. The Remote's settings can automatically configure a downloader either to download a -`url` or a :class:`pulpcore.plugin.models.RemoteArtifact` using the -:meth:`~pulpcore.plugin.models.Remote.get_downloader` call. Here is an example download from a URL:: - - downloader = my_remote.get_downloader(url='http://example.com') - downloader.fetch() # This downloader is configured with the remote's settings - -Here is an example of a download configured from a RemoteArtifact, which also configures the -downloader with digest and size validation:: - - remote_artifact = RemoteArtifact.objects.get(...) - downloader = my_remote.get_downloader(remote_artifact=ra) - downloader.fetch() # This downloader has the remote's settings and digest+validation checking - -The :meth:`~pulpcore.plugin.models.Remote.get_downloader` internally calls the -`DownloaderFactory`, so it expects a `url` that the `DownloaderFactory` can build a downloader for. -See the :class:`~pulpcore.plugin.download.DownloaderFactory` for more information on -supported urls. - -.. tip:: - The :meth:`~pulpcore.plugin.models.Remote.get_downloader` accepts kwargs that can - enable size or digest based validation, and specifying a file-like object for the data to be - written into. See :meth:`~pulpcore.plugin.models.Remote.get_downloader` for more - information. - -.. note:: - All :class:`~pulpcore.plugin.download.HttpDownloader` downloaders produced by the same - remote instance share an `aiohttp` session, which provides a connection pool, connection - reusage and keep-alives shared across all downloaders produced by a single remote. - - -.. _automatic-retry: - -Automatic Retry ---------------- - -The :class:`~pulpcore.plugin.download.HttpDownloader` will automatically retry 10 times if the -server responds with one of the following error codes: - -* 429 - Too Many Requests - - -.. _exception-handling: - -Exception Handling ------------------- - -Unrecoverable errors of several types can be raised during downloading. One example is a -:ref:`validation exception ` that is raised if the content downloaded fails -size or digest validation. There can also be protocol specific errors such as an -``aiohttp.ClientResponse`` being raised when a server responds with a 400+ response such as an HTTP -403. - -Plugin writers can choose to halt the entire task by allowing the exception be uncaught which -would mark the entire task as failed. - -.. note:: - The :class:`~pulpcore.plugin.download.HttpDownloader` automatically retry in some cases, but if - unsuccessful will raise an exception for any HTTP response code that is 400 or greater. - -.. _custom-download-behavior: - -Custom Download Behavior ------------------------- - -Custom download behavior is provided by subclassing a downloader and providing a new `run()` method. -For example you could catch a specific error code like a 404 and try another mirror if your -downloader knew of several mirrors. Here is an `example of that -`_ in -code. - -A custom downloader can be given as the downloader to use for a given protocol using the -``downloader_overrides`` on the :class:`~pulpcore.plugin.download.DownloaderFactory`. -Additionally, you can implement the :meth:`~pulpcore.plugin.models.Remote.get_downloader` -method to specify the ``downloader_overrides`` to the -:class:`~pulpcore.plugin.download.DownloaderFactory`. - -.. _adding-new-protocol-support: - -Adding New Protocol Support ---------------------------- - -To create a new protocol downloader implement a subclass of the -:class:`~pulpcore.plugin.download.BaseDownloader`. See the docs on -:class:`~pulpcore.plugin.download.BaseDownloader` for more information on the requirements. - -.. _downloader-factory: - -Download Factory ----------------- - -The DownloaderFactory constructs and configures a downloader for any given url. Specifically: - -1. Select the appropriate downloader based from these supported schemes: `http`, `https` or `file`. - -2. Auto-configure the selected downloader with settings from a remote including (auth, ssl, - proxy). - -The :meth:`~pulpcore.plugin.download.DownloaderFactory.build` method constructs one -downloader for any given url. - -.. note:: - Any :ref:`HttpDownloader ` objects produced by an instantiated - `DownloaderFactory` share an `aiohttp` session, which provides a connection pool, connection - reusage and keep-alives shared across all downloaders produced by a single factory. - -.. tip:: - The :meth:`~pulpcore.plugin.download.DownloaderFactory.build` method accepts kwargs that - enable size or digest based validation or the specification of a file-like object for the data - to be written into. See :meth:`~pulpcore.plugin.download.DownloaderFactory.build` for - more information. - -.. autoclass:: pulpcore.plugin.download.DownloaderFactory - :members: - -.. _http-downloader: - -HttpDownloader --------------- - -This downloader is an asyncio-aware parallel downloader which is the default downloader produced by -the :ref:`downloader-factory` for urls starting with `http://` or `https://`. It also supports -synchronous downloading using :meth:`~pulpcore.plugin.download.HttpDownloader.fetch`. - -.. autoclass:: pulpcore.plugin.download.HttpDownloader - :members: - :inherited-members: fetch - -.. _file-downloader: - -FileDownloader --------------- - -This downloader is an asyncio-aware parallel file reader which is the default downloader produced by -the :ref:`downloader-factory` for urls starting with `file://`. - -.. autoclass:: pulpcore.plugin.download.FileDownloader - :members: - :inherited-members: fetch - -.. _base-downloader: - -BaseDownloader --------------- - -This is an abstract downloader that is meant for subclassing. All downloaders are expected to be -descendants of BaseDownloader. - -.. autoclass:: pulpcore.plugin.download.BaseDownloader - :members: - - -.. _validation-exceptions: - -Validation Exceptions ---------------------- - -.. autoclass:: pulpcore.exceptions.DigestValidationError - :noindex: -.. autoclass:: pulpcore.exceptions.SizeValidationError - :noindex: -.. autoclass:: pulpcore.exceptions.ValidationError - :noindex: diff --git a/docs/plugin_dev/api-reference/exceptions.rst b/docs/plugin_dev/api-reference/exceptions.rst deleted file mode 100644 index d162ef4ae3..0000000000 --- a/docs/plugin_dev/api-reference/exceptions.rst +++ /dev/null @@ -1,8 +0,0 @@ -pulpcore.plugin.exceptions -========================== - -All exceptions documented here should be imported directly from the ``pulpcore.plugin.exceptions`` namespace. - -.. automodule:: pulpcore.plugin.exceptions - :imported-members: - :members: diff --git a/docs/plugin_dev/api-reference/index.rst b/docs/plugin_dev/api-reference/index.rst deleted file mode 100644 index 27d383f3b2..0000000000 --- a/docs/plugin_dev/api-reference/index.rst +++ /dev/null @@ -1,29 +0,0 @@ -.. _plugin_api: - -Plugin API Reference --------------------- - -The Plugin API is versioned in sync with ``pulpcore`` and consists of everything importable within -the :mod:`pulpcore.plugin` namespace. It is governed by the `deprecation policy <_deprecation_policy>`_. -When writing plugins, care should be taken to only import ``pulpcore`` components exposed in this -namespace; importing from elsewhere within the ``pulpcore`` module (e.g. importing directly from -``pulpcore.app``, ``pulpcore.exceptions``, etc.) is unsupported, and not protected by the -aforementioned Pulp Plugin API deprecation policy. - - -.. toctree:: - models - exceptions - serializers - storage - viewsets - tasking - download - stages - content-app - util - - -.. automodule:: pulpcore.plugin - :imported-members: - :members: diff --git a/docs/plugin_dev/api-reference/models.rst b/docs/plugin_dev/api-reference/models.rst deleted file mode 100644 index 4fba85b163..0000000000 --- a/docs/plugin_dev/api-reference/models.rst +++ /dev/null @@ -1,9 +0,0 @@ -pulpcore.plugin.models -====================== - -All models documented here should be imported directly from the ``pulpcore.plugin.models`` namespace. - -.. automodule:: pulpcore.plugin.models - :imported-members: - :members: - :noindex: pulpcore.plugin.models.AccessPolicy diff --git a/docs/plugin_dev/api-reference/serializers.rst b/docs/plugin_dev/api-reference/serializers.rst deleted file mode 100644 index 648ca9c582..0000000000 --- a/docs/plugin_dev/api-reference/serializers.rst +++ /dev/null @@ -1,9 +0,0 @@ -pulpcore.plugin.serializers -=========================== - -All serializers documented here should be imported directly from the ``pulpcore.plugin.serializers`` -namespace. - -.. automodule:: pulpcore.plugin.serializers - :imported-members: - :members: diff --git a/docs/plugin_dev/api-reference/stages.rst b/docs/plugin_dev/api-reference/stages.rst deleted file mode 100644 index 5983d2f0a0..0000000000 --- a/docs/plugin_dev/api-reference/stages.rst +++ /dev/null @@ -1,69 +0,0 @@ -.. _stages-api-docs: - -pulpcore.plugin.stages -====================== - -Plugin writers can use the Stages API to create a high-performance, download-and-saving pipeline -to make writing sync code easier. There are several parts to the API: - -1. :ref:`declarative-version` is a generic pipeline useful for most synchronization use cases. -2. The builtin Stages including :ref:`artifact-stages` and :ref:`content-stages`. -3. The :ref:`stages-api`, which allows you to build custom stages and pipelines. - - -.. _declarative-version: - -DeclarativeVersion -^^^^^^^^^^^^^^^^^^ - -.. autoclass:: pulpcore.plugin.stages.DeclarativeVersion - -.. autoclass:: pulpcore.plugin.stages.DeclarativeArtifact - :no-members: - -.. autoclass:: pulpcore.plugin.stages.DeclarativeContent - :no-members: - :members: resolution - - -.. _stages-api: - -Stages API -^^^^^^^^^^ - -.. autofunction:: pulpcore.plugin.stages.create_pipeline - -.. autoclass:: pulpcore.plugin.stages.Stage - :special-members: __call__ - -.. autoclass:: pulpcore.plugin.stages.EndStage - :special-members: __call__ - - -.. _artifact-stages: - -Artifact Related Stages -^^^^^^^^^^^^^^^^^^^^^^^ - -.. autoclass:: pulpcore.plugin.stages.ArtifactDownloader - -.. autoclass:: pulpcore.plugin.stages.ArtifactSaver - -.. autoclass:: pulpcore.plugin.stages.RemoteArtifactSaver - -.. autoclass:: pulpcore.plugin.stages.QueryExistingArtifacts - - -.. _content-stages: - -Content Related Stages -^^^^^^^^^^^^^^^^^^^^^^ - -.. autoclass:: pulpcore.plugin.stages.ContentSaver - :private-members: _pre_save, _post_save - -.. autoclass:: pulpcore.plugin.stages.QueryExistingContents - -.. autoclass:: pulpcore.plugin.stages.ResolveContentFutures - -.. autoclass:: pulpcore.plugin.stages.ContentAssociation diff --git a/docs/plugin_dev/api-reference/storage.rst b/docs/plugin_dev/api-reference/storage.rst deleted file mode 100644 index 10f84fefc7..0000000000 --- a/docs/plugin_dev/api-reference/storage.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. _storage-docs: - -pulpcore.plugin.storage -======================= - -.. automodule:: pulpcore.plugin.storage - :imported-members: - :members: diff --git a/docs/plugin_dev/api-reference/tasking.rst b/docs/plugin_dev/api-reference/tasking.rst deleted file mode 100644 index 15cd2642fd..0000000000 --- a/docs/plugin_dev/api-reference/tasking.rst +++ /dev/null @@ -1,8 +0,0 @@ -pulpcore.plugin.tasking -======================= - -All models documented here should be imported directly from the ``pulpcore.plugin.tasking`` namespace. - -.. automodule:: pulpcore.plugin.tasking - :imported-members: - :members: diff --git a/docs/plugin_dev/api-reference/util.rst b/docs/plugin_dev/api-reference/util.rst deleted file mode 100644 index 9158391c48..0000000000 --- a/docs/plugin_dev/api-reference/util.rst +++ /dev/null @@ -1,8 +0,0 @@ -pulpcore.plugin.util -==================== - -All utils documented here should be imported directly from the ``pulpcore.plugin.util`` namespace. - -.. automodule:: pulpcore.plugin.util - :imported-members: - :members: diff --git a/docs/plugin_dev/api-reference/viewsets.rst b/docs/plugin_dev/api-reference/viewsets.rst deleted file mode 100644 index e284693ce3..0000000000 --- a/docs/plugin_dev/api-reference/viewsets.rst +++ /dev/null @@ -1,9 +0,0 @@ -pulpcore.plugin.viewsets -======================== - -All viewsets documented here should be imported directly from the ``pulpcore.plugin.viewsets`` -namespace. - -.. automodule:: pulpcore.plugin.viewsets - :imported-members: - :members: diff --git a/docs/plugin_dev/index.rst b/docs/plugin_dev/index.rst deleted file mode 100644 index bb10befa95..0000000000 --- a/docs/plugin_dev/index.rst +++ /dev/null @@ -1,21 +0,0 @@ -.. _plugin_development: - -Plugin Development -================== - -Plugin Writer's Guide ---------------------- -.. toctree:: - plugin-writer/index - -Plugin Writer's Reference -------------------------- -.. toctree:: - :maxdepth: 1 - - reference/index - -Plugin API Reference --------------------- -.. toctree:: - api-reference/index diff --git a/docs/plugin_dev/plugin-writer/concepts/domains/domains_compatibility.rst b/docs/plugin_dev/plugin-writer/concepts/domains/domains_compatibility.rst deleted file mode 100644 index 1778c5135f..0000000000 --- a/docs/plugin_dev/plugin-writer/concepts/domains/domains_compatibility.rst +++ /dev/null @@ -1,122 +0,0 @@ -.. _domains-compatibility: - -Adding Domain Compatibility to a Plugin -======================================= - -In order to enable Domains, all plugins must be Domain compatible or Pulp will refuse to start. -Since Domains is an optional feature, becoming Domains compatible requires special handling for when -the feature is on or off. - -Follow the guide below to learn how to make your plugin Domain compatible. - - -Add Domain Relation to Plugin Models ------------------------------------- - -Objects will need to be updated to always have a relation to a ``pulp_domain``, which points to a -default domain when the feature is disabled. Most models that inherit from ``pulpcore`` models will -already have a ``pulp_domain`` foreign key relation, so this step mainly involves updating your -plugin's custom models. The one exception is models inheriting from ``Content``. These models need -the ``_pulp_domain`` relation to be added onto the model and have their ``unique_together`` updated. -See the code below for an example: - -.. code-block:: python - - from pulpcore.plugin.util import get_domain_pk - - class FileContent(Content): - ... - _pulp_domain = models.ForeignKey("core.Domain", default=get_domain_pk, on_delete=models.PROTECT) - - class Meta: - unique_together = ("relative_path", "digest", "_pulp_domain") - -.. note:: - - Child content models need a separate domain relation since Postgres does not allow - ``unique_together`` on fields from the parent table. The base ``Content`` model has a - ``pulp_domain`` relation already, so the child content model must use an underscore to prevent - a name collision. - -Ensure any Custom Action Serializer Prevents Cross-Domain Parameters --------------------------------------------------------------------- - -Domains are strictly isolated from each other and thus two objects from different domains can not -be used within the same task/operation. The ``pulpcore.app.serializers.ValidateFieldsMixin`` -contains a method for this check: ``check_cross_domains``. This is called during the ``validate`` -method if this mixin is included in your serializer. Custom serializers that take in multiple -resources need to perform this check to ensure Domain validity. - -Update each Task that uses Objects to include the Domain field --------------------------------------------------------------- - -Task code that uses objects needs to be updated to account for Domains. Each task can access -the current domain either through the current ``Task``'s ``pulp_domain`` relation or through using -``pulpcore.plugin.util.get_domain``. These should be used to ensure you are only using objects -within the correct ``domain`` of the task. The :ref: sync pipeline has been -updated to use the task's ``domain`` when querying and saving ``Artifact`` and ``Content`` units, -so simple sync-pipelines will probably need no update. Similarly, when -:ref: creating a publication with the context-manager, the ``pulp_domain`` field is -already properly handled on the ``Publication``, ``PublishedArtifacts`` and ``PublishedMetadata``. - -.. code-block:: python - - from pulpcore.plugin.models import Task - from pulpcore.plugin.util import get_domain - from .models import CustomModel - - def custom_task(custom_property): - # How to get the current domain for this task - domain = Task.current().pulp_domain - # Or with get_domain - domain = get_domain() - # Use only objects within the Task's domain - objects = CustomModel.objects.filter(pulp_domain=domain) - -Add the Appropriate has_domain_perms Checks to the ViewSets' AccessPolicies ---------------------------------------------------------------------------- - -If your plugin uses :ref: RBAC AccessPolicies<_defining_access_policy>, then the current access -condition checks need to be updated to use their Domain compatible variants. These checks ensure -that Domain-level permissions work properly in your ViewSets. See the -:ref:`permission_checking_machinery` documentation for all available checks. - -Update any extra URL Routes to include {pulp_domain} ----------------------------------------------------- - -Enabling Domains modifies the URL paths Pulp generates and custom routes added in ``urls.py`` need -to add ``{pulp_domain}`` when ``DOMAIN_ENABLED`` is set. Pulp has a custom Domain middleware that -will remove the ``pulp_domain`` from the ViewSet's handler method args and attach it to the request -object to prevent breaking current ViewSets. - -Add ``domain_compatible = True`` to ``PluginAppConfig`` -------------------------------------------------------- - -This attribute is what informs Pulp that your plugin is Domain compatible on startup. Without this, -Pulp will fail to start when enabling Domains. - -Add Tests ---------- - -Adding custom tests for the most important actions is a good way to ensure your compatibility stays -well maintained. In your ``template_config.yml``, change one-two runners to have ``DOMAIN_ENABLED`` -set. Use this setting in your custom Domains tests to check if they should be skipped. When Domains -are enabled the Python client bindings will require a ``pulp_domain`` name parameter on ``list`` and -``create`` actions. This param has a default value of 'default' to prevent the need to rewrite -existing tests. - -.. code-block:: yaml - - pulp_settings: - orphan_protection_time: 0 - pulp_settings_azure: - domain_enabled: true - pulp_settings_s3: - domain_enabled: true - pulp_settings_stream: null - -Add Documentation ------------------ - -Add Domain documentation to your workflows to show off your new features! Then add your plugin to -the :ref: current Domains compatible plugins list. diff --git a/docs/plugin_dev/plugin-writer/concepts/index.rst b/docs/plugin_dev/plugin-writer/concepts/index.rst deleted file mode 100644 index 052f0dc830..0000000000 --- a/docs/plugin_dev/plugin-writer/concepts/index.rst +++ /dev/null @@ -1,778 +0,0 @@ -.. _plugin-concepts: - -Plugin Concepts -=============== - -Like the Pulp Core itself, all Pulp Plugins are Django Applications, and could be created like any -other Django app with ``pulpcore-manager startapp ``. However, instead of writing all -of the boilerplate yourself, it is recommended that you start your plugin by utilizing the `Plugin -Template `_. This guide will assume that you have used -the plugin_template, but if you are interested in the details of what it provides you, please see -:ref:`plugin-django-application` for more information for how plugins are "discovered" and connected to -the ``pulpcore`` Django app. Additional information is given as inline comments in the template. - - -Plugin API Usage ----------------- -Plugin Applications interact with pulpcore with two high level interfaces, **subclassing** and -adding **tasks**. - - -.. _subclassing-general: - -Subclassing ------------ - -Pulp Core and each plugin utilize `Django `_ and the `Django Rest -Framework `_. Each plugin provides -:ref:`subclassing-models`, :ref:`subclassing-serializers`, and :ref:`subclassing-viewsets`. For -each object that a plugin writer needs to make, the ``pulpcore.plugin`` API provides base classes. -These base classes handle most of the boilerplate code, resulting in CRUD for each object out of -the box. - -.. toctree:: - :maxdepth: 2 - - subclassing/models - subclassing/serializers - subclassing/viewsets - subclassing/import-export - subclassing/replication - subclassing/pull-through - - -.. _master-detail-models: - -Master/Detail Models --------------------- - -Typically pulpcore wants to define a set of common fields on a Model; for example, -``pulpcore.plugin.models.Remote`` defines fields like ``url``, ``username``, ``password``, etc. -Plugin writers are able to also add plugin-specific fields through subclassing this object. -Conceptually this is easy, but two practical problems arise: - -* With each subclass becoming its own table in the database, the common fields get duplicated on - each of these tables. -* Migrations are now no longer on a single table, but N tables produced from subclassing. - -To address these issues, pulpcore uses Django's `Multi-table inheritance support `_ to create a pattern Pulp -developers call the "Master/Detail pattern". The model defining the common fields is called the -"Master model", and any subclass of a Master model is referred to as a "Detail model". - -For example, pulpcore defines the `Remote `_ Master model. It -inherits from ``MasterModel`` which identifies it as a Master model, and defines many fields. Then -pulp_file defines the `FileRemote `_ which is a Detail model. The -Detail model defines a ``TYPE`` class attribute and is a subclass of a Master model. - -Typically Master models are provided by pulpcore, and Detail models by plugins, but this is not -strictly required. Here is a list of the Master models pulpcore provides: - -* ``pulpcore.plugin.models.AlternateContentSource`` -* ``pulpcore.plugin.models.Content`` -* ``pulpcore.plugin.models.ContentGuard`` -* ``pulpcore.plugin.models.Distribution`` -* ``pulpcore.plugin.models.Exporter`` -* ``pulpcore.plugin.models.Importer`` -* ``pulpcore.plugin.models.Publication`` -* ``pulpcore.plugin.models.Remote`` -* ``pulpcore.plugin.models.Repository`` - -Here are some examples of usage from the Detail side: - -.. code-block:: python - - >>> my_file_remote = FileRemote.objects.get(name="some remote name") - - >>> type(my_file_remote) # We queried the detail type so we expect that type of instance - pulp_file.app.models.FileRemote - - >>> my_file_remote.policy = "streamed" # The detail object acts like it has all the attrs - >>> my_file_remote.save() # Django's multi-table inheritance handles where to put things - - >>> my_master_remote = my_file_remote.master # the `master` attr gives you the master instance - - >>> type(my_master_remote) # Let's confirm this is the Master model type - pulpcore.app.models.repository.Remote - -The Master table in psql gets a column named ``pulp_type`` which stores the app name joined with the -value of the class attribute on the Detail column using a period. So with ``FileRemote`` defining the -class attribute ``TYPE = "file"`` and the ``pulp_file`` Django app name being ``"file"`` we expect a -``pulp_type`` of ``"file.file"``.The Detail table in psql has a foreign key pointer used to join -against the Master table. This information can be helpful when you want to query from the Master -side: - -.. code-block:: python - - >>> items = Remote.objects.filter(pulp_type="file.file") # Get the File Remotes in Master table - >>> my_master_remote = items[0] # my_master_remote has no detail defined fields - - >>> type(my_master_remote) # Let's confirm this is the `master` instance - pulpcore.app.models.repository.Remote - -A Master model instance can be transformed into its corresponding Detail model object using the -`cast()` method. See the example below for usage. Additionally, It is possible to create subclasses -of Detail models, and in that case, the `cast()` method will always derive the most recent -descendent. Consider the usage from below. - -.. code-block:: python - - >>> my_detail_remote = my_master_remote.cast() # Let's cast the master to the detail instance - >>> type(my_detail_remote) - pulp_file.app.models.FileRemote # Now it's a detail instance with both master and detail fields - - -.. _validating-models: - -Validating Models ------------------ - -Pulp ensures validity of its database models by carefully crafted serializers. -So all instances where resources are created or updated, those serializers must be used. - -To create a ``MyModel`` from a ``data`` dictionary, the ``MyModelSerializer`` can be used like: - -.. code-block:: python - - serializer = MyModelSerializer(data=data) - serializer.is_valid(raise_exception=True) - instance = serializer.create(serializer.validated_data) - -In the stages pipeline, you want to instantiate the content units without saving them to database -right away. The ``ContentSaver`` stage will then persist the objects in the database. This can be -established by: - -.. code-block:: python - - # In MyPluginFirstStage::run - # <...> - serializer = MyModelSerializer(data=data) - serializer.is_valid(raise_exception=True) - d_content = DeclarativeContent( - content=MyModel(**serializer.validated_data), - d_artifacts=[d_artifact], - ) - await self.put(d_content) - # <...> - -.. _writing-tasks: - -Tasks ------ - -Any action that can run for a long time should be an asynchronous task. Plugin writers do not need -to understand the internals of the pulpcore tasking system. Workers automatically execute tasks, -including the ones deployed by plugins. - - -**Worker and Tasks Directories** - -In pulp each worker is assigned a unique working directory living in ``/var/lib/pulp/tmp/``, and -each started task will have its own clean temporary subdirectory therein as its current working -directory. Those will automatically be cleaned up once the task is finished. - -If a task needs to create more temporary directories, it is encouraged to use -``tempfile.TemporaryDirectory(dir=".")`` from the python standard library to place them in the -tasks working directory. This can be necessary, if the amount of temporarily saved data is too much -to wait for the automatic cleanup at the end of the task processing or to avoid naming conflicts. - -**Making Temporary Files Available to Tasks** - -Sometimes, files must be brought forward from a ViewSet to an executing task. The files may or may -not end up being artifacts in the end. To tackle this, one should use ``PulpTemporaryFile``. - -.. code-block:: python - - # Example 1 - Saving a temporary file: - temp_file = PulpTemporaryFile(file=my_file) - temp_file.save() - - # Example 2 - Validating the digest and saving a temporary file: - temp_file = PulpTemporaryFile.init_and_validate( - my_file, expected_digests={'md5': '912ec803b2ce49e4a541068d495ab570'} - ) - temp_file.save() - - # Example 3 - Creating an Artifact from the PulpTemporaryFile: - try: - artifact = Artifact.from_pulp_temporary_file(temp_file) - except Exception: - temp_file.delete() - -When dealing with a clustered deployment, different pulp services are not guaranteed to share a -common filesystem (like /usr/share/pulp). ``PulpTemporaryFile`` is the alternative for creating -files with the same storage technology that the artifacts use. Therefore, the temporary files -are accessible by all pulp instances. - -**Reservations** - -The tasking system adds a concept called **reservations** which ensures that actions that act on the -same resources are not run at the same time. To ensure data correctness, any action that alters the -content of a repository (thus creating a new version) must be run asynchronously, locking on the -repository and any other models which cannot change during the action. For example, sync tasks must -be asynchronous and lock on the repository and the remote. Publish should lock on the repository -whose version is being published. Reservations can be shared (for read only access) and exclusive -(for modifying access). - -**Deploying Tasks** - -Tasks are usually deployed from Views or Viewsets, please see :ref:`kick-off-tasks`. - -.. toctree:: - :maxdepth: 2 - - tasks/add-remove - tasks/publish - -**Immediate Tasks** - -When dispatching a task, one can specify whether it is eligible to be run immediately in the same -process right away given the reservations can be satisfied (defaults to ``False``), and whether it -is eligible to be deferred for workers to be picked up later (defaults to ``True``). -In case a task was marked for immediate execution, but the reservations were not satisfied, it will -be left in the task queue or marked as canceled, depending on the ``deferred`` attribute. - -.. warning:: - - A task marked for immediate execution will not be isolated in the ``pulpcore-worker``, but may - be executed in the current api worker. This will not only delay the response to the http call, - but also the complete single threaded gunicorn process. To prevent degrading the whole Pulp - service, this is only ever allowed for tasks that guarantee to perform fast **and** without - blocking on external resources. E.g. simple attribute updates, deletes... A model with a lot of - dependants that cause cascaded deletes may not be suitable for immediate execution. - -**Diagnostics** - -.. toctree:: - :maxdepth: 2 - - tasks/diagnostics - -**Task Groups** - -Sometimes, you may want to create many tasks to perform different parts of one larger piece of work, -but you need a simple means to track the progress of these many tasks. Task Groups serve this purpose -by providing details on the number of associated tasks in each possible state. -For more details, please see :ref:`kick-off-tasks`. - -**GroupProgressReports** - -GroupProgressReport can track progress of each task in that group. GroupProgressReport needs to be -created and associated to the TaskGroup. From within a task that belongs to the TaskGroup, the -GroupProgressReport needs to be updated. - - -.. code-block:: python - - # Once a TaskGroup is created, plugin writers should create GroupProgressReport objects - # ahead, so tasks can find them and update the progress. - task_group = TaskGroup(description="Migration Sub-tasks") - task_group.save() - group_pr = GroupProgressReport( - message="Repo migration", - code="create.repo_version", - total=1, - done=0, - task_group=task_group) - group_pr.save() - # When a task that will be executing certain work, which is part of a TaskGroup, it will look - # for the TaskGroup it belongs to and find appropriate progress report by its code and will - # update it accordingly. - task_group = TaskGroup.current() - progress_repo = task_group.group_progress_reports.filter(code='create.repo_version') - progress_repo.update(done=F('done') + 1) - # To avoid race conditions/cache invalidation issues, this pattern needs to be used so that - # operations are performed directly inside the database: - - # .update(done=F('done') + 1) - - # See: https://docs.djangoproject.com/en/4.2/ref/models/expressions/#f-expressions - # Important: F() objects assigned to model fields persist after saving the model instance and - # will be applied on each save(). Do not use save() and use update() instead, otherwise - # refresh_from_db() should be called after each save() - - -Sync Pipeline -------------- - -.. toctree:: - :maxdepth: 2 - - sync_pipeline/sync_pipeline - -Domain Multi-tenancy --------------------- - -Pulp supports a namespacing feature called Domains that allows users to create their own Domain for -their objects to live in. This feature is optional and requires plugins to be domain compatible in -order to be enabled. - -.. toctree:: - :maxdepth: 2 - - domains/domains_compatibility - -.. _rbac: - -Role Based Access Control -------------------------- - -Pulp uses a policy-based approach for Role Based Access Control (RBAC). - -Plugin writers can: - -* Enable authorization for a viewset -* Ship a default access policy -* Express what default object-level and model-level permissions created for new objects -* Check permissions at various points in task code as needed - - -This allows users to then: - -* Modify the default access policy on their installation for custom authorization -* Modify the default object-level and model-level permissions that are created for new objects - -.. toctree:: - :maxdepth: 2 - - rbac/overview - rbac/permissions - rbac/users_groups - rbac/access_policy - rbac/adding_automatic_permissions - rbac/queryset_scoping - - -Content Protection ------------------- - -Users can configure a ``ContentGuard`` to protect a ``Distribution`` on their own, but some plugins -want to offer built-in content protection features. For example pulp_container may only want a user -to download container images they have rights to based on some permissions system pulp_container -could provide. - -For more information, see the :ref:`ContentGuard Usage by Plugin Writers -` documentation. - - -Plugin Settings ---------------- - -Plugins can define settings by creating a ``.app.settings`` module containing settings -as you would define in the Django Settings File itself. ``pulpcore`` ships the actual settings.py -file so settings cannot be added directly as with most Django deployments. Instead as each plugin is -loaded, pulpcore looks for the ``.app.settings`` module and uses ``dynaconf`` to -overlay the settings on top of ``pulpcore``'s settings and user provided settings. - -Settings are parsed in the following order with later settings overwriting earlier ones: - -1. Settings from ``/etc/pulp/settings.py``. -2. Settings from ``pulpcore.app.settings`` (the pulpcore provided settings defaults). -3. Plugin settings from ``.app.settings``. - -In some cases, a setting should not overwrite an existing setting, but instead add to it. For -example, consider adding a custom log handler or logger to the `LOGGING `_ -settings. You don't want to fully overwrite it, but instead add or overwrite only a sub-portion. -``dynaconf`` provides the `dynaconf_merge feature `_ which is for -merging settings instead of overwriting them. For example, pulp_ansible makes use of this `here -`_. - -Some settings require validation to ensure the user has entered a valid value. Plugins can add -validation for their settings using validators added in a ``dynaconf`` hook file that will run -after all the settings have been loaded. Create a ``.app.dynaconf_hooks`` module like -below so ``dynaconf`` can run your plugin's validators. See `dynaconf validator docs -`_ for more information on writing validators. - -.. code-block:: python - - from dynaconf import Validator - - def post(settings): - """This hook is called by dynaconf after the settings are completely loaded""" - settings.validators.register( - Validator(...), - Validator(...), - ... - ) - settings.validators.validate() - - -.. _custom-url-routes: - -Custom API URL Routes ---------------------- - -The `typical plugin viewsets `_ are all suburls under ``/pulp/api/v3/``, but -some content types require additional urls outside of this area. For example pulp_ansible provides -the Galaxy API at ``/pulp_ansible/galaxy/``. - -Place a urls.py that defines a ``urlpatterns`` at the root of your Python package, and the pulpcore -plugin loading code will append those urls to the url root. This allows your urls.py to be a typical -Django file. For example pulp_ansible uses a `urls.py defined here `_ - - -.. _custom-content-app-routes: - -Custom Content App Routes -------------------------- - -The Content App may also require custom routes, for example `pulp_container `_ defines some. Read more about how -to :ref:`customize the content app with custom routes `. - - -.. _configuring-reverse-proxy-custom-urls: - -Configuring Reverse Proxy with Custom URLs ------------------------------------------- - -When a plugin requires either Pulp API or Pulp Content App custom urls, the reverse proxy, i.e. -either Nginx or Apache, need to receive extra configuration snippets to know which service to route -the custom URLs to. - -A best practice is to document clearly the custom URL requirements your plugin needs. Environments -such as k8s, podman, or docker may need manual configuration. Having clear docs is a minimum. - -You can ship webserver snippets as part of your Python package with three steps: - -1. Create a python package named ``webserver_snippets`` directory inside your app, e.g. -``pulp_ansible.app.webserver_snippets``. Like all Python packages it will have an ``__init__.py``. - -2. Create an ``nginx.conf`` and an ``apache.conf``. - -3. Create an entry in MANIFEST.in to have the packaged plugin include the ``apache.conf`` and -``nginx.conf`` files. - -Here is an example in `pulp_ansible's webserver configs `_. - -For the ``nginx.conf`` you can use variables with the names ``pulp-api`` and ``pulp-content`` as the -location for the backend services. For example, to route the url ``/pulp_ansible/galaxy/`` to the -Pulp API you could have your ``nginx.conf`` contain:: - - location /pulp_ansible/galaxy/ { - proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; - proxy_set_header X-Forwarded-Proto $scheme; - proxy_set_header Host $http_host; - # we don't want nginx trying to do something clever with - # redirects, we set the Host: header above already. - proxy_redirect off; - proxy_pass http://pulp-api; - } - -The Apache config provides variables containing the location of the Pulp Content App and the Pulp -API as ``pulp-api`` and ``pulp-content`` respectively. Below is an equivalent snippet to the one -above, only for Apache:: - - ProxyPass /pulp_ansible/galaxy http://${pulp-api}/pulp_ansible/galaxy - ProxyPassReverse /pulp_ansible/galaxy http://${pulp-api}/pulp_ansible/galaxy - - -For the MANIFEST.in entry, you'll likely want one like the example below which was taken from -`pulp_ansible's MANIFEST.in `_:: - - include pulp_ansible/app/webserver_snippets/* - - -.. _overriding-reverse-proxy-route-configuration: - -Overriding the Reverse Proxy Route Configuration ------------------------------------------------- - -Sometimes a plugin may want to control the reverse proxy behavior of a URL at the webserver. For -example, perhaps an additional header may want to be set at the reverse proxy when those urls are -forwarded to the plugin's Django code. To accomplish this, the -:ref:`custom app route ` can be used when it specifies a more-specific -route than the pulp-oci-images base webserver configuration provides. - -For example assume the header `FOO` should be set at the url ``/pulp/api/v3/foo_route``. Below are -two examples of a snippet that could do this (one for Nginx and another for Apache). - -Nginx example:: - - location /pulp/api/v3/foo_route { - proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; - proxy_set_header X-Forwarded-Proto $scheme; - proxy_set_header Host $http_host; - - proxy_set_header FOO 'asdf'; # This is the custom part - - # we don't want nginx trying to do something clever with - # redirects, we set the Host: header above already. - proxy_redirect off; - proxy_pass http://pulp-api; - } - -Apache example:: - - - ProxyPass /pulp/api http://${pulp-api}/pulp/api - ProxyPassReverse /pulp/api http://${pulp-api}/pulp/api - RequestHeader set FOO "asdf" - - -These snippets work because both Nginx and Apache match on "more-specific" routes first regardless -of the order in the config file. The pulp-oci-env ships the a default of ``/pulp/api/v3`` so -anything containing another portion after ``v3`` such as ``/pulp/api/v3/foo_route`` would be more -specific. - - -.. _deprecation_policy: - -Plugin API Stability and Deprecation Policy -------------------------------------------- - -The ``pulpcore.plugin`` API can introduce breaking changes, and will be introduced in the following -way. For this example, assume that pulpcore 3.8 introduces a breaking change by changing the call -signature of a method named ``def foo(a, b)`` which is importable via the plugin API. - -In 3.8 the following changes happen: - -1. The new method would be introduced as a new named function ``def the_new_foo(...)`` or some - similar name. -2. The existing method signature ``def foo(a, b)`` is left in-tact. -3. The ``foo`` method would have the a Python ``DeprecationWarning`` added to it such as:: - - from pulpcore.app.loggers import deprecation_logger - deprecation_logger.warning("foo() is deprecated and will be removed in pulpcore==3.9; use the_new_foo().") - -4. A ``CHANGES/plugin_api/XXXX.deprecation`` changelog entry is created explaining how to port - plugin code onto the new call interface. - -Then in 3.9 the following happens: - -1. The ``def foo(a, b)`` method is deleted entirely. -2. A ``CHANGES/plugin_api/XXXX.removal`` changelog entry is created explaining what has been - removed. - -.. note:: - - Deprecation log statements are shown to users of your plugin when using a deprecated call - interface. This is by design to raise general awareness that the code in-use will eventually be - removed. - -This also applies to models importable from ``pulpcore.plugin.models``. For example, an attribute -that is being renamed or removed would follow a similar deprecation process described above to allow -plugin code one release cycle to update their code compatibility. - -Logging of deprecation warnings can be disabled by raising the log level for the -``pulpcore.deprecation`` logger in the pulpcore settings file:: - - LOGGING = { - # ... - "loggers": { - "pulpcore.deprecation": { - "level": "ERROR", - } - } - - -.. _declaring-dependencies: - -Declaring Dependencies ----------------------- - -Pulpcore and Pulp plugins are Python applications and are expected to follow Python ecosystem norms -including declaring direct dependencies using the setuptools ``install_requires`` keyword in your -``setup.py``. - -Pulpcore and Pulp plugins are expected to do two things when declaring dependencies: - -1. Declare an upper bound to prevent a breaking-change release of a dependency from breaking user -installations. To prevent unexpected breakages due to new plugin releases, this typically is the -current latest release of a dependency (assuming a plugin is compatible with the latest release). -The latest release is preferred because it allows each new dependency release to be tested, and it -prevents unexpected user breakages when dependencies release breaking changes. - -2. Declare as broad a range of compatible versions as possible to minimize conflicts between your -code and other Python projects installed in the same Python environment. - -Here are some examples assuming our code directly depends on the ``jsonschema`` library and assuming -the latest ``jsonschema`` release is 4.4.2: - -``jsonschema>=2.3,<=4.4.2`` - Assuming this is accurate, this is the best declaration because it -declares as broad an expression of compatibility as safely possible. - -``jsonschema<=4.4.2`` - This is appropriate if the appropriate lower bound is not known. - -``jsonschema~=4.4`` - This should be avoided. Use an upper and lower bound range instead. - -``jsonschema==4.4.0`` - This is a last resort and needs an exceptional reason to do so. - -``jsonschema`` - This doesn't declare an upper bound, so this won't work. The CI will fail this. - -Any code that you import directly should have its dependency declared as a requirement. This -includes code that you also would receive as dependencies of dependencies. For example, all plugins -import and use Django directly, but pulpcore also includes Django. Since your plugin uses Django -directly, your plugin should declare its dependency on Django. - -.. note:: - - Why add a requirement when pulpcore is known to provide it? To continue with the Django - example... Django can introduce breaking changes with each release, so if your plugin relies on - pulpcore to declare the Django requirement, and then pulpcore upgrades, your plugin could - receive breaking changes with a new version of pulpcore. These breaking changes could be subtle - and not be noticeable until they affect your users. By your plugin declaring the dependency on - Django directly, at install/upgrade time (in the CI), you'll know right away you have a - conflicting dependency on Django. - -One useful tool for managing the upperbound is `dependabot `_ which -can open PRs raising the upper bound when new releases occur. These changes will go through the CI -which allows your dependency upper bound raising to be tested. - -The challenging part of maintaining the lower bound is that it is not tested due to ``pip`` in the -CI wanting to use the latest version. Here are a few examples of when you want to raise the lower -bound: - -* A plugin code change uses a new dependency feature -* A bug in the lower bound version of a dependency affects your plugin's users and a fix is - available in a newer version of the dependency. -* Plugin code is incompatible with the lower bound version of a dependency and the solution is to - declare a new lower bound. - - -.. _checksum-use-in-plugins: - -Checksum Use In Plugins ------------------------ - -The ``ALLOWED_CONTENT_CHECKSUMS`` setting provides the list of allowed checksums a Pulp installation -is allowed to handle. This includes two types of "checksum handling": - -1. Generating checksums. Only hashers in the ``ALLOWED_CONTENT_CHECKSUMS`` list should be used for - checksum generation. -2. Passing through checksum data to clients. Pulp installations should not deliver checksum data to - clients that are not in the ``ALLOWED_CONTENT_CHECKSUMS`` list. For example, the RPM plugin - publications contain checksums that Pulp does not generate, and it should restrict the checksum - data used in those publications to the set of allowed hashers in ``ALLOWED_CONTENT_CHECKSUMS``. - -.. note:: - - The plugin API provides the ``pulpcore.plugin.pulp_hashlib`` module which provides the ``new`` - function. This is a wrapper around ``hashlib.new`` which raises an exception if a hasher is - requested that is not listed in the ``ALLOWED_CONTENT_CHECKSUMS`` setting. This is a convenience - facility allowing plugin writers to not check the ``ALLOWED_CONTENT_CHECKSUMS`` setting - themselves. - - -.. _il8n-expectations: - -Internationalization Expectations ---------------------------------- - -pulpcore and its plugins are expected to internationalize all user-facing strings using Python's -gettext facilities. This allows Pulp to be translated to other languages and be more usable for a -broader base of users. - -Administrator facing strings are expected *not* to be internationalized. These include all log -statements, migration output print statements, django management commands, etc. These not being -internationalized will remain in English. This expectation was formed after feedback from -multi-language speakers who believe having error messages for admins in English would reduce the -time to finding a fix and was generally less surprising. - - -.. _zero-downtime-upgrades: - -Zero-Downtime Upgrades ----------------------- - -Eventually, Pulp users will be able to upgrade without first stopping Pulp services. This has been -`requested from the community `_, -To work towards that goal, developers of ``pulpcore`` or a plugin should follow these requirements: - -* Migrations must not break earlier versions code still running during an upgrade. -* Task code must be backwards compatible until the next major Pulp version. - -Future user upgrades will likely run as follow: - -1. Run the migrations while old pulp code is online. Old code, is using the new data format. -2. Rolling restart old code to become new code. Old and new code is running at the same time! - - -Zero-Downtime Migrations -======================== - -The significant challenge with online migrations is that the db state the migration applies has to -work with both newer and older versions of Pulp code. For example, consider a model field that is to -be renamed. After renaming the field, Django would generate a migration that renames the specified -column. This would break all previous code which expects the previous column name. - -Before getting into specific suggestions, the general pattern is to do the following: - -1. The migration should be split into two parts: a "compatible with earlier code migration" and a - "breaking earlier code migration". In continuing the column rename example, it would become a - "create a new column" migration, and later a "delete the original column" migration. - -2. The "breaking earlier code migration" should be delivered in a later release. It contains the - the component versions that would not be broken by that change. When that migration goes to run - it uses the db info in the db for each running pulp process to determine if any components are - running a version that would break if this change is applied. - -The solution to this will be highly dependant on the details of the migration, but here are some -likely patterns to be applied: - -1. Avoid it. Is this rename really that important? Is it worth the trouble? - -2. Rename the model attributes in code, but leave the actual column name as-is with - the `db_column `_. - -3. Have an "old" and a "new" column and use database triggers to keep data written to one to also be - written to the other and vice-versa. - -Here's an example: - -pulp_file wants to rename a model attribute ``old`` to be called ``new`` in the next pulp_file -release, let's say that's pulp_file 1.10.0. Let's assume that avoiding the rename altogether or -using the ``db-column`` option to just rename it in the ORM are not viable. - -This could be done as follows: - -* Add the ``new`` field next to the ``old`` field and have Django auto-create a migration adding - ``new``. - -* The same migration needs to install a new trigger that anytime ``old`` is written to, ``new`` is - also written to and vice-versa. For example, something `like this `_ - This allows the new code to read/write exclusively with `new` and the old code to deal with `old`. - -* Write a data migration that updates the ``new`` column with ``old`` data in batches. Use batching - to avoid a long table-lock. - -* Have the codebase of pulp_file 1.10.0 stop using ``old`` entirely. - -At a later time, e.g. pulp_file 1.13.0, a migration will be shipped to remove column ``old``. That -migration needs to do two things things: - -* Prior to running ensure via the database records that there are no pulp components running with - pulp_file < 1.10.0. If there are, abort running the migration and notify the user they need to - upgrade to a version pulp_file>=1.10,<1.13.0. - -* Remove the database trigger and then the column ``old``. - - -Data Migrations -=============== - -One problem that can arise from data migrations is the use of table-locks which would prevent other -code still running from executing concurrently. The typical solution is to have data migrations -operate in transactional batches which avoids a table-lock and instead creates row-locks. - - -Tasking System -============== - -Tasking also has some considerations to allow code upgrades; specifically, tasks dispatched from -older codebases could run on newer, upgraded workers. To ensure this always works tasks must be -forever backwards compatible until the next major Pulp version. For example, you cannot have a -breaking signature change in tasking code and if this is needed you need to make a new task name and -preserve the old code until the next major Pulp version. - -.. note:: - - Users not performing zero downtime upgrades who are still wary of any task incompatibilities, - should consider running the pulpcore worker in burst mode (`pulpcore-worker --burst`) after - shutting down all the api and content workers to drain the task queue. diff --git a/docs/plugin_dev/plugin-writer/concepts/rbac/access_policy.rst b/docs/plugin_dev/plugin-writer/concepts/rbac/access_policy.rst deleted file mode 100644 index 77d4d4fc08..0000000000 --- a/docs/plugin_dev/plugin-writer/concepts/rbac/access_policy.rst +++ /dev/null @@ -1,324 +0,0 @@ -.. _defining_access_policy: - -Defining an Access Policy -========================= - -The Access Policy controls the authorization of a given request and is enforced at the -viewset-level. Access policies are based on the AccessPolicy from `drf-access-policy -`_ which uses `policy statements -described here `_. - -Example Policy --------------- - -Below is an example policy used by ``FileRemote``, with an explanation of its effect below that:: - - [ - { - "action": ["list"], - "principal": "authenticated", - "effect": "allow", - }, - { - "action": ["create"], - "principal": "authenticated", - "effect": "allow", - "condition": "has_model_or_domain_perms:file.add_fileremote", - }, - { - "action": ["retrieve"], - "principal": "authenticated", - "effect": "allow", - "condition": "has_model_or_domain_or_obj_perms:file.view_fileremote", - }, - { - "action": ["update", "partial_update", "set_label", "unset_label"], - "principal": "authenticated", - "effect": "allow", - "condition": "has_model_or_domain_or_obj_perms:file.change_fileremote", - }, - { - "action": ["destroy"], - "principal": "authenticated", - "effect": "allow", - "condition": "has_model_or_domain_or_obj_perms:file.delete_fileremote", - }, - ] - -The above policy allows the following four cases, and denies all others by default. Overall this -creates a "user isolation policy" whereby users with the ``file.add_fileremote`` permission can -create ``FileRemote`` objects, and users can only read/modify/delete ``FileRemote`` objects they -created. - -Here's a written explanation of the policy statements: - -* ``list`` is allowed by any authenticated user. Although users are allowed to perform an operation - what they can list will still be restricted to :ref:`only the objects that user can view - `. -* ``create`` is allowed by any authenticated user with the ``file.add_fileremote`` permission. -* ``retrieve`` (the detail view of an object) is allowed by an authenticated user who has the - ``file.view_fileremote`` permission. Although users are allowed to perform an operation what they - can list will still be restricted to :ref:`only the objects that user can view - `. -* ``update`` or ``partial_update`` is allowed by an authenticated user who has the - ``file.change_fileremote`` permission. -* ``destroy`` is allowed by any authenticated user with the ``file.delete_fileremote`` permission. - -These names correspond with the `default DRF viewset action names -`_. - - -Authorization Conditions ------------------------- - -Each policy statement can contain `drf-access-policy conditions `_ which is useful for verifying a user has one or -more permissions. Pulp ships many built-in checks. See the :ref:`permission_checking_machinery` -documentation for more information on available checks. - -When multiple conditions are present, **all** of them must return True for the request to be -authorized. - -.. note:: - - If you are making your plugin compatible with Domains, then use the ``has_model_or_domain_perms`` - and ``has_model_or_domain_or_obj_perms`` checks where appropriate. - -.. warning:: - - The ``admin`` user created on installations prior to RBAC being enabled has - ``is_superuser=True``. Django assumes a superuser has any model-level permission even without it - being assigned. Django's permission checking machinery assumes superusers bypass authorization - checks. - - -Custom ViewSet Actions ----------------------- - -The ``action`` part of a policy statement can reference `any custom action your viewset has -`_. -For example ``FileRepositoryViewSet`` has a ``sync`` custom action used by users to sync a given -``FileRepository``. Below is an example of the default policy used to guard that action:: - - { - "action": ["sync"], - "principal": "authenticated", - "effect": "allow", - "condition": [ - "has_model_or_domain_or_obj_perms:file.modify_repo_content", - "has_remote_param_model_or_domain_or_obj_perms:file.view_fileremote", - ] - } - - -.. _storing_access_policy_in_db: - -Storing an Access Policy in the DB ----------------------------------- - -All access policies are stored in the database in the `pulpcore.plugin.models.AccessPolicy` model, -which stores the policy statements described above. Here is a look at the ``AccessPolicy`` model: - -.. autoclass:: pulpcore.plugin.models.AccessPolicy - :members: viewset_name, statements, creation_hooks - -By storing these in the database they are readable to users with a GET to -``/pulp/api/v3/access_policies/``. Additionally users can PUT/PATCH modify them at -``/pulp/api/v3/access_policies/:uuid/``. Users cannot modify create or delete an Access Policy in -the database because only plugin writers create them and their viewset code expects a specific -AccessPolicy instance to exist. - - -.. _shipping_default_access_policy: - -Shipping a Default Access Policy --------------------------------- - -To ship a default access policy, define a dictionary named ``DEFAULT_ACCESS_POLICY`` as a class -attribute on a subclass of ``NamedModelViewSet`` containing all of ``statements`` and -``creation_hooks``. The ``AccessPolicy`` instance will then be created in the ``pulp_migrate`` -signal handler. In the same way you might want to specify a ``LOCKED_ROLES`` dictionary that will -define roles as lists of permissions to be used in the access policy. - -Here's an example of code to define a default policy: - -.. code-block:: python - - class FileRemoteViewSet(RemoteViewSet): - - <...> - DEFAULT_ACCESS_POLICY = { - "statements": [ - { - "action": ["list"], - "principal": "authenticated", - "effect": "allow", - }, - { - "action": ["create"], - "principal": "authenticated", - "effect": "allow", - "condition": "has_model_or_domain_perms:file.add_fileremote", - }, - { - "action": ["retrieve"], - "principal": "authenticated", - "effect": "allow", - "condition": "has_model_or_domain_or_obj_perms:file.view_fileremote", - }, - { - "action": ["update", "partial_update", "set_label", "unset_label"], - "principal": "authenticated", - "effect": "allow", - "condition": "has_model_or_domain_or_obj_perms:file.change_fileremote", - }, - { - "action": ["destroy"], - "principal": "authenticated", - "effect": "allow", - "condition": "has_model_or_domain_or_obj_perms:file.delete_fileremote", - }, - ], - - "creation_hooks": [ - { - "function": "add_roles_for_object_creator", - "parameters": { - "roles": "file.fileremote_owner", - }, - }, - ], - } - LOCKED_ROLES = { - "file.fileremote_owner": [ - "file.view_fileremote", "file.change_fileremote", "file.delete_fileremote" - ], - "file.fileremote_viewer": ["file.view_fileremote"], - } - <...> - -For an explanation of the ``creation_hooks`` see the -:ref:`shipping_a_default_new_object_policy` documentation. - -The attribute ``LOCKED_ROLES`` contains roles that are managed by the plugin author. Their name -needs to be prefixed by the plugins ``app_label`` with a dot to prevent collisions. Roles defined -there will be replicated and updated in the database after every migration. They are also marked -``locked=True`` to prevent being modified by users. The primary purpose of these roles is to allow -plugin writers to refer to them in the default access policy. - - -.. _allow_granting_permissions_by_the_object_owners: - -Allow Granting Permissions by the Object Owners ------------------------------------------------ - -To allow object owners to grant access to other users, first add a ``manage_roles`` permission to -the model. - -.. code-block:: python - - class FileRemote(Remote): - <...> - - class Meta: - permissions = [ - ("manage_roles_fileremote", "Can manage roles on file remotes"), - ] - -Now include the ``RolesMixin`` in the definition of the viewset and add statements for its verbs. - -.. code-block:: python - - class FileRemoteViewSet(RemoteViewSet, RolesMixin): - <...> - - DEFAULT_ACCESS_POLICY = { - "statements": [ - <...> - { - "action": ["list_roles", "add_role", "remove_role"], - "principal": "authenticated", - "effect": "allow", - "condition": ["has_model_or_domain_or_obj_perms:file.manage_roles_fileremote"], - }, - ] - } - - LOCKED_ROLES = { - "file.fileremote_owner": [ - <...> - <...> - } - - -.. _handling_objects_created_prior_to_RBAC: - -Handling Objects created prior to RBAC --------------------------------------- - -Prior to RBAC being enabled, ``admin`` was the only user and they have ``is_superuser=True`` which -generally causes them to pass any permission check even without explicit permissions being assigned. - - -.. _viewset_enforcement: - -Viewset Enforcement -------------------- - -Pulp configures the ``DEFAULT_PERMISSION_CLASSES`` in the settings file to use -``pulpcore.plugin.access_policy.AccessPolicyFromDB`` by default. This ensures that by defining a -``DEFAULT_ACCESS_POLICY`` on your Viewset, Pulp will automatically save it to the database at -migration-time, and your Viewset will be protected without additional effort. - -This strategy allows users to completely customize or disable the DRF Permission checks Pulp uses -like any typical DRF project would. - -Also like a typical DRF project, individual Viewsets or views can also be customized to use a -different Permission check by declaring the ``permission_classes`` check. For example, here is the -``StatusView`` which disables permission checks entirely as follows: - -.. code-block:: python - - class StatusView(APIView): - ... - permission_classes = tuple() - ... - - -.. _permission_checking_machinery: - -Permission Checking Machinery ------------------------------ - -drf-access-policy provides a feature to enable conditional checks to be globally available as their -docs `describe here `_. Pulp -enables the ``reusable_conditions`` in its settings.py file, allowing a variety of condition checks -to be globally available. Pulp enables this as follows: - -.. code-block:: python - - DRF_ACCESS_POLICY = {"reusable_conditions": ["pulpcore.app.global_access_conditions"]} - -The ``pulpcore.app.global_access_conditions`` provides the following checks that are available for -both users and plugin writers to use in their policies: - -.. automodule:: pulpcore.app.global_access_conditions - :members: - - -.. _custom_permission_checks: - -Custom Permission Checks ------------------------- - -Plugins can provide their own permission checks by defining them in a -``app.global_access_conditions`` module and adding an operation like - -.. code-block:: python - - DRF_ACCESS_POLICY = { - "dynaconf_merge_unique": True, - "reusable_conditions": ["pulp_container.app.global_access_conditions"], - } - -to their ``app.settings`` module. diff --git a/docs/plugin_dev/plugin-writer/concepts/rbac/adding_automatic_permissions.rst b/docs/plugin_dev/plugin-writer/concepts/rbac/adding_automatic_permissions.rst deleted file mode 100644 index 5c2f1def19..0000000000 --- a/docs/plugin_dev/plugin-writer/concepts/rbac/adding_automatic_permissions.rst +++ /dev/null @@ -1,162 +0,0 @@ -.. _adding_automatic_permissions_for_new_objects: - -Adding Automatic Permissions for New Objects -============================================ - -When creating new objects in either viewsets or tasks it's important to have the right permissions. -It is important that the permissions new objects receive work with the AccessPolicy so that newly -created objects can be authorized by the AccessPolicy as expected. The AccessPolicy statements are -user-configurable and so the permissions to be created for new objects are too. Similar to the -requirements for the AccessPolicy ``statements``, plugin writers can define and ship a default -behavior for permissions on new objects, and then users can modify them as needed after migrations -are run. - - -.. _defining_new_object_permission_behaviors: - -Defining New Object Permission Behaviors ----------------------------------------- - -The ``AccessPolicy.creation_hooks`` attribute defines a set of callables that are intended to be -run when new objects are created. These do not run automatically; your models should use the -``pulpcore.plugin.models.AutoAddObjPermsMixin`` on the model as described in the -:ref:`enabling_new_object_permission_creation` section. - -The ``AccessPolicy.creation_hooks`` attribute is optional because not all AccessPolicy objects -create objects. If no objects are created by an endpoint, there does not need to be a -``creation_hooks`` attribute. - -Permissions are associated to users via roles. - -The most common auto-assignment of roles is to the creator of an object themselves. Here is an -example assigning the ``"core.task_owner"`` role to the creator of an object: - -.. code-block:: python - - { - "function": "add_roles_for_object_creator", - "parameters": {"roles": ["core.task_owner"]}, - } - -Another common auto-assignment of roles is to assign to one or more users explicitly. Here is an -example assigning the ``"core.task_owner"`` role to the users ``["alice", "bob"]``. - -.. code-block:: python - - { - "function": "add_roles_for_users", - "parameters": { - "roles": "core.task_owner", - "users": ["alice", "bob"], - }, - } - -A third common auto-assignment of roles is to assign to one or more groups explicitly. Here is an -example assigning the ``"core.task_viewer"`` role to the group ``"foo"``. - -.. code-block:: python - - { - "function": "add_roles_for_groups", - "parameters": { - "roles": ["core.task_viewer"], - "groups": "foo", - }, - } - -.. note:: - - All the hooks shipped with pulpcore accept either a single item or list of items for their - arguments like ``roles``, ``users`` or ``groups``. - - -.. _enabling_new_object_permission_creation: - -Enabling New Object Permission Creation ---------------------------------------- - -To enable automatic permission creation for an object managed by an AccessPolicy, have your model -use the ``pulpcore.plugin.models.AutoAddObjPermsMixin``. See the example below as an example: - -.. code-block:: python - - - class MyModel(BaseModel, AutoAddObjPermsMixin): - ... - -See the docstring below for more information on this mixin. - -.. autoclass:: pulpcore.app.models.access_policy.AutoAddObjPermsMixin - - -.. _shipping_a_default_new_object_policy: - -Shipping a Default New Object Policy ------------------------------------- - -In general, the default recommended is to use the ``add_roles_for_object_creator`` to assign the -view, change, and delete permissions for the object created. Here is an example of a default policy -like this: - -.. code-block:: python - - DEFAULT_ACCESS_POLICY = { - "statements": <...> - "creation_hooks": [ - { - "function": "add_roles_for_object_creator", - "parameters": {"roles": "file.fileremote_owner"}, - } - ], - } - LOCKED_ROLES = { - "file.fileremote_owner": [ - "file.view_fileremote", "file.change_fileremote", "file.delete_fileremote" - ], - } - -This effectively creates a "user isolation" policy which aligns with the examples from -:ref:`shipping_default_access_policy`. - - -.. _defining_custom_new_object_permission_callables: - -Defining Custom New Object Permission Callables ------------------------------------------------ - -Plugin writers can use more than the built-in callables such as ``add_roles_for_object_creator`` or -``add_roles_for_users`` by defining additional methods on the model itself. The callables defined in -the ``function`` are method names on the Model that need to be registered with -``REGISTERED_CREATION_HOOKS``: - -.. code-block:: python - - class MyModel(BaseModel, AutoAddObjPermsMixin): - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.REGISTERED_CREATION_HOOKS["my_custom_callable"] = self.my_custom_callable - - def my_custom_callable(self, role, users, groups): - from pulpcore.app.util import assign_role - for user in users: - assign_role(role, user, self) # self is the object being assigned - for group in groups: - assign_role(role, group, self) # self is the object being assigned - -This would be callable with a configuration like this one: - -.. code-block:: python - - { - "function": "my_custom_callable", - "parameters": { - "role": "pulpcore.task_viewer", - "users": ["bob"], - "groups": [], - }, - } - -.. note:: - - The ``parameters`` dict must actually match the creation hooks signature. diff --git a/docs/plugin_dev/plugin-writer/concepts/rbac/overview.rst b/docs/plugin_dev/plugin-writer/concepts/rbac/overview.rst deleted file mode 100644 index 12a1a1a16c..0000000000 --- a/docs/plugin_dev/plugin-writer/concepts/rbac/overview.rst +++ /dev/null @@ -1,67 +0,0 @@ -Overview -======== - -The goals of the authorization system are to: - -* Make Pulp safe as a multi-user system -* Rely on User and Group definitions in the Django database, but allow them to come from anywhere -* Enforce permission checks at each viewset using a policy based approach -* Give users fine-grained control over each viewset's policy - -Architecture ------------- - -Pulp's authorization model has the following architecture: - -.. image:: /static/rbac_architecture.png - :align: center - -:Request Authorization: Each request is authorized by a `drf-access-policy `_ based policy at the viewset-level. You can learn more about - defining an access policy :ref:`here `. - -:Task Permissions Check: A permission check that occurs inside of Task code. This tends to use - permission checking calls like `has_perm` or `has_perms` `provided by Django `_. - -:Permission Checking Machinery: A set of methods which can check various conditions such as if a - requesting user has a given permission, or is a member of a group that has a given permission, - etc. See the :ref:`permission_checking_machinery` section for the complete list of available - methods. - -:Users and Groups: Users and Groups live in the Django database and are used by the Permission - Checking Machinery. See the :ref:`users_and_groups` documentation for more information. - - -Getting Started ---------------- - -To add authorization for a given resource, e.g. ``FileRemote``, you'll need to: - -**Define the Policy:** - -1. Define the default ``statements`` of the new Access Policy for the resource. See the - :ref:`defining_access_policy` documentation for more information on that. -2. Define the ``roles`` as sets of permissions for that resource. -3. Define the default role associations created for new objects using the ``creation_hooks`` - attribute of the new Access Policy for the resource. See the - :ref:`adding_automatic_permissions_for_new_objects` documentation for more information on that. -4. Ship that Access Policy as the class attribute ``DEFAULT_ACCESS_POLICY`` of a - ``NamedModelViewSet``. This will contain the ``statements`` and ``creation_hooks`` attributes. - Ship the roles as the ``LOCKED_ROLES`` attribute accordingly. See the - :ref:`shipping_default_access_policy` documentation for more information on this. -5. Add the ``RolesMixin`` to the viewset and add statements for managing roles to the access - policy. Usually this is accompanied by adding a ``manage_roles`` permission on the model. - -**Enforce the Policy:** - -1. ``pulpcore.plugin.access_policy.AccessPolicyFromDB`` is configured as the default permission - class, so by specifying a ``DEFAULT_ACCESS_POLICY`` it will automatically be enforced. See the - :ref:`viewset_enforcement` docs for more information on this. - -**Add QuerySet Scoping:** - -1. Define a ``queryset_filtering_required_permission`` attribute on your viewset that names the - permissions users must have to view an object. This is possible if your viewset is a subclass of - the ``pulpcore.plugin.models.NamedModelViewSet``. See the :ref:`enabling_queryset_scoping` - documentation for more information. diff --git a/docs/plugin_dev/plugin-writer/concepts/rbac/permissions.rst b/docs/plugin_dev/plugin-writer/concepts/rbac/permissions.rst deleted file mode 100644 index 93e8584b63..0000000000 --- a/docs/plugin_dev/plugin-writer/concepts/rbac/permissions.rst +++ /dev/null @@ -1,129 +0,0 @@ -Permissions and Roles -===================== - -The permissions system provides a way to assign permissions as part of roles to specific users and -groups of users. The models driving this data are ``django.contrib.auth.models.Permission`` and -``pulpcore.plugin.models.role.Role``. Each ``Permission`` has a name, describing it and can be -associated with one or more ``Role``. Roles can be assigned to users or groups either on the -Model-Level, Domain-level (if domains are enabled), or Object-Level. - - -.. _model_permissions: - -Model Permissions ------------------ - -``Permissions`` in Django are tied to models and usually map to certain -actions performed thereon. By default, each model receives four permissions: - -* The “add” permission limits the user’s ability to view the “add” form and add an object. -* The “change” permission limits a user’s ability to view the change list, view the “change” - form and change an object. -* The “delete” permission limits the ability to delete an object. -* The “view” permission limits the ability to view an object. - -The Model permissions are created automatically by Django, and receive a name like: -``._``. For example to change file remote the permission is named -``file.change_fileremote``. You can view the Permissions on a system via the Django ORM with: -``Permission.objects.all()``. See the `Django Permissions Docs `_ for more information on working with -permissions. - -Here's an example of the Permissions automatically created for the ``FileRemote`` model: - -* ``file.add_fileremote`` -* ``file.view_fileremote`` -* ``file.change_fileremote`` -* ``file.delete_fileremote`` - - -.. _defining_custom_permissions: - -Defining Custom Permissions ---------------------------- - -Any model can define custom permissions, and Django will automatically make a migration to add it -for you. See the `Django Custom Permissions Documentation `_ for more information on how to do that. In contrast -to ``AccessPolicies`` and ``creation_hooks``, permissions can only be defined by the plugin writer. -As a rule of thumb, permissions should be the atomic building blocks for roles and each action that -can be performed on an object should have its own permission. - - -.. _custom_permission_for_repository_content_modification: - -Custom Permission for Repository Content Modification ------------------------------------------------------ - -The Repository subclass is one place where it's recommended to create a custom permission that -manages the ability to modify RepositoryVersions underneath a Repository. While the add, create, -view, and delete default permissions apply to the Repository itself, this new custom permission is -intended to be required for any operations that produce RepositoryVersions, e.g. ``sync``, -``modify``, or ``upload``. - -Here's an example of adding a permission like this for ``FileRepository``: - -.. code-block:: python - - class FileRepository(Repository): - - ... - - class Meta: - ... - permissions = ( - ('modify_repo_content', 'Modify Repository Content'), - ) - -.. note:: - - It is not necessary to "namespace" this ``modify_repo_content`` permission because by including - it in the meta class of your Detail view, it will already be namespaced on the correct object. - - -.. _roles: - -Roles ------ - -``Roles`` are basically sets of ``Permissions``, and in Pulp, users and groups should receive their -``Permissions`` exclusively via role assignments. Typical roles are ``owner`` for an object with all -the permissions to view modify and delete the object, or ``viewer`` limited to see the object. To -scope the reach of the permissions in a role, these role are assigned to ``Users`` or ``Groups`` -either on the model-level, domain-level (if domains are enabled), or the object-level. - -:Model-Level: A role is associated to a user or group for access to a specific model, but not an - instance of that model. This allows you to express concepts like "Hilde can administer all - FileRemotes". -:Domain-Level: When the domains feature is enabled, a role is associated to a user or group for - access to a specific model within the specific domain and only that domain. This allows you - to express concepts like "Hilde can administer all FileRemotes within Domain 'foo'". -:Object-Level: A role is associated to a user or group for access to a specific instance of a - specific model. This allows you to express concepts like "Hilde can administer - FileRemote(name='foo remote'). - -Certain roles may contain permissions that are only ever checked on the model(or domain)-level. -For example the ``creator`` role for a model that contains the models ``add`` permission. - -In the case for ``FileRemote``, the typical set of roles provided by the plugin looks like: - -.. code-block:: python - - LOCKED_ROLES = { - "file.fileremote_creator": ["file.add_fileremote"], - "file.fileremote_owner": [ - "file.view_fileremote", - "file.change_fileremote", - "file.delete_fileremote", - "file.manage_roles_fileremote", - ], - "file.fileremote_viewer": ["file.view_fileremote"], - } - -Roles come in two flavors, locked and user-defined. First there are so called locked roles that are -provided by plugins. Their name needs to be prefixed by the plugin ``app_label`` followed by a dot -(see the example above). They can be seen, but not modified via the api, and are kept up to date -with their definition in the plugin code. That way, plugins can ship default access policies that -rely on those roles. The other flavor is user defined roles. These are managed via the Pulp -API, and plugin code will not interfere with them. Users can opt to use the provided locked roles or -roll their own. diff --git a/docs/plugin_dev/plugin-writer/concepts/rbac/queryset_scoping.rst b/docs/plugin_dev/plugin-writer/concepts/rbac/queryset_scoping.rst deleted file mode 100644 index 8c8f99dc11..0000000000 --- a/docs/plugin_dev/plugin-writer/concepts/rbac/queryset_scoping.rst +++ /dev/null @@ -1,115 +0,0 @@ -.. _queryset_scoping: - -Restricting Viewable Objects -============================ - -With limited object-level permissions on certain objects, its desirable to restrict the objects -shown to users. This effectively causes a Pulp system with many users to have each user see only -"their" permissions. - -This feature is generally referred to as Queryset Scoping because it is applied as an additional -filter on the base Queryset of a ViewSet. This causes the permission filtering to work with other -filterings applied by a user. - -.. note:: - - If Domains are enabled, querysets will be scoped by the current request's domain before being - passed onto RBAC queryset scoping. - -.. _enabling_queryset_scoping: - -Enabling QuerySet Scoping -------------------------- - -The support for this is built into ``pulpcore.plugin.viewsets.NamedModelViewSet``, which is often -the base class for any model-based ViewSet if Pulp. Queryset Scoping is performed by the ViewSet's -``get_queryset`` method which calls each permission class' method ``scope_queryset`` if present. -Pulp's default permission class, ``pulpcore.app.AccessPolicyFromDB``, implementation of -``scope_queryset`` calls the ViewSet function in the AccessPolicy field ``queryset_scoping`` if -defined. This field can be changed by the user to any method on the ViewSet or set empty if they -wish to turn off Queryset Scoping for that view:: - - DEFAULT_ACCESS_POLICY = { - ... - # Call method `scope_queryset` on ViewSet to perform Queryset Scoping - "queryset_scoping": {"function": "scope_queryset"}, - ... - } - -``NamedModelViewSet`` has a default ``scope_queryset`` implementation that will scope the query -based of the ``queryset_filtering_required_permission`` class attribute set on ViewSet. -Objects will only be shown to users that have access to this specific permission either at the -model-level or object-level. - -For example Tasks are restricted only to those users with the "core.view_task" permission like -this:: - - TaskViewSet(NamedModelViewSet): - ... - queryset_filtering_required_permission = "core.view_task" - - -.. _manually_implementing_queryset_scoping: - -Manually Implementing QuerySet Scoping --------------------------------------- - -Default scoping behavior can be overriden by supplying your own ``scope_queryset`` method. -``scope_queryset`` takes one argument, the queryset to be scoped, and returns the scoped queryset. -Content ViewSet's have their ``scope_queryset`` method overriden to scope based on repositories -the user can see. - -.. note:: - - When queryset scoping is enabled for content you must also use the - ``has_required_repo_perms_on_upload`` access condition on the upload endpoint to ensure users - specify a repository for upload or they won't be able to see their uploaded content. - -Extra Queryset Scoping methods can be defined on the ViewSet to allow users to choose different -behaviors besides On/Off. The method must accept the queryset as the first argument. Additional -parameters can also be accepted by supplying them in a ``parameters`` section of the -``queryset_scoping`` field of the AccessPolicy like so: - -.. code-block:: python - - from pulpcore.plugin.viewsets import NamedModelViewSet - from pulpcore.plugin.util import get_objects_for_user - - class MyViewSet(NamedModelViewSet): - - DEFAULT_ACCESS_POLICY = { - # Statements omitted - "queryset_scoping" : { - # This entire field is editable by the user - "function": "different_permission_scope", - "parameters": {"permission": "my.example_permission"} - } - } - - def different_permission_scope(qs, permission): - """Example extra scoping method that uses a user specified permission to scope.""" - return get_objects_for_user(self.request.user, permission, qs=qs) - -If your ViewSet does not inherit from ``pulpcore.plugin.viewsets.NamedModelViewSet`` or you would -like more control over the QuerySet Scoping feature it can be added manually by adding a -``get_queryset`` method to your ViewSet which returns the filtered QuerySet. - -To look up objects by permission easily from an existing QuerySet use the ``get_objects_for_user`` -provided by pulpcore. Here's an example: - -.. code-block:: python - - from pulpcore.plugin.util import get_objects_for_user - - class MyViewSet(rest_framework.viewsets.GenericViewSet): - - def get_queryset(self): - qs = super().get_queryset() - permission_name = "my.example_permission" - return get_objects_for_user(self.request.user, permission_name, qs=qs) - -.. warning:: - - If you have custom ViewSets and plan to add Domains compatibility to your plugin, you must - scope your objects by the domain in the ViewSet's ``get_queryset`` method to comply - with Domain's isolation policies. diff --git a/docs/plugin_dev/plugin-writer/concepts/rbac/users_groups.rst b/docs/plugin_dev/plugin-writer/concepts/rbac/users_groups.rst deleted file mode 100644 index 5354c94ab5..0000000000 --- a/docs/plugin_dev/plugin-writer/concepts/rbac/users_groups.rst +++ /dev/null @@ -1,14 +0,0 @@ -.. _users_and_groups: - -Users and Groups -================ - -Users and Groups are always stored in the Django database. This is a requirement so that ``Roles`` and -``Permissions`` can relate to them. - -:User: Provided by Django with the ``django.contrib.auth.models.User`` model. -:Group: Provided by Django with the ``django.contrib.auth.models.Group`` model. - -Any role can be assigned to either users, groups, or both. This includes both Model-level and -Object-level role assignments. Direct permission assignments are not recommended and cannot be -operated on within the Pulp-API. diff --git a/docs/plugin_dev/plugin-writer/concepts/subclassing/import-export.rst b/docs/plugin_dev/plugin-writer/concepts/subclassing/import-export.rst deleted file mode 100644 index d3a23f5d8d..0000000000 --- a/docs/plugin_dev/plugin-writer/concepts/subclassing/import-export.rst +++ /dev/null @@ -1,165 +0,0 @@ -.. _subclassing_import-export: - -Pulp Import/Export -================== - -The Pulp Import/Export process is based around the `Django Import/Export library `_ . -To be 'exportable/importable', your plugin must define a ``modelresource`` module at -``/app/modelresource.py``. The module must contain a ModelResource subclass -for each Model you want to expose, and it must define an ``IMPORT_ORDER`` ordered list -for all such ModelResources. - -QueryModelResource -~~~~~~~~~~~~~~~~~~ - -If you don't need to do anything "special" to export your Model you can subclass -``pulpcore.plugin.importexport.QueryModelResource``. This only requires you to provide the -``Meta.model`` class for the Model being export/imported, and to override the -``set_up_queryset(self)`` method to define a limiting filter. QueryModelResource is instantiated -by the export process with the RepositoryVersion being exported (``self.repo_version``). - -An example ``QueryModelResource`` subclasses, for import/exporting the ``Bar`` Model -from ``pulp_foo``, would look like this:: - - class BarResource(QueryModelResource): - """ - Resource for import/export of foo_bar entities - """ - - def set_up_queryset(self): - """ - :return: Bars specific to a specified repo-version. - """ - return Bar.objects.filter(pk__in=self.repo_version.content) - - class Meta: - model = Bar - - -BaseContentResource -~~~~~~~~~~~~~~~~~~~~ - -The ``BaseContentResource`` class provides a base class for exporting ``Content``. -``BaseContentResource`` provides extra functionality on top of ``QueryModelResource`` specific to -handling the exporting and importing of Content such as handling of Content-specific fields like -``upstream_id``. - -An example of subclassing ``BaseContentResource`` looks like:: - - class MyContentResource(BaseContentResource): - """ - Resource for import/export of MyContent. - """ - - def set_up_queryset(self): - """ - :return: MyContent specific to a specified repo-version. - """ - return MyContent.objects.filter(pk__in=self.repo_version.content) - - class Meta: - model = MyContent - - -modelresource.py -~~~~~~~~~~~~~~~~ - -A simple ``modelresource.py`` module is the one for the ``pulp_file`` plugin. It looks like -this:: - - from pulpcore.plugin.importexport import BaseContentResource - from pulp_file.app.models import FileContent - - class FileContentResource(BaseContentResource): - """ - Resource for import/export of file_filecontent entities - """ - - def set_up_queryset(self): - """ - :return: FileContents specific to a specified repo-version. - """ - return FileContent.objects.filter(pk__in=self.repo_version.content) - - class Meta: - model = FileContent - - - IMPORT_ORDER = [FileContentResource] - - -Plugin writers are encouraged to subclass the ``RepositoryResource`` class to enable automatic -repository creation during the import. For the ``pulp_file`` plugin, the following implementation -should be considered:: - - from pulpcore.plugin.modelresources import RepositoryResource - from pulp_file.app.models import FileRepository - - class FileRepositoryResource(RepositoryResource): - """ - A resource for importing/exporting file repository entities - """ - - def set_up_queryset(self): - """ - :return: A queryset containing one repository that will be exported. - """ - return FileRepository.objects.filter(pk=self.repo_version.repository) - - class Meta: - model = FileRepository - - - # the list signifying the order of imports must also include the repository resource class - IMPORT_ORDER = [FileContentResource, FileRepositoryResource] - -For performance reasons, it is important that care is taken when writing resource definitions. If your model -has foreign keys that are exported as such (raw UUID key values), you should define a should a custom -"dehydrate" method for that field to avoid an unnecessary lookup for each instance as seen -`in this issue `_. Else, if -foreign keys are exported using some natural key of the referenced row, then the definition of -`set_up_queryset()` should ensure those references are pre-selected using `select_related()`, otherwise -an N+1 query scenario is likely. - - -content_mapping -~~~~~~~~~~~~~~~ - -By default, all the Content that gets imported is automatically associated with the Repository it -is stored with inside the export archive. In some cases, this may not be desirable. One such case is -when there is Content that is tied to a sub_repo but not directly to the Repository itself. Another -case is where you may have Content you want imported but not associated with a Repositoy. In such -cases, you can set a ``content_mapping`` property on the Resource. - -The ``content_mapping`` property should be a dictionary that maps repository names to a list of -content_ids. The importer code in pulp will combine the ``content_mappings`` across Resources and -export them to a ``content_mapping.json`` file that it will use during import to map Content to -Repositories. - -Here is an example that deals with subrepos:: - - class MyContentResource(BaseContentResource): - """ - Resource for import/export of MyContent. - """ - - def __init__(self, *args, **kwargs): - """Override __init__ to set content_mapping to a dict.""" - self.content_mapping = {} - super().__init__(*args, **kwargs) - - def set_up_queryset(self): - """Set up the queryset and our content_mapping.""" - content = MyContent.objects.filter(pk__in=self.repo_version.content) - self.content_mapping[self.repository_version.repository.name] = content - - for repo in self.subrepos(self.repo_version): - subrepo_content = repo.latest_repository_version.content - self.content_mapping[repo.name] = subrepo_content - content |= subrepo_content - - return content - - class Meta: - model = MyContent - diff --git a/docs/plugin_dev/plugin-writer/concepts/subclassing/models.rst b/docs/plugin_dev/plugin-writer/concepts/subclassing/models.rst deleted file mode 100644 index 6eb63b9fa4..0000000000 --- a/docs/plugin_dev/plugin-writer/concepts/subclassing/models.rst +++ /dev/null @@ -1,134 +0,0 @@ -.. _subclassing-models: - -Models -====== - -For the most part, models provided by plugin writers are just regular `Django Models -`_. - -.. note:: - One slight variation is that the validation is primarily handled in the Django Rest Framework - Serializer. ``.clean()`` is not called. - -Most plugins will implement: - * model(s) for the specific content type(s) used in the plugin, should be subclassed from Content model - * model(s) for the plugin specific remote(s), should be subclassed from Remote model - - -Adding Model Fields -~~~~~~~~~~~~~~~~~~~ - -Each subclassed Model will typically store attributes that are specific to the content type. These -attributes need to be added to the model as ``fields``. You can use any of Django's field types -for your fields. See the `Django field documentation -`_, for more in-depth information on -using these fields. - - -.. note:: - It is required to declare the ``default_related_name``. - -The TYPE class attribute is used for filtering purposes. - -.. code-block:: python - - class FileContent(Content): - """ - The "file" content type. - - Fields: - digest (str): The SHA256 HEX digest. - """ - TYPE = 'file' - digest = models.TextField(null=False) - - class Meta: - default_related_name = "%(app_label)s_%(model_name)s" - - -Here we create a new field ``digest`` using Django's ``TextField``. After adding/modifying a model, you -can make and run database migrations with: - - -.. code-block:: bash - - pulpcore-manager makemigrations $PLUGIN_APP_LABEL - pulpcore-manager migrate - - -.. warning:: - Do not use settings directly in the model fields, it will lead to a data migration that is - specific to the users installation in case those settings change. - -If you recognize this syntax, it is because pulpcore-manager is ``manage.py`` configured with -``DJANGO_SETTINGS_MODULE="pulpcore.app.settings"``. You can use it anywhere you normally would use -``manage.py`` or ``django-admin``. - - -Uniqueness -~~~~~~~~~~ - -Model uniqueness (which will also be used as the natural key) is defined by an inner ``class -Meta``. Pulp Core enforces uniqueness constraints at the database level. - -Adding to the simplified ``FileContent`` above: - -.. code-block:: python - - class FileContent(Content): - """ - The "file" content type. - Content of this type represents a single file uniquely - identified by path and SHA256 digest. - Fields: - digest (str): The SHA256 HEX digest. - """ - - TYPE = 'file' - - digest = models.TextField(null=False) - - class Meta: - # Note the comma, this must be a tuple. - unique_together = ('digest',) - default_related_name = "%(app_label)s_%(model_name)s" - -In this example the Content's uniqueness enforced on a single field ``digest``. For a multi-field -uniqueness, simply add other fields. - -.. code-block:: python - - class FileContent(Content): - """ - The "file" content type. - Content of this type represents a single file uniquely - identified by path and SHA256 digest. - Fields: - relative_path (str): The file relative path. - digest (str): The SHA256 HEX digest. - """ - - TYPE = 'file' - - relative_path = models.TextField(null=False) - digest = models.TextField(null=False) - - class Meta: - default_related_name = "%(app_label)s_%(model_name)s" - unique_together = ( - 'relative_path', - 'digest', - ) - - -The example above ensures that content is unique on ``relative_path`` and ``digest`` together. - -ForeignKey Gotchas -~~~~~~~~~~~~~~~~~~ - -The orphan cleanup operation performs mass-deletion of Content units that are not associated with -any repository. Any ForeignKey relationships that refer to Content with a deletion relationship of -``PROTECT`` will cause Orphan cleanup errors like:: - - django.db.models.deletion.ProtectedError: ("Cannot delete some instances of model 'MyContent' - because they are referenced through a protected foreign key: 'MyOtherContent.mycontent'" diff --git a/docs/plugin_dev/plugin-writer/concepts/subclassing/pull-through.rst b/docs/plugin_dev/plugin-writer/concepts/subclassing/pull-through.rst deleted file mode 100644 index 9758919b60..0000000000 --- a/docs/plugin_dev/plugin-writer/concepts/subclassing/pull-through.rst +++ /dev/null @@ -1,36 +0,0 @@ -Pull-Through Caching -==================== - -Pull-through caching enables plugins to use remotes on their distributions that will act as an -upstream fallback source when an user requests content from Pulp. The content will be streamed from -the remote and saved in Pulp to be served again in future requests. This feature requires plugins to -provide implementations for the methods below on the subclasses of their Remote and Content objects. - -.. automethod:: pulpcore.app.models.Remote::get_remote_artifact_url - -.. automethod:: pulpcore.app.models.Remote::get_remote_artifact_content_type - -.. automethod:: pulpcore.app.models.Content::init_from_artifact_and_relative_path - -Finally, plugin writers need to expose the ``remote`` field on their distribution serializer to allow -users to add their remotes to their distributions. The ``remote`` field is already present on the base -distribution model, so no new migration is needed. - -.. code-block:: python - - class GemDistributionSerializer(DistributionSerializer): - """A Serializer for GemDistribution.""" - - ... - - remote = DetailRelatedField( - required=False, - help_text=_("Remote that can be used to fetch content when using pull-through caching."), - view_name_pattern=r"remotes(-.*/.*)?-detail", - queryset=Remote.objects.all(), - allow_null=True, - ) - - class Meta: - fields = DistributionSerializer.Meta.fields + ("publication", "remote") - model = GemDistribution diff --git a/docs/plugin_dev/plugin-writer/concepts/subclassing/replication.rst b/docs/plugin_dev/plugin-writer/concepts/subclassing/replication.rst deleted file mode 100644 index ae0a04b22e..0000000000 --- a/docs/plugin_dev/plugin-writer/concepts/subclassing/replication.rst +++ /dev/null @@ -1,9 +0,0 @@ -Pulp Replication -================ - -The Pulp replication process allows a Pulp instance to discover distributions on an upstream Pulp -and create the necessary remotes, repositories, and distributions to serve the same content as the -upstream Pulp. To be 'replication' compatible, your plugin must define a ``replicator`` module at -``/app/replica.py``. The module must contain a Replicator subclass for each distribution -type you want to be able to replicate. The module must also define ``REPLICATION_ORDER`` ordered -list for all such replicators. \ No newline at end of file diff --git a/docs/plugin_dev/plugin-writer/concepts/subclassing/serializers.rst b/docs/plugin_dev/plugin-writer/concepts/subclassing/serializers.rst deleted file mode 100644 index a3829c66b9..0000000000 --- a/docs/plugin_dev/plugin-writer/concepts/subclassing/serializers.rst +++ /dev/null @@ -1,47 +0,0 @@ -.. _subclassing-serializers: - -Serializers -=========== - -`Django Rest Framework Serializers `_ -work "both ways", translating user input to Python objects, and translating Python objects to -user-facing responses. Generally, plugins will create a serializer field for each field on their -model that should be user-facing. - -Most plugins will implement: - * serializer(s) for plugin specific content type(s), should be subclassed from one of - NoArtifactContentSerializer, SingleArtifactContentSerializer, or - MultipleArtifactContentSerializer, depending on the properties of the content type(s) - * serializer(s) for plugin specific remote(s), should be subclassed from RemoteSerializer - * serializer(s) for plugin specific publisher(s), should be subclassed from PublisherSerializer - -Adding Fields -------------- - -For each field on the corresponding model that should be readable or writable by the user, the -serializer needs to add that field as well. - - -.. code-block:: python - - class FileContentSerializer(SingleArtifactContentSerializer): - """ - Serializer for File Content. - """ - - relative_path = serializers.CharField( - help_text="Relative location of the file within the repository" - ) - - class Meta: - fields = SingleArtifactContentSerializer.Meta.fields + ('relative_path',) - model = FileContent - -Help Text -^^^^^^^^^ - -The REST APIs of Pulp Core and each plugin are automatically documented using swagger. Each field's -documentation is generated using the ``help_text`` set on the serializer field, so please be sure -to set this for every field. - - diff --git a/docs/plugin_dev/plugin-writer/concepts/subclassing/viewsets.rst b/docs/plugin_dev/plugin-writer/concepts/subclassing/viewsets.rst deleted file mode 100644 index 34e4b775f6..0000000000 --- a/docs/plugin_dev/plugin-writer/concepts/subclassing/viewsets.rst +++ /dev/null @@ -1,145 +0,0 @@ -.. _subclassing-viewsets: - -Viewsets -======== - -Each `Django Rest Framework Viewset `_ -is a collection of views that provides ``create``, ``update``, ``retrieve``, ``list``, and -``delete``, which coresponds to http ``POST``, ``PATCH``, ``GET``, ``GET``, ``DELETE``, -respectively. Some base classes will not include all of the views if they are inappropriate. For -instance, the ``ContentViewset`` does not include ``update`` because Content Units are immutable in -Pulp 3 (to support Repository Versions). - -Most Plugins will implement: - * viewset(s) for plugin specific content type(s), should be subclassed from ``ContentViewSet``, - ``ReadOnlyContentViewSet`` or ``SingleArtifactContentUploadViewSet`` - * viewset(s) for plugin specific remote(s), should be subclassed from ``RemoteViewSet`` - * viewset(s) for plugin specific publisher(s), should be subclassed from ``PublisherViewSet`` - - -Endpoint Namespacing --------------------- - -Automatically, each "Detail" class is namespaced by the ``app_label`` set in the -``PulpPluginAppConfig`` (this is set by the ``plugin_template``). - -For example, a ContentViewSet for ``app_label`` "foobar" like this: - -.. code-block:: python - - class PackageViewSet(ContentViewSet): - endpoint_name = 'packages' - -The above example will create set of CRUD endpoints for Packages at -``pulp/api/v3/content/foobar/packages/`` and -``pulp/api/v3/content/foobar/packages//`` - - -Detail Routes (Extra Endpoints) -------------------------------- - -In addition to the CRUD endpoints, a Viewset can also add a custom endpoint. For example: - - -.. code-block:: python - - class PackageViewSet(ContentViewSet): - endpoint_name = 'packages' - - @decorators.detail_route(methods=('get',)) - def hello(self, request): - return Response("Hey!") - -The above example will create a simple nested endpoint at -``pulp/api/v3/content/foobar/packages/hello/`` - - -.. _kick-off-tasks: - -Kick off Tasks -^^^^^^^^^^^^^^ - -Some endpoints may need to deploy tasks to the tasking system. The following is an example of how -this is accomplished. - -See :class:`~pulpcore.plugin.tasking.dispatch` for more details. - -.. note:: - - The arguments provided to a task must be JSON serializable, but may contain instances of - `uuid.UUID`. - -.. note:: - - You should always prefer handing primary keys instead of serialized instances of ORM objects to - a task. - -.. code-block:: python - - # We recommend using POST for any endpoints that kick off task. - @detail_route(methods=('post',), serializer_class=RepositorySyncURLSerializer) - # `pk` is a part of the URL - def sync(self, request, pk): - """ - Synchronizes a repository. - The ``repository`` field has to be provided. - """ - remote = self.get_object() - serializer = RepositorySyncURLSerializer(data=request.data, context={'request': request}) - # This is how non-crud validation is accomplished - serializer.is_valid(raise_exception=True) - repository = serializer.validated_data.get('repository') - mirror = serializer.validated_data.get('mirror', False) - - # This is how tasks are kicked off. - result = dispatch( - tasks.synchronize, - exclusive_resources=[repository], - shared_resources=[remote], - kwargs={ - 'remote_pk': remote.pk, - 'repository_pk': repository.pk, - 'mirror': mirror - } - ) - # Since tasks are asynchronous, we return a 202 - return OperationPostponedResponse(result, request) - -If a "task_group" is provided as an optional keyword argument, then the deployed task will be -automatically added to the task group. Once done spawning new tasks for a given task group, -you should call ``.finish()`` on the task group object. - -For more details about the reservation system and task groups, see :ref:`writing-tasks`. and -:class:`~pulpcore.plugin.models.TaskGroup`. - - -Content Upload ViewSet -^^^^^^^^^^^^^^^^^^^^^^ - -For single file content types, there is the special ``SingleArtifactContentUploadViewSet`` to -derive from, that allows file uploads in the create method, instead of referencing an existing -Artifact. Also it allows to specify a ``Repository``, to create a new ``RepositoryVersion`` -containing the newly created content. Content creation is then offloaded into a task. -To use that ViewSet, the serializer for the content type should inherit from -``SingleArtifactContentUploadSerializer``. By overwriting the ``deferred_validate`` method -instead of ``validate``, this serializer can do detailed analysis of the given or uploaded Artifact -in order to fill database fields of the content type like "name", "version", etc. This part of -validation is only called in the task context. You can also overwrite the ``retrieve`` method -if you want your content type to be compatible with the functionality that makes sure the -``pulp_href`` of the already existing unit is returned and re-used when attempting to re-upload -duplicate content. If the ``retrieve`` method is not implemented, an exception would be raised. - -If the uploaded content does not need to be stored, plugin writers may derive from the class -``NoArtifactContentUploadViewSet``. Again, the same analogy applies to this workflow. To use this -ViewSet, the associated serializer should also subclass ``NoArtifactContentUploadSerializer``. Note -that by using the aforesaid Serializer and ViewSet, Pulp still loads content of an uploaded file to -a temporary file. But the file is going to be removed by the end of a created task. - -If any additional context needs to be passed from the ViewSet to the creation task, the -``get_deferred_context`` method of the ViewSet might be overwritten. It's return value will then be -available as ``self.context`` in the Serializer. - -.. note:: - - Context passed from the ViewSet to the Task must be easily serializable. i.e. one cannot - return the request from ``get_deferred_context``. diff --git a/docs/plugin_dev/plugin-writer/concepts/sync_pipeline/sync_pipeline.rst b/docs/plugin_dev/plugin-writer/concepts/sync_pipeline/sync_pipeline.rst deleted file mode 100644 index e417f36bca..0000000000 --- a/docs/plugin_dev/plugin-writer/concepts/sync_pipeline/sync_pipeline.rst +++ /dev/null @@ -1,76 +0,0 @@ -.. _stages-concept-docs: - -Synchronizing Repositories with the async-Pipeline -================================================== - -To accomplish the steps outlined in :ref:`sync-docs` in an efficient way, pulp provides a high -level api to construct a pipeline of stages. Those stages work in parallel like an assembly line -using pythons `async` feature in combination with the `asyncio` library. Each stage takes -designated content units from an incoming queue of type :class:`asyncio.Queue` and performs an -individual task on them before passing them to the outgoing queue that is connected to the next -stage. - -The anathomy of a stage is that it inherits :class:`pulpcore.plugin.stages.Stage` and overwrites -its asynchronous callback :meth:`run`. -In :meth:`run` it can retrieve incoming declarative content individually via the asynchronous -iterator :meth:`self.items` or in batches via :meth:`self.batches`. -It can pass on declarative content with :meth:`self.put`. - -The sync pipeline is headed by a `first_stage`, that is supposed to download upstream metadata -and iterate over all upstream content references. For each such reference, it creates a -:class:`pulpcore.plugin.stages.DeclarativeContent` that contains a prefilled but unsaved instance -of a subclass of :class:`pulpcore.plugin.content.Content`, as well as a list of -:class:`pulpcore.plugin.stages.DeclarativeArtifact`. The latter combine an unsaved instance of -:class:`pulpcore.plugin.content.Artifact` with a url to retrieve it. -The :class:`pulpcore.plugin.stages.DeclarativeContent` objects, that describe, what a content will -look like when properly downloaded and saved to the database, are passed one by one to the next -pipeline stage. -The responsibility of providing this `first_stage` lies completely in the plugins domain, since -this is the part of the pipeline specific to the repository type. - -The pulp plugin api provides the following stages which also comprise the default pipeline in the -following order: - - 1. :class:`pulpcore.plugin.stages.QueryExistingContents` - 2. :class:`pulpcore.plugin.stages.QueryExistingArtifacts` - 3. :class:`pulpcore.plugin.stages.ArtifactDownloader` - 4. :class:`pulpcore.plugin.stages.ArtifactSaver` - 5. :class:`pulpcore.plugin.stages.ContentSaver` - 6. :class:`pulpcore.plugin.stages.RemoteArtifactSaver` - 7. :class:`pulpcore.plugin.stages.ResolveContentFutures` - 8. :class:`pulpcore.plugin.stages.ContentAssociation` - -If the `mirror=True` optional parameter is passed to `DeclarativeVersion` the pipeline also runs -:class:`pulpcore.plugin.stages.ContentUnassociation` at the end. - -On-demand synchronizing ------------------------ - -See :ref:`on-demand-support`. - -.. _multi-level-discovery: - -Multiple level discovery ------------------------- - -Plugins like `pulp_deb` and `pulp_container` use content artifacts to enumerate more content. -To support this pattern, the declarative content allows to be associated with a -:class:`asyncio.Future`, that is resolved when the content reaches the -:class:`pulpcore.plugin.stages.ResolveContentFutures` stage. -By awaiting this Future, one can implement an informational back loop into earlier stages. -If you need to drop :class:`pulpcore.plugin.stages.DeclarativeContent` prematurely from the -pipeline, you can resolve the content using the function `resolve()` without handing -the content to the next stage. The function `resolve()` will unblock the coroutines awaiting the -attached `Future`. - -.. hint:: - - To improve performance when you expect to create a lot of those futures, consider to - create a larger batch before starting to await them. This way the batching in the subsequent - stages will still be exploited. - -.. hint:: - - If you need downloaded artifacts of this content for further discovery, make sure to - provide `deferred_download=False` to the - :class:`pulpcore.plugin.stages.DeclarativeArtifact`. diff --git a/docs/plugin_dev/plugin-writer/concepts/tasks/add-remove.rst b/docs/plugin_dev/plugin-writer/concepts/tasks/add-remove.rst deleted file mode 100644 index d130a16179..0000000000 --- a/docs/plugin_dev/plugin-writer/concepts/tasks/add-remove.rst +++ /dev/null @@ -1,80 +0,0 @@ -Adding and Removing Content -=========================== - -For adding and removing content, Pulp 3 provides a layered plugin API. The docs below explain our -lower level API; this information is helpful to understand how a synchronize task works under the -hood. - -Repository Versions -------------------- - -Starting with Pulp 3, repositories are versioned. A new immutable respository version is created -when its set of content units changes - -To facilitate the creation of repository versions a -`pulpcore.plugin.models.RepositoryVersion` context manager is provided. Plugin Writers are -strongly encouraged to use RepositoryVersion as a context manager to provide transactional safety, -working directory setup, and database cleanup after encountering failures. - -.. code-block:: python - - with repository.new_version() as new_version: - - # add content manually - new_version.add_content(content) - new_version.remove_content(content) - -.. warning:: - - Any action that adds/removes content to a repository *must* create a new RepositoryVersion. - Every action that creates a new RepositoryVersion *must* be asynchronous (defined as a task). - Task reservations are necessary to prevent race conditions. - -.. _sync-docs: - -Synchronizing -------------- - -.. tip:: - - Please consider using the high level :ref:`stages-concept-docs` for actual implementations. - -Most plugins will define a synchronize task, which fetches content from a remote repository, and -adds it to a Pulp repository. - -A typical synchronization task will follow this pattern: - -* Download and analyze repository metadata from a remote source. -* Decide what needs to be added to repository or removed from it. -* Associate already existing content to a repository by creating an instance of - :class:`~pulpcore.plugin.models.RepositoryContent` and saving it. -* Remove :class:`~pulpcore.plugin.models.RepositoryContent` objects which were identified for - removal. -* For every content which should be added to Pulp create but do not save yet: - - * instance of ``ExampleContent`` which will be later associated to a repository. - * instance of :class:`~pulpcore.plugin.models.ContentArtifact` to be able to create relations with - the artifact models. - * instance of :class:`~pulpcore.plugin.models.RemoteArtifact` to store information about artifact - from remote source and to make a relation with :class:`~pulpcore.plugin.models.ContentArtifact` - created before. - -* If a remote content should be downloaded right away (aka ``immediate`` download policy), use - the suggested :ref:`downloading ` solution. If content should be downloaded - later (aka ``on_demand`` or ``background`` download policy), feel free to skip this step. -* Save all artifact and content data in one transaction: - - * in case of downloaded content, create an instance of - :class:`~pulpcore.plugin.models .Artifact`. Set the `file` field to the - absolute path of the downloaded file. Pulp will move the file into place - when the Artifact is saved. The Artifact refers to a downloaded file on a - filesystem and contains calculated checksums for it. - * in case of downloaded content, update the :class:`~pulpcore.plugin.models.ContentArtifact` with - a reference to the created :class:`~pulpcore.plugin.models.Artifact`. - * create and save an instance of the :class:`~pulpcore.plugin.models.RepositoryContent` to - associate the content to a repository. - * save all created artifacts and content: ``ExampleContent``, - :class:`~pulpcore.plugin.models.ContentArtifact`, - :class:`~pulpcore.plugin.models.RemoteArtifact`. - -* Use :class:`~pulpcore.plugin.models.ProgressReport` to report the progress of some steps if needed. diff --git a/docs/plugin_dev/plugin-writer/concepts/tasks/diagnostics.rst b/docs/plugin_dev/plugin-writer/concepts/tasks/diagnostics.rst deleted file mode 100644 index c566e6808f..0000000000 --- a/docs/plugin_dev/plugin-writer/concepts/tasks/diagnostics.rst +++ /dev/null @@ -1,35 +0,0 @@ -Diagnostics -=========== - -When users enable task diagnostics using the ``TASK_DIAGNOSTICS`` setting, all tasks will write out -diagnostic information to data files in ``/var/tmp/pulp//`` directory. - -Memory Analysis ---------------- - -The resident set size (RSS) of the process is measured every 5 seconds and written to a file such as -``/var/tmp/pulp/3367e577-4b09-44b6-9069-4a06c367776a/memory.datum``. - -You can plot this with gnuplot by changing into the directory with the files you want to see then: - -1) Enter the ``gnuplot`` interactive environment. - -2) Paste these commands:: - - set terminal png size 1200,900 enhanced font "Arial, 10" - set output "memory.png" - set ylabel "Task Process Megabytes (MB)" - set xlabel "Seconds since task start" - plot "memory.datum" with lines - -3) Open your png chart saved at memory.png - -Profiling ---------- - -If the ``pyinstrument`` package is installed, a runtime profile of the execution of the task will be -automatically produced and written to a file such as -``/var/tmp/pulp/3367e577-4b09-44b6-9069-4a06c367776a/pyinstrument.html``. - -When opened in a browser, this profile will present a tree showing how much time is being spent in -various functions relative to the total runtime of the task. diff --git a/docs/plugin_dev/plugin-writer/concepts/tasks/publish.rst b/docs/plugin_dev/plugin-writer/concepts/tasks/publish.rst deleted file mode 100644 index 825586540a..0000000000 --- a/docs/plugin_dev/plugin-writer/concepts/tasks/publish.rst +++ /dev/null @@ -1,30 +0,0 @@ -.. _publish-task: - -Publish -======= - -In order to make content files available to clients, users must publish these files. Typically, -users will publish a repository which will make the content in the repository available. - -When publishing a repository, your plugin needs to mimic the layout of both data and metadata. In -the simplest case for content types that don't have metadata, only the content unit data itself -needs to be published. - -In most cases, both metadata and content unit data are required to make a usable publication. It's -important to understand what the required metadata is for your content type. - -**Using a** :class:`~pulpcore.plugin.models.Publication` **context manager is highly encouraged.** On -context exit, the complete attribute is set True provided that an exception has not been raised. -In the event an exception has been raised, the publication is deleted. - -One of the ways to perform publishing: - -* Find :class:`~pulpcore.plugin.models.ContentArtifact` objects which should be published -* For each of them create and save instance of :class:`~pulpcore.plugin.models.PublishedArtifact` - which refers to :class:`~pulpcore.plugin.models.ContentArtifact` and - :class:`~pulpcore.app.models.Publication` to which this artifact belongs. -* Generate and write to disk repository metadata -* For each of the metadata files create an instance of - :class:`~pulpcore.plugin.models.PublishedMetadata` using `create_from_file` constructor. Each - instance relates a metadata file to a :class:`~pulpcore.app.models.Publication`. -* Use :class:`~pulpcore.plugin.models.ProgressReport` to report progress of some steps if needed. diff --git a/docs/plugin_dev/plugin-writer/index.rst b/docs/plugin_dev/plugin-writer/index.rst deleted file mode 100644 index c9c8cdb69c..0000000000 --- a/docs/plugin_dev/plugin-writer/index.rst +++ /dev/null @@ -1,32 +0,0 @@ -Plugin Writer's Guide -===================== - -.. note:: - This documentation is for Pulp Plugin developers. For Pulp Core development, see the - `our contributor docs `_. - -Pulp Core does not manage content by itself, but instead relies on plugins to add support for one -content type or another. Examples of content types include RPM packages, Ansible roles, and Container -images. - -This documentation outlines how to create a Pulp plugin that provides features like: - -* Define a new content type and its attributes -* Download and save the new type of content into Pulp Core -* Publish the new type of content, allowing Pulp Core to serve it at a ``distribution`` -* Export content to remote servers or CDNs -* Add custom web application views -* Implement custom features, e.g. dependency solving, retension/deletion policies, etc. - -Along with this guide, it may be useful to refer to to our simplest plugin, `pulp_file -`_. - -Additionally we provide a `Plugin Template `_ which will -take care of a majority of the boilerplate. - -.. toctree:: - :maxdepth: 2 - - planning-guide - concepts/index - plugin-walkthrough diff --git a/docs/plugin_dev/plugin-writer/planning-guide.rst b/docs/plugin_dev/plugin-writer/planning-guide.rst deleted file mode 100644 index a006c3c8db..0000000000 --- a/docs/plugin_dev/plugin-writer/planning-guide.rst +++ /dev/null @@ -1,48 +0,0 @@ -.. _planning-guide: - -Plugin Planning Guide -===================== - -This guide assumes that you are familiar with `general pulp concepts -`_. Usually, the most difficult part -of writing a new plugin is understanding the ecosystem surrounding the content type(s) that you -want to support. - -This page outlines some of the questions a plugin writer should consider while planning and writing -a new plugin. - -What APIs are available from remote repositories? -------------------------------------------------- - -Since remote repositories typically exist to serve content to a client, they usually implement a -web API. It is very helpful to become familiar with this interface in order to understand how -to fetch content into Pulp and subsequently distribute it to the client. - -Some ecosystems have extensive APIs, so it is helpful to understand a general flow to narrow the -research scope. For sychronization, Pulp mimics the behavior of the client, and for -publishing/distributing, Pulp mimics the behavior of the server. - -1. Discover content in a remote repository -2. Retrieve metadata about the content -3. Retrieve files - -What does the metadata look like? ---------------------------------- - -Understanding the structure and content of a content type's metadata is crucial to the design and -function of a plugin. - -**Example:** -When the Container plugin was in the planning phase, engineers got familiar with the `manifest spec -files `_ to understand how to properly design -the workflow of Container content management within the plugin. - - -Which data should be modeled as Content Units? ----------------------------------------------- - -Will this data be added to/removed from a repository individually? If yes, this data could be a -Content Unit. - -Should it be possible to add/remove a subset of this data to a repository? If yes, you should -consider managing this as a smaller unit. diff --git a/docs/plugin_dev/plugin-writer/plugin-walkthrough.rst b/docs/plugin_dev/plugin-writer/plugin-walkthrough.rst deleted file mode 100644 index 73988b5389..0000000000 --- a/docs/plugin_dev/plugin-writer/plugin-walkthrough.rst +++ /dev/null @@ -1,111 +0,0 @@ -Plugin Walkthrough -================== - -This guide assumes that you are familiar with `general pulp concepts -`_ as well as the :ref:`planning-guide`. -It will be helpful to skim the :ref:`plugin-concepts` pages, and refer back to them as you go -through the process. - -Bootstrap your plugin ---------------------- - -Start your new plugin by using the `Plugin Template `_. -Follow the documentation in the README to get a working stub plugin. - -.. _define-content-type: - -Define your plugin Content type -------------------------------- - -To define a new content type(s), e.g. ``ExampleContent``: - -* :class:`pulpcore.plugin.models.Content` should be subclassed and extended with additional - attributes to the plugin needs, -* define ``TYPE`` class attribute which is used for filtering purposes, -* uniqueness should be specified in ``Meta`` class of newly defined ``ExampleContent`` model, -* ``unique_together`` should be specified for the ``Meta`` class of ``ExampleContent`` model, -* create a serializer for your new Content type as a subclass of - :class:`pulpcore.plugin.serializers.NoArtifactContentSerializer`, - :class:`pulpcore.plugin.serializers.SingleArtifactContentSerializer`, or - :class:`pulpcore.plugin.serializers.MultipleArtifactContentSerializer` -* create a viewset for your new Content type. It can be as a subclass of - :class:`pulpcore.plugin.viewsets.ContentViewSet`, and you can define your ``create()`` method based - on the serializer you chose. If you need a read-only viewset, subclass - :class:`pulpcore.plugin.viewsets.ReadOnlyContentViewSet` instead. It's also convenient to subclass - :class:`pulpcore.plugin.viewsets.SingleArtifactContentUploadViewSet` if you need an upload support. - -:class:`~pulpcore.plugin.models.Content` model should not be used directly anywhere in plugin code. -Only plugin-defined Content classes are expected to be used. - -Check ``pulp_file`` implementation of `the FileContent -`_ and its -`serializer `_ -and `viewset `_. -For a general reference for serializers and viewsets, check `DRF documentation -`_. - -Add any fields that correspond to the metadata of your content, which could be the project name, -the author name, or any other type of metadata. - - -.. _define-remote: - -Define your plugin Remote -------------------------- - -To define a new remote, e.g. ``ExampleRemote``: - -* :class:`pulpcore.plugin.models.Remote` should be subclassed and extended with additional - attributes to the plugin needs, -* define ``TYPE`` class attribute which is used for filtering purposes, -* create a serializer for your new remote as a subclass of - :class:`pulpcore.plugin.serializers.RemoteSerializer`, -* create a viewset for your new remote as a subclass of - :class:`pulpcore.plugin.viewsets.RemoteViewSet`. - -:class:`~pulpcore.plugin.models.Remote` model should not be used directly anywhere in plugin code. -Only plugin-defined Remote classes are expected to be used. - - -There are several important aspects relevant to remote implementation which are briefly mentioned -in the :ref:`object-relationships` section: - -* due to deduplication of :class:`~pulpcore.plugin.models.Content` and - :class:`~pulpcore.plugin.models.Artifact` data, they may already exist and the remote needs to - fetch and use them when they do. -* :class:`~pulpcore.plugin.models.ContentArtifact` associates - :class:`~pulpcore.plugin.models.Content` and :class:`~pulpcore.plugin.models.Artifact`. If - :class:`~pulpcore.plugin.models.Artifact` is not downloaded yet, - :class:`~pulpcore.plugin.models.ContentArtifact` contains ``NULL`` value for - :attr:`~pulpcore.plugin.models.ContentArtifact.artifact`. It should be updated whenever - corresponding :class:`~pulpcore.plugin.models.Artifact` is downloaded - -.. note:: - - Some of these steps may need to behave differently for other download policies. - -The remote implementation suggestion above allows plugin writer to have an understanding and -control at a low level. - - -Define your Tasks ------------------ - -See :ref:`writing-tasks`. Almost all plugins must implement a `sync` task, most implement a -`publish` task as well. - - -Plugin Completeness Checklist ------------------------------- - - * :ref:`Plugin django app is defined using PulpAppConfig as a parent ` - * :ref:`Plugin entry point is defined ` - * `pulpcore is specified as a requirement `_ - * Necessary models/serializers/viewsets are :ref:`defined ` and :ref:`discoverable `. At a minimum: - - * models for plugin content type, remote, publisher - * serializers for plugin content type, remote, publisher - * viewset for plugin content type, remote, publisher - - * :ref:`Errors are handled according to Pulp conventions ` - * Docs for plugin are available (any location and format preferred and provided by plugin writer) diff --git a/docs/plugin_dev/reference/content-protection.rst b/docs/plugin_dev/reference/content-protection.rst deleted file mode 100644 index 6a7a285c54..0000000000 --- a/docs/plugin_dev/reference/content-protection.rst +++ /dev/null @@ -1,94 +0,0 @@ -.. _content-protection: - -Content Protection ------------------- - -By default, the Content app will serve all content, but some deployments want to only serve content -to some users and not others. For example pulp_rpm only wants to give rpms to users who have valid -certificates declaring their paid access to content. To allow total customization of how content is -protected, A plugin writer can define a ``ContentGuard``. - - -Defining a ContentGuard -^^^^^^^^^^^^^^^^^^^^^^^ - -The ``ContentGuard`` is a Master/Detail object provided at -``from pulpcore.plugin.models import ContentGuard``, which provides `these base fields `_. - -In your plugin code, subclass ``ContentGuard`` and optionally add additional fields as necessary to -perform the authentication and authorization. Then overwrite the ``permit`` method so that it -returns ``None`` if access is granted and throws a ``PermissionError`` on denial. As with all -Master/Detail objects a ``TYPE`` class attribute is needed which is then used in the URL. For -``ContentGuard`` detail objects the URL structure is:: - - ``/pulp/api/v3/contentguards///`` - - -.. note:: - - The `pulp-certguard `_ plugin ships various - ``ContentGuard`` types for users and plugin writers to use together. Plugins can ship their own - content guards too, but look at the existing ones first. - - -Simple Example -^^^^^^^^^^^^^^ - -Here's a trivial example where the client needs to send a header named SECRET_STRING and if its -value matches a recorded value for that ContentGuard instance, give the content to the user. The -secret both authenticates the user and authorizes them for this Content. - -.. code-block:: python - - from django.db import models - from pulpcore.plugin.models import ContentGuard - - class SecretStringContentGuard(ContentGuard): - - TYPE = 'secret_string' - - secret_string = models.FileField(max_length=255) - - def permit(self, request): - """ - - Authorize the specified web request. - - Args: - request (aiohttp.web.Request): A request for a published file. - - Raises: - PermissionError: When the request cannot be authorized. - """ - ca = self.ca_certificate.read() - validator = Validator(ca.decode('utf8')) - validator(request) - - class Meta: - default_related_name = "%(app_label)s_%(model_name)s" - - -End-User use of ContentGuard -############################ - -Users create an instance of a ``SecretStringContentGuard`` and give it a secret string with -``httpie``:: - - http POST http://localhost:24817/pulp/api/v3/contentguards//secret_string/ \ - secret_string='2xlSFgJwOhbLrtIlmYszqHQy7ivzdQo9' - - -Then the user can protect one or more Distributions by specifying ``content_guard``. See the -`ContentGuard creation API `_ for more information. - - -.. _plugin-writers-use-content-protection: - -Plugin Writer use of ContentGuard -################################# - -Plugin writers can also programatically create detail ``ContentGuard`` instances and have the -plugin's detail Distribution they define force its use. This allows plugin writers to offer -content protection features to users with fewer user required steps. diff --git a/docs/plugin_dev/reference/error-handling.rst b/docs/plugin_dev/reference/error-handling.rst deleted file mode 100644 index e035ca020b..0000000000 --- a/docs/plugin_dev/reference/error-handling.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. _error-handling-basics: - -Error Handling --------------- - -Please see the `error-handling -`_ section in the -code guidelines. diff --git a/docs/plugin_dev/reference/how-plugins-work.rst b/docs/plugin_dev/reference/how-plugins-work.rst deleted file mode 100644 index feafbacbcf..0000000000 --- a/docs/plugin_dev/reference/how-plugins-work.rst +++ /dev/null @@ -1,88 +0,0 @@ -How Plugins Work -================ - -.. _plugin-django-application: - -Plugin Django Application -------------------------- - -Like the Pulp Core itself, all Pulp Plugins begin as Django Applications, started like any other -with `pulpcore-manager startapp `. However, instead of subclassing Django's -`django.apps.AppConfig` as seen `in the Django documentation `_, Pulp Plugins identify themselves as plugins to -pulpcore by subclassing :class:`pulpcore.plugin.PulpPluginAppConfig`. - -:class:`pulpcore.plugin.PulpPluginAppConfig` also provides the application autoloading behaviors, -such as automatic registration of viewsets with the API router, which adds plugin endpoints. - -The :class:`pulpcore.plugin.PulpPluginAppConfig` subclass for any plugin must set a few required -attributes: - -* ``name`` attribute defines the importable dotted Python location of the plugin application (the - Python namespace that contains at least models and viewsets). -* ``label`` attribute to something that unambiguously labels the plugin in a clear way for users. - See `how it is done `_ in - the ``pulp_file`` plugin. -* ``version`` attribute to the string representing the version. - - -.. _plugin-entry-point: - -pulpcore.plugin Entry Point ---------------------------- - -The Pulp Core discovers available plugins by inspecting the pulpcore.plugin entry point. - -Once a plugin has defined its :class:`pulpcore.plugin.PulpPluginAppConfig` subclass, it should add -a pointer to that subclass using the Django ``default_app_config`` convention, e.g. -``default_app_config = pulp_myplugin.app.MyPulpPluginAppConfig`` somewhere in the module that -contains your Django application. The Pulp Core can then be told to use this value to discover your -plugin, by pointing the pulpcore.plugin entry point at it. If, for example, we set -``default_app_config`` in ``pulp_myplugin/__init__.py``, the setup.py ``entry_points`` would look like -this: - -.. code-block:: python - - entry_points={ - 'pulpcore.plugin': [ - 'pulp_myplugin = pulp_myplugin:default_app_config', - ] - } - -If you do not wish to use Django's ``default_app_config`` convention, the name given to the -``pulpcore.plugin`` entry point must be an importable identifier with a string value containing the -importable dotted path to your plugin's application config class, just as ``default_app_config`` -does. - -Check out ``pulp_file`` plugin: `default_app_config -`_ and `setup.py example -`_. - - -.. _mvs-discovery: - -Model, Serializer, Viewset Discovery ------------------------------------- - -The structure of plugins should, where possible, mimic the layout of the Pulp Core Plugin API. For -example, model classes should be based on platform classes imported from -:mod:`pulpcore.plugin.models` and be defined in the `models` module or directory of a plugin app. -ViewSets should be imported from :mod:`pulpcore.plugin.viewsets`, and be defined in the `viewsets` -module of a plugin app, and so on. - -This matching of module names is required for the Pulp Core to be able to auto-discover plugin -components, particularly for both models and viewsets. - -Take a look at `the structure `_ of -the ``pulp_file`` plugin. - - -Serializer and OpenAPI schema ------------------------------ - -Serializers are converted to OpenAPI objects through `drf-spectacular `_. -It inspects all serializer fields to describe them in the OpenAPI schema. -Due to the `DRF issue `_ -it is preferable to use ``CharField`` instead of ``URLField``. -Otherwise The REST API hosted at ``/pulp/api/v3/`` may hide some paths. - diff --git a/docs/plugin_dev/reference/how-to-doc-api.rst b/docs/plugin_dev/reference/how-to-doc-api.rst deleted file mode 100644 index eca3bc4ecc..0000000000 --- a/docs/plugin_dev/reference/how-to-doc-api.rst +++ /dev/null @@ -1,93 +0,0 @@ -Documenting your API --------------------- - -Each instance of Pulp hosts dynamically generated REST API documentation located at -`http://pulpserver/pulp/api/v3/docs/`. - -The documentation is generated using `ReDoc `_ based on the -`OpenAPI 3.0 `_ schema -generated by Pulp. The schema generator iterates over all the Views and Viewsets in every plugin -and generates the schema based on the information provided by Viewset doc strings, Viewset method -docstrings, associated Model's names, View docstrings, and the help text from serializers. - -Individual parameters and responses are documented automatically based on the Serializer field type. -A field's description is generated from the "help_text" kwarg when defining serializer fields. - -Response status codes can be generated through the `Meta` class on the serializer: - -.. code-block:: python - - from rest_framework.status import HTTP_400_BAD_REQUEST - - class SnippetSerializerV1(serializers.Serializer): - title = serializers.CharField(required=False, allow_blank=True, max_length=100) - - class Meta: - error_status_codes = { - HTTP_400_BAD_REQUEST: 'Bad Request' - } - -You may disable schema generation for a view by setting ``schema`` to ``None``: - -.. code-block:: python - - class CustomView(APIView): - # ... - schema = None # Will not appear in schema - -.. note:: - ``Meta.ref_name`` is a string that is used as the model definition name for - the serializer class. If this attribute is not specified, all serializers - have an implicit name derived from their class name. In order to avoid - possible name collisions between plugins, plugins must define ``ref_name`` - on the Meta class using ``.`` as a prefix. - - For the model based serializers offered by pulpcore (i.e. - :class:`~pulpcore.plugin.serializers.ModelSerializer` and derived - serializers), ``Meta.ref_name`` will be set correctly automatically. There is no - need to set ``Meta.ref_name`` in this case. - - If a serializer has no associated model, you need to set ``Meta.ref_name`` - explicitly. For example, if the ``SnippetSerializerV1`` from above is for - the plugin providing the ``snippets`` app, ``ref_name`` could be set like - this:: - - class SnippetSerializerV1(serializers.Serializer): - title = serializers.CharField(required=False, allow_blank=True, max_length=100) - - class Meta: - ref_name = "snippets.Snippet" - -.. note:: - - Plugin authors can provide manual overrides using the `@extend_schema decorator - `_ - -The OpenAPI schema for pulpcore and all installed plugins can be downloaded from the ``pulp-api`` -server: - -.. code-block:: bash - - curl -o api.json http://localhost:24817/pulp/api/v3/docs/api.json - -The OpenAPI schema for a specific plugin can be downloaded by specifying the plugin's module name -as a GET parameter. For example for pulp_rpm only endpoints use a query like this: - -.. code-block:: bash - - curl -o api.json http://localhost:24817/pulp/api/v3/docs/api.json?component=rpm - -OpenAPI Tags ------------- - -Tags are used to group operations logically into categories, such as RepositoriesRpm, -DistributionsRpm, ContentPackages, and so on. -Pulpcore OpenAPI Tags are generated from URL paths. - -Path ``/pulp/api/v3/repositories/rpm/rpm/`` yields the tag ``repositories: rpm`` which is turned into -``RepositoriesRpmApi`` in the bindings client. - -It is possible to customize the tag by setting the ``pulp_tag_name`` attribute at your view. - -Setting ``pulp_tag_name = "Pulp: RPM Repo"`` at ``RpmRepositoryViewSet`` yields the tag ``Pulp: RPM Repo`` -which is turned into ``PulpRPMRepoApi`` in the bindings client. diff --git a/docs/plugin_dev/reference/index.rst b/docs/plugin_dev/reference/index.rst deleted file mode 100644 index 9d0084da6f..0000000000 --- a/docs/plugin_dev/reference/index.rst +++ /dev/null @@ -1,17 +0,0 @@ -Plugin Writing Reference Material -================================= - -This section includes in-depth material that is topic specific. - -.. toctree:: - :maxdepth: 2 - - object-relationships - how-plugins-work - error-handling - how-to-doc-api - on-demand-support - releasing - content-protection - metadata-signing - task-scheduling diff --git a/docs/plugin_dev/reference/metadata-signing.rst b/docs/plugin_dev/reference/metadata-signing.rst deleted file mode 100644 index 38ac445fc7..0000000000 --- a/docs/plugin_dev/reference/metadata-signing.rst +++ /dev/null @@ -1,112 +0,0 @@ -.. _metadata-signing: - -Metadata Signing -================ - -Plugin writers wishing to enable users to sign metadata need to add a new field ``metadata_signing_service`` -to their implementation of a repository and/or publication. This field should be exposed to users who consume -content via REST API. The users may afterwards specify which signing service will be used to sign the -metadata when creating a publication. - -Every signing service will always be an instance of a subclass of the ``SigningService`` model. Plugin -writers may either use the existing ``AsciiArmoredDetachedSigningService``, or use that as a reference for -writing their own signing service model. - -The ``SigningService`` base class already provides the fully implemented ``sign()`` method, the signature of -the ``validate()`` method (which must be implemented by each subclass), and the ``save()`` method (which -calls the ``validate()`` method, but is otherwise fully implemented). - -.. note:: - - The ``sign()`` function will be calling the provided script to give the administrator the - freedom to define, how the signature is obtained. It is their responsibility to setup the - software or hardware facilities for signing and make the script use them. The plugin writer - however should provide a reasonably easy default script based on e.g. a simple call to ``gpg``. - -In order to sign metadata, plugin writers are required to call the ``sign()`` method of the signing service -being used. This method invokes the signing script (or other executable) which is provided by the -administrator who instantiates a concrete signing service. Instantiating/creating a concrete signing service -will ultimately call the ``save()`` method, which will in turn call ``validate()``. As a result, it is up to -the ``validate()`` method to ensure the signing service script provided by the administrator actually provides -any signatures, signature files, and return values, as required by the individual ``SigningService`` subclass. - -This is why implementing a signing service model other than ``AsciiArmoredDetachedSigningService`` simply -requires inheriting from ``SiginingService`` and then implementing ``validate()``. - -.. note:: - The existing ``AsciiArmoredDetachedSigningService`` requires a signing script that creates a detached - ascii-armored signature file, and prints valid JSON in the following format to stdout: - - {"file": "filename", "signature": "filename.asc"} - - Here "filename" is a path to the original file that was signed (passed to the signing script by the - ``sign()`` method), and "filename.asc" is a path to the signature file created by the script. - - The script may read the fingerprint of the key it should use for signing, from the - ``PULP_SIGNING_KEY_FINGERPRINT`` environment variable. - A ``CORRELATION_ID`` environment variable is also added by default. - It is possible to pass a dictionary of environment variables to the signing script if need be. - - The json is converted to a python dict and returned by the ``sign()`` method. If an error occurs, a - runtime error is raised instead. All of this is enforced by the ``validate()`` method at the time of - instantiation. - - For more information see the corresponding :ref:`workflow documentation `. - -The following procedure may be taken into account for the plugin writers: - - 1. Let us assume that a file repository contains the field ``metadata_signing_service``: - - .. code-block:: python - - metadata_signing_service = models.ForeignKey( - AsciiArmoredDetachedSigningService, - on_delete=models.SET_NULL, - null=True - ) - - In the serializer, there is also added a corresponding field that serializes ``metadata_signing_service``, - like so: - - .. code-block:: python - - metadata_signing_service = serializers.HyperlinkedRelatedField( - help_text="A reference to an associated signing service.", - view_name="signing-services-detail", - queryset=models.AsciiArmoredDetachedSigningService.objects.all(), - many=False, - required=False, - allow_null=True - ) - - 2. Retrieve a desired signing script via the field ``metadata_signing_service`` stored in the repository: - - .. code-block:: python - - metadata_signing_service = FileRepository.objects.get(name='foo').metadata_signing_service - - A plugin writer can create a new repository with an associated signing service in the following two ways: - - - Using Python: - - .. code-block:: python - - signing_service = AsciiArmoredDetachedSigningService.objects.get(name='sign-metadata') - FileRepository.objects.create(name='foo', metadata_signing_service=signing_service) - - - Using HTTP calls: - - .. code-block:: bash - - http POST :24817/pulp/api/v3/repositories/file/file/ name=foo metadata_signing_service=http://localhost:24817/pulp/api/v3/signing-services/5506c8ac-8eae-4f34-bb5a-3bc08f82b088/ - - 3. Sign a file by calling the method ``sign()``: - - .. code-block:: python - - with tempfile.TemporaryDirectory("."): - try: - signature = metadata_signing_service.sign(metadata.filepath) - except RuntimeError: - raise - add_to_repository(metadata, signature) diff --git a/docs/plugin_dev/reference/object-relationships.rst b/docs/plugin_dev/reference/object-relationships.rst deleted file mode 100644 index ab42717d49..0000000000 --- a/docs/plugin_dev/reference/object-relationships.rst +++ /dev/null @@ -1,53 +0,0 @@ -.. _object-relationships: - -Object Relationships -==================== - -There are models which are expected to be used in plugin implementation, so understanding what they -are designed for is useful for a plugin writer. Each model below has a link to its documentation -where its purpose, all attributes and relations are listed. - -Here is a gist of how models are related to each other and what each model is responsible for. - -* :class:`~pulpcore.app.models.Repository` contains :class:`~pulpcore.plugin.models.Content`. - :class:`~pulpcore.plugin.models.RepositoryContent` is used to represent this relation. -* :class:`~pulpcore.plugin.models.Content` can have :class:`~pulpcore.plugin.models.Artifact` - associated with it. :class:`~pulpcore.plugin.models.ContentArtifact` is used to represent this - relation. -* :class:`~pulpcore.plugin.models.ContentArtifact` can have - :class:`~pulpcore.plugin.models.RemoteArtifact` associated with it. -* :class:`~pulpcore.plugin.models.Artifact` is a file. -* :class:`~pulpcore.plugin.models.RemoteArtifact` contains information about - :class:`~pulpcore.plugin.models.Artifact` from a remote source, including URL to perform - download later at any point. -* :class:`~pulpcore.plugin.models.Remote` knows specifics of the plugin - :class:`~pulpcore.plugin.models.Content` to put it into Pulp. - :class:`~pulpcore.plugin.models.Remote` defines how to synchronize remote content. Pulp - Platform provides support for concurrent :ref:`downloading ` of remote content. - Plugin writer is encouraged to use one of them but is not required to. -* :class:`~pulpcore.plugin.models.PublishedArtifact` refers to - :class:`~pulpcore.plugin.models.ContentArtifact` which is published and belongs to a certain - :class:`~pulpcore.app.models.Publication`. -* :class:`~pulpcore.plugin.models.PublishedMetadata` is a file generated while publishing and - belongs to a certain :class:`~pulpcore.app.models.Publication`. -* :class:`~pulpcore.app.models.Publication` is a result of publish operation of a specific - :class:`~pulpcore.plugin.models.RepositoryVersion`. -* :class:`~pulpcore.app.models.Distribution` defines how a publication is distributed for a specific - :class:`~pulpcore.plugin.models.Publication`. -* :class:`~pulpcore.plugin.models.ProgressReport` is used to report progress of the task. -* :class:`~pulpcore.plugin.models.GroupProgressReport` is used to report progress of the task group. - - -An important feature of the current design is deduplication of -:class:`~pulpcore.plugin.models.Content` and :class:`~pulpcore.plugin.models.Artifact` data. -:class:`~pulpcore.plugin.models.Content` is shared between :class:`~pulpcore.app.models.Repository`, -:class:`~pulpcore.plugin.models.Artifact` is shared between -:class:`~pulpcore.plugin.models.Content`. -See more details on how it affects remote implementation in :ref:`define-remote` section. - - -Check ``pulp_file`` `implementation `_ to see how all -those models are used in practice. -More detailed explanation of model usage with references to ``pulp_file`` code is below. - - diff --git a/docs/plugin_dev/reference/on-demand-support.rst b/docs/plugin_dev/reference/on-demand-support.rst deleted file mode 100644 index 418eeccc98..0000000000 --- a/docs/plugin_dev/reference/on-demand-support.rst +++ /dev/null @@ -1,117 +0,0 @@ -.. _on-demand-support: - -On-Demand Support ------------------ - -"On-Demand support" refers to a plugin's ability to support downloading and creating Content but not -downloading their associated Artifacts. By convention, users expect the `Remote.policy` attribute to -determine when Artifacts will be downloaded. See the user docs for specifics on the user -expectations there. - -.. _on-demand-support-with-da: - -Adding Support when using DeclarativeVersion -============================================ - -Plugins like `pulp-file` sync content using `DeclarativeVersion`. -On-demand support can be added by specifying `deferred_download=True` at instantiation of -:class:`pulpcore.plugin.stages.DeclarativeArtifact`. - -`Remote.policy` can take several values. To easily translate them, consider a snippet like this one -taken from `pulp-file`.:: - - async def run(self): - # Interpret download policy - deferred_download = (self.remote.policy != Remote.IMMEDIATE) - <...> - da = DeclarativeArtifact( - artifact=artifact, - url=url, - relative_path=relative_path, - remote=self.remote, - deferred_download=deferred_download, - ) - <...> - -.. hint:: - - The `deferred_download` flag is used at the artifact level, to support on-demand concepts for - plugins that need some artifacts to download immediately in all cases. - See also :ref:`multi-level-discovery`. - - -Adding Support when using a Custom Stages API Pipeline -====================================================== - -Plugins like `pulp-rpm` that sync content using a custom pipeline can enable on-demand support by -excluding the `QueryExistingArtifacts`, `ArtifactDownloader` and `ArtifactSaver` stages. Without -these stages included, no Artifact downloading will occur. Content unit saving will occur, which -will correctly create the on-demand content units. - -`Remote.policy` can take several values. To easily maintain the pipeline consider a snippet like -this one inspired by `pulp-rpm`:: - - download = (remote.policy == Remote.IMMEDIATE) # Interpret policy to download Artifacts or not - stages = [first_stage] - if download: - stages.extend([QueryExistingArtifacts(), ArtifactDownloader(), ArtifactSaver()]) - stages.extend(the_rest_of_the_pipeline) # This adds the Content and Association Stages - -.. warning:: - - Skipping of those Stages does not work with :ref:`multi-level-discovery`. - If you need some artifacts downloaded anyway, follow the example on - :ref:on-demand-support-with-dv` and include the artifact stages in the custom pipeline. - -.. hint:: - - Consider to also exclude the `ResolveContentFutures` stage. - -What if the Custom Pipeline Needs Artifact Downloading? -======================================================= - -For example, `pulp-container` uses a custom Stages API Pipeline, and relies on Artifact downloading to -download metadata that is saved and stored as a Content unit. This metadata defines more Content -units to be created without downloading their corresponding Artifacts. The on-demand support for -this type needs to download Artifacts for those content types, but not others. - -By specifying `deferred_download=False` in the `DeclarativeArtifact` regardless of the overall sync -policy, lazy downloading for that specific artifact can be prohibited. - -.. hint:: - - See also :ref:`on-demand-support-with-da` - -How Does This Work at the Model Layer? -====================================== - -The presence of a `RemoteArtifact` is what allows the Pulp content app to fetch and serve that -Artifact on-demand. So a Content unit is on-demand if and only if: - -1. It has a saved Content unit - -2. A `ContentArtifact` for each `Artifact` is saved that the Content unit would have referenced. - Note: the `ContentArtifact` is created in both on-demand and not on-demand cases. - -3. Instead of creating and saving an `Artifact`, a `RemoteArtifact` is created. This contains any - known digest or size information allowing for automatic validation when the `Artifact` is - fetched. - - -How does the Content App work with this Model Layer? -==================================================== - -When a request for content arrives, it is matched against a `Distribution` and eventually against a -specific Artifact path, which actually matches against a `ContentArtifact` not an `Artifact`. If an -`Artifact` exists, it is served to the client. Otherwise a `RemoteArtifact` allows the `Artifact` to -be downloaded on-demand and served to the client. - -If `remote.policy == Remote.ON_DEMAND` the Artifact is saved on the first download. This causes -future requests to serve the already-downloaded and validated Artifact. - -.. note:: - In situations where multiple Remotes synced and provided the same `Content` unit, only one - `Content` unit is created but many `RemoteArtifact` objects may be created. The Pulp Content app - will try all `RemoteArtifact` objects that correspond with a `ContentArtifact`. It's possible an - unexpected `Remote` could be used when fetching that equivalent `Content` unit. Similar warnings - are in the user documentation on on-demand. diff --git a/docs/plugin_dev/reference/releasing.rst b/docs/plugin_dev/reference/releasing.rst deleted file mode 100644 index fcd85b7d6c..0000000000 --- a/docs/plugin_dev/reference/releasing.rst +++ /dev/null @@ -1,56 +0,0 @@ -Releasing Your Plugin -===================== - -Depending on pulpcore ---------------------- - -The Plugin API is not yet stable, but starting with pulpcore 3.7.0, a -:ref:`deprecation process ` is in place which makes it safe for a plugin -to declare compatability with the next, unreleased pulpcore version also. For example, a plugin -compatible with pulpcore 3.7 would declare compatibility up to pulpcore 3.8. In this example, use -the following requirements string:: - - pulpcore>=3.7,<3.9 - -This ensures that when pulpcore 3.8 is released, users can receive it immediately and use it without -any issue. However when 3.9 comes out, any deprecations introduced in the ``pulpcore.plugin`` API in -3.8 will be removed, so preventing your plugin from working with pulpcore 3.9 maintains -compatibility. - -Sometimes plugins can be compatible with older version of pulpcore, and in those cases the oldest -version should be allowed. For example if your plugin is compatible with pulpcore 3.5, and you just -tested it against 3.7 and it's still compatible, use this requirements string:: - - - pulpcore>=3.5,<3.9 - - -Release process ---------------- - -Here are the steps to take to release a minor Plugin version, e.g. pulp_file 1.11.0: - -1. Via the Github Actions, trigger a `"Create new release branch" `_ job. -2. Checkout locally the target plugin release branch and set accordingly ``pulpcore_branch`` and - ``pulpcore_pip_version_specifier`` in the template_config file. -3. Pull in latest CI changes from the plugin_template. Ensure you have the latest copy of upstream - remote. :: - - [user@localhost plugin_template]$ git remote -v - origin git@github.com:user/plugin_template.git (fetch) - origin git@github.com:user/plugin_template.git (push) - upstream git@github.com:pulp/plugin_template.git (fetch) - upstream git@github.com:pulp/plugin_template.git (push) - [user@localhost plugin_template]$ git branch - * main - [user@localhost plugin_template]$ git pull upstream main - [user@localhost plugin_template]$ ./plugin-template --github - - Make the PR against target plugin release branch and merge it. -4. Via the Github Actions, trigger a `"Release pipeline" `_ job - by specifying the release branch and the tag of the release. -5. Once the release is available, make an anouncement on the discourse. See `example `_ . -6. The CI automation will create PRs with the Changelog update and Versions bump that will need to - be merged. - -To release a patch Plugin version, e.g. pulp_file 1.11.1, start with the step number 4. diff --git a/docs/plugin_dev/reference/task-scheduling.rst b/docs/plugin_dev/reference/task-scheduling.rst deleted file mode 100644 index c9de1dfe99..0000000000 --- a/docs/plugin_dev/reference/task-scheduling.rst +++ /dev/null @@ -1,17 +0,0 @@ -.. _task-scheduling: - -Task Scheduling ---------------- - -.. warning: This feature is only accessible by direct manipulation of - ``TaskSchedule`` objects. It is targeted for plugin writers and no api access is planned. - -Pulp supports scheduling of tasks. Scheduled tasks will be dispatched shortly after their -``next_dispatch`` time, and be rescheduled one ``dispatch_interval`` after that, if the latter is -set. By specifying the ``dispatch_interval`` as ``time_delta(days=1)`` you can expect the task -dispatch to stabily happen at same time every day. Until the last task of the same schedule enters a -final state, a new task will not be dispatched. Scheduling is done by the worker processes, -therefore scheduled task dispatching will be missed if all workers are offline. After an outage -window, overdue schedules will dispatch at most one task, but down to timing, they may be -rescheduled shortly thereafter. The task schedule API at ``/pulp/api/v3/task-schedules/`` is -provided to read the tasks schedules. diff --git a/docs/plugins.rst b/docs/plugins.rst deleted file mode 100644 index d1fd96a446..0000000000 --- a/docs/plugins.rst +++ /dev/null @@ -1,29 +0,0 @@ -.. _plugins: - -Plugins -======= - -Plugins add support for a type of content to Pulp. For example, the -`file_plugin `_ adds support for Pulp to manage files. - -Each plugin has its own documentation that contains setup, workflow, and conceptual information: - -* `Pulp RPM plugin `_. -* `Pulp File plugin `_. -* `Pulp Container plugin `_. -* `Pulp Ansible plugin `_. -* `Pulp Debian plugin `_. -* `Pulp Python plugin `_. -* `Pulp Gem plugin `_. -* `Pulp Chef Cookbook plugin `_. -* `Pulp Maven plugin `_. -* `Ansible GalaxyNG plugin `_. -* `Pulp Certguard plugin `_. -* `Pulp 2-to-3 Migration plugin `_. - - -And don't hesitate to :ref:`contact us` with any questions during development. -Let us know when the plugin is ready and we will be happy to add it to the list of available plugins for Pulp! - -.. note:: - Are we missing a plugin? Let us know via the pulp-dev@redhat.com mailing list. diff --git a/staging_docs/reference/api-reference/content-app.md b/docs/reference/api-reference/content-app.md similarity index 100% rename from staging_docs/reference/api-reference/content-app.md rename to docs/reference/api-reference/content-app.md diff --git a/staging_docs/reference/api-reference/download.md b/docs/reference/api-reference/download.md similarity index 100% rename from staging_docs/reference/api-reference/download.md rename to docs/reference/api-reference/download.md diff --git a/staging_docs/reference/api-reference/exceptions.md b/docs/reference/api-reference/exceptions.md similarity index 100% rename from staging_docs/reference/api-reference/exceptions.md rename to docs/reference/api-reference/exceptions.md diff --git a/staging_docs/reference/api-reference/index.md b/docs/reference/api-reference/index.md similarity index 100% rename from staging_docs/reference/api-reference/index.md rename to docs/reference/api-reference/index.md diff --git a/staging_docs/reference/api-reference/models.md b/docs/reference/api-reference/models.md similarity index 100% rename from staging_docs/reference/api-reference/models.md rename to docs/reference/api-reference/models.md diff --git a/staging_docs/reference/api-reference/serializers.md b/docs/reference/api-reference/serializers.md similarity index 100% rename from staging_docs/reference/api-reference/serializers.md rename to docs/reference/api-reference/serializers.md diff --git a/staging_docs/reference/api-reference/stages.md b/docs/reference/api-reference/stages.md similarity index 100% rename from staging_docs/reference/api-reference/stages.md rename to docs/reference/api-reference/stages.md diff --git a/staging_docs/reference/api-reference/storage.md b/docs/reference/api-reference/storage.md similarity index 100% rename from staging_docs/reference/api-reference/storage.md rename to docs/reference/api-reference/storage.md diff --git a/staging_docs/reference/api-reference/tasking.md b/docs/reference/api-reference/tasking.md similarity index 100% rename from staging_docs/reference/api-reference/tasking.md rename to docs/reference/api-reference/tasking.md diff --git a/staging_docs/reference/api-reference/util.md b/docs/reference/api-reference/util.md similarity index 100% rename from staging_docs/reference/api-reference/util.md rename to docs/reference/api-reference/util.md diff --git a/staging_docs/reference/api-reference/viewsets.md b/docs/reference/api-reference/viewsets.md similarity index 100% rename from staging_docs/reference/api-reference/viewsets.md rename to docs/reference/api-reference/viewsets.md diff --git a/staging_docs/reference/main.md b/docs/reference/main.md similarity index 100% rename from staging_docs/reference/main.md rename to docs/reference/main.md diff --git a/staging_docs/reference/platform-api/app/apps.md b/docs/reference/platform-api/app/apps.md similarity index 100% rename from staging_docs/reference/platform-api/app/apps.md rename to docs/reference/platform-api/app/apps.md diff --git a/staging_docs/reference/platform-api/app/auth.md b/docs/reference/platform-api/app/auth.md similarity index 100% rename from staging_docs/reference/platform-api/app/auth.md rename to docs/reference/platform-api/app/auth.md diff --git a/staging_docs/reference/platform-api/app/index.md b/docs/reference/platform-api/app/index.md similarity index 100% rename from staging_docs/reference/platform-api/app/index.md rename to docs/reference/platform-api/app/index.md diff --git a/staging_docs/reference/platform-api/app/models.md b/docs/reference/platform-api/app/models.md similarity index 100% rename from staging_docs/reference/platform-api/app/models.md rename to docs/reference/platform-api/app/models.md diff --git a/staging_docs/reference/platform-api/app/response.md b/docs/reference/platform-api/app/response.md similarity index 100% rename from staging_docs/reference/platform-api/app/response.md rename to docs/reference/platform-api/app/response.md diff --git a/staging_docs/reference/platform-api/app/serializers.md b/docs/reference/platform-api/app/serializers.md similarity index 100% rename from staging_docs/reference/platform-api/app/serializers.md rename to docs/reference/platform-api/app/serializers.md diff --git a/staging_docs/reference/platform-api/app/settings.md b/docs/reference/platform-api/app/settings.md similarity index 100% rename from staging_docs/reference/platform-api/app/settings.md rename to docs/reference/platform-api/app/settings.md diff --git a/staging_docs/reference/platform-api/app/urls.md b/docs/reference/platform-api/app/urls.md similarity index 100% rename from staging_docs/reference/platform-api/app/urls.md rename to docs/reference/platform-api/app/urls.md diff --git a/staging_docs/reference/platform-api/app/viewsets.md b/docs/reference/platform-api/app/viewsets.md similarity index 100% rename from staging_docs/reference/platform-api/app/viewsets.md rename to docs/reference/platform-api/app/viewsets.md diff --git a/staging_docs/reference/platform-api/constants.md b/docs/reference/platform-api/constants.md similarity index 100% rename from staging_docs/reference/platform-api/constants.md rename to docs/reference/platform-api/constants.md diff --git a/staging_docs/reference/platform-api/exceptions.md b/docs/reference/platform-api/exceptions.md similarity index 100% rename from staging_docs/reference/platform-api/exceptions.md rename to docs/reference/platform-api/exceptions.md diff --git a/staging_docs/reference/platform-api/index.md b/docs/reference/platform-api/index.md similarity index 100% rename from staging_docs/reference/platform-api/index.md rename to docs/reference/platform-api/index.md diff --git a/staging_docs/reference/platform-api/tasking.md b/docs/reference/platform-api/tasking.md similarity index 100% rename from staging_docs/reference/platform-api/tasking.md rename to docs/reference/platform-api/tasking.md diff --git a/docs/release_process.rst b/docs/release_process.rst deleted file mode 100644 index f85aeae7e0..0000000000 --- a/docs/release_process.rst +++ /dev/null @@ -1,59 +0,0 @@ -.. _pulpcore_release_process: - -Pulpcore Release Process -======================== - -Here are the steps to take to release a Pulpcore version. New Y-releases of Pulpcore must take all of them. -**A new Z-release need only execute steps 2, 3, and 4.** - - * **"I am releasing a new Y-branch of Pulpcore (e.g., 3.23)"**: - - 1. Via the Github Actions, trigger a `"Create new release branch" `_ job. - - * **"I am releasing a new Z-release of Pulpcore (e.g., 3.23.0, 3.22.12)"**: - - 2. Via the Github Actions, trigger a `"Release pipeline" `_ job by specifying the release branch (X.Y) and the tag (X.Y.Z) of the release. - - 3. Once the release is available, make an announcement on Pulp discourse, in the "Announcements" category. See `example `_. - - 4. The CI automation will create PRs with the Changelog update and Versions bump that will need to be merged. - - * **"I have released a new Y-release of Pulpcore, followup actions"**: - - 5. Arrange for a new oci-image for that release by following the `"oci-images Release Instructions" `_. - - 6. Update the ``ci_branches`` stanza in `pulpcore's template.config.yml `_. This stanza should always (and only) contain: - - * The most-current (i.e., newly-released) branch. - - * All branches in use by supported downstream products (see below). These are branches we will consider backporting selected bugfixes to. - - 7. Monitor `pulpcore pull-requests `_ for creation of a PR such as `"Update supported versions" `_. Such PRs are created by `this job `_. The job may have been disabled if there hasn't been any release-activity in the repository for at least 60 days. You will need to re-enable it in this case. - -Some possible failures of **Step 2**, above, include: - - * If release-tag is new but not based on current-dev, workflow will complain and fail - - * If release-tag is for an existing release (by accident) , the workflow won't fail until the docs-pub. Cleaning this up can be Exciting. - -Active branches as of 2023-05-16: ---------------------------------- - * pulpcore - - * 3.23 (galaxyNG/4.7) - - * 3.22 (katello/4.9) - - * 3.21 (katello/4.7, galaxyNG/4.6, RHUI/4.3) - - * 3.18 (katello/4.5) - - * 3.16 (katello/4.3) - - * pulp_file - - * 1.12 (katello/4.9) - - * 1.11 (katello/4.7) - - * 1.10 (katello/4.3. 4.5) diff --git a/docs/rest_api.rst b/docs/rest_api.rst deleted file mode 100644 index 7cb720c2c7..0000000000 --- a/docs/rest_api.rst +++ /dev/null @@ -1,11 +0,0 @@ -REST API -======== - -.. note:: - - The REST API documentation is `here `_. - -The documentation is auto generated based on the OpenAPI schema for the REST API. The hosted -documentation is broken up between ``pulpcore`` and each of the plugin's documentation sites. -Users can view the REST API documentation for their instance of Pulp by pointing their browser at -``http://:24817/pulp/api/v3/docs/``. diff --git a/docs/static/task-status-v1.yaml b/docs/static/task-status-v1.yaml deleted file mode 100644 index da21c48aa4..0000000000 --- a/docs/static/task-status-v1.yaml +++ /dev/null @@ -1,55 +0,0 @@ -$schema: http://json-schema.org/draft-07/hyper-schema -$id: https://github.com/pulp/pulpcore/blob/main/docs/static/task-status-v1.yaml -type: object -properties: - pulp_href: - description: URI for the task in the pulp API - type: string - examples: - - /pulp/api/v3/tasks/018f973c-ad7b-7f03-96d0-b38a42c18100/ - pulp_created: - description: Created timestamp for the task - type: string - format: date-time - examples: - - 2024-05-20T18:21:27.292394Z - pulp_last_updated: - description: Last updated timestamp for the task - type: string - format: date-time - examples: - - 2024-05-20T18:21:27.292405Z - name: - description: Name of the task - type: string - examples: - - pulp_file.app.tasks.synchronizing.synchronize - state: - description: State of the task - type: string - enum: - - waiting - - skipped - - running - - completed - - failed - - canceled - - canceling - unblocked_at: - description: The time the task became unblocked - type: string - format: date-time - examples: - - 2024-05-20T18:21:27.317792Z - started_at: - description: The time the task started executing - type: string - format: date-time - examples: - - 2024-05-20T18:21:27.349481Z - finished_at: - description: The time the task finished executing - type: string - format: date-time - examples: - - 2024-05-20T18:21:28.074560Z diff --git a/docs/tech_preview.rst b/docs/tech_preview.rst deleted file mode 100644 index ab925a2c3e..0000000000 --- a/docs/tech_preview.rst +++ /dev/null @@ -1,8 +0,0 @@ -Tech Previews -============= - -The following features are currently being released as part of tech preview: - -* Support for Open Telemetry -* Upstream replicas -* Domains - Multi-Tenancy diff --git a/docs/template_gitref b/docs/template_gitref deleted file mode 100644 index 3d9674dcd0..0000000000 --- a/docs/template_gitref +++ /dev/null @@ -1 +0,0 @@ -2021.08.26-361-gcd6f9f0 diff --git a/docs/troubleshooting.rst b/docs/troubleshooting.rst deleted file mode 100644 index bca7e63e79..0000000000 --- a/docs/troubleshooting.rst +++ /dev/null @@ -1,40 +0,0 @@ -Troubleshooting -=============== - - -.. _enabling-debug-logging: - -Enabling Debug Logging ----------------------- - -By default Pulp logs at INFO level, but enabling DEBUG logging can be a helpful thing to get more -insight when things don't go as expected. This can be enabled with dynaconf using the examples -below. - -Designating a Python-based settings file, and putting the DEBUG logging configuration there:: - - export PULP_SETTINGS=/etc/pulp/settings.py - echo "LOGGING = {'dynaconf_merge': True, 'loggers': {'': {'handlers': ['console'], 'level': 'DEBUG'}}}" >> /etc/pulp/settings.py - -Or via environment variable:: - - PULP_LOGGING='@json {"dynaconf_merge": true, "loggers": {"": {"handlers": ["console"], "level": "DEBUG"}}}' - -.. tip:: - - As a workaround, you could specify the entire config with the `PULP_LOGGING` environment variable - and avoid using the "merge" feature from dynaconf. In that case you would specify - ``'level': 'DEBUG'`` in addition to your current config shown with ``dynaconf list``. - -Then when starting Pulp you should see a lot more information logged. - -To ensure you've enabled the settings correctly, view them with the ``dynaconf list`` command (for -more information, see :ref:`viewing-settings`). If configured correctly you should see:: - - $ dynaconf list - - LOGGING {'disable_existing_loggers': False, - 'loggers': {'': {'filters': ['correlation_id'], - 'handlers': ['console'], - 'level': 'DEBUG'}, # <--- the DEBUG level - diff --git a/staging_docs/user/guides/create-client-bindings.md b/docs/user/guides/create-client-bindings.md similarity index 100% rename from staging_docs/user/guides/create-client-bindings.md rename to docs/user/guides/create-client-bindings.md diff --git a/staging_docs/user/guides/create-domains.md b/docs/user/guides/create-domains.md similarity index 100% rename from staging_docs/user/guides/create-domains.md rename to docs/user/guides/create-domains.md diff --git a/staging_docs/user/guides/manage-labels.md b/docs/user/guides/manage-labels.md similarity index 100% rename from staging_docs/user/guides/manage-labels.md rename to docs/user/guides/manage-labels.md diff --git a/staging_docs/user/guides/reclaim-disk-space.md b/docs/user/guides/reclaim-disk-space.md similarity index 100% rename from staging_docs/user/guides/reclaim-disk-space.md rename to docs/user/guides/reclaim-disk-space.md diff --git a/staging_docs/user/guides/repair-pulp.md b/docs/user/guides/repair-pulp.md similarity index 100% rename from staging_docs/user/guides/repair-pulp.md rename to docs/user/guides/repair-pulp.md diff --git a/staging_docs/user/guides/troubleshoot-tasks.md b/docs/user/guides/troubleshoot-tasks.md similarity index 100% rename from staging_docs/user/guides/troubleshoot-tasks.md rename to docs/user/guides/troubleshoot-tasks.md diff --git a/staging_docs/user/guides/update-repo-retention.md b/docs/user/guides/update-repo-retention.md similarity index 100% rename from staging_docs/user/guides/update-repo-retention.md rename to docs/user/guides/update-repo-retention.md diff --git a/staging_docs/user/guides/upload-publish.md b/docs/user/guides/upload-publish.md similarity index 100% rename from staging_docs/user/guides/upload-publish.md rename to docs/user/guides/upload-publish.md diff --git a/staging_docs/user/guides/use-complex-filters.md b/docs/user/guides/use-complex-filters.md similarity index 100% rename from staging_docs/user/guides/use-complex-filters.md rename to docs/user/guides/use-complex-filters.md diff --git a/staging_docs/user/learn/concepts.md b/docs/user/learn/concepts.md similarity index 100% rename from staging_docs/user/learn/concepts.md rename to docs/user/learn/concepts.md diff --git a/staging_docs/user/learn/from-pulp-2.md b/docs/user/learn/from-pulp-2.md similarity index 100% rename from staging_docs/user/learn/from-pulp-2.md rename to docs/user/learn/from-pulp-2.md diff --git a/staging_docs/user/learn/lifecycle-promotion.md b/docs/user/learn/lifecycle-promotion.md similarity index 100% rename from staging_docs/user/learn/lifecycle-promotion.md rename to docs/user/learn/lifecycle-promotion.md diff --git a/staging_docs/user/learn/on-demand-downloading.md b/docs/user/learn/on-demand-downloading.md similarity index 100% rename from staging_docs/user/learn/on-demand-downloading.md rename to docs/user/learn/on-demand-downloading.md diff --git a/staging_docs/user/learn/pulp-versioning.md b/docs/user/learn/pulp-versioning.md similarity index 100% rename from staging_docs/user/learn/pulp-versioning.md rename to docs/user/learn/pulp-versioning.md diff --git a/staging_docs/user/tutorials/index.md b/docs/user/tutorials/index.md similarity index 100% rename from staging_docs/user/tutorials/index.md rename to docs/user/tutorials/index.md diff --git a/docs/versioning.rst b/docs/versioning.rst deleted file mode 100644 index 7f94cc93ec..0000000000 --- a/docs/versioning.rst +++ /dev/null @@ -1,20 +0,0 @@ -.. _versioning: - -Versioning -========== - -Pulp uses a version scheme ``x.y.z``, which is based on `Semantic Versioning -`_. Briefly, ``x.y.z`` releases may only contain bugfixes (no features), -``x.y`` releases may only contain backwards compatible changes (new features, bugfixes), and ``x`` -releases may break backwards compatibility. - -Plugin API ----------- - -The plugin API is provided by the pulpcore package and is not versioned independently. ``x.y.z`` -releases of pulpcore should provide backwards compatible releases of the Plugin API but ``x.y`` -releases might bring backwards incompatible changes of the plugin API. - -We expect the plugin API to eventually be Semantically Versioned so that only ``x`` releases of -pulpcore will bring backwards incompatible changes to the plugin API. Until then, we recommend -plugins pin to ``x.y`` releases of pulpcore. diff --git a/docs/workflows/alternate-content-sources.rst b/docs/workflows/alternate-content-sources.rst deleted file mode 100644 index 6a3729a44e..0000000000 --- a/docs/workflows/alternate-content-sources.rst +++ /dev/null @@ -1,64 +0,0 @@ -Alternate Content Sources -========================= - -.. warning:: This feature requires plugin support to work correctly. - -Overview --------- - -Pulp supports the concept of Alternate Content Sources (ACS) which sync content using a remote. -Each content source is a potential alternate provider of files that are associated with content -units in Pulp. - -The ACS are useful when dealing with an unreliable or slow internet connection to remote -repositories. Also, when some parts of the repositories are already present on the local -filesystem, configuring a remote pointing to, e.g., ``file://path/to/the/repo`` enables the -related ACS to fetch the content faster. Similarly, if there exists a mirror of a CDN that is known -to be geographically closer to clients, the ACS may come to the place as well. - -Setting the ACS tells Pulp to first check for alternative sources of content in an attempt to pull -the remote content. The ACS have a global scope. Thus, alternative sources will be used in all -future synchronization tasks regardless of the remote specified during the sync time as long as the -checksums of remote artifacts match. - -Creating ACS ------------- - -To create ACS, you'll need a Remote with the "on_demand" policy. You can have ACS point to -multiple Repositories by specifying the ``paths`` parameter. Each path will be appended to the -Remote's url. - -.. code-block:: - - pulp acs create --name --remote --path --path - -.. note:: - - The ``path`` option is optional and can be specified multiple times. If a path is not provided, - the url of your remote is used to search for content. - -Updating ACS ------------- - -To update ACS, use a similar call to your ACS but with ``update`` command: - -.. code-block:: - - pulp acs update --name --remote - -To add or remove paths, use the ``path`` subcommand: - -.. code-block:: - - pulp acs path add --name --path - pulp acs path remove --name --path - -Refreshing ACS --------------- - -To make ACS available the next time you sync, you will need to call the ``refresh`` command. It -will go through your paths and catalog content from your content source. - -.. code-block:: - - pulp acs refresh --name diff --git a/docs/workflows/complex-filtering.rst b/docs/workflows/complex-filtering.rst deleted file mode 100644 index 909723eebe..0000000000 --- a/docs/workflows/complex-filtering.rst +++ /dev/null @@ -1,53 +0,0 @@ -Complex Filtering -================= - -In addition to the usual querystring filters, Pulp provides a special ``q`` filter, that allows you -to combine other filters with `NOT`, `AND` and `OR` operations. - -.. warning:: - - In order to prevent arbitrarily complex queries, the maximum complexity of the expressions - explained below is limited to 8. See the examples below for how complexity is calculated. - -For a given list endpoint, all the other existing (non ordering) filters can be used in these -expressions. - -The grammar, given sufficient whitespace to tokenize, is basically:: - - EXPRESSION = FILTER_EXPR | NOT_EXPR | AND_EXPR | OR_EXPR | "(" EXPRESSION ")" - NOT_EXPR = "NOT" EXPRESSION - AND_EXPRESSION = EXPRESSION "AND" EXPRESSION - OR_EXPRESSION = EXPRESSION "OR" EXPRESSION - FILTER_EXPRESSION = FILTERNAME "=" STRING - STRING = SIMPLE_STRING | QUOTED_STRING - -Some example ``q`` expressions and their complexity are:: - - pulp_type__in='core.rbac' - # complexity: 1 = 1 (filter expression) - - NOT pulp_type="core.rbac" - # complexity: 2 = 1 (NOT) + 1 (filter expression) - - pulp_type__in=core.rbac,core.content_redirect - # complexity: 1 = 1 (filter expression) - - pulp_type="core.rbac" OR pulp_type="core.content_redirect" - pulp_type="core.rbac" AND name__contains=GGGG - pulp_type="core.rbac" AND name__iexact=gGgG - pulp_type="core.rbac" AND name__contains="naïve" - # complexity: 3 = 1 (AND/OR) + 2 (filter expression) - - pulp_type="core.rbac" AND name__icontains=gg AND NOT name__contains=HH - # complexity: 5 = 1 (AND/OR) + 1 (NOT) + 3 (filter expression) - - NOT (pulp_type="core.rbac" AND name__icontains=gGgG) - pulp_type="core.rbac" AND NOT name__contains="naïve" - # complexity: 4 = 1 (AND/OR) + 1 (NOT) + 2 (filter expression) - - pulp_type="core.rbac" AND( name__icontains=gh OR name__contains="naïve") - # complexity: 5 = 2 (AND/OR) + 3 (filter expression) - - pulp_type="core.rbac" OR name__icontains=gh OR name__contains="naïve" - # complexity: 4 = 1 (AND/OR) + 3 (filter expression) - diff --git a/docs/workflows/domains-multi-tenancy.rst b/docs/workflows/domains-multi-tenancy.rst deleted file mode 100644 index cb875f02c5..0000000000 --- a/docs/workflows/domains-multi-tenancy.rst +++ /dev/null @@ -1,150 +0,0 @@ -.. _domains: - -Domains - Multi-Tenancy in Pulp -=============================== - -.. warning:: This feature requires plugin support to work correctly. - -.. warning:: This feature is provided as a tech preview and could change in backwards incompatible - ways in the future. - -Overview --------- - -Domains are an optional feature that enables true multi-tenancy in Pulp. Enabling domains allows -multiple tenants to use the same Pulp instance in a safe and isolated manner without worry that -other users can modify or interfere with your workflows. Each domain acts as a unique namespace for -that user and their Pulp objects, completely isolated from the other domains. To this end each -domain can be customized to use their own storage backend to store their artifacts and content, -ensuring each user has complete control over the content they manage in Pulp. - -Domains are meant for Pulp admins that need more multi-tenancy abilities than are provided through -current RBAC features. Domains help greatly when multiple, but distinctly separate tenants are -sharing the same Pulp instance without having to resort to creating multiple Pulp instances. See -:ref:`RBAC Overview` for Pulp's RBAC capabilities if you are unsure what Pulp's RBAC can -currently do. - -Enabling Domains ----------------- - -Domains are off by default and can be enabled by setting ``DOMAIN_ENABLED = True`` in your settings -file and restarting Pulp. In order to enable domains, each plugin needs to be domain compatible. If -a plugin is not compatible Pulp will fail to start. See list below for current list of plugins with -domain compatibility. Once domains are enabled all current objects in Pulp will be found under the -``default`` domain. - -.. _domain-plugins: - -Current Domain Compatible Plugins: - -- pulp_file>=1.13.0 -- pulp_certguard>=1.6.0 - -.. warning:: - - Before turning on domains, you should let all currently running tasks finished. - -Creating Domains ----------------- - -Domains have three important fields: a unique ``name``, the backend ``storage class`` and the -``storage settings`` for the storage backend. See :ref:`Storage` documentation to see -available storage backends and settings. The domain name must be unique and is used in the URL path -after the :ref:`API_ROOT`, e.g. ``/pulp//api/v3/``. You can also customize -the content app behavior for your domain through the fields ``redirect_to_object_storage`` and -``hide_guarded_distributions``. See :ref:`settings` for more details on these settings. - -.. code-block:: - - pulp domains create \ - --name \ - --storage-class \ - --storage-settings - - Specific example: - - pulp domains create \ - --name foo \ - --description foo \ - --storage-class pulpcore.app.models.storage.FileSystem \ - --storage-settings "{\"MEDIA_ROOT\": \"/var/lib/pulp/media/\"}" - -.. note:: - - ``default``, ``content``, and ``api`` are reserved names that can not be used during creation - or update. The ``default`` domain can not be updated or deleted. - -.. note:: - - To delete a domain all objects within that domain must be deleted first, including artifacts and - orphaned content. - -.. warning:: Changing the ``storage-class`` or ``storage-settings`` of an in-use domain is - dangerous and can result in a broken domain. - -.. _using-domains: - -Using Domains -------------- - -Once domains are enabled all URLs in Pulp will require the domain name in the path after the -:ref:`API_ROOT` for the Pulp API, e.g. ``/pulp//api/v3/``, or after the -:ref:`CONTENT_PATH_PREFIX` for the Content App, e.g. -``/pulp/content//``. Objects present in Pulp before enabling domains will now be -present under the ``default`` domain. To work in a domain you must specify the domain's name in the -URL. - -.. code-block:: - - # List repositories in 'test' domain - pulp --domain test repository list - - # Create a File Repository 'foo' in 'foo' domain - pulp --domain foo file repository create --name foo - - # Create a File Repository 'foo' in 'boo' domain (Possible because of separate domains) - pulp --domain boo file repository create --name foo - - # See Exposed Distributions in 'default' domain - pulp distribution list - -Domains are isolated from each other and perform their own deduplication and uniqueness checks -within their domain. Therefore multiple domains can each have their own repository named 'foo'; a -capability not available without domains as repository names were unique within a Pulp system, but -are now unique within a domain. This also means that content and artifact deduplication is no longer -system wide, but instead perform at the domain level. Since domains can each have their own unique -storage backend, duplicate content across domains could be stored in multiple locations and it would -be inappropriate to try to deduplicate across domains. - -Most all Pulp objects and operations work the same within their domain. Uploading, syncing, -publishing, and distributing workflows are all supported with domains. Objects are scoped to their -domain and will not appear in other domains even if you have permissions on those domains. Plugins -that support RBAC will now also have access to a new permission level on the domain. Assigning a -role at the domain-level will allow users to operate with those permissions only within that domain. -Current global(model)-level roles should be converted to domain-level if you wish for the user to -not have permission across all domains. - -.. code-block:: - - # Delete the global-level role - pulp user role-assignment remove --username --role --object "" - - # Assign the role at the domain-level - pulp user role-assignment add --username --role --domain - -.. note:: - - Operations on resources across separate domains is not allowed. e.g. You can not add content - from one domain to the repository of another domain even if you own both domains. - -.. warning:: - - Pulp Export and Import are currently not supported with domains enabled. - -There are notable objects in Pulp, ``AccessPolicies``, ``Roles``, ``Users``, and ``Groups``, that -are not a part of domains and remain global across the system. These objects are closely intertwined -with the RBAC system and currently do not make sense to be unique on the domain level. Objects -that are not a part of a domain are readable from any domain (with the correct permissions), but are -only operable on within the ``default`` domain, i.e. ``Roles`` can be read from any domain, but can -only be created from the ``default`` domain. - diff --git a/docs/workflows/exposing-content.rst b/docs/workflows/exposing-content.rst deleted file mode 100644 index 4bb61744b8..0000000000 --- a/docs/workflows/exposing-content.rst +++ /dev/null @@ -1,139 +0,0 @@ -Exposing Content -================ - -Overview --------- - -Content, e.g. rpms or docker/oci containers, loaded into Pulp is only served by Pulp if made available -through a :term:`Distribution`. There are three options available to plugin writers. - -* Auto-distribution of a Repository -* Manual distribution of a RepositoryVersion -* Manual distribution of a Publication - -The three workflows cannot be used together. Typically a plugin and that plugin's users will use -either the ``repository`` and ``repository_version`` options or ``publication`` but not both. It -comes down to whether a plugin uses a :term:`Publication` or not. If it does, it will use the -``publication`` attribute. If not, it can use the ``repository`` or ``repository_version`` -attributes. - -Distributions have a ``base_path`` which is the portion of the URL a given :term:`Distribution` will -be rooted at. There is also a Pulp configured setting called :ref:`CONTENT_PATH_PREFIX ` -which defaults to ``/pulp/content/``. With this default a Distribution's URL with ``base_path`` of -``someexample`` or ``a/nested/example`` can be expected respectively:: - - /pulp/content/someexample/ - /pulp/content/a/nested/example/ - - -.. note:: - - The ``base_path`` must have no overlapping components. So if a :term:`Distribution` with - ``base_path`` of ``a/path/foo`` existed, you could not make a second :term:`Distribution` with a - ``base_path`` of ``a/path`` or ``a`` because both are subpaths of ``a/path/foo``. Pulp will - stop you from doing this which is why :term:`Distribution` creates or updates to ``base_path`` - are run serially by the tasking system. - - -Auto-Distribution of a Repository ---------------------------------- - -In this workflow you pair a :term:`Repository` and a :term:`Distribution` such that the Distribution -will serve the latest RepositoryVersion associated with that Repository. - -First lets make a Repository named ``foo`` and save its URL as ``REPO_HREF``:: - - http POST http://localhost:24817/pulp/api/v3/repositories/container/container/ name=foo - export REPO_HREF=$(http :24817/pulp/api/v3/repositories/container/container/ | jq -r '.results[] | select(.name == "foo") | .pulp_href') - -Then lets make a :term:`Distribution` that will distribute ``foo`` at base_url ``mypath``:: - - http POST :24817/pulp/api/v3/distributions/container/container/ name='baz' base_path='mypath' repository=$REPO_HREF`` - -As soon as this is created, any :term:`RepositoryVersion` created will be immediately available at -base_path ``mypath``. With the default :ref:`CONTENT_PATH_PREFIX ` that would -be ``/pulp/content/mypath/`` - -.. note:: - - This is only available for plugins that do not require a :term:`Publication`. A - :term:`Publication` is required for content types that have "metadata". See your plugin - documentation for details on if it uses a :term:`Publication` or not. - - -Manual Distribution of a RepositoryVersion ------------------------------------------- - -In this workflow, you already have a :term:`RepositoryVersion` created. You then want to distribute -its content at the base_path ``mypath`` using a :term:`Distribution`. In this case you manually -associate the :term:`Distribution` with the :term:`RepositoryVersion` using the -``repository_version`` option of the :term:`Distribution`. - -First create a :term:`RepositoryVersion` with some `pulp_ansible `_ content in it:: - - # Create a Repository - http POST :24817/pulp/api/v3/repositories/ansible/ansible/ name=foo - export REPO_HREF=$(http :24817/pulp/api/v3/repositories/ansible/ansible/ | jq -r '.results[] | select(.name == "foo") | .pulp_href') - - # Create an AnsibleRemote to sync roles from galaxy.ansible.com - http POST :24817/pulp/api/v3/remotes/ansible/ansible/ name=bar url='https://galaxy.ansible.com/api/v1/roles/?namespace__name=elastic' - - export REMOTE_HREF=$(http :24817/pulp/api/v3/remotes/ansible/ansible/ | jq -r '.results[] | select(.name == "bar") | .pulp_href') - - # Sync the repo with the remote - http POST ':24817'$REPO_HREF'sync/' remote=$REMOTE_HREF - sleep 3 # wait for the sync to happen - export REPO_VERSION_HREF=$(http GET ':24817'$REPO_HREF'versions/1/' | jq -r '.pulp_href') - -Now with your :term:`RepositoryVersion` saved as ``REPO_VERSION_HREF`` you can have the -:term:`Distribution` serve it at base_path ``dev``:: - - http POST :24817/pulp/api/v3/distributions/file/file/ name='baz' base_path='dev' repository_version=REPO_VERSION_HREF - -As soon as this is created, the :term:`RepositoryVersion` will be immediately available at base_path -``dev``. With the default :ref:`CONTENT_PATH_PREFIX ` that would be -``/pulp/content/dev/`` - -.. note:: - - This is only available for plugins that do not require a :term:`Publication`. A - :term:`Publication` is required for content types that have "metadata". See your plugin - documentation for details on if it uses a :term:`Publication` or not. - - -Manual Distribution of a Publication ------------------------------------- - -In this workflow, you already have a :term:`Publication` created. You then want to distribute its -content at the base_path ``mypath`` using a :term:`Distribution`. In this case you manually -associate the :term:`Distribution` with the :term:`Publication` using the ``publication`` option of -the :term:`Distribution`. - -First create a :term:`Publication` with some `pulp_file `_ -content in it:: - - # Create a Repository - http POST :24817/pulp/api/v3/repositories/file/file/ name=foo - export REPO_HREF=$(http :24817/pulp/api/v3/repositories/file/file/ | jq -r '.results[] | select(.name == "foo") | .pulp_href') - - # Create an FileRemote to sync roles from fixures - http POST :24817/pulp/api/v3/remotes/file/file/ name='bar' url='https://fixtures.pulpproject.org/file/PULP_MANIFEST' - export REMOTE_HREF=$(http :24817/pulp/api/v3/remotes/file/file/ | jq -r '.results[] | select(.name == "bar") | .pulp_href') - - # Sync the repo with the remote - http POST ':24817'$REPO_HREF'sync/' remote=$REMOTE_HREF - sleep 3 # wait for the sync to happen - - # Create a Publication - http POST :24817/pulp/api/v3/publications/file/file/ repository=$REPO_HREF - export PUBLICATION_HREF=$(http :24817/pulp/api/v3/publications/file/file/ | jq -r '.results[0] | .pulp_href') - -Now with your :term:`Publication` saved as ``PUBLICATION_HREF`` you can have the -:term:`Distribution` serve it at base_path ``bar``:: - - http POST :24817/pulp/api/v3/distributions/file/file/ name='baz' base_path='bar' publication=$PUBLICATION_HREF - -As soon as this is created, the :term:`Publication` will be immediately available at base_path -``bar``. With the default :ref:`CONTENT_PATH_PREFIX ` that would be -``/pulp/content/bar/`` diff --git a/docs/workflows/import-export.rst b/docs/workflows/import-export.rst deleted file mode 100644 index 99eb08761e..0000000000 --- a/docs/workflows/import-export.rst +++ /dev/null @@ -1,430 +0,0 @@ -Pulp Import and Export -====================== - -Overview -^^^^^^^^ - -There is a use-case for extracting the content and :term:`Artifacts` for a set of -:term:`RepositoryVersions`, out of a running instance of Pulp and into -a file that can then be transferred to another Pulp instance and imported. This is not -the Pulp-to-Pulp sync case; the assumption is that the receiving Pulp instance is -network-isolated. - -The high-level workflow for this use case is - -1. On the Upstream Pulp instance, an Exporter is defined for the set of -:term:`Repositories` that are to be exported to some Downstream Pulp instance. - -2. That Exporter is requested to produce and execute an Export for the current -:term:`RepositoryVersions` of the specified -:term:`Repositories` - -3. The resulting ``.tar`` Export is transferred to the appropriate Downstream. - -4. On the Downstream Pulp instance, an Importer is defined, that maps the incoming -Upstream :term:`Repositories` to matching Downstream -:term:`Repositories`. - -5. That Importer is requested to produce and execute an Import, pointing to the provided -export file from the Upstream. - -In order to minimize space utilization, import/export operates on sets of -:term:`Repositories`. This allows the Export operation to export shared -:term:`Artifacts` only once per-export, rather than once for each -:term:`Repository` being exported. - -Definitions -^^^^^^^^^^^ -Upstream - Pulp instance whose :term:`RepositoryVersions` we want to export -Downstream - Pulp instance that will be importing those :term:`RepositoryVersions` -ModelResource - entity that understands how to map the metadata for a specific Model - owned/controlled by a plugin to an exportable file-format - (see `django-import-export `_) -Exporter - resource that exports content from Pulp for a variety of different use cases -PulpExporter - kind-of Exporter, that is specifically used to export data from an Upstream - for consumption by a Downstream -PulpExport - specific instantiation/run of a PulpExporter -Export file - compressed tarfile containing database content and :term:`Artifacts` for - :term:`RepositoryVersions`, generated during execution of an Export -PulpImporter - resource that accepts an Upstream PulpExporter export file, and manages - the process of importing the content and :term:`Artifacts` included -PulpImport - specific instantiation/run of a PulpImporter -Repository-mapping - configuration file that provides the ability to map an Upstream :term:`Repository`, - to a Downstream :term:`Repository`, into which the Upstream’s :term:`RepositoryVersion` - should be imported by a PulpImporter -Import order - for complicated repository-types, managing relationships requires that - ModelResources be imported in order. Plugins are responsible for specifying the - import-order of the ModelResources they own - -Assumptions -^^^^^^^^^^^ - -The import/export workflow operates on a set of assumptions. Violating them will result -in error-messages as described below. - -On-Demand content not supported -------------------------------- - -Export will not operate on :term:`RepositoryVersions` that have -been synchronized using ``policy=on_demand`` or ``policy=streamed``. :term:`Artifacts` -must actually exist in order to be exported - this is, after -all the only way for the Downstream Pulp instance to gain access to them! - -If a repository is specified for export that utilized on-demand/streamed syncing, the -export will fail with a RuntimeError: - - ``Remote artifacts cannot be exported.`` - -Export/Import Directories must be explicitly allowed ----------------------------------------------------- - -Import and Export strictly control which directories may be read from/written to via -the settings options ``ALLOWED_IMPORT_PATHS`` and ``ALLOWED_EXPORT_PATHS``. -These default to empty - if they not explicitly set, attempts to import or export will fail -with a validation error like - - ``"Path '/tmp/exports/' is not an allowed export path"`` - -Installed plugins must match ----------------------------- - -A Downstream must support the complete set of plugins present in a given export. If the -export includes plugins that are not installed in the Downstream, an import attempt will -fail with a validation error like - - ``Export uses pulp_rpm which is not installed.`` - -Version-compatibility required ------------------------------- - -The export-to-import workflow is built on the assumption that the Upstream and -Downstream instances are running "compatible" versions of pulpcore and plugins. In this -context, "compatible" is defined as **"share the same X.Y version"**. If this is not the -case, an import attempt will fail with a validation error like - - ``Export version 3.14.15 of pulpcore incompatible with installed version 3.16.3.`` - - -Exporting -^^^^^^^^^ - -.. note:: - - The following examples assume a Pulp instance that includes the ``pulp_file`` and - ``pulp_rpm`` plugins. They also assume that the ``http`` and ``jq`` packages are - installed. - -These workflows are executed on an Upstream Pulp instance. - -Creating an Exporter --------------------- - -In this workflow, you define an Exporter for a set of :term:`Repositories`. -This Exporter can be invoked repeatedly to regularly export the current -:term:`RepositoryVersion` of each of the specified :term:`Repositories`. - -First, let's make a pair of :term:`Repositories` named ``zoo`` and ``isofile``, -and save their UUIDs as ``ZOO_UUID`` and ``ISOFILE_UUID`` - -Set up 'zoo' repository":: - - # Create the repository - export ZOO_HREF=$(http POST :/pulp/api/v3/repositories/rpm/rpm/ name=zoo | jq -r '.pulp_href') - # - # add a remote - http POST :/pulp/api/v3/remotes/rpm/rpm/ name=zoo url=https://fixtures.pulpproject.org/rpm-signed/ policy='immediate' - # - # find remote's href - export REMOTE_HREF=$(http :/pulp/api/v3/remotes/rpm/rpm/ | jq -r ".results[] | select(.name == \"zoo\") | .pulp_href") - # - # sync the repository to give us some content - http POST :$ZOO_HREF'sync/' remote=$REMOTE_HREF - -Set up 'isofile' repository:: - - # create the repository - ISOFILE_HREF=$(http POST :/pulp/api/v3/repositories/file/file/ name=isofile | jq -r '.pulp_href') - # - # add remote - http POST :/pulp/api/v3/remotes/file/file/ name=isofile url=https://fixtures.pulpproject.org/file/PULP_MANIFEST - # - # find remote's href - REMOTE_HREF=$(http :/pulp/api/v3/remotes/file/file/ | jq -r ".results[] | select(.name == \"isofile\") | .pulp_href") - # - # sync the repository to give us some content - http POST :$ISOFILE_HREF'sync/' remote=$REMOTE_HREF - -Now that we have :term:`Repositories` with content, let's define an Exporter named ``test-exporter`` -that will export these :term:`Repositories` to the directory ``/tmp/exports/``:: - - export EXPORTER_HREF=$(http POST :/pulp/api/v3/exporters/core/pulp/ \ - name=test-exporter \ - repositories:=[\"${ISOFILE_HREF}\",\"${ZOO_HREF}\"] \ - path=/tmp/exports/ | jq -r '.pulp_href') - http GET :${EXPORTER_HREF} - -Exporting Content ------------------ - -Once we have an Exporter defined, we invoke it to generate an export-file in the directory -specified by that Exporter's ``path`` attribute:: - - http POST :${EXPORTER_HREF}exports/ - -The resulting Export writes to a ``.tar`` file, in the directory pointed to by the -Exporter's path, with a name that follows the convention ``export--YYYYmmdd_HHMM.tar``. - -It will also produce a "table of contents" file describing the file (or files, see -`Exporting Chunked Files`_ below) for later use verifying and importing the results of the export:: - - ls /tmp/exports - export-32fd25c7-18b2-42de-b2f8-16f6d90358c3-20200416_2000.tar - export-32fd25c7-18b2-42de-b2f8-16f6d90358c3-20200416_2000-toc.json - python -m json.tool /tmp/exports/export-32fd25c7-18b2-42de-b2f8-16f6d90358c3-20200416_2000-toc.json - { - "meta": { - "chunk_size": 0, # chunk_size in bytes, or 0 if an export did not use the chunk_size parameter - "file": "export-32fd25c7-18b2-42de-b2f8-16f6d90358c3-20200416_2000.tar", - "global_hash": "eaef962943915ecf6b5e45877b162364284bd9c4f367d9c96d18c408012ef424" - }, - "files": { - "export-32fd25c7-18b2-42de-b2f8-16f6d90358c3-20200416_2000.tar": "eaef962943915ecf6b5e45877b162364284bd9c4f367d9c96d18c408012ef424" - } - } - -These export files can now be transferred to a Downstream Pulp instance, and imported. - -.. note:: - - In the event of any failure during an export, the process will clean up any partial - export-files that may have been generated. Export-files can be very large; this will - preserve available space in the export-directory. - -Exporting Specific Versions ---------------------------- - -By default, the latest-versions of the :term:`Repositories` specified in the Exporter are exported. However, you -can export specific :term:`RepositoryVersions` of those :term:`Repositories` -if you wish using the ``versions=`` parameter on the ``/exports/`` invocation. - -Following the above example - let's assume we want to export the "zero'th" :term:`RepositoryVersion` of the -repositories in our Exporter.:: - - http POST :${EXPORTER_HREF}exports/ \ - versions:=[\"${ISO_HREF}versions/0/\",\"${ZOO_HREF}versions/0/\"] - -Note that the "zero'th" :term:`RepositoryVersion` of a :term:`Repository` is created when the :term:`Repository` is created, and is empty. If you unpack the resulting Export ``tar`` you will find, for example, that there is no ``artifacts/`` directory and an empty ``ArtifactResource.json`` file:: - - cd /tmp/exports - tar xvf export-930ea60c-97b7-4e00-a737-70f773ebbb14-20200511_2005.tar - versions.json - pulpcore.app.modelresource.ArtifactResource.json - pulpcore.app.modelresource.RepositoryResource.json - repository-3c1ec06a-b0d6-4d04-9f99-32bfc0a499a9_0/pulpcore.app.modelresource.ContentResource.json - repository-3c1ec06a-b0d6-4d04-9f99-32bfc0a499a9_0/pulpcore.app.modelresource.ContentArtifactResource.json - repository-3c1ec06a-b0d6-4d04-9f99-32bfc0a499a9_0/pulp_rpm.app.modelresource.PackageResource.json - repository-3c1ec06a-b0d6-4d04-9f99-32bfc0a499a9_0/pulp_rpm.app.modelresource.ModulemdResource.json - repository-3c1ec06a-b0d6-4d04-9f99-32bfc0a499a9_0/pulp_rpm.app.modelresource.ModulemdDefaultsResource.json - repository-3c1ec06a-b0d6-4d04-9f99-32bfc0a499a9_0/pulp_rpm.app.modelresource.PackageGroupResource.json - repository-3c1ec06a-b0d6-4d04-9f99-32bfc0a499a9_0/pulp_rpm.app.modelresource.PackageCategoryResource.json - repository-3c1ec06a-b0d6-4d04-9f99-32bfc0a499a9_0/pulp_rpm.app.modelresource.PackageEnvironmentResource.json - repository-3c1ec06a-b0d6-4d04-9f99-32bfc0a499a9_0/pulp_rpm.app.modelresource.PackageLangpacksResource.json - repository-3c1ec06a-b0d6-4d04-9f99-32bfc0a499a9_0/pulp_rpm.app.modelresource.UpdateRecordResource.json - repository-3c1ec06a-b0d6-4d04-9f99-32bfc0a499a9_0/pulp_rpm.app.modelresource.DistributionTreeResource.json - repository-3c1ec06a-b0d6-4d04-9f99-32bfc0a499a9_0/pulp_rpm.app.modelresource.RepoMetadataFileResource.json - repository-958ae747-c19d-4820-828c-87452f1a5b8d_0/pulpcore.app.modelresource.ContentResource.json - repository-958ae747-c19d-4820-828c-87452f1a5b8d_0/pulpcore.app.modelresource.ContentArtifactResource.json - repository-958ae747-c19d-4820-828c-87452f1a5b8d_0/pulp_file.app.modelresource.FileContentResource.json - python -m json.tool pulpcore.app.modelresource.ArtifactResource.json - [] - -Exporting Incrementally ------------------------ - -By default, PulpExport exports all of the content and artifacts of the -:term:`RepositoryVersions` being exported. A common use-case is to do -regular transfers of content from an Upstream to a Downstream Pulp instance. While you -**can** export everything every time, it is an inefficient use of time and disk storage to -do so; exporting only the "entities that have changed" is a better choice. You can -accomplish this by setting the ``full`` parameter on the ``/exports/`` invocation to -``False``:: - - http POST :${EXPORTER_HREF}exports/ full=False - -This results in an export of all content-entities, but only :term:`Artifacts` -that have been **added** since the `last_export` of the same Exporter. - -You can override the use of `last_export` as the starting point of an incremental export by use of the ``start_versions=`` -parameter. Building on our example Exporter, if we want to do an incremental export of everything that's happened since the -**second** :term:`RepositoryVersion` of each :term:`Repository`, regardless of what happened in our last export, -we would issue a command such as the following:: - - http POST :${EXPORTER_HREF}exports/ \ - full=False \ - start_versions:=[\"${ISO_HREF}versions/1/\",\"${ZOO_HREF}versions/1/\"] - -This would produce an incremental export of everything that had been added to our :term:`Repositories` -between :term:`RepositoryVersion` '1' and the ``current_version`` :term:`RepositoryVersions` -of our :term:`Repositories`. - -Finally, if we need complete control over incremental exporting, we can combine the use of ``start_versions=`` and ``versions=`` -to produce an incremental export of everything that happened after ``start_versions=`` up to and including ``versions=``:: - - http POST :${EXPORTER_HREF}exports/ \ - full=False \ - start_versions:=[\"${ISO_HREF}versions/1/\",\"${ZOO_HREF}versions/1/\"] \ - versions:=[\"${ISO_HREF}versions/3/\",\"${ZOO_HREF}versions/3/\"] - -.. note:: - - **Note** that specifying ``start_versions=`` without specifying ``full=False`` (i.e., asking for an incremental export) - is an error, since it makes no sense to specify a 'starting version' for a full export. - -Exporting Chunked Files ------------------------ - -By default, PulpExport streams data into a single ``.tar`` file. Since :term:`Repositories` -can contain a lot of artifacts and content, that can result in a file too large to be -copied to transport media. In this case, you can specify a maximum-file-size, and the -export process will chunk the tar into a series of files no larger than this. - -You accomplish this by setting the ``chunk_size`` parameter to the desired maximum number of bytes. This -parameter takes an integer, or size-units of KB, MB, or GB. Files appear in the Exporter.path -directory, with a four-digit sequence number suffix:: - - http POST :/pulp/api/v3/exporters/core/pulp/1ddbe6bf-a6c3-4a88-8614-ad9511d21b94/exports/ chunk_size="10KB" - { - "task": "/pulp/api/v3/tasks/da3350f7-0102-4dd5-81e0-81becf3ffdc7/" - } - ls -l /tmp/exports/ - 10K export-780822a4-d280-4ed0-a53c-382a887576a6-20200522_2325.tar.0000 - 10K export-780822a4-d280-4ed0-a53c-382a887576a6-20200522_2325.tar.0001 - 10K export-780822a4-d280-4ed0-a53c-382a887576a6-20200522_2325.tar.0002 - 10K export-780822a4-d280-4ed0-a53c-382a887576a6-20200522_2325.tar.0003 - 10K export-780822a4-d280-4ed0-a53c-382a887576a6-20200522_2325.tar.0004 - 10K export-780822a4-d280-4ed0-a53c-382a887576a6-20200522_2325.tar.0005 - 2.3K export-780822a4-d280-4ed0-a53c-382a887576a6-20200522_2325.tar.0006 - 1168 export-780822a4-d280-4ed0-a53c-382a887576a6-20200522_2325-toc.json - -The "table of contents" lists all the resulting files and their checksums:: - - python -m json.tool /tmp/exports/export-780822a4-d280-4ed0-a53c-382a887576a6-20200522_2325-toc.json - { - "meta": { - "chunk_size": 10240, - "file": "export-8c1891a3-ffb5-41a7-b141-51daa0e38a18-20200717_1947.tar", - "global_hash": "eaef962943915ecf6b5e45877b162364284bd9c4f367d9c96d18c408012ef424" - }, - "files": { - "export-780822a4-d280-4ed0-a53c-382a887576a6-20200522_2325.tar.0000": "8156874798802f773bcbaf994def6523888922bde7a939bc8ac795a5cbb25b85", - "export-780822a4-d280-4ed0-a53c-382a887576a6-20200522_2325.tar.0001": "e52fac34b0b7b1d8602f5c116bf9d3eb5363d2cae82f7cc00cc4bd5653ded852", - "export-780822a4-d280-4ed0-a53c-382a887576a6-20200522_2325.tar.0002": "df4a2ea551ff41e9fb046e03aa36459f216d4bcb07c23276b78a96b98ae2b517", - "export-780822a4-d280-4ed0-a53c-382a887576a6-20200522_2325.tar.0003": "27a6ecba3cc51965fdda9ec400f5610ff2aa04a6834c01d0c91776ac21a0e9bb", - "export-780822a4-d280-4ed0-a53c-382a887576a6-20200522_2325.tar.0004": "f35c5a96fccfe411c074463c0eb0a77b39fa072ba160903d421c08313aba58f8", - "export-780822a4-d280-4ed0-a53c-382a887576a6-20200522_2325.tar.0005": "13458b10465b01134bde49319d6b5cba9948016448da9d35cb447265a25e3caa", - "export-780822a4-d280-4ed0-a53c-382a887576a6-20200522_2325.tar.0006": "a1986a0590943c9bb573c7d7170c428457ce54efe75f55997259ea032c585a35" - } - } - -Updating an Exporter --------------------- - -You can update an Exporter to modify a subset of its fields:: - - http PATCH :${EXPORTER_HREF} path=/tmp/newpath - -Importing -^^^^^^^^^ - -Creating the importer ---------------------- - -The first step to importing a Pulp export archive is to create an importer:: - - http :/pulp/api/v3/importers/core/pulp/ name="test" - - -By default, Pulp will map :term:`Repositories` in the export to :term:`Repositories` -in Pulp by name. This can be overriden by supplying a repo mapping that maps names from the Pulp export -to the names of repos in Pulp. For example, suppose the name of the repo in the Pulp export achive was -'source' and the repo in Pulp was 'dest'. The following command would set up this mapping:: - - http :/pulp/api/v3/importers/core/pulp/ name="test" repo_mapping:="{\"source\": \"dest\"}" - -After the importer is created, a POST request to create an import will trigger the import process. - -.. note:: - By default, the Pulp import machinery expects destination repositories to be present at the time - of the import. This can be overridden by passing the ``create_repositories=True`` field via the - POST request that will lead Pulp to create missing repositories on the fly. - -.. warning:: - The options ``repo_mapping`` and ``create_repositories`` are not compatible with each other. The - existence of a repository specified in the ``repo_mapping`` option is tested before the importer - is initialized. Thus, the repository has to be already created in advance. - -You can import an exported ``.tar`` directly using the ``path`` parameter:: - - http POST :/pulp/api/v3/importers/core/pulp/f8acba87-0250-4640-b56b-c92597d344b7/imports/ \ - path="/data/export-113c8950-072b-432a-9da6-24da1f4d0a02-20200408_2015.tar" - -Or you can point the importer at the "table of contents" file that was produced by an export. -If the TOC file is in the same directory as the export-files it points to, the import process -will: - - * verify the checksum(s) of all export-files, - * reassemble a chunked-export into a single ``.tar`` - * remove chunks as they are used (in order to conserve disk space) - * verify the checksum of the resulting reassembled ``.tar`` - -and then import the result:: - - http POST :/pulp/api/v3/importers/core/pulp/f8acba87-0250-4640-b56b-c92597d344b7/imports/ \ - toc="/data/export-113c8950-072b-432a-9da6-24da1f4d0a02-20200408_2015-toc.json" - -.. note:: - - The directory containing the file pointed to by ``path`` or ``toc`` must be defined in the - ``ALLOWED_IMPORT_PATHS`` setting or the import will fail. - -The command to create an import will return a task that can be used to monitor the import. You can -also see a history of past imports:: - - http :/pulp/api/v3/importers/core/pulp/f8acba87-0250-4640-b56b-c92597d344b7/imports/ - -Pre-validating import parameters --------------------------------- - -There are a number of things that can keep an import from being successful, ranging from a specified -export-file not being available to bad JSON specified for ``repo_mapping``. You can pre-validate your -proposed import using the ``import-check`` command:: - - http POST :/pulp/api/v3/importers/core/pulp/import-check/ \ - path=/tmp/export-file-path toc=/tmp/export-toc-path repo_mapping:="{\"source\": \"dest\"}" - -``import-check`` will validate that: - - * paths are in ``ALLOWED_IMPORT_PATHS`` - * containing directory exists - * containing directory is readable - * path/toc file(s) exist and are readable - * for TOC, containing directory is writeable - * repo_mapping is valid JSON - -``import-check`` is a low-overhead synchronous call. It does not attempt to do validations that -require database access or long-running tasks such as verifying checksums. All parameters are optional. - -.. note:: - - For ``path`` and ``toc``, if the ALLOWED_IMPORT_PATHS check fails, no further information will be given. diff --git a/docs/workflows/index.rst b/docs/workflows/index.rst deleted file mode 100644 index d34ba6f99d..0000000000 --- a/docs/workflows/index.rst +++ /dev/null @@ -1,28 +0,0 @@ -Workflows and Use Cases -======================= - -Best practices for content management are discussed here. The goal of this document is to provide a -framework for users to design their own workflows with any content type. For specific examples, -users should refer to `plugin documentation `_. This page -assumes that the reader is familiar with the fundamentals discussed in the :doc:`/concepts`. - - -.. toctree:: - :maxdepth: 2 - - alternate-content-sources - upload-publish - repo-versioning - on-demand-downloading - promotion - scheduling-tasks - exposing-content - signed-metadata - repairing-pulp - reclaim-disk-space - import-export - labels - plugin-removal - troubleshooting - domains-multi-tenancy - complex-filtering diff --git a/docs/workflows/labels.rst b/docs/workflows/labels.rst deleted file mode 100644 index 23d43496b4..0000000000 --- a/docs/workflows/labels.rst +++ /dev/null @@ -1,134 +0,0 @@ -Labels -====== - -Pulp provides a way to add key/value data to many resources (e.g. repositories, remotes, -distributions) in the form of labels. Labels are also useful for categorizing and filtering -resources. In the API, labels appear as a dictionary field that maps keys (strings) to values (also -strings). - -Managing labels ---------------- - -Creating labels -^^^^^^^^^^^^^^^ - -To create labels:: - - # create a new repository - pulp file repository create --name myrepo - - # set some labels - pulp file repository label set --name myrepo --key environment --value production - pulp file repository label set --name myrepo --key reviewed --value true - - # call show to view the repo's labels - pulp file repository show --name myrepo - -On show, you should see the new labels that have been created:: - - { - "pulp_href": "/pulp/api/v3/repositories/file/file/13477a92-b811-4436-a76a-d2469a17a62e/", - "pulp_created": "2021-01-29T17:54:17.084105Z", - "versions_href": - "/pulp/api/v3/repositories/file/file/13477a92-b811-4436-a76a-d2469a17a62e/versions/", - "pulp_labels": { - "environment": "production", - "reviewed": "true" - }, - "latest_version_href": "/pulp/api/v3/repositories/file/file/13477a92-b811-4436-a76a-d2469a17a62e/versions/0/", - "name": "myrepo", - "description": null, - "remote": null - } - -Updating labels -^^^^^^^^^^^^^^^ - -To update an existing label, call set again:: - - # update the label - pulp file repository label set --name myrepo --key reviewed --value false - - # call show to view the repo's labels - pulp file repository show --name myrepo - -On show, you should now see:: - - { - "pulp_href": "/pulp/api/v3/repositories/file/file/13477a92-b811-4436-a76a-d2469a17a62e/", - "pulp_created": "2021-01-29T17:54:17.084105Z", - "versions_href": "/pulp/api/v3/repositories/file/file/13477a92-b811-4436-a76a-d2469a17a62e/versions/", - "pulp_labels": { - "environment": "production", - "reviewed": "false" - }, - "latest_version_href": "/pulp/api/v3/repositories/file/file/13477a92-b811-4436-a76a-d2469a17a62e/versions/0/", - "name": "myrepo", - "description": null, - "remote": null - } - -Unsetting labels -^^^^^^^^^^^^^^^^ - -To remove a label from a resource, call the unset command:: - - # update the label - pulp file repository label unset --name myrepo --key reviewed - - # call show to view the repo's labels - pulp file repository show --name myrepo - -On show, you should now see:: - - { - "pulp_href": "/pulp/api/v3/repositories/file/file/13477a92-b811-4436-a76a-d2469a17a62e/", - "pulp_created": "2021-01-29T17:54:17.084105Z", - "versions_href": "/pulp/api/v3/repositories/file/file/13477a92-b811-4436-a76a-d2469a17a62e/versions/", - "pulp_labels": { - "environment": "production" - }, - "latest_version_href": "/pulp/api/v3/repositories/file/file/13477a92-b811-4436-a76a-d2469a17a62e/versions/0/", - "name": "myrepo", - "description": null, - "remote": null - } - -Filtering ---------- - -Pulp provides a ``pulp_label_select`` field for filtering resources by label. The value for this -field must be url-encoded. The following operations are supported: - -- ``environment=production`` - label has key 'environment' with value 'production' -- ``environment!=production`` - label has key 'environment' without value 'production' -- ``environment~prod`` - label has key 'environment' with value that contains 'prod' (case insensitive) -- ``enviroment`` - label has key of environment -- ``!environment`` - label without a key of environment - -Multiple terms can be combined with ``,``: - -- ``environment=production,reviewed=true`` - returns resources with labels where environment is - production and reviewed is true -- ``environment,reviewed=false`` - returns resources with an environment label and where reviewed is - false - -To filter using the CLI use ``--label-select``:: - - pulp file repository list --label-select="environment~prod,reviewed" - -This would return a list of repositories such as:: - - { - "pulp_href": "/pulp/api/v3/repositories/file/file/13477a92-b811-4436-a76a-d2469a17a62e/", - "pulp_created": "2021-01-29T17:54:17.084105Z", - "versions_href": "/pulp/api/v3/repositories/file/file/13477a92-b811-4436-a76a-d2469a17a62e/versions/", - "pulp_labels": { - "environment": "production", - "reviewed": "true" - }, - "latest_version_href": "/pulp/api/v3/repositories/file/file/13477a92-b811-4436-a76a-d2469a17a62e/versions/0/", - "name": "myrepo", - "description": null, - "remote": null - } diff --git a/docs/workflows/on-demand-downloading.rst b/docs/workflows/on-demand-downloading.rst deleted file mode 100644 index 943f826b17..0000000000 --- a/docs/workflows/on-demand-downloading.rst +++ /dev/null @@ -1,68 +0,0 @@ -On-Demand Downloading -===================== - -Overview --------- - -Pulp can sync content in a few modes: 'immediate', 'on_demand', and 'streamed'. Each provides a -different behavior on how and when Pulp acquires content. These are set as the `policy` attribute -of the :term:`Remote` performing the sync. Policy is an optional parameter and defaults to -`immediate`. - -immediate - When performing the sync, download all :term:`Artifacts` now. Also download all metadata - now to create the content units in Pulp, associated with the - :term:`repository version` created by the sync. `immediate` is the default, and - any plugin providing a sync is expected to implement the `immediate` mode. - -on_demand - When performing the sync, do not download any :term:`Artifacts` now. Download all - metadata now to create the content units in Pulp, associated with the - :term:`repository version` created by the sync. Clients requesting content - trigger the downloading of :term:`Artifacts`, which are saved into Pulp to be served to - future clients. - - This mode is ideal for saving disk space because Pulp never downloads and stores - :term:`Artifacts` that clients don't need. Units created from this mode are - :term:`on-demand content units`. - -streamed - When performing the sync, do not download any :term:`Artifacts` now. Download all - metadata now to create the content units in Pulp, associated with the - :term:`repository version` created by the sync. Clients requesting content - trigger the downloading of :term:`Artifacts`, which are *not* saved into Pulp. This - content will be re-downloaded with each client request. - - This mode is ideal for content that you especially don't want Pulp to store over time. For - instance, syncing from a nightly repo would cause Pulp to store every nightly ever produced which - is likely not valuable. Units created from this mode are - :term:`on-demand content units`. - - -Does Plugin X Support 'on_demand' or 'streamed'? ------------------------------------------------- - -Unless a plugin has enabled either the 'on_demand' or 'streamed' values for the `policy` attribute -you will receive an error. Check that plugin's documentation also. - -.. note:: - - Want to add on-demand support to your plugin? See the `Pulp Plugin API <../plugins/ - nightly/>`_ documentation for more details on how to add on-demand support to a plugin. - - -Associating On-Demand Content with Additional Repository Versions ------------------------------------------------------------------ - -An :term:`on-demand content unit` can be associated and unassociated from a -:term:`repository version` just like a normal unit. Note that the original -:term:`Remote` will be used to download content should a client request it, even as that content is -made available in multiple places. - - -.. warning:: - - Deleting a :term:`Remote` that was used in a sync with either the `on_demand` or `streamed` - options can break published data. Specifically, clients who want to fetch content that a - :term:`Remote` was providing access to would begin to 404. Recreating a :term:`Remote` and - re-triggering a sync will cause these broken units to recover again. diff --git a/docs/workflows/plugin-removal.rst b/docs/workflows/plugin-removal.rst deleted file mode 100644 index 4435317d96..0000000000 --- a/docs/workflows/plugin-removal.rst +++ /dev/null @@ -1,32 +0,0 @@ -.. _plugin-removal: - -Plugin removal --------------- - -In case when one does not need a certain Pulp plugin anymore or there is a plugin which is no -longer supported and having it installed may prevent from upgrading to the latest releases for -pulpcore and other Pulp plugins. - -Pulp provides an ability to remove its plugins. It might be needed for the following reasons: - * a plugin is no longer needed - * a plugin is no longer supported (it can block further pulpcore upgrades if maintainers no - longer update the plugin to be compatible with the latest pulpcore) - -Plugins can be removed one at a time using the `pulpcore-manager` command `remove-plugin`. In this -example the File plugin is removed: - - -.. code-block:: - - $ pulpcore-manager remove-plugin file - - -As a result, all the data related to the plugin will be removed from the Pulp database. -It is possible to install back the removed plugin if desired and if it's compatible with the -pulpcore version being used. - - -.. note:: - - After `remove-plugin` command has succeeded, the plugin needs to be uninstalled manually. - Steps to uninstall depend on how it was originally installed. diff --git a/docs/workflows/promotion.rst b/docs/workflows/promotion.rst deleted file mode 100644 index f6e16a003c..0000000000 --- a/docs/workflows/promotion.rst +++ /dev/null @@ -1,38 +0,0 @@ -Promotion -========= - -A typical workflow for several users of Pulp is managing separate sets of content for different -lifecycle environments (e.g. Dev, Staging, Production, etc) and promoting content from one -environment to another. There are some features Pulp provides which can help facilitate this. - -Distributions -------------- - -:term:`Distributions` are a resource in Pulp that are useful for supporting different -environments. In most cases, you'll want to create one Distribution for each :term:`Repository` and -environment. If for example, you have a CentOS Repository that you want to serve to your Dev -servers, you can create a distribution called "Dev CentOS" that points to your CentOS Publication. - -One way to promote content is to use Distributions. Going back to the Dev CentOS example one way you -could promote this Publication to a Staging environment would be to create a Staging CentOS -distribution and point it to the same publication as the Dev CentOS distribution. Any time you want -to promote content from Dev to Staging, you can simply repeat this action. Also, to rollback, you can -simply point the Staging CentOS distribution to the Publication it was previously pointed at. - -Repositories ------------- - -Another way to promote content is to create separate Repositories for each environment. This gives -you greater control over which content is available to each environment. In the case where you want -to make a CentOS Repository available to Dev and Production environments, you'd create two -Repositories: a Dev CentOS and a Production CentOS Repository. You'd also create one Distribution -for each repository and you'd sync down all content from a Remote into the Dev CentOS Repository and -only that Repository. - -When you want to promote content from Dev to Production, one option is to call the ``modify`` -endpoint on the Production CentOS Repository and supply a Dev CentOS RepositoryVersion as the -``base_version`` parameter. This will copy all content from the Dev CentOS RepositoryVersion of your -choosing into the Production CentOS repository. - -This method of managing environment content is particularly useful for plugins without Publications -where Distributions can point directly to a Repository. diff --git a/docs/workflows/reclaim-disk-space.rst b/docs/workflows/reclaim-disk-space.rst deleted file mode 100644 index 99392d4835..0000000000 --- a/docs/workflows/reclaim-disk-space.rst +++ /dev/null @@ -1,67 +0,0 @@ -.. _reclaim-disk-space: - -Reclaim disk space for a list of repositories ---------------------------------------------- - -Pulp provides the ability to reclaim disk space for: - - * content that one no longer needs to serve but would like to keep in the repo for historical - reasons. - * repos that were synced with the on_demand download policy and one would want to clear out - downloaded files for those repos. Usually, a repository that was synced with the on_demand - download policy will store artifacts locally after they have been requested by the client, but - there really isn't a way to have pulp delete the locally stored files and free disk space if - these packages are unlikely to be used again. - -To start a reclaim task send a POST request to `/pulp/api/v3/repositories/reclaim_space/`. - -.. code-block:: - - $ RECLAIM_TASK=$(http POST :24817/pulp/api/v3/repositories/reclaim_space/ repo_hrefs:=[\"/pulp/api/v3/repositories/rpm/rpm/b3a6674d-181c-4e72-9412-7cbc747480ad/\"] | qq -r '.task') - $ http --body :24817$RECLAIM_TASK - - { - "child_tasks": [], - "created_resources": [], - "error": null, - "finished_at": "2021-07-16T15:36:20.650573Z", - "logging_cid": "50d1721d205c40f69defb773e32a98ff", - "name": "pulpcore.app.tasks.reclaim_space.reclaim_space", - "parent_task": null, - "progress_reports": [ - { - "code": "reclaim-space.artifact", - "done": 35, - "message": "Reclaim disk space", - "state": "completed", - "suffix": null, - "total": 35 - } - ], - "pulp_created": "2021-07-16T15:36:20.306845Z", - "pulp_href": "/pulp/api/v3/tasks/20ee50bd-9392-4ebf-8f1d-d2f15474ebd6/", - "reserved_resources_record": [ - "/pulp/api/v3/repositories/rpm/rpm/b3a6674d-181c-4e72-9412-7cbc747480ad/" - ], - "started_at": "2021-07-16T15:36:20.370245Z", - "state": "completed", - "task_group": null, - "worker": "/pulp/api/v3/workers/ccc132c4-0445-4f55-b370-32c3662dce3c/" - } - -As a result of this request, disk space will be freed-up for artifacts that are exclusive to the -list of provided repos. The content of the repository versions will not change and no repository -versions will be created or removed. - -An optional ``repo_versions_keeplist`` parameter can be specified, that will contain list of repo -version hrefs which will be excluded from the artifact removal. - -The task will remove artifacts only from content that was synced from a remote source. It will not -touch the content that was uploaded directly into Pulp. - -.. note:: - - The task will clean up artifacts regardless of the download policy. The content app will be able - to stream artifact if it is locally available, otherwise it will attempt to redownload it from - the known upstream urls. In case upstream stopped serving the corresponding file, Pulp won't be - able to download and serve it. diff --git a/docs/workflows/repairing-pulp.rst b/docs/workflows/repairing-pulp.rst deleted file mode 100644 index a66510bf8a..0000000000 --- a/docs/workflows/repairing-pulp.rst +++ /dev/null @@ -1,109 +0,0 @@ -.. _repairing-pulp: - -Repairing Pulp --------------- - -Pulp provides some features for self-repair in cases where artifacts in the storage went missing or -got corrupted in some way (i.e. bit rot). - -.. warning:: - - This repair feature looks for missing or corrupted files that are supposed to be in - the storage. It attempts a redownload of these files from known upstream urls. - In case upstream stopped serving the corresponding files, or these files were uploaded - directly into Pulp or were created by Pulp itself (i.e. generated metadata), the Pulp - repair feature is unable to remedy the situation. - -To start a repair task for all of Pulp (i.e. checks all content), send a POST request to -`/pulp/api/v3/repair/`. - -.. code-block:: - - $ REPAIR_TASK=$(http POST :24817/pulp/api/v3/repair/ | jq -r '.task') - $ http --body :24817$REPAIR_TASK - - { - "child_tasks": [], - "created_resources": [], - "error": null, - "finished_at": "2020-04-07T08:36:52.373633Z", - "name": "pulpcore.app.tasks.repository.repair_all_artifacts", - "parent_task": null, - "progress_reports": [ - { - "code": "repair.repaired", - "done": 2, - "message": "Repair corrupted units", - "state": "completed", - "suffix": null, - "total": null - }, - { - "code": "repair.corrupted", - "done": 2, - "message": "Identify corrupted units", - "state": "completed", - "suffix": null, - "total": null - } - ], - "pulp_created": "2020-04-07T08:36:52.274985Z", - "pulp_href": "/pulp/api/v3/tasks/530302b4-8674-4db3-8a13-99febef80830/", - "reserved_resources_record": [], - "started_at": "2020-04-07T08:36:52.348381Z", - "state": "completed", - "task_group": null, - "worker": "/pulp/api/v3/workers/f2fe2811-74a1-463f-93d2-53c7b302115c/" - } - -To start a repair task on a specific repository version, send a POST request to its `repair` -endpoint: - -.. code-block:: - - $ REPAIR_TASK=$(http POST :24817${REPOSITORY_VERSION}repair/ | jq -r '.task') - $ http --body :24817$REPAIR_TASK - - { - "child_tasks": [], - "created_resources": [], - "error": null, - "finished_at": "2020-04-07T08:36:52.373633Z", - "name": "pulpcore.app.tasks.repository.repair_version", - "parent_task": null, - "progress_reports": [ - { - "code": "repair.repaired", - "done": 2, - "message": "Repair corrupted units", - "state": "completed", - "suffix": null, - "total": null - }, - { - "code": "repair.corrupted", - "done": 2, - "message": "Identify corrupted units", - "state": "completed", - "suffix": null, - "total": null - } - ], - "pulp_created": "2020-04-07T08:36:52.274985Z", - "pulp_href": "/pulp/api/v3/tasks/530302b4-8674-4db3-8a13-99febef80830/", - "reserved_resources_record": [ - "/pulp/api/v3/repositories/file/file/47a3f651-aaa6-4026-b649-130c45ab38ea/" - ], - "started_at": "2020-04-07T08:36:52.348381Z", - "state": "completed", - "task_group": null, - "worker": "/pulp/api/v3/workers/f2fe2811-74a1-463f-93d2-53c7b302115c/" - } - -The result of this task can be read in the `progress_report` section. -If the number of `done` differs between the reports, pulp was unable to repair all artifacts. - -For both endpoints, there is a POST parameter named ``verify_checksums``, which defaults to -True. Specifying False when calling one of the repair endpoints will skip the checksum -verification and only check for files which are missing, which is substantially faster and -less resource intensive. However, this won't detect corrupted files. diff --git a/docs/workflows/repo-versioning.rst b/docs/workflows/repo-versioning.rst deleted file mode 100644 index f7ebe33dcc..0000000000 --- a/docs/workflows/repo-versioning.rst +++ /dev/null @@ -1,30 +0,0 @@ -Repository Versioning -===================== - -Repositories in Pulp 3 are versioned and anytime a change is made to the content of Repository, a -new version is created. These RepositoryVersions are immutable: they can only be created and -deleted, not updated or changed. - -Version Retention ------------------ - -By default, retain_repo_versions is null which means that Pulp will store all versions of a -Repository. This behavior can be changed by setting the retain_repo_versions field on the -Repository. A Repository must have at least one RepositoryVersion so retain_repo_versions must be -greater than or equal to 1. - -Setting retain_repo_versions to 1 effectively disables repository versioning since Pulp will only -store the latest version. - -Cleanup will ignore any repo versions that are being served directly via a distribution or via a -publication. - -To update this field for a file Repository called myrepo, simply call: - -``` -pulp file repository update --name myrepo --retained-versions 1 -``` - -Note that updating this field will automatically update the versions for the Repository so setting -the number to a smaller value will cause Pulp to delete any versions that exceed the number of -retained versions. diff --git a/docs/workflows/scheduling-tasks.rst b/docs/workflows/scheduling-tasks.rst deleted file mode 100644 index 7c7dbfe671..0000000000 --- a/docs/workflows/scheduling-tasks.rst +++ /dev/null @@ -1,9 +0,0 @@ -Scheduling Tasks -================ - -While Pulp 2 supported scheduling tasks natively, this is no longer a core feature in Pulp 3. Scheduling Pulp tasks -in Pulp 3 can be accomplished using any of the following external tools (and possibly others not on this list). - - * `cron `_ - * `rundeck `_ - * `distributed cron `_ diff --git a/docs/workflows/signed-metadata.rst b/docs/workflows/signed-metadata.rst deleted file mode 100644 index 25008e6b31..0000000000 --- a/docs/workflows/signed-metadata.rst +++ /dev/null @@ -1,101 +0,0 @@ -.. _configuring-signing: - -Metadata Signing ----------------- - -Administrators can add signing services to Pulp using the command line tools. Users -may then associate the signing services with repositories that support content signing. -The example below demonstrates how a signing service can be created using ``gpg``: - -1. Make sure the service user ``pulp`` has access to ``gpg`` and that the keypair is - installed in its keyrings. The private key might alternatively be provided by a - hardware cryptographic device. - -2. Create a signing script that accepts a file name as the only argument. The script - needs to generate an ascii-armored detached GPG signature for that file, using the key - specified via the ``PULP_SIGNING_KEY_FINGERPRINT`` environment variable. There is also - a ``CORRELATION_ID`` environment variable available for logging purposes. The script - should then print out a JSON structure with the following format. All the file names - are relative paths inside the current working directory:: - - {"file": "filename", "signature": "filename.asc"} - - The filename must remain the same for the detached signature, as shown. - - .. note:: - - Plugins may provide other signing service classes that may need their JSON output to - contain different information. - - Below is an example of a signing script that produces signatures for content: - - .. code-block:: bash - - #!/usr/bin/env bash - - FILE_PATH=$1 - SIGNATURE_PATH="$1.asc" - - ADMIN_ID="$PULP_SIGNING_KEY_FINGERPRINT" - PASSWORD="password" - - # Create a detached signature - gpg --quiet --batch --pinentry-mode loopback --yes --passphrase \ - $PASSWORD --homedir ~/.gnupg/ --detach-sign --default-key $ADMIN_ID \ - --armor --output $SIGNATURE_PATH $FILE_PATH - - # Check the exit status - STATUS=$? - if [ $STATUS -eq 0 ]; then - echo {\"file\": \"$FILE_PATH\", \"signature\": \"$SIGNATURE_PATH\"} - else - exit $STATUS - fi - - .. note:: - As the creator of the signing script, you can expect PULP_SIGNING_KEY_FINGERPRINT - and potentially other environment variables, depending on the content plugin calling signing service. - Make sure the script contains a proper shebang and Pulp has got valid permissions - to execute it. - -3. Create a signing service consisting of an absolute path to the script, a meaningful - name describing the script's purpose, and the identity identifying the key for signing. The - script must be executable. Here is an example showing how to create one instance of a signing - service: - - .. code-block:: bash - - pulpcore-manager add-signing-service ${SERVICE_NAME} ${SCRIPT_ABS_FILENAME} ${KEYID} - - .. note:: - - The public key must be available on the caller's keyring or on a keyring provided via the - ``--gpghome`` or ``--keyring`` parameters. - - .. warning:: - - It is possible to insert a new signing service into the database by using the - ``pulpcore-manager shell_plus`` interactive Python shell. However, this is not recommended. - -4. Retrieve and check the saved signing service via REST API:: - - $ http :24817/pulp/api/v3/signing-services/ - - { - "count": 1, - "next": null, - "previous": null, - "results": [ - { - "name": "sign-metadata", - "pubkey_fingerprint": "19CD52BD1CA9A00DF10A842D74B14E3590C2231F", - "public_key": "-----BEGIN PGP PUBLIC KEY BLOCK-----\n\n [...] \n-----END PGP PUBLIC KEY BLOCK-----\n", - "pulp_created": "2020-11-06T15:42:20.645197Z", - "pulp_href": "/pulp/api/v3/signing-services/ffb9e987-952f-47e3-a274-ffe69a80ded7/", - "script": "/var/lib/pulp/sign-metadata.sh" - } - ] - } - -Plugin writers are then able to sign selected content by the provided script. To learn more -about the signing from a plugin's perspective, see the section :ref:`metadata-signing`. diff --git a/docs/workflows/troubleshooting.rst b/docs/workflows/troubleshooting.rst deleted file mode 100644 index 64dd039fb6..0000000000 --- a/docs/workflows/troubleshooting.rst +++ /dev/null @@ -1,53 +0,0 @@ -Troubleshooting -=============== - -.. _debugging_tasks: - -Debugging Tasks ---------------- - -In case your system gets stuck in the processing of pulp tasks, you might want to debug the tasking system. - -Please always consider that your system might be in the process of dealing with long running tasks, and other tasks are rightfully waiting on their completion. - -Query tasks with the CLI ------------------------- - -How many tasks are waiting? - -.. code-block:: bash - - pulp task list --state=waiting | jq 'length' - -Is anybody running? - -.. code-block:: bash - - pulp task list --state=running | jq 'length' - -How many have failed? - -.. code-block:: bash - - pulp task list --state=failed | jq 'length' - -Retrieve the HREF's of running tasks: - -.. code-block:: bash - - pulp task list --state=running | jq 'map({.name, .pulp_href})' - # Save the HREF of the 3rd (counting starts at zero) - TASK_HREF=$(pulp task list --state=running | jq -r 'map(.pulp_href)[3]') - -Show the state of a particular task: - -.. code-block:: bash - - pulp task show --href "$TASK_HREF" - -Cancel a running task: - -.. code-block:: bash - - # warning canceling tasks may break higher level workflows - pulp task cancel --href "$TASK_HREF" diff --git a/docs/workflows/upload-publish.rst b/docs/workflows/upload-publish.rst deleted file mode 100644 index 17f0be9ea8..0000000000 --- a/docs/workflows/upload-publish.rst +++ /dev/null @@ -1,62 +0,0 @@ -Upload and Publish -================== - -Uploading Content ------------------ - -Content can be uploaded by POSTing the file to ``/pulp/api/v3/content///``. -Some plugins can create the content unit completely from that file, but others require some -additional attributes to be specified with the upload. See your plugin documentation for more info -on upload features of your plugin. - -File data can be uploaded in parallel, and then the call to -``/pulp/api/v3/content///`` can reference the already existing Artifact to create -the content from. - - -Associating with a Repository on Upload ---------------------------------------- - -You can automatically associate newly uploaded content with a Repository when using the -``/pulp/api/v3/content///`` API by passing the ``repository reference``. - - -Chunked Uploads ---------------- - -For large file uploads, Pulp provides an `Uploads API <../../restapi.html#tag/uploads>`_. To begin -uploading a file in chunks, an initial POST request must be sent to the ``/pulp/api/v3/uploads`` -endpoint with the total size of the file:: - - http POST :24817/pulp/api/v3/uploads/ size=10485760 - -This returns an upload href (e.g. ``/pulp/api/v3/uploads/a8b5a7f7-2f22-460d-ab20-d5616cb71cdd/``) that can -be used for chunks. Chunks can be uploaded in any order or in parallel:: - - http --form PUT :24817/pulp/api/v3/uploads/a8b5a7f7-2f22-460d-ab20-d5616cb71cdd/ file@./chunk2 'Content-Range:bytes 6291456-10485759/*' - http --form PUT :24817/pulp/api/v3/uploads/a8b5a7f7-2f22-460d-ab20-d5616cb71cdd/ file@./chunk1 'Content-Range:bytes 0-6291455/*' - -Note: You can send an optional sha256 argument:: - - http --form PUT :24817/pulp/api/v3/uploads/a8b5a7f7-2f22-460d-ab20-d5616cb71cdd/ file@./chunk1 'Content-Range:bytes 0-6291455/*' sha256=7ffc86295de63e96006ce5ab379050628aa5d51f816267946c71906594e13870 - -Once all chunks have been uploaded, a final POST request with the file sha256 can be sent to -complete the upload:: - - http POST :24817/pulp/api/v3/uploads/a8b5a7f7-2f22-460d-ab20-d5616cb71cdd/commit sha256=abc123... - -This queues a task that creates an artifact, and the upload gets deleted and cannot be re-used. - -Putting this altogether, here is an example that uploads a 1.iso file in two chunks:: - - curl -O https://fixtures.pulpproject.org/file-large/1.iso - split --bytes=6M 1.iso chunk - export UPLOAD=$(http POST :24817/pulp/api/v3/uploads/ size=`ls -l 1.iso | cut -d ' ' -f5` | jq -r '.pulp_href') - http --form PUT :24817$UPLOAD file@./chunkab 'Content-Range:bytes 6291456-10485759/*' - http --form PUT :24817$UPLOAD file@./chunkaa 'Content-Range:bytes 0-6291455/*' - http POST :24817${UPLOAD}commit/ sha256=`sha256sum 1.iso | cut -d ' ' -f1` - -.. note:: - - Each uploaded chunk is stored as a separate file in the default storage. When an upload is - committed, uploaded chunks are removed automatically and a new artifact is created, as usually. diff --git a/staging_docs/assets/diagrams_src/concept-content.dot b/staging_docs/assets/diagrams_src/concept-content.dot deleted file mode 100644 index 946c9ba50b..0000000000 --- a/staging_docs/assets/diagrams_src/concept-content.dot +++ /dev/null @@ -1,21 +0,0 @@ -@startuml -rectangle Files { - (foo.rpm) - usecase Playbook as "Ansible Playbook - --- - contains: - vars/ - templates/ - files/ - README" -} - -rectangle "Content Unit RPM" { - (foo.rpm) --> (Artifact) -} - -rectangle "Content Unit Ansible" { - (Artifact) as Artifact1 - (Playbook) --> (Artifact1) -} -@enduml \ No newline at end of file diff --git a/staging_docs/assets/diagrams_src/concept-publish.dot b/staging_docs/assets/diagrams_src/concept-publish.dot deleted file mode 100644 index c858256175..0000000000 --- a/staging_docs/assets/diagrams_src/concept-publish.dot +++ /dev/null @@ -1,8 +0,0 @@ -@startuml -(Repository Version) -right-> (Publication) -(Publication) --> (Distribution) - -note left of Distribution - How and where to publish -end note -@enduml \ No newline at end of file diff --git a/staging_docs/assets/diagrams_src/concept-remote.dot b/staging_docs/assets/diagrams_src/concept-remote.dot deleted file mode 100644 index a2fb1341c1..0000000000 --- a/staging_docs/assets/diagrams_src/concept-remote.dot +++ /dev/null @@ -1,27 +0,0 @@ -@startuml -rectangle "External sources" { - - rectangle "Repository" { - usecase RV2 as "Repository Version 2 - --- - contains: - Content Unit 1 - Content Unit 2" - usecase RV3 as "Repository Version 3 - --- - contains: - Content Unit 1 - Content Unit 2 - Content Units Remote" - note "Adding new content units\nfrom remote source" as N - RV2 -right-> N - N -right-> RV3 - } - - rectangle "Remote Source" { - (Content Units) - } - - (Content Units) --> N -} -@enduml \ No newline at end of file diff --git a/staging_docs/assets/diagrams_src/concept-repository.dot b/staging_docs/assets/diagrams_src/concept-repository.dot deleted file mode 100644 index 17c41d7bf3..0000000000 --- a/staging_docs/assets/diagrams_src/concept-repository.dot +++ /dev/null @@ -1,11 +0,0 @@ -@startuml -rectangle "Create repository" { - (Content Unit 1) - (Content Unit 2) - rectangle "Repository" { - (Repository Version 1) - } - (Content Unit 1) --> (Repository Version 1) - (Content Unit 2) --> (Repository Version 1) -} -@enduml \ No newline at end of file diff --git a/staging_docs/assets/images/.gitkeep b/staging_docs/assets/images/.gitkeep deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/staging_docs/assets/images/architecture.png b/staging_docs/assets/images/architecture.png deleted file mode 100644 index 26ca1478dc..0000000000 Binary files a/staging_docs/assets/images/architecture.png and /dev/null differ diff --git a/staging_docs/assets/images/deferred_download_sequence.png b/staging_docs/assets/images/deferred_download_sequence.png deleted file mode 100644 index ee0a0eddb2..0000000000 Binary files a/staging_docs/assets/images/deferred_download_sequence.png and /dev/null differ diff --git a/staging_docs/assets/images/lazy_component.png b/staging_docs/assets/images/lazy_component.png deleted file mode 100644 index 08fbfc5453..0000000000 Binary files a/staging_docs/assets/images/lazy_component.png and /dev/null differ diff --git a/staging_docs/assets/images/node-anatomy.png b/staging_docs/assets/images/node-anatomy.png deleted file mode 100644 index dd6e6f6c83..0000000000 Binary files a/staging_docs/assets/images/node-anatomy.png and /dev/null differ diff --git a/staging_docs/assets/images/node-topology.png b/staging_docs/assets/images/node-topology.png deleted file mode 100644 index c7d264d810..0000000000 Binary files a/staging_docs/assets/images/node-topology.png and /dev/null differ diff --git a/staging_docs/assets/images/pulp-exp1.png b/staging_docs/assets/images/pulp-exp1.png deleted file mode 100644 index 01d867a32d..0000000000 Binary files a/staging_docs/assets/images/pulp-exp1.png and /dev/null differ diff --git a/staging_docs/assets/images/rbac_architecture.png b/staging_docs/assets/images/rbac_architecture.png deleted file mode 100644 index 83dbf7a45d..0000000000 Binary files a/staging_docs/assets/images/rbac_architecture.png and /dev/null differ