diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 2d3e69fe8..0b7c7b289 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1,2 +1,9 @@ source/guides/github-actions-ci-cd-sample/* @webknjaz source/guides/publishing-package-distribution-releases-using-github-actions-ci-cd-workflows.rst @webknjaz + +# Sphinx extension +pug_sphinx_extensions/ @FFY00 + +# build-details.json +source/specifications/build-details/ @FFY00 +source/specifications/specs/build-details-*.json @FFY00 diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 000000000..fbc581cd6 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,5 @@ +blank_issues_enabled: false +contact_links: + - name: "Community Guidelines" + url: "https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md" + about: "Please make sure to follow the PSF Code of Conduct when participating in this repository." diff --git a/.github/ISSUE_TEMPLATE/general.yml b/.github/ISSUE_TEMPLATE/general.yml new file mode 100644 index 000000000..d41731613 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/general.yml @@ -0,0 +1,37 @@ +name: General Issue +description: Please fill out the form below to submit an issue. +labels: [] +assignees: [] + +body: + - type: markdown + attributes: + value: | + **Thanks for taking a minute to file an issue!** + + Read the [PSF Code of Conduct][CoC] first. + + ⚠ + Verify first that your issue is not [already reported on + GitHub][issue search]. + + _Please fill out the form below with as many precise + details as possible._ + + [CoC]: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md + [issue search]: ../search?q=is%3Aissue&type=issues + + - type: textarea + attributes: + label: Issue Description + description: Please provide a detailed description of your issue. + placeholder: Describe your issue here... + validations: + required: true + + - type: checkboxes + attributes: + label: Code of Conduct + options: + - label: I am aware that participants in this repository must follow the PSF Code of Conduct. + required: true diff --git a/.github/workflows/pr-preview-links.yml b/.github/workflows/pr-preview-links.yml index 90ea9cc73..291ec3ad2 100644 --- a/.github/workflows/pr-preview-links.yml +++ b/.github/workflows/pr-preview-links.yml @@ -17,6 +17,6 @@ jobs: documentation-links: runs-on: ubuntu-latest steps: - - uses: readthedocs/actions/preview@v1 + - uses: readthedocs/actions/preview@b8bba1484329bda1a3abe986df7ebc80a8950333 # v1.5 with: project-slug: "python-packaging-user-guide" diff --git a/.github/workflows/test-translations.yml b/.github/workflows/test-translations.yml index 45dc60aa3..537a8df72 100644 --- a/.github/workflows/test-translations.yml +++ b/.github/workflows/test-translations.yml @@ -31,9 +31,10 @@ jobs: steps: - name: Grab the repo src - uses: actions/checkout@v4 + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 with: ref: ${{ env.I18N_BRANCH }} + persist-credentials: false - name: List languages id: languages @@ -53,12 +54,13 @@ jobs: steps: - name: Grab the repo src - uses: actions/checkout@v4 + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 with: ref: ${{ env.I18N_BRANCH }} + persist-credentials: false - name: Set up Python - uses: actions/setup-python@v5 + uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 with: python-version: >- 3.10 @@ -67,10 +69,12 @@ jobs: run: python -m pip install --upgrade nox virtualenv sphinx-lint - name: Set Sphinx problem matcher - uses: sphinx-doc/github-problem-matcher@v1.0 + uses: sphinx-doc/github-problem-matcher@1f74d6599f4a5e89a20d3c99aab4e6a70f7bda0f # v1.1 - name: Build translated docs in ${{ matrix.language }} - run: nox -s build -- -q -D language=${{ matrix.language }} + run: nox -s build -- -q -D language=${LANGUAGE} + env: + LANGUAGE: ${{ matrix.language }} - name: Set Sphinx Lint problem matcher if: always() @@ -78,4 +82,6 @@ jobs: - name: Lint translation file if: always() - run: sphinx-lint locales/${{ matrix.language }}/LC_MESSAGES/messages.po + run: sphinx-lint locales/${LANGUAGE}/LC_MESSAGES/messages.po + env: + LANGUAGE: ${{ matrix.language }} diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 8503ca720..1f67bad8e 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -6,12 +6,19 @@ on: branches-ignore: - gh-readonly-queue/** # Temporary merge queue-related GH-made branches pull_request: + types: + - opened # default + - synchronize # default + - reopened # default + - ready_for_review # used in PRs created from GitHub Actions workflows workflow_call: concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }} cancel-in-progress: true +permissions: {} + jobs: build: name: ${{ matrix.noxenv }} @@ -24,10 +31,12 @@ jobs: - linkcheck steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + with: + persist-credentials: false - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 with: python-version: "3.11" cache: 'pip' @@ -55,6 +64,6 @@ jobs: steps: - name: Decide whether the needed jobs succeeded or failed - uses: re-actors/alls-green@release/v1 + uses: re-actors/alls-green@05ac9388f0aebcb5727afa17fcccfecd6f8ec5fe # v1.2.2 with: jobs: ${{ toJSON(needs) }} diff --git a/.github/workflows/translation.yml b/.github/workflows/translation.yml index 7cfae2991..67fcb5edf 100644 --- a/.github/workflows/translation.yml +++ b/.github/workflows/translation.yml @@ -17,16 +17,20 @@ jobs: runs-on: ubuntu-latest if: github.repository_owner == 'pypa' + permissions: + contents: write # to push to I18N_BRANCH + steps: - name: Grab the repo src - uses: actions/checkout@v3 + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 with: fetch-depth: 0 # To reach the common commit + persist-credentials: true # For `git push` - name: Set up git user as [bot] # Refs: # * https://github.community/t/github-actions-bot-email-address/17204/6 # * https://github.com/actions/checkout/issues/13#issuecomment-724415212 - uses: fregante/setup-git-user@v1.1.0 + uses: fregante/setup-git-user@024bc0b8e177d7e77203b48dab6fb45666854b35 # v2.0.2 - name: Switch to the translation source branch run: | @@ -48,10 +52,12 @@ jobs: run: | sh -x - git merge '${{ github.event.repository.default_branch }}' + git merge "${DEFAULT_BRANCH}" + env: + DEFAULT_BRANCH: ${{ github.event.repository.default_branch }} - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 with: python-version: >- 3.10 diff --git a/.github/workflows/update-uv-build-version.yml b/.github/workflows/update-uv-build-version.yml new file mode 100644 index 000000000..d204bd391 --- /dev/null +++ b/.github/workflows/update-uv-build-version.yml @@ -0,0 +1,43 @@ +--- + +name: Update uv build version + +on: + schedule: + - cron: "0 6 * * 1" # mondays at 6am + workflow_dispatch: + +jobs: + update-uv-build-version: + name: Update uv_build version + if: github.repository_owner == 'pypa' # suppress noise in forks + runs-on: ubuntu-latest + permissions: + contents: write + pull-requests: write + steps: + - name: Checkout repository + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + with: + persist-credentials: false + - name: Set up uv + uses: astral-sh/setup-uv@3259c6206f993105e3a61b142c2d97bf4b9ef83d # v7.1.0 + - name: Update uv_build version + id: update_script + run: uv run scripts/update_uv_build_version.py + - # If there are no changes, no pull request will be created and the action exits silently. + name: Create Pull Request + uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8 + with: + token: ${{ secrets.GITHUB_TOKEN }} + commit-message: Update uv_build version to ${{ steps.update_script.outputs.version }} + title: Update uv_build version to ${{ steps.update_script.outputs.version }} + draft: true # Trigger CI by un-drafting the PR, otherwise `GITHUB_TOKEN` PRs don't trigger CI. + body: | + Automated update of uv_build version bounds for uv ${{ steps.update_script.outputs.version }}. + + This PR was created automatically by the cron workflow, ping `@konstin` for problems. + branch: bot/update-uv-build-version + delete-branch: true + +... diff --git a/.github/workflows/zizmor.yml b/.github/workflows/zizmor.yml new file mode 100644 index 000000000..6c8c62f7d --- /dev/null +++ b/.github/workflows/zizmor.yml @@ -0,0 +1,38 @@ +# From https://woodruffw.github.io/zizmor/usage/#use-in-github-actions + +name: GitHub Actions Security Analysis with zizmor 🌈 + +on: + push: + branches: ["main"] + pull_request: + branches: ["**"] + +jobs: + zizmor: + name: zizmor latest via PyPI + runs-on: ubuntu-latest + permissions: + security-events: write + # required for workflows in private repositories + contents: read + actions: read + steps: + - name: Checkout repository + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + with: + persist-credentials: false + + - name: Install the latest version of uv + uses: astral-sh/setup-uv@3259c6206f993105e3a61b142c2d97bf4b9ef83d # v7.1.0 + + - name: Run zizmor 🌈 + run: uvx zizmor --format sarif source/guides/github-actions-ci-cd-sample/* > results.sarif + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + - name: Upload SARIF file + uses: github/codeql-action/upload-sarif@f443b600d91635bebf5b0d9ebc620189c0d6fba5 # v4.30.8 + with: + sarif_file: results.sarif + category: zizmor diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index c6372ec30..47b864808 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,9 @@ +ci: + autoupdate_schedule: quarterly + repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.6.0 + rev: v6.0.0 hooks: - id: check-added-large-files - id: check-case-conflict @@ -12,7 +15,7 @@ repos: - id: trailing-whitespace - repo: https://github.com/codespell-project/codespell - rev: v2.3.0 + rev: v2.4.1 hooks: - id: codespell args: ["-L", "ned,ist,oder", "--skip", "*.po"] @@ -34,7 +37,7 @@ repos: - id: rst-inline-touching-normal - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.4.10 + rev: v0.14.10 hooks: - id: ruff - id: ruff-format diff --git a/extra/specifications/schemas/build-details-v1.0.schema.json b/extra/specifications/schemas/build-details-v1.0.schema.json new file mode 100644 index 000000000..9954ddab7 --- /dev/null +++ b/extra/specifications/schemas/build-details-v1.0.schema.json @@ -0,0 +1,261 @@ +{ + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$id": "https://packaging.python.org/en/latest/specifications/schemas/build-details-v1.0.schema.json", + "type": "object", + "title": "build-details.json β€” a static description file with build details of Python installations", + "required": [ + "schema_version", + "base_prefix", + "platform", + "language", + "implementation" + ], + "additionalProperties": false, + "properties": { + "schema_version": { + "type": "string", + "description": "Schema version.\n\nThis is a string following the format ``.``, where ```` and ```` are unpaded numbers and represent the **major** and **minor** components of the version. Versions may be arithmetically compared by intrepreting the version string as a decimal number.\n\nFor this specification version, this value is constant and **MUST** be ``1.0``.\n\nFuture versions of this schema **MUST** use a higher version number. Future versions of this schema **MUST NOT** use the same **major** version component as other schema version unless its specification is deemed backwards-compatible with them β€” it can't change, or extend, any parts of the current specification in such a way as the semantics of the interpreted data differ, or that data valid under the new specification is invalid under the older specification, with the exception of additional properties (errors caused by ``additionalProperties``).", + "const": "1.0" + }, + "base_prefix": { + "type": "string", + "description": "Base prefix of the Python installation.\n\nEither an absolute path, or a path relative to directory where this file is contained.", + "examples": [ + "/usr", + "../.." + ] + }, + "base_interpreter": { + "type": "string", + "description": "The path to the Python interprer of the base installation.\n\nEither an absolute path, or a path relative to ``base_prefix``.\n\nThis field **MUST** be present if the installation provides an interpreter executable.", + "examples": [ + "/usr/bin/python", + "bin/python" + ] + }, + "platform": { + "type": "string", + "description": "System platform string.\n\nThis field **SHOULD** be equivalent to ``sysconfig.get_platform()``.", + "examples": [ + "linux-x86_64" + ] + }, + "language": { + "type": "object", + "description": "Object containing details related to the Python language specification.", + "required": [ + "version" + ], + "additionalProperties": false, + "properties": { + "version": { + "type": "string", + "description": "String representation the Python language version β€” a version string consisting only of the *major* and *minor* components.\n\nThis field **SHOULD** be equivalent to ``sysconfig.get_python_version()``.", + "examples": ["3.14"] + }, + "version_info": { + "type": "object", + "description": "Object in the format of :py:data:`sys.version_info`.\n\nThis section **SHOULD** be equivalent to :py:data:`sys.version_info`.", + "required": ["major", "minor", "micro", "releaselevel", "serial"], + "additionalProperties": false, + "examples": [ + { + "major": 3, + "minor": 14, + "micro": 1, + "releaselevel": "final", + "serial": 0 + } + ], + "properties": { + "major": { + "type": "number" + }, + "minor": { + "type": "number" + }, + "micro": { + "type": "number" + }, + "releaselevel": { + "type": "string", + "enum": ["alpha", "beta", "candidate", "final"] + }, + "serial": { + "type": "number" + } + } + } + } + }, + "implementation": { + "type": "object", + "description": "Object containing details related to Python implementation.\n\nThis section **SHOULD** be equivalent to :py:data:`sys.implementation`. It follows specification defined in PEP 421, meaning that on top of the required keys, implementation-specific keys can also exist, but must be prefixed with an underscore.", + "required": [ + "name", + "version", + "hexversion", + "cache_tag" + ], + "additionalProperties": true, + "properties": { + "name": { + "type": "string", + "description": "Lower-case name of the Python implementation.", + "examples": ["cpython", "pypy"] + }, + "version": { + "type": "object", + "description": "Object in the format of :py:data:`sys.version_info`, containing the implementation version.", + "required": ["major", "minor", "micro", "releaselevel", "serial"], + "additionalProperties": false, + "examples": [ + { + "major": 3, + "minor": 14, + "micro": 1, + "releaselevel": "final", + "serial": 0 + }, + { + "major": 7, + "minor": 3, + "micro": 16, + "releaselevel": "final", + "serial": 0 + } + ], + "properties": { + "major": { + "type": "number" + }, + "minor": { + "type": "number" + }, + "micro": { + "type": "number" + }, + "releaselevel": { + "type": "string", + "enum": ["alpha", "beta", "candidate", "final"] + }, + "serial": { + "type": "number" + } + } + } + } + }, + "abi": { + "type": "object", + "description": "Object containing details related to ABI.", + "required": [ + "flags" + ], + "additionalProperties": false, + "properties": { + "flags": { + "type": "array", + "description": "Build configuration flags, used to calculate the extension suffix.\n\nThe flags **MUST** be defined in the order they appear on the extension suffix.", + "additionalProperties": true, + "examples": [ + ["t", "d"] + ] + }, + "extension_suffix": { + "type": "string", + "description": "Suffix used for extensions built against the current implementation version.\n\nThis field **MUST** be present if the Python implementation supports extensions, otherwise this entry will be missing.", + "examples": [ + ".cpython-314-x86_64-linux-gnu.so" + ] + }, + "stable_abi_suffix": { + "type": "string", + "description": "Suffix used for extensions built against the stable ABI.\n\nThis field **MUST** be present if the Python implementation has a stable ABI extension suffix, otherwise this entry will be missing.", + "examples": [ + ".abi3.so" + ] + } + } + }, + "suffixes": { + "type": "object", + "description": "Valid module suffixes grouped by type.\n\nThis section **MUST** be present if the Python installation supports importing external files, and it **SHOULD** be equivalent to the ``importlib.machinery.*_SUFFIXES`` attributes.\n\nAdditionally, if a Python implementation provides extension kinds other than the ones listed on ``importlib.machinery`` module, they **MAY** add a sub-section for them.", + "examples": [ + { + "source": [".py"], + "bytecode": [".pyc"], + "optimized_bytecode": [".pyc"], + "debug_bytecode": [".pyc"], + "extensions": [".cpython-313-x86_64-linux-gnu.so", ".abi3.so", ".so"] + } + ] + }, + "libpython": { + "type": "object", + "description": "Object containing details related to the ``libpython`` library.\n\nThis section **MUST** by present if Python installation provides a ``libpython`` library, otherwise this section will be missing.", + "additionalProperties": false, + "properties": { + "dynamic": { + "type": "string", + "description": "The path to the dynamic ``libpython`` library.\n\nEither an absolute path, or a path relative to ``base_prefix``.\n\nThis field **MUST** be present if the Python installation provides a dynamic ``libpython`` library, otherwise this entry will be missing.", + "examples": [ + "/usr/lib/libpython3.14.so.1.0", + "lib/libpython3.14.so.1.0" + ] + }, + "dynamic_stableabi": { + "type": "string", + "description": "The path to the dynamic ``libpython`` library for the stable ABI.\n\nEither an absolute path, or a path relative to ``base_prefix``.\n\nThis field **MUST** be present if the Python installation provides a dynamic ``libpython`` library targeting the Stable ABI, otherwise this entry will be missing.\n\nIf this key is present ``dynamic`` **MUST** also be set.", + "examples": [ + "/usr/lib/libpython3.so", + "lib/libpython3.so" + ] + }, + "static": { + "type": "string", + "description": "The path to the static ``libpython`` library.\n\nEither an absolute path, or a path relative to ``base_prefix``.\n\nThis field **MUST** be present if the Python installation provides a static ``libpython`` library, otherwise this entry will be missing.", + "examples": [ + "/usr/lib/python3.14/config-3.14-x86_64-linux-gnu/libpython3.14.a", + "lib/python3.14/config-3.14-x86_64-linux-gnu/libpython3.14.a" + ] + }, + "link_extensions": { + "type": "boolean", + "description": "Should extensions built against a dynamic ``libpython`` link to it?\n\nThis field **MUST** be present if the Python installation provides a dynamic ``libpython`` library, otherwise this entry will be missing." + } + } + }, + "c_api": { + "type": "object", + "description": "Object containing details related to the Python C API.\n\nThis section **MUST** be present if the Python implementation provides a C API, otherwise this section will be missing.", + "required": [ + "headers" + ], + "additionalProperties": false, + "properties": { + "headers": { + "type": "string", + "description": "The path to the C API headers.\n\nEither an absolute path, or a path relative to ``base_prefix``.", + "examples": [ + "/usr/include/python3.14", + "include/python3.14" + ] + }, + "pkgconfig_path": { + "type": "string", + "description": "The path to the pkg-config definition files.\n\nEither an absolute path, or a path relative to ``base_prefix``.\n\nThis field **MUST** be present if the Python implementation provides pkg-config definition files, otherwise this section will be missing.", + "examples": [ + "/usr/lib/pkgconfig", + "lib/pkgconfig" + ] + } + } + }, + "arbitrary_data": { + "type": "object", + "description": "Object containing extra arbitrary data.\n\nThis is meant to be used as an escape-hatch, to include any relevant data that is not covered by this specification. Implementations may choose what data to provide in this section.", + "additionalProperties": true + } + } +} diff --git a/extra/specifications/schemas/direct-url.schema.json b/extra/specifications/schemas/direct-url.schema.json new file mode 100644 index 000000000..d1f4c860a --- /dev/null +++ b/extra/specifications/schemas/direct-url.schema.json @@ -0,0 +1,99 @@ +{ + "$schema": "https://json-schema.org/draft/2019-09/schema", + "$id": "https://packaging.python.org/en/latest/specifications/schemas/direct-url.schema.json", + "title": "Direct URL Data", + "description": "Data structure that can represent URLs to python projects and distribution artifacts such as VCS source trees, local source trees, source distributions and wheels.", + "definitions": { + "url": { + "type": "string", + "format": "uri" + }, + "DirInfo": { + "type": "object", + "properties": { + "editable": { + "type": ["boolean", "null"] + } + } + }, + "VCSInfo": { + "type": "object", + "properties": { + "vcs": { + "type": "string", + "enum": ["git", "hg", "bzr", "svn"] + }, + "requested_revision": { + "type": "string" + }, + "commit_id": { + "type": "string" + }, + "resolved_revision": { + "type": "string" + } + }, + "required": ["vcs", "commit_id"] + }, + "ArchiveInfo": { + "type": "object", + "properties": { + "hash": { + "type": "string", + "pattern": "^\\w+=[a-f0-9]+$", + "deprecated": true + }, + "hashes": { + "type": "object", + "patternProperties": { + "^[a-f0-9]+$": { + "type": "string" + } + } + } + } + } + }, + "allOf": [ + { + "type": "object", + "properties": { + "url": { + "$ref": "#/definitions/url" + } + }, + "required": ["url"] + }, + { + "anyOf": [ + { + "type": "object", + "properties": { + "dir_info": { + "$ref": "#/definitions/DirInfo" + } + }, + "required": ["dir_info"] + }, + { + "type": "object", + "properties": { + "vcs_info": { + "$ref": "#/definitions/VCSInfo" + } + }, + "required": ["vcs_info"] + }, + { + "type": "object", + "properties": { + "archive_info": { + "$ref": "#/definitions/ArchiveInfo" + } + }, + "required": ["archive_info"] + } + ] + } + ] +} diff --git a/extra/specifications/schemas/pylock.schema.json b/extra/specifications/schemas/pylock.schema.json new file mode 100644 index 000000000..90404e33d --- /dev/null +++ b/extra/specifications/schemas/pylock.schema.json @@ -0,0 +1,345 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "$id": "https://packaging.python.org/en/latest/specifications/schemas/pylock.schema.json", + "additionalProperties": false, + "definitions": { + "tool": { + "type": "object", + "markdownDescription": "Similar usage as that of the `[tool]` table from the [pyproject.toml specification](https://packaging.python.org/en/latest/specifications/pyproject-toml/#pyproject-toml-spec), but at the package version level instead of at the lock file level (which is also available via `[tool]`).", + "additionalProperties": { + "type": "object", + "additionalProperties": true + } + }, + "url": { + "type": "string", + "markdownDescription": "The URL to the source tree." + }, + "path": { + "type": "string", + "markdownDescription": "The path to the local directory of the source tree." + }, + "upload-time": { + "markdownDescription": "The time the file was uploaded (UTC). Must be specified as a datetime literal." + }, + "size": { + "type": "integer", + "markdownDescription": "The size of the archive file." + }, + "hashes": { + "type": "object", + "description": "Known hash values of the file where the key is the hash algorithm and the value is the hash value.", + "additionalProperties": { + "type": "string" + } + }, + "subdirectory": { + "type": "string", + "markdownDescription": "The subdirectory within the [source tree](https://packaging.python.org/en/latest/specifications/source-distribution-format/#source-distribution-format-source-tree) where the project root of the project is (e.g. the location of the `pyproject.toml` file)." + }, + "vcs": { + "type": "object", + "markdownDescription": "Record the version control system details for the [source tree](https://packaging.python.org/en/latest/specifications/source-distribution-format/#source-distribution-format-source-tree) it contains.", + "additionalProperties": false, + "properties": { + "type": { + "type": "string", + "markdownDescription": "The type of version control system used." + }, + "url": { + "$ref": "#/definitions/url" + }, + "path": { + "$ref": "#/definitions/path" + }, + "requested-revision": { + "type": "string", + "markdownDescription": "The branch/tag/ref/commit/revision/etc. that the user requested." + }, + "commit-id": { + "type": "string", + "markdownDescription": "The exact commit/revision number that is to be installed." + }, + "subdirectory": { + "$ref": "#/definitions/subdirectory" + } + } + }, + "directory": { + "type": "object", + "markdownDescription": "Record the local directory details for the [source tree](https://packaging.python.org/en/latest/specifications/source-distribution-format/#source-distribution-format-source-tree) it contains.", + "additionalProperties": false, + "properties": { + "path": { + "type": "string", + "markdownDescription": "The local directory where the source tree is." + }, + "editable": { + "type": "boolean", + "default": false, + "markdownDescription": "A flag representing whether the source tree was an editable install at lock time." + }, + "subdirectory": { + "$ref": "#/definitions/subdirectory" + } + } + }, + "archive": { + "type": "object", + "additionalProperties": false, + "markdownDescription": "A direct reference to an archive file to install from (this can include wheels and sdists, as well as other archive formats containing a source tree).", + "properties": { + "url": { + "$ref": "#/definitions/url" + }, + "path": { + "$ref": "#/definitions/path" + }, + "size": { + "$ref": "#/definitions/size" + }, + "upload-time": { + "$ref": "#/definitions/upload-time" + }, + "hashes": { + "$ref": "#/definitions/hashes" + }, + "subdirectory": { + "$ref": "#/definitions/subdirectory" + } + } + }, + "sdist": { + "type": "object", + "additionalProperties": false, + "markdownDescription": "Details of a [source distribution file name](https://packaging.python.org/en/latest/specifications/source-distribution-format/#source-distribution-format-sdist) for the package.", + "properties": { + "name": { + "type": "string", + "markdownDescription": "The file name of the [source distribution file name](https://packaging.python.org/en/latest/specifications/source-distribution-format/#source-distribution-format-sdist) file." + }, + "upload-time": { + "$ref": "#/definitions/upload-time" + }, + "url": { + "$ref": "#/definitions/url" + }, + "path": { + "$ref": "#/definitions/path" + }, + "size": { + "$ref": "#/definitions/size" + }, + "hashes": { + "$ref": "#/definitions/hashes" + } + } + }, + "wheels": { + "type": "array", + "markdownDescription": "For recording the wheel files as specified by [Binary distribution format](https://packaging.python.org/en/latest/specifications/binary-distribution-format/#binary-distribution-format) for the package.", + "items": { + "type": "object", + "additionalProperties": false, + "properties": { + "name": { + "type": "string", + "markdownDescription": "The file name of the [Binary distribution format](https://packaging.python.org/en/latest/specifications/binary-distribution-format/#binary-distribution-format) file." + }, + "upload-time": { + "$ref": "#/definitions/upload-time" + }, + "url": { + "$ref": "#/definitions/url" + }, + "path": { + "$ref": "#/definitions/path" + }, + "size": { + "$ref": "#/definitions/size" + }, + "hashes": { + "$ref": "#/definitions/hashes" + } + } + } + }, + "1.0": { + "required": ["lock-version", "created-by", "packages"], + "properties": { + "lock-version": { + "type": "string", + "enum": ["1.0"], + "description": "Record the file format version that the file adheres to." + }, + "environments": { + "type": "array", + "markdownDescription": "A list of [environment markers](https://packaging.python.org/en/latest/specifications/dependency-specifiers/#dependency-specifiers-environment-markers) for which the lock file is considered compatible with.", + "items": { + "type": "string", + "description": "Environment marker" + } + }, + "requires-python": { + "type": "string", + "markdownDescription": "Specifies the [Requires-Python](https://packaging.python.org/en/latest/specifications/core-metadata/#core-metadata-requires-python) for the minimum Python version compatible for any environment supported by the lock file (i.e. the minimum viable Python version for the lock file)." + }, + "extras": { + "type": "array", + "markdownDescription": "The list of [extras](https://packaging.python.org/en/latest/specifications/core-metadata/#core-metadata-provides-extra) supported by this lock file.", + "default": [], + "items": { + "type": "string", + "description": "Extra name" + } + }, + "dependency-groups": { + "type": "array", + "markdownDescription": "The list of [dependency groups](https://packaging.python.org/en/latest/specifications/dependency-groups/#dependency-groups) publicly supported by this lock file (i.e. dependency groups users are expected to be able to specify via a tool’s UI).", + "default": [], + "items": { + "type": "string", + "description": "Dependency group name" + } + }, + "default-groups": { + "type": "array", + "markdownDescription": "The name of synthetic dependency groups to represent what should be installed by default (e.g. what `project.dependencies` implicitly represents).", + "default": [], + "items": { + "type": "string", + "description": "Dependency group name" + } + }, + "created-by": { + "type": "string", + "markdownDescription": "Records the name of the tool used to create the lock file." + }, + "packages": { + "type": "array", + "markdownDescription": "An array containing all packages that may be installed.", + "items": { + "type": "object", + "additionalProperties": false, + "required": ["name"], + "allOf": [ + { + "if": { + "required": ["vcs"] + }, + "then": { + "not": { + "required": ["directory", "archive", "sdist", "wheels"] + } + } + }, + { + "if": { + "required": ["directory"] + }, + "then": { + "not": { + "required": ["vcs", "archive", "sdist", "wheels"] + } + } + }, + { + "if": { + "required": ["sdist"] + }, + "then": { + "not": { + "required": ["vcs", "directory", "archive"] + } + } + }, + { + "if": { + "required": ["wheels"] + }, + "then": { + "not": { + "required": ["vcs", "directory", "archive"] + } + } + } + ], + "properties": { + "name": { + "type": "string", + "markdownDescription": "The name of the package, [normalized](https://packaging.python.org/en/latest/specifications/name-normalization/#name-normalization)." + }, + "version": { + "type": "string", + "description": "The version of the package." + }, + "marker": { + "type": "string", + "markdownDescription": "The [environment marker](https://packaging.python.org/en/latest/specifications/dependency-specifiers/#dependency-specifiers-environment-markers) which specify when the package should be installed." + }, + "requires-python": { + "type": "string", + "markdownDescription": "Holds the [version specifiers](https://packaging.python.org/en/latest/specifications/version-specifiers/#version-specifiers) for Python version compatibility for the package." + }, + "dependencies": { + "type": "array", + "markdownDescription": "Records the other entries in `[[packages]]` which are direct dependencies of this package.", + "items": { + "type": "object", + "markdownDescription": "A table which contains the minimum information required to tell which other package entry it corresponds to where doing a key-by-key comparison would find the appropriate package with no ambiguity (e.g. if there are two entries for the `spam` package, then you can include the version number like `{name = \"spam\", version = \"1.0.0\"}`, or by source like `{name = \"spam\", vcs = { url = \"...\"}`).", + "additionalProperties": true + } + }, + "vcs": { + "$ref": "#/definitions/vcs" + }, + "directory": { + "$ref": "#/definitions/directory" + }, + "archive": { + "$ref": "#/definitions/archive" + }, + "index": { + "type": "string", + "markdownDescription": "The base URL for the package index from [simple repository API](https://packaging.python.org/en/latest/specifications/simple-repository-api/#simple-repository-api) where the sdist and/or wheels were found (e.g. `https://pypi.org/simple/`)." + }, + "sdist": { + "$ref": "#/definitions/sdist" + }, + "wheels": { + "$ref": "#/definitions/wheels" + }, + "attestation-identities": { + "type": "array", + "markdownDescription": "A recording of the attestations for any file recorded for this package.", + "items": { + "type": "object", + "additionalProperties": false, + "required": ["kind"], + "properties": { + "kind": { + "type": "string", + "markdownDescription": "The unique identity of the Trusted Publisher." + } + } + } + }, + "tool": { + "$ref": "#/definitions/tool" + } + } + } + }, + "tool": { + "$ref": "#/definitions/tool" + } + } + } + }, + "oneOf": [ + { + "$ref": "#/definitions/1.0" + } + ], + "type": "object" +} diff --git a/noxfile.py b/noxfile.py index 698e82f9d..484a8d39a 100644 --- a/noxfile.py +++ b/noxfile.py @@ -89,6 +89,7 @@ def linkcheck(session): "--keep-going", # be strict "source", # where the rst files are located "build", # where to put the check output + *session.posargs, ) diff --git a/pug_sphinx_extensions/__init__.py b/pug_sphinx_extensions/__init__.py new file mode 100644 index 000000000..00d91da3c --- /dev/null +++ b/pug_sphinx_extensions/__init__.py @@ -0,0 +1,86 @@ +import os +import pathlib +import urllib + +import sphinx.application +import sphinx.util.logging + + +DOMAIN = "packaging.python.org" + + +logger = sphinx.util.logging.getLogger(__name__) + + +def resolve_local_html_link(app: sphinx.application.Sphinx, url_path: str) -> str: + """Takes path of a link pointing an HTML render of the current project, + and returns local path of the referenced document. + + Support links to renders from both the `html` and `dirhtml` builders. + + Example: + + .. code-block:: python + + >>> resolve_local_html_link('https://packaging.python.org/en/latest/flow/') + '{srcdir}/flow.rst' + >>> resolve_local_html_link('https://packaging.python.org/en/latest/flow.html') + '{srcdir}/flow.rst' + >>> resolve_local_html_link('https://packaging.python.org/en/latest/specifications/schemas/') + '{srcdir}/specifications/schemas/index.rst' + >>> resolve_local_html_link('https://packaging.python.org/en/latest/specifications/schemas/build-details-v1.0.schema.json') + '{html_extra_path0}/specifications/schemas/build-details-v1.0.schema.json' + + """ + # Search for document in html_extra_path + for entry in app.config.html_extra_path: + candidate = (app.confdir / entry / url_path).resolve() + if candidate.is_dir(): + candidate = candidate / "index.html" + if candidate.exists(): + return os.fspath(candidate) + # Convert html path to source path + url_path = url_path.removesuffix("/") # Normalize + if url_path.endswith(".html"): + document = url_path.removesuffix(".html") + elif (candidate := f"{url_path}/index") in app.project.docnames: + document = candidate + else: + document = url_path + return app.env.doc2path(document) + + +def rewrite_local_uri(app: sphinx.application.Sphinx, uri: str) -> str: + """Replace remote URIs targeting https://packaging.python.org/en/latest/... + with local ones, so that local changes are taken into account by linkcheck. + + Additionally, resolve local relative links to html_extra_path. + """ + local_uri = uri + parsed = urllib.parse.urlparse(uri) + # Links to https://packaging.python.org/en/latest/... + if parsed.hostname == DOMAIN and parsed.path.startswith("/en/latest/"): + document = parsed.path.removeprefix("/en/latest/") + local_uri = resolve_local_html_link(app, document) + logger.verbose( + f"{uri!s} is a remote URL that points to local sources, " + "replacing it with a local URL in linkcheck to take new changes " + "into account (pass -vv for more info)" + ) + logger.debug(f"Replacing linkcheck URL {uri!r} with {local_uri!r}") + # Local relative links + if not parsed.scheme and not parsed.netloc and parsed.path: + full_path = pathlib.Path(app.env.docname).parent / parsed.path + local_uri = resolve_local_html_link(app, os.fspath(full_path)) + if local_uri != uri: + logger.verbose(f"Local linkcheck URL {uri!r} resolved as {local_uri!r}") + return local_uri + + +def setup(app: sphinx.application.Sphinx) -> dict[str, bool]: + app.connect("linkcheck-process-uri", rewrite_local_uri) + + return { + "parallel_read_safe": True, + "parallel_write_safe": True, + } diff --git a/requirements.txt b/requirements.txt index 077adb580..5c710c86b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,6 +1,7 @@ -furo==2023.9.10 +furo==2024.8.6 sphinx==7.2.6 sphinx-autobuild==2021.3.14 sphinx-inline-tabs==2023.4.21 sphinx-copybutton==0.5.2 sphinx-toolbox==3.5.0 +sphinx-jsonschema==1.19.1 diff --git a/scripts/update_uv_build_version.py b/scripts/update_uv_build_version.py new file mode 100644 index 000000000..69fefba27 --- /dev/null +++ b/scripts/update_uv_build_version.py @@ -0,0 +1,64 @@ +# /// script +# requires-python = ">= 3.12" +# dependencies = [ +# "httpx>=0.28.1,<0.29", +# "packaging>=25.0", +# ] +# /// +import os +import re +from pathlib import Path + +import httpx +from packaging.utils import parse_wheel_filename +from packaging.version import Version + + +def main(): + response = httpx.get( + "https://pypi.org/simple/uv-build/", + headers={"Accept": "application/vnd.pypi.simple.v1+json"}, + ) + response.raise_for_status() + data = response.json() + current_release = None + for file in data["files"]: + if not file["filename"].endswith(".whl"): + continue + _name, version, _build, _tags = parse_wheel_filename(file["filename"]) + if version.is_prerelease: + continue + if current_release is None or version > current_release: + current_release = version + + [major, minor, _patch] = current_release.release + if major != 0: + raise NotImplementedError("The script needs to be updated for uv 1.x") + upper_bound = Version(f"{major}.{minor + 1}.0") + + repository_root = Path(__file__).parent.parent + existing = repository_root.joinpath( + "source/shared/build-backend-tabs.rst" + ).read_text() + replacement = f'requires = ["uv_build >= {current_release}, <{upper_bound}"]' + searcher = re.compile(re.escape('requires = ["uv_build') + ".*" + re.escape('"]')) + if not searcher.search(existing): + raise RuntimeError("Could not `uv-build` entry") + updated = searcher.sub(replacement, existing) + + if existing != updated: + print("Updating source/shared/build-backend-tabs.rst") + Path("source/shared/build-backend-tabs.rst").write_text(updated) + if github_output := os.environ.get("GITHUB_OUTPUT"): + with open(github_output, "a") as f: + f.write(f"version={current_release}\n") + f.write("updated=true\n") + else: + print("Already up-to-date source/shared/build-backend-tabs.rst") + if github_output := os.environ.get("GITHUB_OUTPUT"): + with open(github_output, "a") as f: + f.write("updated=false\n") + + +if __name__ == "__main__": + main() diff --git a/source/conf.py b/source/conf.py index cd459ffb2..ccb828b6e 100644 --- a/source/conf.py +++ b/source/conf.py @@ -2,6 +2,11 @@ # https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information import os +import pathlib +import sys + +_ROOT = pathlib.Path(__file__).resolve().parent.parent +sys.path.append(os.fspath(_ROOT)) # Some options are only enabled for the main packaging.python.org deployment builds RTD_BUILD = bool(os.getenv("READTHEDOCS")) @@ -22,12 +27,14 @@ root_doc = "index" extensions = [ + "pug_sphinx_extensions", "sphinx.ext.extlinks", "sphinx.ext.intersphinx", "sphinx.ext.todo", "sphinx_inline_tabs", "sphinx_copybutton", "sphinx_toolbox.collapse", + "sphinx-jsonschema", ] nitpicky = True @@ -62,14 +69,19 @@ html_title = "Python Packaging User Guide" html_theme = "furo" +html_theme_options = { + "source_edit_link": "https://github.com/pypa/packaging.python.org/edit/main/source/{filename}", + "source_view_link": "https://github.com/pypa/packaging.python.org/blob/main/source/{filename}?plain=true", +} + html_favicon = "assets/py.png" html_last_updated_fmt = "" _metrics_js_files = [ ( - "https://plausible.io/js/script.js", + "https://analytics.python.org/js/script.outbound-links.js", {"data-domain": "packaging.python.org", "defer": "defer"}, - ) + ), ] html_js_files = [] if RTD_CANONICAL_BUILD: @@ -77,6 +89,10 @@ # https://plausible.io/packaging.python.org html_js_files.extend(_metrics_js_files) +html_extra_path = [ + "../extra", +] + # -- Options for HTML help output ------------------------------------------------------ # https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-help-output @@ -122,23 +138,34 @@ # https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-the-linkcheck-builder linkcheck_ignore = [ - "http://localhost:\\d+", - "https://test.pypi.org/project/example-package-YOUR-USERNAME-HERE", - "https://pypi.org/manage/*", - "https://test.pypi.org/manage/*", + r"http://localhost:\d+", + r"https://test\.pypi\.org/project/example-package-YOUR-USERNAME-HERE", + r"https://pypi\.org/manage/.*", + r"https://test\.pypi\.org/manage/.*", # Temporarily ignored. Ref: # https://github.com/pypa/packaging.python.org/pull/1308#issuecomment-1775347690 - "https://www.breezy-vcs.org/*", + r"https://www\.breezy-vcs\.org/.*", # Ignore while StackOverflow is blocking GitHub CI. Ref: # https://github.com/pypa/packaging.python.org/pull/1474 - "https://stackoverflow.com/*", + r"https://stackoverflow\.com/.*", + r"https://pyscaffold\.org/.*", + r"https://anaconda\.org", + r"https://www\.cisa\.gov/sbom", + r"https://developers\.redhat\.com/products/softwarecollections/overview", + r"https://math-atlas\.sourceforge\.net/?", + r"https://click\.palletsprojects\.com/.*", + r"https://typer\.tiangolo\.com/.*", + r"https://www.npmjs.com/.*", ] linkcheck_retries = 5 -# Ignore anchors for links to GitHub project pages -- GitHub adds anchors from -# README.md headings through JavaScript, so Sphinx's linkcheck can't find them -# in the HTML. +# Ignore anchors for common targets when we know they likely won't be found linkcheck_anchors_ignore_for_url = [ + # GitHub synthesises anchors in JavaScript, so Sphinx can't find them in the HTML r"https://github\.com/", + r"https://docs\.github\.com/", + # While PyPI has its botscraping defenses active, Sphinx can't resolve the anchors + # https://github.com/pypa/packaging.python.org/issues/1744 + r"https://pypi\.org/", ] # -- Options for extlinks ---------------------------------------------------------- diff --git a/source/contribute.rst b/source/contribute.rst index a246751e4..f512dd30d 100644 --- a/source/contribute.rst +++ b/source/contribute.rst @@ -11,7 +11,7 @@ including: * Reviewing new contributions * Revising existing content * Writing new content -* Translate the guide +* Translating the guide Most of the work on the |PyPUG| takes place on the `project's GitHub repository`__. To get started, check out the list of @@ -45,7 +45,7 @@ Tutorials are focused on teaching the reader new concepts by accomplishing a goal. They are opinionated step-by-step guides. They do not include extraneous warnings or information. `example tutorial-style document`_. -.. _example tutorial-style document: https://docs.djangoproject.com/en/1.11/intro/ +.. _example tutorial-style document: https://docs.djangoproject.com/en/dev/intro/ Guides ------ @@ -103,7 +103,6 @@ If you are not familiar with reStructuredText (RST) syntax, please read `this gu before translating on Weblate. **Do not translate the text in reference directly** - When translating the text in reference, please do not translate them directly. | Wrong: Translate the following text directly: diff --git a/source/discussions/deploying-python-applications.rst b/source/discussions/deploying-python-applications.rst index 19ecd398e..e10f36f9c 100644 --- a/source/discussions/deploying-python-applications.rst +++ b/source/discussions/deploying-python-applications.rst @@ -23,7 +23,7 @@ Supporting multiple hardware platforms For Python-only distributions, it *should* be straightforward to deploy on all platforms where Python can run. - For distributions with binary extensions, deployment is major headache. Not only + For distributions with binary extensions, deployment is a major headache. Not only must the extensions be built on all the combinations of operating system and hardware platform, but they must also be tested, preferably on continuous integration platforms. The issues are similar to the "multiple Python diff --git a/source/discussions/downstream-packaging.rst b/source/discussions/downstream-packaging.rst new file mode 100644 index 000000000..3f4795fa8 --- /dev/null +++ b/source/discussions/downstream-packaging.rst @@ -0,0 +1,481 @@ +.. _downstream-packaging: + +=============================== +Supporting downstream packaging +=============================== + +:Page Status: Draft +:Last Reviewed: 2025-? + +While PyPI and the Python packaging tools such as :ref:`pip` are the primary +means of distributing Python packages, they are also often made available as part +of other packaging ecosystems. These repackaging efforts are collectively called +*downstream* packaging (your own efforts are called *upstream* packaging), +and include such projects as Linux distributions, Conda, Homebrew and MacPorts. +They generally aim to provide improved support for use cases that cannot be handled +via Python packaging tools alone, such as native integration with a specific operating +system, or assured compatibility with specific versions of non-Python software. + +This discussion attempts to explain how downstream packaging is usually done, +and what additional challenges downstream packagers typically face. It aims +to provide some optional guidelines that project maintainers may choose to +follow which help make downstream packaging *significantly* easier +(without imposing any major maintenance hassles on the upstream project). +Note that this is not an all-or-nothing proposal β€” anything that upstream +maintainers can do is useful, even if it's only a small part. Downstream +maintainers are also willing to prepare patches to resolve these issues. +Having these patches merged can be very helpful, since it removes the need +for different downstreams to carry and keep rebasing the same patches, +and the risk of applying inconsistent solutions to the same problem. + +Establishing a good relationship between software maintainers and downstream +packagers can bring mutual benefits. Downstreams are often willing to share +their experience, time and hardware to improve your package. They are +sometimes in a better position to see how your package is used in practice, +and to provide information about its relationships with other packages that +would otherwise require significant effort to obtain. +Packagers can often find bugs before your users hit them in production, +provide bug reports of good quality, and supply patches whenever they can. +For example, they are regularly active in ensuring the packages they redistribute +are updated for any compatibility issues that arise when a new Python version +is released. + +Please note that downstream builds include not only binary redistribution, +but also source builds done on user systems (in source-first distributions +such as Gentoo Linux, for example). + + +.. _provide-complete-source-distributions: + +Provide complete source distributions +------------------------------------- + +Why? +~~~~ + +The vast majority of downstream packagers prefer to build packages from source, +rather than use the upstream-provided binary packages. In some cases, using +sources is actually required for the package to be included in the distribution. +This is also true of pure Python packages that provide universal wheels. +The reasons for using source distributions may include: + +- Being able to audit the source code of all packages. + +- Being able to run the test suite and build documentation. + +- Being able to easily apply patches, including backporting commits + from the project's repository and sending patches back to the project. + +- Being able to build on a specific platform that is not covered + by upstream builds. + +- Being able to build against specific versions of system libraries. + +- Having a consistent build process across all Python packages. + +While it is usually possible to build packages from a Git repository, there are +a few important reasons to provide a static archive file instead: + +- Fetching a single file is often more efficient, more reliable and better + supported than e.g. using a Git clone. This can help users with poor + Internet connectivity. + +- Downstreams often use hashes to verify the authenticity of source files + on subsequent builds, which require that they remain bitwise identical over + time. For example, automatically generated Git archives do not guarantee + this, as the compressed data may change if gzip is upgraded on the server. + +- Archive files can be mirrored, reducing both upstream and downstream + bandwidth use. The actual builds can afterwards be performed in firewalled + or offline environments, that can only access source files provided + by the local mirror or redistributed earlier. + +- Explicitly publishing archive files can ensure that any dependencies on version control + system metadata are resolved when creating the source archive. For example, automatically + generated Git archives omit all of the commit tag information, potentially resulting in + incorrect version details in the resulting builds. + +How? +~~~~ + +Ideally, **a source distribution archive published on PyPI should include all the files +from the package's Git repository** that are necessary to build the package +itself, run its test suite, build and install its documentation, and any other +files that may be useful to end users, such as shell completions, editor +support files, and so on. + +This point applies only to the files belonging to the package itself. +The downstream packaging process, much like Python package managers, will +provision the necessary Python dependencies, system tools and external +libraries that are needed by your package and its build scripts. However, +the files listing these dependencies (for example, ``requirements*.txt`` files) +should also be included, to help downstreams determine the needed dependencies, +and check for changes in them. + +Some projects have concerns related to Python package managers using source +distributions from PyPI. They do not wish to increase their size with files +that are not used by these tools, or they do not wish to publish source +distributions at all, as they enable a problematic or outright nonfunctional +fallback to building the particular project from source. In these cases, a good +compromise may be to publish a separate source archive for downstream use +elsewhere, for example by attaching it to a GitHub release. Alternatively, +large files, such as test data, can be split into separate archives. + +On the other hand, some projects (NumPy_, for instance) decide to include tests +in their installed packages. This has the added advantage of permitting users to +run tests after installing them, for example to check for regressions +after upgrading a dependency. Yet another approach is to split tests or test +data into a separate Python package. Such an approach was taken by +the cryptography_ project, with the large test vectors being split +to cryptography-vectors_ package. + +A good idea is to use your source distribution in the release workflow. +For example, the :ref:`build` tool does exactly that β€” it first builds a source +distribution, and then uses it to build a wheel. This ensures that the source +distribution actually works, and that it won't accidentally install fewer files +than the official wheels. + +Ideally, also use the source distribution to run tests, build documentation, +and so on, or add specific tests to make sure that all necessary files were +actually included. Understandably, this requires more effort, so it's fine +not do that β€” downstream packagers will report any missing files promptly. + + +.. _no-internet-access-in-builds: + +Do not use the Internet during the build process +------------------------------------------------ + +Why? +~~~~ + +Downstream builds are frequently done in sandboxed environments that cannot +access the Internet. The package sources are unpacked into this environment, +and all the necessary dependencies are installed. + +Even if this is not the case, and assuming that you took sufficient care to +properly authenticate downloads, using the Internet is discouraged for a number +of reasons: + +- The Internet connection may be unstable (e.g. due to poor reception) + or suffer from temporary problems that could cause the process to fail + or hang. + +- The remote resources may become temporarily or even permanently + unavailable, making the build no longer possible. This is especially + problematic when someone needs to build an old package version. + +- The remote resources may change, making the build not reproducible. + +- Accessing remote servers poses a privacy issue and a potential + security issue, as it exposes information about the system building + the package. + +- The user may be using a service with a limited data plan, in which + uncontrolled Internet access may result in additional charges or other + inconveniences. + +How? +~~~~ + +If the package is implementing any custom build *backend* actions that use +the Internet, for example by automatically downloading vendored dependencies +or fetching Git submodules, its source distribution should either include all +of these files or allow provisioning them externally, and the Internet must not +be used if the files are already present. + +Note that this point does not apply to Python dependencies that are specified +in the package metadata, and are fetched during the build and installation +process by *frontends* (such as :ref:`build` or :ref:`pip`). Downstreams use +frontends that use local provisioning for Python dependencies. + +Ideally, custom build scripts should not even attempt to access the Internet +at all, unless explicitly requested to. If any resources are missing and need +to be fetched, they should ask the user for permission first. If that is not +feasible, the next best thing is to provide an opt-out switch to disable +all Internet access. This could be done e.g. by checking whether +a ``NO_NETWORK`` environment variable is set to a non-empty value. + +Since downstreams frequently also run tests and build documentation, the above +should ideally extend to these processes as well. + +Please also remember that if you are fetching remote resources, you absolutely +must *verify their authenticity* (usually against a hash), to protect against +the file being substituted by a malicious party. + + +.. _support-system-dependencies-in-builds: + +Support building against system dependencies +-------------------------------------------- + +Why? +~~~~ + +Some Python projects have non-Python dependencies, such as libraries written +in C or C++. Trying to use the system versions of these dependencies +in upstream packaging may cause a number of problems for end users: + +- The published wheels require a binary-compatible version of the used + library to be present on the user's system. If the library is missing + or an incompatible version is installed, the Python package may fail with errors + that are not clear to inexperienced users, or even misbehave at runtime. + +- Building from a source distribution requires a source-compatible version + of the dependency to be present, along with its development headers + and other auxiliary files that some systems package separately + from the library itself. + +- Even for an experienced user, installing a compatible dependency version + may be very hard. For example, the used Linux distribution may not provide + the required version, or some other package may require an incompatible + version. + +- The linkage between the Python package and its system dependency is not + recorded by the packaging system. The next system update may upgrade + the library to a newer version that breaks binary compatibility with + the Python package, and requires user intervention to fix. + +For these reasons, you may reasonably decide to either statically link +your dependencies, or to provide local copies in the installed package. +You may also vendor the dependency in your source distribution. Sometimes +these dependencies are also repackaged on PyPI, and can be declared as +project dependencies like any other Python package. + +However, none of these issues apply to downstream packaging, and downstreams +have good reasons to prefer dynamically linking to system dependencies. +In particular: + +- In many cases, reliably sharing dynamic dependencies between components is a large part + of the *purpose* of a downstream packaging ecosystem. Helping to support that makes it + easier for users of those systems to access upstream projects in their preferred format. + +- Static linking and vendoring obscures the use of external dependencies, + making source auditing harder. + +- Dynamic linking makes it possible to quickly and systematically replace the used + libraries across an entire downstream packaging ecosystem, which can be particularly + important when they turn out to contain a security vulnerability or critical bug. + +- Using system dependencies makes the package benefit from downstream + customization that can improve the user experience on a particular platform, + without the downstream maintainers having to consistently patch + the dependencies vendored in different packages. This can include + compatibility improvements and security hardening. + +- Static linking and vendoring can result in multiple different versions of the + same library being loaded in the same process (for example, attempting to + import two Python packages that link to different versions of the same library). + This sometimes works without incident, but it can also lead to anything from library + loading errors, to subtle runtime bugs, to catastrophic failures (like suddenly + crashing and losing data). + +- Last but not least, static linking and vendoring results in duplication, + and may increase the use of both disk space and memory. + +How? +~~~~ + +A good compromise between the needs of both parties is to provide a switch +between using vendored and system dependencies. Ideally, if the package has +multiple vendored dependencies, it should provide both individual switches +for each dependency, and a general switch to control the default for them, +e.g. via a ``USE_SYSTEM_DEPS`` environment variable. + +If the user requests using system dependencies, and a particular dependency +is either missing or incompatible, the build should fail with an explanatory +message rather than fall back to a vendored version. This gives the packager +the opportunity to notice their mistake and a chance to consciously decide +how to solve it. + +It is reasonable for upstream projects to leave *testing* of building with +system dependencies to their downstream repackagers. The goal of these guidelines +is to facilitate more effective collaboration between upstream projects and downstream +repackagers, not to suggest upstream projects take on tasks that downstream repackagers +are better equipped to handle. + +.. _support-downstream-testing: + +Support downstream testing +-------------------------- + +Why? +~~~~ + +A variety of downstream projects run some degree of testing on the packaged +Python projects. Depending on the particular case, this can range from minimal +smoke testing to comprehensive runs of the complete test suite. There can +be various reasons for doing this, for example: + +- Verifying that the downstream packaging did not introduce any bugs. + +- Testing on additional platforms that are not covered by upstream testing. + +- Finding subtle bugs that can only be reproduced with particular hardware, + system package versions, and so on. + +- Testing the released package against newer (or older) dependency versions than + the ones present during upstream release testing. + +- Testing the package in an environment closely resembling the production + setup. This can detect issues caused by non-trivial interactions between + different installed packages, including packages that are not dependencies + of your package, but nevertheless can cause issues. + +- Testing the released package against newer Python versions (including + newer point releases), or less tested Python implementations such as PyPy. + +Admittedly, sometimes downstream testing may yield false positives or bug +reports about scenarios the upstream project is not interested in supporting. +However, perhaps even more often it does provide early notice of problems, +or find non-trivial bugs that would otherwise cause issues for the upstream +project's users. While mistakes do happen, the majority of downstream packagers +are doing their best to double-check their results, and help upstream +maintainers triage and fix the bugs that they reported. + +How? +~~~~ + +There are a number of things that upstream projects can do to help downstream +repackagers test their packages efficiently and effectively, including some of the suggestions +already mentioned above. These are typically improvements that make the test suite more +reliable and easier to use for everyone, not just downstream packagers. +Some specific suggestions are: + +- Include the test files and fixtures in the source distribution, or make it + possible to easily download them separately. + +- Do not write to the package directories during testing. Downstream test + setups sometimes run tests on top of the installed package, and modifications + performed during testing and temporary test files may end up being part + of the installed package! + +- Make the test suite work offline. Mock network interactions, using + packages such as responses_ or vcrpy_. If that is not possible, make it + possible to easily disable the tests using Internet access, e.g. via a pytest_ + marker. Use pytest-socket_ to verify that your tests work offline. This + often makes your own test workflows faster and more reliable as well. + +- Make your tests work without a specialized setup, or perform the necessary + setup as part of test fixtures. Do not ever assume that you can connect + to system services such as databases β€” in an extreme case, you could crash + a production service! + +- If your package has optional dependencies, make their tests optional as + well. Either skip them if the needed packages are not installed, or add + markers to make deselecting easy. + +- More generally, add markers to tests with special requirements. These can + include e.g. significant space usage, significant memory usage, long runtime, + incompatibility with parallel testing. + +- Do not assume that the test suite will be run with ``-Werror``. Downstreams + often need to disable that, as it causes false positives, e.g. due to newer + dependency versions. Assert for warnings using ``pytest.warns()`` rather + than ``pytest.raises()``! + +- Aim to make your test suite reliable and reproducible. Avoid flaky tests. + Avoid depending on specific platform details, don't rely on exact results + of floating-point computation, or timing of operations, and so on. Fuzzing + has its advantages, but you want to have static test cases for completeness + as well. + +- Split tests by their purpose, and make it easy to skip categories that are + irrelevant or problematic. Since the primary purpose of downstream testing + is to ensure that the package itself works, downstreams are not generally interested + in tasks such as checking code coverage, code formatting, typechecking or running + benchmarks. These tests can fail as dependencies are upgraded or the system + is under load, without actually affecting the package itself. + +- If your test suite takes significant time to run, support testing + in parallel. Downstreams often maintain a large number of packages, + and testing them all takes a lot of time. Using pytest-xdist_ can help them + avoid bottlenecks. + +- Ideally, support running your test suite via ``pytest``. pytest_ has many + command-line arguments that are truly helpful to downstreams, such as + the ability to conveniently deselect tests, rerun flaky tests + (via pytest-rerunfailures_), add a timeout to prevent tests from hanging + (via pytest-timeout_) or run tests in parallel (via pytest-xdist_). + Note that test suites don't need to be *written* with ``pytest`` to be + *executed* with ``pytest``: ``pytest`` is able to find and execute almost + all test cases that are compatible with the standard library's ``unittest`` + test discovery. + + +.. _aim-for-stable-releases: + +Aim for stable releases +----------------------- + +Why? +~~~~ + +Many downstreams provide stable release channels in addition to the main +package streams. The goal of these channels is to provide more conservative +upgrades to users with higher stability needs. These users often prefer +to trade having the newest features available for lower risk of issues. + +While the exact policies differ, an important criterion for including a new +package version in a stable release channel is for it to be available in testing +for some time already, and have no known major regressions. For example, +in Gentoo Linux a package is usually marked stable after being available +in testing for a month, and being tested against the versions of its +dependencies that are marked stable at the time. + +However, there are circumstances which demand more prompt action. For example, +if a security vulnerability or a major bug is found in the version that is +currently available in the stable channel, the downstream is facing a need +to resolve it. In this case, they need to consider various options, such as: + +- putting a new version in the stable channel early, + +- adding patches to the version currently published, + +- or even downgrading the stable channel to an earlier release. + +Each of these options involves certain risks and a certain amount of work, +and packagers needs to weigh them to determine the course of action. + +How? +~~~~ + +There are some things that upstreams can do to tailor their workflow to stable +release channels. These actions often are beneficial to the package's users +as well. Some specific suggestions are: + +- Adjust the release frequency to the rate of code changes. Packages that + are released rarely often bring significant changes with every release, + and a higher risk of accidental regressions. + +- Avoid mixing bug fixes and new features, if possible. In particular, if there + are known bug fixes merged already, consider making a new release before + merging feature branches. + +- Consider making prereleases after major changes, to provide more testing + opportunities for users and downstreams willing to opt-in. + +- If your project is subject to very intense development, consider splitting + one or more branches that include a more conservative subset of commits, + and are released separately. For example, Django_ currently maintains three + release branches in addition to main. + +- Even if you don't wish to maintain additional branches permanently, consider + making additional patch releases with minimal changes to the previous + version, especially when a security vulnerability is discovered. + +- Split your changes into focused commits that address one problem at a time, + to make it easier to cherry-pick changes to earlier releases when necessary. + + +.. _responses: https://pypi.org/project/responses/ +.. _vcrpy: https://pypi.org/project/vcrpy/ +.. _pytest-socket: https://pypi.org/project/pytest-socket/ +.. _pytest-xdist: https://pypi.org/project/pytest-xdist/ +.. _pytest: https://pytest.org/ +.. _pytest-rerunfailures: https://pypi.org/project/pytest-rerunfailures/ +.. _pytest-timeout: https://pypi.org/project/pytest-timeout/ +.. _Django: https://www.djangoproject.com/ +.. _NumPy: https://numpy.org/ +.. _cryptography: https://pypi.org/project/cryptography/ +.. _cryptography-vectors: https://pypi.org/project/cryptography-vectors/ diff --git a/source/discussions/index.rst b/source/discussions/index.rst index d262bcff2..b1b84f97a 100644 --- a/source/discussions/index.rst +++ b/source/discussions/index.rst @@ -16,3 +16,5 @@ specific topic. If you're just trying to get stuff done, see package-formats src-layout-vs-flat-layout setup-py-deprecated + single-source-version + downstream-packaging diff --git a/source/discussions/pip-vs-easy-install.rst b/source/discussions/pip-vs-easy-install.rst index 4fa590cf3..2bb75d3be 100644 --- a/source/discussions/pip-vs-easy-install.rst +++ b/source/discussions/pip-vs-easy-install.rst @@ -11,7 +11,7 @@ It was notable at the time for installing :term:`packages :term:`PyPI ` using requirement specifiers, and automatically installing dependencies. -:ref:`pip` came later in 2008, as alternative to :ref:`easy_install `, although still +:ref:`pip` came later in 2008, as an alternative to :ref:`easy_install `, although still largely built on top of :ref:`setuptools` components. It was notable at the time for *not* installing packages as :term:`Eggs ` or from :term:`Eggs ` (but rather simply as 'flat' packages from :term:`sdists `__ by Paul Ganssle * :doc:`setuptools:deprecated/commands` diff --git a/source/discussions/single-source-version.rst b/source/discussions/single-source-version.rst new file mode 100644 index 000000000..c7dc8d1e1 --- /dev/null +++ b/source/discussions/single-source-version.rst @@ -0,0 +1,62 @@ +.. _single-source-version: + +=================================== +Single-sourcing the Project Version +=================================== + +:Page Status: Complete +:Last Reviewed: 2024-10-07 + +Many Python :term:`distribution packages ` publish a single +Python :term:`import package ` where it is desired that the runtime +``__version__`` attribute on the import package report the same version specifier +as :func:`importlib.metadata.version` reports for the distribution package +(as described in :ref:`runtime-version-access`). + +It is also frequently desired that this version information be derived from a version +control system *tag* (such as ``v1.2.3``) rather than being manually updated in the +source code. + +Some projects may choose to simply live with the data entry duplication, and rely +on automated testing to ensure the different values do not diverge. + +Alternatively, a project's chosen build system may offer a way to define a single +source of truth for the version number. + +In general, the options are: + +1) If the code is in a version control system (VCS), such as Git, then the version can be extracted from the VCS. + +2) The version can be hard-coded into the :file:`pyproject.toml` file -- and the build system can copy it + into other locations it may be required. + +3) The version string can be hard-coded into the source code -- either in a special purpose file, + such as :file:`_version.txt` (which must then be shipped as part of the project's source distribution + package), or as an attribute in a particular module, such as :file:`__init__.py`. The build + system can then extract it from the runtime location at build time. + +Consult your build system's documentation for their recommended method. + +When the intention is that a distribution package and its associated import package +share the same version, it is recommended that the project include an automated test +case that ensures ``import_name.__version__`` and ``importlib.metadata.version("dist-name")`` +report the same value (note: for many projects, ``import_name`` and ``dist-name`` will +be the same name). + + +.. _Build system version handling: + +Build System Version Handling +----------------------------- + +The following are links to some build system's documentation for handling version strings. + +* `Flit `_ + +* `Hatchling `_ + +* `PDM `_ + +* `Setuptools `_ + + - `setuptools_scm `_ diff --git a/source/discussions/src-layout-vs-flat-layout.rst b/source/discussions/src-layout-vs-flat-layout.rst index bfa405729..c38968345 100644 --- a/source/discussions/src-layout-vs-flat-layout.rst +++ b/source/discussions/src-layout-vs-flat-layout.rst @@ -79,3 +79,27 @@ layout and the flat layout: ``tox.ini``) and packaging/tooling configuration files (eg: ``setup.py``, ``noxfile.py``) on the import path. This would make certain imports work in editable installations but not regular installations. + +.. _running-cli-from-source-src-layout: + +Running a command-line interface from source with src-layout +============================================================ + +Due to the firstly mentioned specialty of the src layout, a command-line +interface can not be run directly from the :term:`source tree `, +but requires installation of the package in +:doc:`Development Mode ` +for testing purposes. Since this can be unpractical in some situations, +a workaround could be to prepend the package folder to Python's +:py:data:`sys.path` when called via its :file:`__main__.py` file: + +.. code-block:: python + + import os + import sys + + if not __package__: + # Make CLI runnable from source tree with + # python src/package + package_source_path = os.path.dirname(os.path.dirname(__file__)) + sys.path.insert(0, package_source_path) diff --git a/source/discussions/versioning.rst b/source/discussions/versioning.rst index 49fbbf0de..eeea3578c 100644 --- a/source/discussions/versioning.rst +++ b/source/discussions/versioning.rst @@ -75,7 +75,7 @@ semantic versioning and calendar versioning. number. The maintainers may consciously choose to break the assumption that the last version segment only contains backwards-compatible changes. - One such case is when security vulnerability needs to be + One such case is when a security vulnerability needs to be addressed. Security releases often come in patch versions but contain breaking changes inevitably. @@ -124,7 +124,7 @@ Calendar versioning ------------------- Semantic versioning is not a suitable choice for all projects, such as those -with a regular time based release cadence and a deprecation process that +with a regular time-based release cadence and a deprecation process that provides warnings for a number of releases prior to removal of a feature. A key advantage of date-based versioning, or `calendar versioning `_ @@ -148,12 +148,11 @@ user, as serial version numbers convey little or no information regarding API backwards compatibility. Combinations of the above schemes are possible. For example, a project may -combine date based versioning with serial versioning to create a *year.serial* +combine date-based versioning with serial versioning to create a *year.serial* numbering scheme that readily conveys the approximate age of a release, but doesn't otherwise commit to a particular release cadence within the year. - Local version identifiers ========================= @@ -172,6 +171,54 @@ since the latest release, setuptools-scm generates a version like "0.5.dev1+gd00980f", or if the repository has untracked changes, like "0.5.dev1+gd00980f.d20231217". +.. _runtime-version-access: + +Accessing version information at runtime +======================================== + +Version information for all :term:`distribution packages ` +that are locally available in the current environment can be obtained at runtime +using the standard library's :func:`importlib.metadata.version` function:: + + >>> importlib.metadata.version("cryptography") + '41.0.7' + +Many projects also choose to version their top level +:term:`import packages ` by providing a package level +``__version__`` attribute:: + + >>> import cryptography + >>> cryptography.__version__ + '41.0.7' + +This technique can be particularly valuable for CLI applications which want +to ensure that version query invocations (such as ``pip -V``) run as quickly +as possible. + +Package publishers wishing to ensure their reported distribution package and +import package versions are consistent with each other can review the +:ref:`single-source-version` discussion for potential approaches to doing so. + +As import packages and modules are not *required* to publish runtime +version information in this way (see the withdrawn proposal in +:pep:`PEP 396 <396>`), the ``__version__`` attribute should either only be +queried with interfaces that are known to provide it (such as a project +querying its own version or the version of one of its direct dependencies), +or else the querying code should be designed to handle the case where the +attribute is missing [#fallback-to-dist-version]_. + +Some projects may need to publish version information for external APIs +that aren't the version of the module itself. Such projects should +define their own project-specific ways of obtaining the relevant information +at runtime. For example, the standard library's :mod:`ssl` module offers +multiple ways to access the underlying OpenSSL library version:: + + >>> ssl.OPENSSL_VERSION + 'OpenSSL 3.2.2 4 Jun 2024' + >>> ssl.OPENSSL_VERSION_INFO + (3, 2, 0, 2, 0) + >>> hex(ssl.OPENSSL_VERSION_NUMBER) + '0x30200020' -------------------------------------------------------------------------------- @@ -184,6 +231,15 @@ since the latest release, setuptools-scm generates a version like Brett Cannon `_. For a humoristic take, read about ZeroVer_. +.. [#fallback-to-dist-version] A full list mapping the top level names available + for import to the distribution packages that provide those import packages and + modules may be obtained through the standard library's + :func:`importlib.metadata.packages_distributions` function. This means that + even code that is attempting to infer a version to report for all importable + top-level names has a means to fall back to reporting the distribution + version information if no ``__version__`` attribute is defined. Only standard + library modules, and modules added via means other than Python package + installation would fail to have version information reported in that case. .. _zerover: https://0ver.org diff --git a/source/glossary.rst b/source/glossary.rst index 00d798e39..40c041f4c 100644 --- a/source/glossary.rst +++ b/source/glossary.rst @@ -119,15 +119,6 @@ Glossary extensions. - Known Good Set (KGS) - - A set of distributions at specified versions which are compatible with - each other. Typically a test suite will be run which passes all tests - before a specific set of packages is declared a known good set. This - term is commonly used by frameworks and toolkits which are comprised of - multiple individual distributions. - - Import Package A Python module which can contain other modules or recursively, other @@ -144,7 +135,50 @@ Glossary A :term:`Project` that is installed for use with a Python interpreter or :term:`Virtual Environment`, - as described in the specicifcation :ref:`recording-installed-packages`. + as described in the specification :ref:`recording-installed-packages`. + + + Known Good Set (KGS) + + A set of distributions at specified versions which are compatible with + each other. Typically a test suite will be run which passes all tests + before a specific set of packages is declared a known good set. This + term is commonly used by frameworks and toolkits which are comprised of + multiple individual distributions. + + + License Classifier + + A PyPI Trove classifier + (as :ref:`described ` + in the :term:`Core Metadata` specification) + which begins with ``License ::``. + + + License Expression + SPDX Expression + + A string with valid SPDX license expression syntax, + including one or more SPDX :term:`License Identifier`\(s), + which describes a :term:`Distribution Archive`'s license(s) + and how they inter-relate. + Examples: + ``GPL-3.0-or-later``, + ``MIT AND (Apache-2.0 OR BSD-2-Clause)`` + + + License Identifier + SPDX Identifier + + A valid SPDX short-form license identifier, + originally specified in :pep:`639`. + This includes all valid SPDX identifiers and + the custom ``LicenseRef-[idstring]`` strings conforming to the + SPDX specification. + Examples: + ``MIT``, + ``GPL-3.0-only``, + ``LicenseRef-My-Custom-License`` Module @@ -253,8 +287,7 @@ Glossary PyPA is a working group that maintains many of the relevant projects in Python packaging. They maintain a site at :doc:`pypa.io `, host projects on `GitHub - `_ and `Bitbucket - `_, and discuss issues on the + `_, and discuss issues on the `distutils-sig mailing list `_ and `the Python Discourse forum `__. @@ -313,6 +346,23 @@ Glossary docs on :ref:`pip:Requirements Files`. + Root License Directory + License Directory + + The directory under which license files are stored in a + :term:`Project Source Tree`, :term:`Distribution Archive` + or :term:`Installed Project`. + For a :term:`Project Source Tree` or + :term:`Source Distribution (or "sdist")`, this is the + :term:`Project Root Directory`. + For a :term:`Built Distribution` or :term:`Installed Project`, + this is the :file:`.dist-info/licenses/` directory of + the wheel archive or project folder respectively. + Also, the root directory that paths + recorded in the ``License-File`` + :term:`Core Metadata Field` are relative to. + + setup.py setup.cfg diff --git a/source/guides/analyzing-pypi-package-downloads.rst b/source/guides/analyzing-pypi-package-downloads.rst index 62efea7ab..2ad02fed5 100644 --- a/source/guides/analyzing-pypi-package-downloads.rst +++ b/source/guides/analyzing-pypi-package-downloads.rst @@ -1,3 +1,5 @@ +.. _analyzing-pypi-package-downloads: + ================================ Analyzing PyPI package downloads ================================ @@ -101,7 +103,7 @@ Counting package downloads The following query counts the total number of downloads for the project "pytest". -:: +.. code-block:: sql #standardSQL SELECT COUNT(*) AS num_downloads @@ -121,7 +123,7 @@ The following query counts the total number of downloads for the project To count downloads from pip only, filter on the ``details.installer.name`` column. -:: +.. code-block:: sql #standardSQL SELECT COUNT(*) AS num_downloads @@ -145,7 +147,7 @@ Package downloads over time To group by monthly downloads, use the ``TIMESTAMP_TRUNC`` function. Also filtering by this column reduces corresponding costs. -:: +.. code-block:: sql #standardSQL SELECT @@ -183,7 +185,7 @@ Python versions over time Extract the Python version from the ``details.python`` column. Warning: This query processes over 500 GB of data. -:: +.. code-block:: sql #standardSQL SELECT @@ -226,7 +228,7 @@ column, which includes the hash and artifact filename. .. note:: The URL generated here is not guaranteed to be stable, but currently aligns with the URL where PyPI artifacts are hosted. -:: +.. code-block:: sql SELECT CONCAT('https://files.pythonhosted.org/packages', path) as url diff --git a/source/guides/creating-command-line-tools.rst b/source/guides/creating-command-line-tools.rst new file mode 100644 index 000000000..cbe8b3bb0 --- /dev/null +++ b/source/guides/creating-command-line-tools.rst @@ -0,0 +1,183 @@ +.. _creating-command-line-tools: + +========================================= +Creating and packaging command-line tools +========================================= + +This guide will walk you through creating and packaging a standalone command-line application +that can be installed with :ref:`pipx`, a tool for creating and managing :term:`Python Virtual Environments ` +and exposing the executable scripts of packages (and available manual pages) for use on the command-line. + +Creating the package +==================== + +First of all, create a source tree for the :term:`project `. For the sake of an example, we'll +build a simple tool outputting a greeting (a string) for a person based on arguments given on the command-line. + +.. todo:: Advise on the optimal structure of a Python package in another guide or discussion and link to it here. + +This project will adhere to :ref:`src-layout ` and in the end be alike this file tree, +with the top-level folder and package name ``greetings``: + +:: + + . + β”œβ”€β”€ pyproject.toml + └── src + └── greetings + β”œβ”€β”€ cli.py + β”œβ”€β”€ greet.py + β”œβ”€β”€ __init__.py + └── __main__.py + +The actual code responsible for the tool's functionality will be stored in the file :file:`greet.py`, +named after the main module: + +.. code-block:: python + + import typer + from typing_extensions import Annotated + + + def greet( + name: Annotated[str, typer.Argument(help="The (last, if --title is given) name of the person to greet")] = "", + title: Annotated[str, typer.Option(help="The preferred title of the person to greet")] = "", + doctor: Annotated[bool, typer.Option(help="Whether the person is a doctor (MD or PhD)")] = False, + count: Annotated[int, typer.Option(help="Number of times to greet the person")] = 1 + ): + greeting = "Greetings, " + if doctor and not title: + title = "Dr." + if not name: + if title: + name = title.lower().rstrip(".") + else: + name = "friend" + if title: + greeting += f"{title} " + greeting += f"{name}!" + for i in range(0, count): + print(greeting) + +The above function receives several keyword arguments that determine how the greeting to output is constructed. +Now, construct the command-line interface to provision it with the same, which is done +in :file:`cli.py`: + +.. code-block:: python + + import typer + + from .greet import greet + + + app = typer.Typer() + app.command()(greet) + + + if __name__ == "__main__": + app() + +The command-line interface is built with typer_, an easy-to-use CLI parser based on Python type hints. It provides +auto-completion and nicely styled command-line help out of the box. Another option would be :py:mod:`argparse`, +a command-line parser which is included in Python's standard library. It is sufficient for most needs, but requires +a lot of code, usually in ``cli.py``, to function properly. Alternatively, docopt_ makes it possible to create CLI +interfaces based solely on docstrings; advanced users are encouraged to make use of click_ (on which ``typer`` is based). + +Now, add an empty :file:`__init__.py` file, to define the project as a regular :term:`import package `. + +The file :file:`__main__.py` marks the main entry point for the application when running it via :mod:`runpy` +(i.e. ``python -m greetings``, which works immediately with flat layout, but requires installation of the package with src layout), +so initialize the command-line interface here: + +.. code-block:: python + + if __name__ == "__main__": + from greetings.cli import app + app() + +.. note:: + + In order to enable calling the command-line interface directly from the :term:`source tree `, + i.e. as ``python src/greetings``, a certain hack could be placed in this file; read more at + :ref:`running-cli-from-source-src-layout`. + + +``pyproject.toml`` +------------------ + +The project's :term:`metadata ` is placed in :term:`pyproject.toml`. The :term:`pyproject metadata keys ` and the ``[build-system]`` table may be filled in as described in :ref:`writing-pyproject-toml`, adding a dependency +on ``typer`` (this tutorial uses version *0.12.3*). + +For the project to be recognised as a command-line tool, additionally a ``console_scripts`` :ref:`entry point ` (see :ref:`console_scripts`) needs to be added as a :term:`subkey `: + +.. code-block:: toml + + [project.scripts] + greet = "greetings.cli:app" + +Now, the project's source tree is ready to be transformed into a :term:`distribution package `, +which makes it installable. + + +Installing the package with ``pipx`` +==================================== + +After installing ``pipx`` as described in :ref:`installing-stand-alone-command-line-tools`, install your project: + +.. code-block:: console + + $ cd path/to/greetings/ + $ pipx install . + +This will expose the executable script we defined as an entry point and make the command ``greet`` available. +Let's test it: + +.. code-block:: console + + $ greet + Greetings, friend! + $ greet --doctor Brennan + Greetings, Dr. Brennan! + $ greet --title Ms. Parks + Greetings, Ms. Parks! + $ greet --title Mr. + Greetings, Mr. mr! + +Since this example uses ``typer``, you could now also get an overview of the program's usage by calling it with +the ``--help`` option, or configure completions via the ``--install-completion`` option. + +To just run the program without installing it permanently, use ``pipx run``, which will create a temporary +(but cached) virtual environment for it: + +.. code-block:: console + + $ pipx run --spec . greet --doctor + +This syntax is a bit impractical, however; as the name of the entry point we defined above does not match the package name, +we need to state explicitly which executable script to run (even though there is only one in existence). + +There is, however, a more practical solution to this problem, in the form of an entry point specific to ``pipx run``. +The same can be defined as follows in :file:`pyproject.toml`: + +.. code-block:: toml + + [project.entry-points."pipx.run"] + greetings = "greetings.cli:app" + + +Thanks to this entry point (which *must* match the package name), ``pipx`` will pick up the executable script as the +default one and run it, which makes this command possible: + +.. code-block:: console + + $ pipx run . --doctor + +Conclusion +========== + +You know by now how to package a command-line application written in Python. A further step could be to distribute your package, +meaning uploading it to a :term:`package index `, most commonly :term:`PyPI `. To do that, follow the instructions at :ref:`Packaging your project`. And once you're done, don't forget to :ref:`do some research ` on how your package is received! + +.. _click: https://click.palletsprojects.com/ +.. _docopt: https://docopt.readthedocs.io/en/latest/ +.. _typer: https://typer.tiangolo.com/ diff --git a/source/guides/dropping-older-python-versions.rst b/source/guides/dropping-older-python-versions.rst index c0c2b4434..267d7b923 100644 --- a/source/guides/dropping-older-python-versions.rst +++ b/source/guides/dropping-older-python-versions.rst @@ -4,34 +4,27 @@ Dropping support for older Python versions ========================================== -Dropping support for older Python versions is supported by the standard :ref:`core-metadata` 1.2 specification via a "Requires-Python" attribute. +The ability to drop support for older Python versions is enabled by the standard :ref:`core-metadata` 1.2 specification via the :ref:`"Requires-Python" ` attribute. -Metadata 1.2+ clients, such as Pip 9.0+, will adhere to this specification by matching the current Python runtime and comparing it with the required version +Metadata 1.2+ installers, such as Pip, will adhere to this specification by matching the current Python runtime and comparing it with the required version in the package metadata. If they do not match, it will attempt to install the last package distribution that supported that Python runtime. -This mechanism can be used to drop support for older Python versions, by amending the "Requires-Python" attribute in the package metadata. - -This guide is specifically for users of :ref:`setuptools`, other packaging tools such as ``flit`` may offer similar functionality but users will need to consult relevant documentation. +This mechanism can be used to drop support for older Python versions, by amending the ``Requires-Python`` attribute in the package metadata. Requirements ------------ -This workflow requires that: - -1. The publisher is using the latest version of :ref:`setuptools`, -2. The latest version of :ref:`twine` is used to upload the package, -3. The user installing the package has at least Pip 9.0, or a client that supports the Metadata 1.2 specification. +This workflow requires that the user installing the package uses Pip [#]_, or another installer that supports the Metadata 1.2 specification. Dealing with the universal wheels --------------------------------- -Traditionally, projects providing Python code that is semantically +Traditionally, :ref:`setuptools` projects providing Python code that is semantically compatible with both Python 2 and Python 3, produce :term:`wheels ` that have a ``py2.py3`` tag in their names. When dropping support for Python 2, it is important not to forget to change this tag to just ``py3``. It is often configured within :file:`setup.cfg` under -the ``[bdist_wheel]`` section by setting ``universal = 1`` if they -use setuptools. +the ``[bdist_wheel]`` section by setting ``universal = 1``. If you use this method, either remove this option or section, or explicitly set ``universal`` to ``0``: @@ -43,69 +36,69 @@ explicitly set ``universal`` to ``0``: [bdist_wheel] universal = 0 # Make the generated wheels have "py3" tag -.. tip:: +.. hint:: - Since it is possible to override the :file:`setup.cfg` settings via - CLI flags, make sure that your scripts don't have ``--universal`` in - your package creation scripts. + Regarding :ref:`deprecated ` direct ``setup.py`` invocations, + passing the ``--universal`` flag on the command line could override this setting. Defining the Python version required ------------------------------------ -1. Download the newest version of Setuptools -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Ensure that before you generate source distributions or binary distributions, you update Setuptools and install twine. +1. Install twine +~~~~~~~~~~~~~~~~ +Ensure that you have twine available at its latest version. Steps: .. tab:: Unix/macOS .. code-block:: bash - python3 -m pip install --upgrade setuptools twine + python3 -m pip install --upgrade twine .. tab:: Windows .. code-block:: bat - py -m pip install --upgrade setuptools twine - -``setuptools`` version should be above 24.0.0. + py -m pip install --upgrade twine 2. Specify the version ranges for supported Python distributions ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -You can specify version ranges and exclusion rules, such as at least Python 3. Or, Python 2.7, 3.4 and beyond. +Set the version ranges declaring which Python distributions are supported +within your project's :file:`pyproject.toml`. The :ref:`requires-python` configuration field +corresponds to the :ref:`Requires-Python ` core metadata field: -Examples: +.. code-block:: toml -.. code-block:: text + [build-system] + ... - Requires-Python: ">=3" - Requires-Python: ">2.7,!=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + [project] + requires-python = ">= 3.8" # At least Python 3.8 -The way to set those values is within the call to ``setup`` within your -:file:`setup.py` script. This will insert the ``Requires-Python`` -metadata values based on the argument you provide in ``python_requires``. +You can specify version ranges and exclusion rules (complying with the :ref:`version-specifiers` specification), +such as at least Python 3.9. Or, at least Python 3.7 and beyond, skipping the 3.7.0 and 3.7.1 point releases: -.. code-block:: python +.. code-block:: toml - from setuptools import setup + requires-python = ">= 3.9" + requires-python = ">= 3.7, != 3.7.0, != 3.7.1" - setup( - # Your setup arguments - python_requires='>=2.7', # Your supported Python ranges - ) +If using the :ref:`setuptools` build backend, consult the `dependency-management`_ documentation for more options. + +.. caution:: + Avoid adding upper bounds to the version ranges, e. g. ``">= 3.8, < 3.10"``. Doing so can cause different errors + and version conflicts. See the `discourse-discussion`_ for more information. 3. Validating the Metadata before publishing ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Within a Python source package (the zip or the tar-gz file you download) is a text file called PKG-INFO. -This file is generated by :ref:`distutils` or :ref:`setuptools` when it generates the source package. -The file contains a set of keys and values, the list of keys is part of the PyPa standard metadata format. +This file is generated by the :term:`build backend ` when it generates the source package. +The file contains a set of keys and values, the list of keys is part of the PyPA standard metadata format. You can see the contents of the generated file like this: @@ -115,24 +108,31 @@ You can see the contents of the generated file like this: Validate that the following is in place, before publishing the package: -- If you have upgraded correctly, the Metadata-Version value should be 1.2 or higher. -- The Requires-Python field is set and matches your specification in setup.py. +- If you have upgraded correctly, the ``Metadata-Version`` value should be 1.2 or higher. +- The ``Requires-Python`` field is set and matches your specification in the configuration file. -4. Using Twine to publish +4. Publishing the package ~~~~~~~~~~~~~~~~~~~~~~~~~ -Twine has a number of advantages, apart from being faster it is now the supported method for publishing packages. - -Make sure you are using the newest version of Twine, at least 1.9. +Proceed as suggested in :ref:`Uploading your Project to PyPI`. -Dropping a Python release +Dropping a Python version ------------------------- -Once you have published a package with the Requires-Python metadata, you can then make a further update removing that Python runtime from support. +In principle, at least metadata support for Python versions should be kept as long as possible, because +once that has been dropped, people still depending on a version will be forced to downgrade. +If however supporting a specific version becomes a blocker for a new feature or other issues occur, the metadata +``Requires-Python`` should be amended. Of course this also depends on whether the project needs to be stable and +well-covered for a wider range of users. + +Each version compatibility change should have its own release. + +.. tip:: -It must be done in this order for the automated fallback to work. + When dropping a Python version, it might also be rewarding to upgrade the project's code syntax generally, apart from updating the versions used in visible places (like the testing environment). Tools like pyupgrade_ or `ruff `_ can automate some of this work. -For example, you published the Requires-Python: ">=2.7" as version 1.0.0 of your package. +.. _discourse-discussion: https://discuss.python.org/t/requires-python-upper-limits/12663 +.. _pyupgrade: https://pypi.org/project/pyupgrade/ +.. _dependency-management: https://setuptools.pypa.io/en/latest/userguide/dependency_management.html#python-requirement -If you were then to update the version string to ">=3.5", and publish a new version 2.0.0 of your package, any users running Pip 9.0+ from version 2.7 will -have version 1.0.0 of the package installed, and any >=3.5 users will receive version 2.0.0. +.. [#] Support for the Metadata 1.2 specification has been added in Pip 9.0. diff --git a/source/guides/github-actions-ci-cd-sample/publish-to-test-pypi.yml b/source/guides/github-actions-ci-cd-sample/publish-to-pypi.yml similarity index 55% rename from source/guides/github-actions-ci-cd-sample/publish-to-test-pypi.yml rename to source/guides/github-actions-ci-cd-sample/publish-to-pypi.yml index 3bd06cccc..155f82555 100644 --- a/source/guides/github-actions-ci-cd-sample/publish-to-test-pypi.yml +++ b/source/guides/github-actions-ci-cd-sample/publish-to-pypi.yml @@ -8,9 +8,11 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 + with: + persist-credentials: false - name: Set up Python - uses: actions/setup-python@v5 + uses: actions/setup-python@v6 with: python-version: "3.x" - name: Install pypa/build @@ -22,7 +24,7 @@ jobs: - name: Build a binary wheel and a source tarball run: python3 -m build - name: Store the distribution packages - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v5 with: name: python-package-distributions path: dist/ @@ -42,56 +44,13 @@ jobs: steps: - name: Download all the dists - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v6 with: name: python-package-distributions path: dist/ - name: Publish distribution πŸ“¦ to PyPI uses: pypa/gh-action-pypi-publish@release/v1 - github-release: - name: >- - Sign the Python 🐍 distribution πŸ“¦ with Sigstore - and upload them to GitHub Release - needs: - - publish-to-pypi - runs-on: ubuntu-latest - - permissions: - contents: write # IMPORTANT: mandatory for making GitHub Releases - id-token: write # IMPORTANT: mandatory for sigstore - - steps: - - name: Download all the dists - uses: actions/download-artifact@v3 - with: - name: python-package-distributions - path: dist/ - - name: Sign the dists with Sigstore - uses: sigstore/gh-action-sigstore-python@v2.1.1 - with: - inputs: >- - ./dist/*.tar.gz - ./dist/*.whl - - name: Create GitHub Release - env: - GITHUB_TOKEN: ${{ github.token }} - run: >- - gh release create - '${{ github.ref_name }}' - --repo '${{ github.repository }}' - --notes "" - - name: Upload artifact signatures to GitHub Release - env: - GITHUB_TOKEN: ${{ github.token }} - # Upload to GitHub Release using the `gh` CLI. - # `dist/` contains the built packages, and the - # sigstore-produced signatures and certificates. - run: >- - gh release upload - '${{ github.ref_name }}' dist/** - --repo '${{ github.repository }}' - publish-to-testpypi: name: Publish Python 🐍 distribution πŸ“¦ to TestPyPI needs: @@ -107,7 +66,7 @@ jobs: steps: - name: Download all the dists - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v6 with: name: python-package-distributions path: dist/ diff --git a/source/guides/installing-scientific-packages.rst b/source/guides/installing-scientific-packages.rst index 5677d382a..a1aeae567 100644 --- a/source/guides/installing-scientific-packages.rst +++ b/source/guides/installing-scientific-packages.rst @@ -14,13 +14,13 @@ In particular, `NumPy `__, which provides the basis for most of the software in the `scientific Python stack `_ can be configured to interoperate with different FORTRAN libraries, and can take advantage -of different levels of vectorised instructions available in modern CPUs. +of different levels of vectorized instructions available in modern CPUs. Starting with version 1.10.4 of NumPy and version 1.0.0 of SciPy, pre-built 32-bit and 64-bit binaries in the ``wheel`` format are available for all major operating systems (Windows, macOS, and Linux) on PyPI. Note, however, that on Windows, NumPy binaries are linked against the `ATLAS -`__ BLAS/LAPACK library, restricted to SSE2 +`__ BLAS/LAPACK library, restricted to SSE2 instructions, so they may not provide optimal linear algebra performance. There are a number of alternative options for obtaining scientific Python @@ -89,7 +89,7 @@ SciPy distributions ------------------- The SciPy site lists `several distributions -`_ +`_ that provide the full SciPy stack to end users in an easy to use and update format. @@ -118,21 +118,22 @@ be loaded and unloaded from the user's environment. The conda cross-platform package manager ---------------------------------------- -`Anaconda `_ is a Python distribution -published by Anaconda, Inc. It is a stable collection of Open Source -packages for big data and scientific use. As of the 5.0 release of Anaconda, -about 200 packages are installed by default, and a total of 400-500 can be -installed and updated from the Anaconda repository. - ``conda`` is an open source (BSD licensed) package management system and -environment management system included in Anaconda that allows users to install +environment management system that allows users to install multiple versions of binary software packages and their dependencies, and easily switch between them. It is a cross-platform tool working on Windows, -macOS, and Linux. Conda can be used to package up and distribute all kinds of +MacOS, and Linux. Conda can be used to package up and distribute all kinds of packages, it is not limited to just Python packages. It has full support for native virtual environments. Conda makes environments first-class citizens, making it easy to create independent environments even for C libraries. It is written in Python, but is Python-agnostic. Conda manages Python itself as a package, so that :command:`conda update python` is possible, in contrast to -pip, which only manages Python packages. Conda is available in Anaconda and -Miniconda (an easy-to-install download with just Python and conda). +pip, which only manages Python packages. + +Anaconda `Anaconda `_ is a Python distribution published by Anaconda, Inc. It is a stable collection of Open Source packages for big data and scientific use, and a collection of Graphical Interface utilities for managing conda environments. + +In addition to the full distribution provided by Anaconda, the conda package manager itself is available in `miniconda `_, `miniforge `_, and `pixi `_. + + +Conda packages are available on multiple channels on Anaconda.org, including the +default channel supported by Anaconda, Inc, the community supported conda-forge channel, which provides a wide variety of pre-built packages, and some domain-specific package collections. diff --git a/source/guides/installing-stand-alone-command-line-tools.rst b/source/guides/installing-stand-alone-command-line-tools.rst index 8578a3b28..c078fd1e4 100644 --- a/source/guides/installing-stand-alone-command-line-tools.rst +++ b/source/guides/installing-stand-alone-command-line-tools.rst @@ -1,3 +1,5 @@ +.. _installing-stand-alone-command-line-tools: + Installing stand alone command line tools ========================================= diff --git a/source/guides/installing-using-linux-tools.rst b/source/guides/installing-using-linux-tools.rst index f0914f8dc..56647f3e9 100644 --- a/source/guides/installing-using-linux-tools.rst +++ b/source/guides/installing-using-linux-tools.rst @@ -51,7 +51,7 @@ To install pip and wheel for the system Python, there are two options: 1. Enable the `EPEL repository `_ using `these instructions - `__. + `__. On EPEL 7, you can install pip and wheel like so: .. code-block:: bash diff --git a/source/guides/installing-using-pip-and-virtual-environments.rst b/source/guides/installing-using-pip-and-virtual-environments.rst index 64fdf9382..22d1840cc 100644 --- a/source/guides/installing-using-pip-and-virtual-environments.rst +++ b/source/guides/installing-using-pip-and-virtual-environments.rst @@ -200,7 +200,7 @@ When your virtual environment is activated, you can install packages. Use the Install a package ~~~~~~~~~~~~~~~~~ -For example,let's install the +For example, let's install the `Requests`_ library from the :term:`Python Package Index (PyPI)`: .. tab:: Unix/macOS diff --git a/source/guides/licensing-examples-and-user-scenarios.rst b/source/guides/licensing-examples-and-user-scenarios.rst new file mode 100644 index 000000000..b6cdfe327 --- /dev/null +++ b/source/guides/licensing-examples-and-user-scenarios.rst @@ -0,0 +1,358 @@ +.. _licensing-examples-and-user-scenarios: + + +===================================== +Licensing examples and user scenarios +===================================== + + +:pep:`639` has specified the way to declare a :term:`Distribution Archive`'s +license and paths to license files and other legally required information. +This document aims to provide clear guidance how to migrate from the legacy +to the standardized way of declaring licenses. +Make sure your preferred build backend supports :pep:`639` before +trying to apply the newer guidelines. + + +Licensing Examples +================== + +.. _licensing-example-basic: + +Basic example +------------- + +The Setuptools project itself, as of `version 75.6.0 `__, +does not use the ``License`` field in its own project source metadata. +Further, it no longer explicitly specifies ``license_file``/``license_files`` +as it did previously, since Setuptools relies on its own automatic +inclusion of license-related files matching common patterns, +such as the :file:`LICENSE` file it uses. + +It includes the following license-related metadata in its +:file:`pyproject.toml`: + +.. code-block:: toml + + [project] + classifiers = [ + "License :: OSI Approved :: MIT License" + ] + +The simplest migration to PEP 639 would consist of using this instead: + +.. code-block:: toml + + [project] + license = "MIT" + +Or, if the project used :file:`setup.cfg`, in its ``[metadata]`` table: + +.. code-block:: ini + + [metadata] + license = MIT + +The output Core Metadata for the :term:`Distribution Package` would then be: + +.. code-block:: email + + License-Expression: MIT + License-File: LICENSE + +The :file:`LICENSE` file would be stored at :file:`/setuptools-{VERSION}/LICENSE` +in the sdist and :file:`/setuptools-{VERSION}.dist-info/licenses/LICENSE` +in the wheel, and unpacked from there into the site directory (e.g. +:file:`site-packages/`) on installation; :file:`/` is the root of the respective +archive and ``{VERSION}`` the version of the Setuptools release in the Core +Metadata. + + +.. _licensing-example-advanced: + +Advanced example +---------------- + +Suppose Setuptools were to include the licenses of the third-party projects +that are vendored in the :file:`setuptools/_vendor/` and :file:`pkg_resources/_vendor/` +directories; specifically: + +.. code-block:: text + + packaging==21.2 + pyparsing==2.2.1 + ordered-set==3.1.1 + more_itertools==8.8.0 + +The appropriate license expressions are: + +.. code-block:: text + + packaging: Apache-2.0 OR BSD-2-Clause + pyparsing: MIT + ordered-set: MIT + more_itertools: MIT + +A comprehensive license expression covering both Setuptools +proper and its vendored dependencies would contain these metadata, +combining all the license expressions into one. Such an expression might be: + +.. code-block:: text + + MIT AND (Apache-2.0 OR BSD-2-Clause) + +In addition, per the requirements of the licenses, the relevant license files +must be included in the package. Suppose the :file:`LICENSE` file contains the text +of the MIT license and the copyrights used by Setuptools, ``pyparsing``, +``more_itertools`` and ``ordered-set``; and the :file:`LICENSE*` files in the +:file:`setuptools/_vendor/packaging/` directory contain the Apache 2.0 and +2-clause BSD license text, and the Packaging copyright statement and +`license choice notice `__. + +Specifically, we assume the license files are located at the following +paths in the project source tree (relative to the project root and +:file:`pyproject.toml`): + +.. code-block:: text + + LICENSE + setuptools/_vendor/packaging/LICENSE + setuptools/_vendor/packaging/LICENSE.APACHE + setuptools/_vendor/packaging/LICENSE.BSD + +Putting it all together, our :file:`pyproject.toml` would be: + +.. code-block:: toml + + [project] + license = "MIT AND (Apache-2.0 OR BSD-2-Clause)" + license-files = [ + "LICENSE*", + "setuptools/_vendor/LICENSE*", + ] + +Or alternatively, the license files can be specified explicitly (paths will be +interpreted as glob patterns): + +.. code-block:: toml + + [project] + license = "MIT AND (Apache-2.0 OR BSD-2-Clause)" + license-files = [ + "LICENSE", + "setuptools/_vendor/LICENSE", + "setuptools/_vendor/LICENSE.APACHE", + "setuptools/_vendor/LICENSE.BSD", + ] + +If our project used :file:`setup.cfg`, we could define this in : + +.. code-block:: ini + + [metadata] + license = MIT AND (Apache-2.0 OR BSD-2-Clause) + license_files = + LICENSE + setuptools/_vendor/packaging/LICENSE + setuptools/_vendor/packaging/LICENSE.APACHE + setuptools/_vendor/packaging/LICENSE.BSD + +With either approach, the output Core Metadata in the distribution +would be: + +.. code-block:: email + + License-Expression: MIT AND (Apache-2.0 OR BSD-2-Clause) + License-File: LICENSE + License-File: setuptools/_vendor/packaging/LICENSE + License-File: setuptools/_vendor/packaging/LICENSE.APACHE + License-File: setuptools/_vendor/packaging/LICENSE.BSD + +In the resulting sdist, with :file:`/` as the root of the archive and ``{VERSION}`` +the version of the Setuptools release specified in the Core Metadata, +the license files would be located at the paths: + +.. code-block:: text + + /setuptools-{VERSION}/LICENSE + /setuptools-{VERSION}/setuptools/_vendor/packaging/LICENSE + /setuptools-{VERSION}/setuptools/_vendor/packaging/LICENSE.APACHE + /setuptools-{VERSION}/setuptools/_vendor/packaging/LICENSE.BSD + +In the built wheel, with :file:`/` being the root of the archive and +``{VERSION}`` as the previous, the license files would be stored at: + +.. code-block:: text + + /setuptools-{VERSION}.dist-info/licenses/LICENSE + /setuptools-{VERSION}.dist-info/licenses/setuptools/_vendor/packaging/LICENSE + /setuptools-{VERSION}.dist-info/licenses/setuptools/_vendor/packaging/LICENSE.APACHE + /setuptools-{VERSION}.dist-info/licenses/setuptools/_vendor/packaging/LICENSE.BSD + +Finally, in the installed project, with :file:`site-packages/` being the site dir +and ``{VERSION}`` as the previous, the license files would be installed to: + +.. code-block:: text + + site-packages/setuptools-{VERSION}.dist-info/licenses/LICENSE + site-packages/setuptools-{VERSION}.dist-info/licenses/setuptools/_vendor/packaging/LICENSE + site-packages/setuptools-{VERSION}.dist-info/licenses/setuptools/_vendor/packaging/LICENSE.APACHE + site-packages/setuptools-{VERSION}.dist-info/licenses/setuptools/_vendor/packaging/LICENSE.BSD + + +Expression examples +''''''''''''''''''' + +Some additional examples of valid ``License-Expression`` values: + +.. code-block:: email + + License-Expression: MIT + License-Expression: BSD-3-Clause + License-Expression: MIT AND (Apache-2.0 OR BSD-2-Clause) + License-Expression: MIT OR GPL-2.0-or-later OR (FSFUL AND BSD-2-Clause) + License-Expression: GPL-3.0-only WITH Classpath-Exception-2.0 OR BSD-3-Clause + License-Expression: LicenseRef-Public-Domain OR CC0-1.0 OR Unlicense + License-Expression: LicenseRef-Proprietary + License-Expression: LicenseRef-Custom-License + + +User Scenarios +============== + +The following covers the range of common use cases from a user perspective, +providing guidance for each. Do note that the following +should **not** be considered legal advice, and readers should consult a +licensed legal practitioner in their jurisdiction if they are unsure about +the specifics for their situation. + + +I have a private package that won't be distributed +-------------------------------------------------- + +If your package isn't shared publicly, i.e. outside your company, +organization or household, it *usually* isn't strictly necessary to include +a formal license, so you wouldn't necessarily have to do anything extra here. + +However, it is still a good idea to include ``LicenseRef-Proprietary`` +as a license expression in your package configuration, and/or a +copyright statement and any legal notices in a :file:`LICENSE.txt` file +in the root of your project directory, which will be automatically +included by packaging tools. + + +I just want to share my own work without legal restrictions +----------------------------------------------------------- + +While you aren't required to include a license, if you don't, no one has +`any permission to download, use or improve your work `__, +so that's probably the *opposite* of what you actually want. +The `MIT license `__ is a great choice instead, as it's simple, +widely used and allows anyone to do whatever they want with your work +(other than sue you, which you probably also don't want). + +To apply it, just paste `the text `__ into a file named +:file:`LICENSE.txt` at the root of your repo, and add the year and your name to +the copyright line. Then, just add ``license = "MIT"`` under +``[project]`` in your :file:`pyproject.toml` if your packaging tool supports it, +or in its config file/section. You're done! + + +I want to distribute my project under a specific license +-------------------------------------------------------- + +To use a particular license, simply paste its text into a :file:`LICENSE.txt` +file at the root of your repo, if you don't have it in a file starting with +:file:`LICENSE` or :file:`COPYING` already, and add +``license = "LICENSE-ID"`` under ``[project]`` in your +:file:`pyproject.toml` if your packaging tool supports it, or else in its +config file. You can find the ``LICENSE-ID`` +and copyable license text on sites like +`ChooseALicense `__ or `SPDX `__. + +Many popular code hosts, project templates and packaging tools can add the +license file for you, and may support the expression as well in the future. + + +I maintain an existing package that's already licensed +------------------------------------------------------ + +If you already have license files and metadata in your project, you +should only need to make a couple of tweaks to take advantage of the new +functionality. + +In your project config file, enter your license expression under +``license`` (``[project]`` table in :file:`pyproject.toml`), +or the equivalent for your packaging tool, +and make sure to remove any legacy ``license`` table subkeys or +``License ::`` classifiers. Your existing ``license`` value may already +be valid as one (e.g. ``MIT``, ``Apache-2.0 OR BSD-2-Clause``, etc); +otherwise, check the `SPDX license list `__ for the identifier +that matches the license used. + +Make sure to list your license files under ``license-files`` +under ``[project]`` in :file:`pyproject.toml` +or else in your tool's configuration file. + +See the :ref:`licensing-example-basic` for a simple but complete real-world demo +of how this works in practice. +See also the best-effort guidance on how to translate license classifiers +into license expression provided by the :pep:`639` authors: +`Mapping License Classifiers to SPDX Identifiers `__. +Packaging tools may support automatically converting legacy licensing +metadata; check your tool's documentation for more information. + + +My package includes other code under different licenses +------------------------------------------------------- + +If your project includes code from others covered by different licenses, +such as vendored dependencies or files copied from other open source +software, you can construct a license expression +to describe the licenses involved and the relationship +between them. + +In short, ``License-1 AND License-2`` mean that *both* licenses apply +(for example, you included a file under another license), and +``License-1 OR License-2`` means that *either* of the licenses can be used, at +the user's option (for example, you want to allow users a choice of multiple +licenses). You can use parenthesis (``()``) for grouping to form expressions +that cover even the most complex situations. + +In your project config file, enter your license expression under +``license`` (``[project]`` table of :file:`pyproject.toml`), +or the equivalent for your packaging tool, +and make sure to remove any legacy ``license`` table subkeys +or ``License ::`` classifiers. + +Also, make sure you add the full license text of all the licenses as files +somewhere in your project repository. List the +relative path or glob patterns to each of them under ``license-files`` +under ``[project]`` in :file:`pyproject.toml` +(if your tool supports it), or else in your tool's configuration file. + +As an example, if your project was licensed MIT but incorporated +a vendored dependency (say, ``packaging``) that was licensed under +either Apache 2.0 or the 2-clause BSD, your license expression would +be ``MIT AND (Apache-2.0 OR BSD-2-Clause)``. You might have a +:file:`LICENSE.txt` in your repo root, and a :file:`LICENSE-APACHE.txt` and +:file:`LICENSE-BSD.txt` in the :file:`_vendor/` subdirectory, so to include +all of them, you'd specify ``["LICENSE.txt", "_vendor/packaging/LICENSE*"]`` +as glob patterns, or +``["LICENSE.txt", "_vendor/LICENSE-APACHE.txt", "_vendor/LICENSE-BSD.txt"]`` +as literal file paths. + +See a fully worked out :ref:`licensing-example-advanced` for an end-to-end +application of this to a real-world complex project, with many technical +details, and consult a `tutorial `__ for more help and examples +using SPDX identifiers and expressions. + + +.. _chooseamitlicense: https://choosealicense.com/licenses/mit/ +.. _choosealicenselist: https://choosealicense.com/licenses/ +.. _dontchoosealicense: https://choosealicense.com/no-permission/ +.. _mappingclassifierstospdx: https://peps.python.org/pep-0639/appendix-mapping-classifiers/ +.. _packaginglicense: https://github.com/pypa/packaging/blob/21.2/LICENSE +.. _setuptools7560: https://github.com/pypa/setuptools/blob/v75.6.0/pyproject.toml +.. _spdxlist: https://spdx.org/licenses/ +.. _spdxtutorial: https://github.com/david-a-wheeler/spdx-tutorial diff --git a/source/guides/modernize-setup-py-project.rst b/source/guides/modernize-setup-py-project.rst index 5b6ab3c26..1f71d1973 100644 --- a/source/guides/modernize-setup-py-project.rst +++ b/source/guides/modernize-setup-py-project.rst @@ -67,7 +67,7 @@ For more details: * :ref:`distributing-packages` * :ref:`pyproject-build-system-table` -* :doc:`pip:reference/build-system/pyproject-toml` +* :doc:`pip:reference/build-system` How to handle additional build-time dependencies? @@ -128,7 +128,7 @@ For some projects this isolation is unwanted and it can be deactivated as follow For more details: -* :doc:`pip:reference/build-system/pyproject-toml` +* :doc:`pip:reference/build-system` How to handle packaging metadata? @@ -244,5 +244,5 @@ Where to read more about this? ============================== * :ref:`pyproject-toml-spec` -* :doc:`pip:reference/build-system/pyproject-toml` +* :doc:`pip:reference/build-system` * :doc:`setuptools:build_meta` diff --git a/source/guides/packaging-binary-extensions.rst b/source/guides/packaging-binary-extensions.rst index 589ed89c8..de8a9d2d6 100644 --- a/source/guides/packaging-binary-extensions.rst +++ b/source/guides/packaging-binary-extensions.rst @@ -403,3 +403,15 @@ a Debian system, see the following articles: * `What are (c)python extension modules? `_ * `Releasing the gil `_ * `Writing cpython extension modules using C++ `_ + +Additional considerations for binary wheels +------------------------------------------- + +The `pypackaging-native `_ website has +additional coverage of packaging Python packages with native code. It aims to +provide an overview of the most important packaging issues for such projects, +with in-depth explanations and references. + +Examples of topics covered are non-Python compiled dependencies ("native +dependencies"), the importance of the ABI (Application Binary Interface) of +native code, dependency on SIMD code and cross compilation. diff --git a/source/guides/publishing-package-distribution-releases-using-github-actions-ci-cd-workflows.rst b/source/guides/publishing-package-distribution-releases-using-github-actions-ci-cd-workflows.rst index 049fba15c..a3d893c9f 100644 --- a/source/guides/publishing-package-distribution-releases-using-github-actions-ci-cd-workflows.rst +++ b/source/guides/publishing-package-distribution-releases-using-github-actions-ci-cd-workflows.rst @@ -21,10 +21,10 @@ for temporarily storing and downloading the source packages. details of building platform specific projects. If you have binary components, check out :ref:`cibuildwheel`'s GitHub Action examples. -Configuring trusted publishing +Configuring Trusted Publishing ============================== -This guide relies on PyPI's `trusted publishing`_ implementation to connect +This guide relies on PyPI's `Trusted Publishing`_ implementation to connect to `GitHub Actions CI/CD`_. This is recommended for security reasons, since the generated tokens are created for each of your projects individually and expire automatically. Otherwise, you'll need to generate an @@ -36,7 +36,7 @@ Since this guide will demonstrate uploading to both PyPI and TestPyPI, we'll need two trusted publishers configured. The following steps will lead you through creating the "pending" publishers for your new :term:`PyPI project `. -However it is also possible to add `trusted publishing`_ to any +However it is also possible to add `Trusted Publishing`_ to any pre-existing project, if you are its owner. .. attention:: @@ -75,7 +75,7 @@ Let's begin! πŸš€ .. attention:: - For security reasons, you must require `manual approval `_ + For security reasons, you must require `manual approval `_ on each run for the ``pypi`` environment. @@ -87,13 +87,13 @@ Creating a workflow definition GitHub CI/CD workflows are declared in YAML files stored in the ``.github/workflows/`` directory of your repository. -Let's create a ``.github/workflows/publish-to-test-pypi.yml`` +Let's create a ``.github/workflows/publish-to-pypi.yml`` file. Start it with a meaningful name and define the event that should make GitHub run this workflow: -.. literalinclude:: github-actions-ci-cd-sample/publish-to-test-pypi.yml +.. literalinclude:: github-actions-ci-cd-sample/publish-to-pypi.yml :language: yaml :end-before: jobs: @@ -107,7 +107,7 @@ build the distribution packages. First, we'll define the job for building the dist packages of your project and storing them for later use: -.. literalinclude:: github-actions-ci-cd-sample/publish-to-test-pypi.yml +.. literalinclude:: github-actions-ci-cd-sample/publish-to-pypi.yml :language: yaml :start-at: jobs: :end-before: Install pypa/build @@ -119,7 +119,7 @@ And now we can build the dists from source and store them. In this example, we'll use the ``build`` package. So add this to the steps list: -.. literalinclude:: github-actions-ci-cd-sample/publish-to-test-pypi.yml +.. literalinclude:: github-actions-ci-cd-sample/publish-to-pypi.yml :language: yaml :start-at: Install pypa/build :end-before: publish-to-pypi @@ -134,9 +134,9 @@ provided by GitHub Actions. This also defines a GitHub Environment for the job to run in its context and a URL to be displayed in GitHub's UI nicely. Additionally, it allows acquiring an OpenID Connect token that the ``pypi-publish`` actions needs to implement secretless -trusted publishing to PyPI. +Trusted Publishing to PyPI. -.. literalinclude:: github-actions-ci-cd-sample/publish-to-test-pypi.yml +.. literalinclude:: github-actions-ci-cd-sample/publish-to-pypi.yml :language: yaml :start-after: path: dist/ :end-before: steps: @@ -149,49 +149,23 @@ Publishing the distribution to PyPI Finally, add the following steps at the end: -.. literalinclude:: github-actions-ci-cd-sample/publish-to-test-pypi.yml +.. literalinclude:: github-actions-ci-cd-sample/publish-to-pypi.yml :language: yaml :start-after: id-token: write - :end-before: github-release: + :end-before: publish-to-testpypi: This step uses the `pypa/gh-action-pypi-publish`_ GitHub Action: after the stored distribution package has been downloaded by the `download-artifact`_ action, it uploads the contents of the ``dist/`` folder into PyPI unconditionally. -Signing the distribution packages -================================= - -The following job signs the distribution packages with `Sigstore`_, -the same artifact signing system `used to sign CPython `_. - -Firstly, it uses the `sigstore/gh-action-sigstore-python GitHub Action`_ -to sign the distribution packages. In the next step, an empty GitHub Release -from the current tag is created using the ``gh`` CLI. Note this step can be further -customised. See the `gh release documentation `_ -as a reference. - .. tip:: - You may need to manage your ``GITHUB_TOKEN`` permissions to - enable creating the GitHub Release. See the `GitHub - documentation `_ - for instructions. Specifically, the token needs the - ``contents: write`` permission. - -Finally, the signed distributions are uploaded to the GitHub Release. - -.. literalinclude:: github-actions-ci-cd-sample/publish-to-test-pypi.yml - :language: yaml - :start-at: github-release: - :end-before: publish-to-testpypi - - -.. note:: - - This is a replacement for GPG signatures, for which support has been - `removed from PyPI `_. - However, this job is not mandatory for uploading to PyPI and can be omitted. + Starting with version + `v1.11.0 `_, + `pypa/gh-action-pypi-publish`_ generates and uploads :pep:`740`-compatible + attestations for each distribution by default. No additional manual + signing steps are required. Separate workflow for publishing to TestPyPI @@ -201,7 +175,7 @@ Now, repeat these steps and create another job for publishing to the TestPyPI package index under the ``jobs`` section: -.. literalinclude:: github-actions-ci-cd-sample/publish-to-test-pypi.yml +.. literalinclude:: github-actions-ci-cd-sample/publish-to-pypi.yml :language: yaml :start-at: publish-to-testpypi @@ -217,7 +191,7 @@ This paragraph showcases the whole workflow after following the above guide. .. collapse:: Click here to display the entire GitHub Actions CI/CD workflow definition - .. literalinclude:: github-actions-ci-cd-sample/publish-to-test-pypi.yml + .. literalinclude:: github-actions-ci-cd-sample/publish-to-pypi.yml :language: yaml That's all, folks! @@ -254,9 +228,6 @@ sure that your release pipeline remains healthy! https://github.com/actions/download-artifact .. _`upload-artifact`: https://github.com/actions/upload-artifact -.. _Sigstore: https://www.sigstore.dev/ -.. _`sigstore/gh-action-sigstore-python GitHub Action`: - https://github.com/marketplace/actions/gh-action-sigstore-python .. _Secrets: https://docs.github.com/en/actions/reference/encrypted-secrets -.. _trusted publishing: https://docs.pypi.org/trusted-publishers/ +.. _Trusted Publishing: https://docs.pypi.org/trusted-publishers/ diff --git a/source/guides/section-build-and-publish.rst b/source/guides/section-build-and-publish.rst index 2af29d763..52f827553 100644 --- a/source/guides/section-build-and-publish.rst +++ b/source/guides/section-build-and-publish.rst @@ -7,12 +7,13 @@ Building and Publishing writing-pyproject-toml distributing-packages-using-setuptools - single-sourcing-package-version dropping-older-python-versions packaging-binary-extensions packaging-namespace-packages + creating-command-line-tools creating-and-discovering-plugins using-testpypi making-a-pypi-friendly-readme publishing-package-distribution-releases-using-github-actions-ci-cd-workflows modernize-setup-py-project + licensing-examples-and-user-scenarios diff --git a/source/guides/single-sourcing-package-version.rst b/source/guides/single-sourcing-package-version.rst index 5c8af21e0..7ed3d87da 100644 --- a/source/guides/single-sourcing-package-version.rst +++ b/source/guides/single-sourcing-package-version.rst @@ -1,173 +1,8 @@ -.. _`Single sourcing the version`: +:orphan: -=================================== -Single-sourcing the package version -=================================== +.. meta:: + :http-equiv=refresh: 0; url=../../discussions/single-source-version/ -.. todo:: Update this page for build backends other than setuptools. +Redirecting stale single-source package version link... -There are many techniques to maintain a single source of truth for the version -number of your project: - -#. Read the file in :file:`setup.py` and get the version. Example (from `pip setup.py - `_):: - - import codecs - import os.path - - def read(rel_path): - here = os.path.abspath(os.path.dirname(__file__)) - with codecs.open(os.path.join(here, rel_path), 'r') as fp: - return fp.read() - - def get_version(rel_path): - for line in read(rel_path).splitlines(): - if line.startswith('__version__'): - delim = '"' if '"' in line else "'" - return line.split(delim)[1] - else: - raise RuntimeError("Unable to find version string.") - - setup( - ... - version=get_version("package/__init__.py") - ... - ) - - .. note:: - - As of the release of setuptools 46.4.0, one can accomplish the same - thing by instead placing the following in the project's - :file:`setup.cfg` file (replacing "package" with the import name of the - package): - - .. code-block:: ini - - [metadata] - version = attr: package.__version__ - - As of the release of setuptools 61.0.0, one can specify the - version dynamically in the project's :file:`pyproject.toml` file. - - .. code-block:: toml - - [project] - name = "package" - dynamic = ["version"] - - [tool.setuptools.dynamic] - version = {attr = "package.__version__"} - - Please be aware that declarative config indicators, including the - ``attr:`` directive, are not supported in parameters to - :file:`setup.py`. - -#. Use an external build tool that either manages updating both locations, or - offers an API that both locations can use. - - Few tools you could use, in no particular order, and not necessarily complete: - `bump2version `_, - `changes `_, - `commitizen `_, - `zest.releaser `_. - - -#. Set the value to a ``__version__`` global variable in a dedicated module in - your project (e.g. :file:`version.py`), then have :file:`setup.py` read and - ``exec`` the value into a variable. - - :: - - version = {} - with open("...sample/version.py") as fp: - exec(fp.read(), version) - # later on we use: version['__version__'] - - Example using this technique: `warehouse `_. - -#. Place the value in a simple ``VERSION`` text file and have both - :file:`setup.py` and the project code read it. - - :: - - with open(os.path.join(mypackage_root_dir, 'VERSION')) as version_file: - version = version_file.read().strip() - - An advantage with this technique is that it's not specific to Python. Any - tool can read the version. - - .. warning:: - - With this approach you must make sure that the ``VERSION`` file is included in - all your source and binary distributions (e.g. add ``include VERSION`` to your - :file:`MANIFEST.in`). - -#. Set the value in :file:`setup.py`, and have the project code use the - ``importlib.metadata`` API to fetch the value at runtime. - (``importlib.metadata`` was introduced in Python 3.8 and is available to - older versions as the ``importlib-metadata`` project.) An installed - project's version can be fetched with the API as follows:: - - import sys - - if sys.version_info >= (3, 8): - from importlib import metadata - else: - import importlib_metadata as metadata - - assert metadata.version('pip') == '1.2.0' - - Be aware that the ``importlib.metadata`` API only knows about what's in the - installation metadata, which is not necessarily the code that's currently - imported. - - If a project uses this method to fetch its version at runtime, then its - ``install_requires`` value needs to be edited to install - ``importlib-metadata`` on pre-3.8 versions of Python like so:: - - setup( - ... - install_requires=[ - ... - 'importlib-metadata >= 1.0 ; python_version < "3.8"', - ... - ], - ... - ) - - An older (and less efficient) alternative to ``importlib.metadata`` is the - ``pkg_resources`` API provided by ``setuptools``:: - - import pkg_resources - assert pkg_resources.get_distribution('pip').version == '1.2.0' - - If a project uses ``pkg_resources`` to fetch its own version at runtime, - then ``setuptools`` must be added to the project's ``install_requires`` - list. - - Example using this technique: `setuptools `_. - - -#. Set the value to ``__version__`` in ``sample/__init__.py`` and import - ``sample`` in :file:`setup.py`. - - :: - - import sample - setup( - ... - version=sample.__version__ - ... - ) - - .. warning:: - - Although this technique is common, beware that it will fail if - ``sample/__init__.py`` imports packages from ``install_requires`` - dependencies, which will very likely not be installed yet when - :file:`setup.py` is run. - - -#. Keep the version number in the tags of a version control system (Git, Mercurial, etc) - instead of in the code, and automatically extract it from there using - `setuptools_scm `_. +If the page doesn't automatically refresh, see :ref:`single-source-version`. diff --git a/source/guides/supporting-multiple-python-versions.rst b/source/guides/supporting-multiple-python-versions.rst index 8c128ed91..7e945aa53 100644 --- a/source/guides/supporting-multiple-python-versions.rst +++ b/source/guides/supporting-multiple-python-versions.rst @@ -62,7 +62,7 @@ of many continuous-integration systems. There are two hosted services which when used in conjunction provide automated testing across Linux, Mac and Windows: - - `Travis CI `_ provides both a Linux and a macOS + - `Travis CI `_ provides both a Linux and a macOS environment. The Linux environment is Ubuntu 12.04 LTS Server Edition 64 bit while the macOS is 10.9.2 at the time of writing. - `Appveyor `_ provides a Windows environment diff --git a/source/guides/supporting-windows-using-appveyor.rst b/source/guides/supporting-windows-using-appveyor.rst index 0044d8c5e..e884dd976 100644 --- a/source/guides/supporting-windows-using-appveyor.rst +++ b/source/guides/supporting-windows-using-appveyor.rst @@ -237,6 +237,6 @@ For reference, the SDK setup support script is listed here: :linenos: .. _Appveyor: https://www.appveyor.com/ -.. _Travis: https://travis-ci.org/ +.. _Travis: https://travis-ci.com/ .. _GitHub: https://github.com .. _Bitbucket: https://bitbucket.org/ diff --git a/source/guides/tool-recommendations.rst b/source/guides/tool-recommendations.rst index 3903232b9..bf8d93d5a 100644 --- a/source/guides/tool-recommendations.rst +++ b/source/guides/tool-recommendations.rst @@ -109,6 +109,11 @@ Do **not** use :ref:`distutils`, which is deprecated, and has been removed from the standard library in Python 3.12, although it still remains available from setuptools. +.. _extension-module-tool-recommendations: + +Build backends for extension modules +------------------------------------ + For packages with :term:`extension modules `, it is best to use a build system with dedicated support for the language the extension is written in, for example: @@ -139,15 +144,25 @@ to build distributable wheels. Uploading to PyPI ================= -For projects hosted on GitHub, it is recommended to use the :ref:`trusted publishing -`, which allows the package to be securely uploaded to PyPI -from a GitHub Actions job. (This is not yet supported on software forges other -than GitHub.) +For projects hosted on or published via supported CI/CD platforms, it is +recommended to use the :ref:`Trusted Publishing `, which +allows the package to be securely uploaded to PyPI from a CI/CD workflow +without a manually configured API token. + +As of November 2024, PyPI supports the following platforms as Trusted Publishing +providers: + +* GitHub Actions (on ``https://github.com``) +* GitLab CI/CD (on ``https://gitlab.com``) +* ActiveState +* Google Cloud The other available method is to upload the package manually using :ref:`twine`. -**Never** use ``python setup.py upload`` for this task. In addition to being -:ref:`deprecated `, it is insecure. +.. danger:: + + **Never** use ``python setup.py upload`` for this task. In addition to being + :ref:`deprecated `, it is insecure. Workflow tools diff --git a/source/guides/writing-pyproject-toml.rst b/source/guides/writing-pyproject-toml.rst index e82bd893d..a1a595a13 100644 --- a/source/guides/writing-pyproject-toml.rst +++ b/source/guides/writing-pyproject-toml.rst @@ -22,19 +22,20 @@ three possible TOML tables in this file. .. note:: - There is a significant difference between the ``[build-system]`` and - ``[project]`` tables. The former should always be present, regardless of - which build backend you use (since it *defines* the tool you use). The latter - is understood by *most* build backends, but some build backends use a - different format. + The ``[build-system]`` table should always be present, + regardless of which build backend you use (``[build-system]`` *defines* the + build tool you use). - At the time of writing this (November 2023), Poetry_ is a notable build - backend that does not use the ``[project]`` table (it uses the - ``[tool.poetry]`` table instead). + On the other hand, the ``[project]`` table is understood by *most* build + backends, but some build backends use a different format. + A notable exception is Poetry_, which before version 2.0 (released January + 5, 2025) did not use the ``[project]`` table, it used the ``[tool.poetry]`` + table instead. With version 2.0, it supports both. Also, the setuptools_ build backend supports both the ``[project]`` table, - and the older format in ``setup.cfg`` or ``setup.py``. For new projects, it - is recommended to use the ``[project]`` table, and keep ``setup.py`` only if + and the older format in ``setup.cfg`` or ``setup.py``. + + For new projects, use the ``[project]`` table, and keep ``setup.py`` only if some programmatic configuration is needed (such as building C extensions), but the ``setup.cfg`` and ``setup.py`` formats are still valid. See :ref:`setup-py-deprecated`. @@ -55,38 +56,7 @@ Usually, you'll just copy what your build backend's documentation suggests (after :ref:`choosing your build backend `). Here are the values for some common build backends: -.. tab:: Hatchling - - .. code-block:: toml - - [build-system] - requires = ["hatchling"] - build-backend = "hatchling.build" - -.. tab:: setuptools - - .. code-block:: toml - - [build-system] - requires = ["setuptools >= 61.0"] - build-backend = "setuptools.build_meta" - -.. tab:: Flit - - .. code-block:: toml - - [build-system] - requires = ["flit_core >= 3.4"] - build-backend = "flit_core.buildapi" - -.. tab:: PDM - - .. code-block:: toml - - [build-system] - requires = ["pdm-backend"] - build-backend = "pdm.backend" - +.. include:: ../shared/build-backend-tabs.rst Static vs. dynamic metadata @@ -130,7 +100,7 @@ only field that cannot be marked as dynamic. [project] name = "spam-eggs" -The project name must consists of ASCII letters, digits, underscores "``_``", +The project name must consist of ASCII letters, digits, underscores "``_``", hyphens "``-``" and periods "``.``". It must not start or end with an underscore, hyphen or period. @@ -163,8 +133,8 @@ This field is required, although it is often marked as dynamic using dynamic = ["version"] This allows use cases such as filling the version from a ``__version__`` -attribute or a Git tag. Consult :ref:`Single sourcing the version` for more -details. +attribute or a Git tag. Consult the :ref:`single-source-version` +discussion for more details. Dependencies and requirements @@ -236,7 +206,7 @@ To install a command as part of your package, declare it in the In this example, after installing your project, a ``spam-cli`` command will be available. Executing this command will do the equivalent of -``from spam import main_cli; main_cli()``. +``import sys; from spam import main_cli; sys.exit(main_cli())``. On Windows, scripts packaged this way need a terminal, so if you launch them from within a graphical application, they will make a terminal pop @@ -321,29 +291,104 @@ You can also specify the format explicitly, like this: readme = {file = "README.txt", content-type = "text/x-rst"} +.. _license-and-license-files: + +``license`` and ``license-files`` +--------------------------------- + +As per :pep:`639`, licenses should be declared with two fields: + +- ``license`` is an :term:`SPDX license expression ` + consisting of one or more :term:`license identifiers `. +- ``license-files`` is a list of license file glob patterns. + +A previous PEP had specified ``license`` to be a table with a ``file`` or a +``text`` key, this format is now deprecated. Most :term:`build backends` now support the new format as shown in the following table. + +.. list-table:: build backend versions that introduced :pep:`639` support + :header-rows: 1 + + * - hatchling + - setuptools + - flit-core [#flit-core-pep639]_ + - pdm-backend + - poetry-core + - uv-build + * - 1.27.0 + - 77.0.3 + - 3.12 + - 2.4.0 + - 2.2.0 + - 0.7.19 + + +.. _license: + ``license`` ------------ +''''''''''' + +The new format for ``license`` is a valid :term:`SPDX license expression ` +consisting of one or more :term:`license identifiers `. +The full license list is available at the +`SPDX license list page `_. The supported list version is +3.17 or any later compatible one. + +.. code-block:: toml -This can take two forms. You can put your license in a file, typically -``LICENSE`` or ``LICENSE.txt``, and link that file here: + [project] + license = "GPL-3.0-or-later" + # or + license = "MIT AND (Apache-2.0 OR BSD-2-Clause)" + +.. note:: If you get a build error that ``license`` should be a dict/table, + your build backend doesn't yet support the new format. See the + `above section `_ for more context. + The now deprecated format is `described in PEP 621 `__. + +As a general rule, it is a good idea to use a standard, well-known +license, both to avoid confusion and because some organizations avoid software +whose license is unapproved. + +If your :term:`Distribution Archive` is licensed with a license that doesn't +have an existing SPDX identifier, you can create a custom one in format +``LicenseRef-[idstring]``. The custom identifiers must follow the SPDX +specification, `clause 10.1 `_ of the version 2.2 or any later +compatible one. .. code-block:: toml [project] - license = {file = "LICENSE"} + license = "LicenseRef-My-Custom-License" + + +.. _license-files: -or you can write the name of the license: +``license-files`` +''''''''''''''''' + +This is a list of license files and files containing other legal +information you want to distribute with your package. .. code-block:: toml [project] - license = {text = "MIT License"} + license-files = ["LICEN[CS]E*", "vendored/licenses/*.txt", "AUTHORS.md"] -If you are using a standard, well-known license, it is not necessary to use this -field. Instead, you should use one of the :ref:`classifiers` starting with ``License -::``. (As a general rule, it is a good idea to use a standard, well-known -license, both to avoid confusion and because some organizations avoid software -whose license is unapproved.) +The glob patterns must follow the specification: + +- Alphanumeric characters, underscores (``_``), hyphens (``-``) and dots (``.``) + will be matched verbatim. +- Special characters: ``*``, ``?``, ``**`` and character ranges: [] are supported. +- Path delimiters must be the forward slash character (``/``). +- Patterns are relative to the directory containing :file:`pyproject.toml`, and + thus may not start with a slash character. +- Parent directory indicators (``..``) must not be used. +- Each glob must match at least one file. + +Literal paths are valid globs. +Any characters or character sequences not covered by this specification are +invalid. ``keywords`` @@ -379,9 +424,6 @@ A list of PyPI classifiers that apply to your project. Check the "Intended Audience :: Developers", "Topic :: Software Development :: Build Tools", - # Pick your license as you wish (see also "license" above) - "License :: OSI Approved :: MIT License", - # Specify the Python versions you support here. "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.6", @@ -399,6 +441,7 @@ To prevent a package from being uploaded to PyPI, use the special ``Private :: Do Not Upload`` classifier. PyPI will always reject packages with classifiers beginning with ``Private ::``. +.. _writing-pyproject-toml-urls: ``urls`` -------- @@ -406,6 +449,13 @@ beginning with ``Private ::``. A list of URLs associated with your project, displayed on the left sidebar of your PyPI project page. +.. note:: + + See :ref:`well-known-labels` for a listing + of labels that PyPI and other packaging tools are specifically aware of, + and `PyPI's project metadata docs `_ + for PyPI-specific URL processing. + .. code-block:: toml [project.urls] @@ -415,12 +465,35 @@ sidebar of your PyPI project page. Issues = "https://github.com/me/spam/issues" Changelog = "https://github.com/me/spam/blob/master/CHANGELOG.md" -Note that if the key contains spaces, it needs to be quoted, e.g., +Note that if the label contains spaces, it needs to be quoted, e.g., ``Website = "https://example.com"`` but ``"Official Website" = "https://example.com"``. +Users are advised to use :ref:`well-known-labels` for their project URLs +where appropriate, since consumers of metadata (like package indices) can +specialize their presentation. + +For example in the following metadata, neither ``MyHomepage`` nor +``"Download Link"`` is a well-known label, so they will be rendered verbatim: + +.. code-block:: toml + + [project.urls] + MyHomepage = "https://example.com" + "Download Link" = "https://example.com/abc.tar.gz" +Whereas in this metadata ``HomePage`` and ``DOWNLOAD`` both have +well-known equivalents (``homepage`` and ``download``), and can be presented +with those semantics in mind (the project's home page and its external +download location, respectively). + +.. code-block:: toml + + [project.urls] + HomePage = "https://example.com" + DOWNLOAD = "https://example.com/abc.tar.gz" + Advanced plugins ================ @@ -467,7 +540,8 @@ A full example ] description = "Lovely Spam! Wonderful Spam!" readme = "README.rst" - license = {file = "LICENSE.txt"} + license = "MIT" + license-files = ["LICEN[CS]E.*"] keywords = ["egg", "bacon", "sausage", "tomatoes", "Lobster Thermidor"] classifiers = [ "Development Status :: 4 - Beta", @@ -504,6 +578,9 @@ A full example like ``requires-python = "<= 3.10"`` here. `This blog post `_ contains some information regarding possible problems. +.. [#flit-core-pep639] flit-core `does not yet `_ support WITH in SPDX license expressions. + +.. _flit-issue-735: https://github.com/pypa/flit/issues/735 .. _gfm: https://docs.github.com/en/get-started/writing-on-github/getting-started-with-writing-and-formatting-on-github/basic-writing-and-formatting-syntax .. _setuptools: https://setuptools.pypa.io .. _poetry: https://python-poetry.org @@ -514,3 +591,5 @@ A full example .. _pytest: https://pytest.org .. _pygments: https://pygments.org .. _rest: https://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html +.. _spdxcustomids: https://spdx.github.io/spdx-spec/v2.2.2/other-licensing-information-detected/ +.. _spdxlicenselist: https://spdx.org/licenses/ diff --git a/source/index.rst b/source/index.rst index 32f85b206..aa522c8c5 100644 --- a/source/index.rst +++ b/source/index.rst @@ -79,7 +79,7 @@ topics. Explanations and Discussions ============================ -The :doc:`discussions/index` section for in-depth explanations and discussion +The :doc:`discussions/index` section provides in-depth explanations and discussion about topics, such as: * :doc:`discussions/deploying-python-applications` diff --git a/source/key_projects.rst b/source/key_projects.rst index 531c28066..e4501fe0e 100644 --- a/source/key_projects.rst +++ b/source/key_projects.rst @@ -404,26 +404,30 @@ conda :doc:`Docs ` -conda is the package management tool for `Anaconda -`__ Python installations. -Anaconda Python is a distribution from `Anaconda, Inc -`__ specifically aimed at the scientific -community, and in particular on Windows where the installation of binary -extensions is often difficult. +Conda is a package, dependency, and environment management system for any language β€” Python, R, +Ruby, C/C++, Fortran, and more. It is written in Python and +widely used in the Python scientific computing community, due to its support for non-Python +compiled libraries and extensions. It is used as the basis of the `Anaconda +`__ Python distribution from Anaconda, Inc. It was originally +aimed at the scientific community, but can also be used on its own, or with the +:doc:`miniconda `, `miniforge `_ or +`pixi `_ systems. It is available for Windows, Mac and Linux systems. Conda is a completely separate tool from :ref:`pip`, virtualenv and wheel, but provides -many of their combined features in terms of package management, virtual environment -management and deployment of binary extensions. - -Conda does not install packages from PyPI and can install only from -the official Anaconda repositories, or anaconda.org (a place for -user-contributed *conda* packages), or a local (e.g. intranet) package -server. However, note that :ref:`pip` can be installed into, and work -side-by-side with conda for managing :term:`distributions -` from PyPI. Also, `conda skeleton -`__ -is a tool to make Python packages installable by conda by first -fetching them from PyPI and modifying their metadata. +many of their combined features, such as package management, virtual environment +management and deployment of binary extensions and other binary code. + +Conda does not install packages from PyPI -- it can only manage packages built specifically +for conda, which can be made available on a "conda channel", such as those hosted on +`anaconda.org `__, or a local (e.g. intranet) package server. +In addition to the "default" channels managed by `Anaconda, Inc. `__, there are a wide variety of packages from the community supported +`conda-forge project `__ + +Note that :ref:`pip` can be installed into, and work side-by-side with conda +for managing :term:`distributions ` from PyPI. It is also possible +to build conda packages from Python source packages using tools such as +`conda skeleton +`__: a tool to automatically make conda packages from Python packages available on PyPI. .. _devpi: diff --git a/source/overview.rst b/source/overview.rst index 8c68036a7..70ef2d058 100644 --- a/source/overview.rst +++ b/source/overview.rst @@ -339,7 +339,7 @@ originated and where the technologies below work best: Bringing your own kernel ^^^^^^^^^^^^^^^^^^^^^^^^ -Most operating systems support some form of classical virtualization, +Most desktop operating systems support some form of classical virtualization, running applications packaged as images containing a full operating system of their own. Running these virtual machines, or VMs, is a mature approach, widespread in data center environments. @@ -348,9 +348,13 @@ These techniques are mostly reserved for larger scale deployments in data centers, though certain complex applications can benefit from this packaging. The technologies are Python agnostic, and include: -* `Vagrant `_ -* `VHD `_, `AMI `_, and :doc:`other formats ` -* `OpenStack `_ - A cloud management system in Python, with extensive VM support +* KVM on Linux +* Hyper-V on Windows +* `VHD `_, + `AMI `_, + and :doc:`other formats ` +* `OpenStack `_ - + A cloud management system written in Python, with extensive VM support Bringing your own hardware ^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/source/shared/build-backend-tabs.rst b/source/shared/build-backend-tabs.rst new file mode 100644 index 000000000..64ef0bf2f --- /dev/null +++ b/source/shared/build-backend-tabs.rst @@ -0,0 +1,42 @@ +.. (comment) This file is included in guides/writing-pyproject-toml.rst and tutorials/packaging-projects.rst. +.. The minimum versions here are the versions that introduced support for PEP 639. + +.. tab:: Hatchling + + .. code-block:: toml + + [build-system] + requires = ["hatchling >= 1.26"] + build-backend = "hatchling.build" + +.. tab:: setuptools + + .. code-block:: toml + + [build-system] + requires = ["setuptools >= 77.0.3"] + build-backend = "setuptools.build_meta" + +.. tab:: Flit + + .. code-block:: toml + + [build-system] + requires = ["flit_core >= 3.12.0, <4"] + build-backend = "flit_core.buildapi" + +.. tab:: PDM + + .. code-block:: toml + + [build-system] + requires = ["pdm-backend >= 2.4.0"] + build-backend = "pdm.backend" + +.. tab:: uv-build + + .. code-block:: toml + + [build-system] + requires = ["uv_build >= 0.9.21, <0.10.0"] + build-backend = "uv_build" diff --git a/source/specifications/binary-distribution-format.rst b/source/specifications/binary-distribution-format.rst index 8da38357a..8bb41ab40 100644 --- a/source/specifications/binary-distribution-format.rst +++ b/source/specifications/binary-distribution-format.rst @@ -150,10 +150,10 @@ this character cannot appear within any component. This is handled as follows: - In distribution names, any run of ``-_.`` characters (HYPHEN-MINUS, LOW LINE and FULL STOP) should be replaced with ``_`` (LOW LINE), and uppercase characters should be replaced with corresponding lowercase ones. This is - equivalent to regular :ref:`name normalization ` followed by replacing ``-`` with ``_``. - Tools consuming wheels must be prepared to accept ``.`` (FULL STOP) and - uppercase letters, however, as these were allowed by an earlier version of - this specification. + equivalent to regular :ref:`name normalization ` followed + by replacing ``-`` with ``_``. Tools consuming wheels must be prepared to accept + ``.`` (FULL STOP) and uppercase letters, however, as these were allowed by an earlier + version of this specification. - Version numbers should be normalised according to the :ref:`Version specifier specification `. Normalised version numbers cannot contain ``-``. - The remaining components may not contain ``-`` characters, so no escaping @@ -175,13 +175,17 @@ File contents ''''''''''''' The contents of a wheel file, where {distribution} is replaced with the -name of the package, e.g. ``beaglevote`` and {version} is replaced with -its version, e.g. ``1.0.0``, consist of: +:ref:`normalized name ` of the package, e.g. +``beaglevote`` and {version} is replaced +with its :ref:`normalized version `, +e.g. ``1.0.0``, (with dash/``-`` characters replaced with underscore/``_`` characters +in both fields) consist of: #. ``/``, the root of the archive, contains all files to be installed in ``purelib`` or ``platlib`` as specified in ``WHEEL``. ``purelib`` and ``platlib`` are usually both ``site-packages``. #. ``{distribution}-{version}.dist-info/`` contains metadata. +#. :file:`{distribution}-{version}.dist-info/licenses/` contains license files. #. ``{distribution}-{version}.data/`` contains one subdirectory for each non-empty install scheme key not already covered, where the subdirectory name is an index into a dictionary of install paths @@ -189,7 +193,7 @@ its version, e.g. ``1.0.0``, consist of: #. Python scripts must appear in ``scripts`` and begin with exactly ``b'#!python'`` in order to enjoy script wrapper generation and ``#!python`` rewriting at install time. They may have any or no - extension. + extension. The ``scripts`` directory may only contain regular files. #. ``{distribution}-{version}.dist-info/METADATA`` is Metadata version 1.1 or greater format metadata. #. ``{distribution}-{version}.dist-info/WHEEL`` is metadata about the archive @@ -249,6 +253,36 @@ The .dist-info directory installation will fail if any file in the archive is not both mentioned and correctly hashed in RECORD. +Subdirectories in :file:`.dist-info/` +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Subdirectories under :file:`.dist-info/` are reserved for future use. +The following subdirectory names under :file:`.dist-info/` are reserved for specific usage: + +================= ============== +Subdirectory name PEP / Standard +================= ============== +``licenses`` :pep:`639` +``license_files`` :pep:`639` +``LICENSES`` `REUSE licensing framework `__ +``sboms`` :pep:`770` +================= ============== + +The :file:`.dist-info/licenses/` directory +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +If the metadata version is 2.4 or greater and one or more ``License-File`` +fields is specified, the :file:`.dist-info/` directory MUST contain a +:file:`licenses/` subdirectory, which MUST contain the files listed in the +``License-File`` fields in the :file:`METADATA` file at their respective paths +relative to the :file:`licenses/` directory. + +The :file:`.dist-info/sboms/` directory +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +All files contained within the :file:`.dist-info/sboms/` directory MUST +be Software Bill-of-Materials (SBOM) files that describe software contained +within the distribution archive. The .data directory ^^^^^^^^^^^^^^^^^^^ @@ -423,6 +457,14 @@ History - February 2013: This specification was approved through :pep:`427`. - February 2021: The rules on escaping in wheel filenames were revised, to bring them into line with what popular tools actually do. +- December 2024: Clarified that the ``scripts`` folder should only contain + regular files (the expected behaviour of consuming tools when encountering + symlinks or subdirectories in this folder is not formally defined, and hence + may vary between tools). +- December 2024: The :file:`.dist-info/licenses/` directory was specified through + :pep:`639`. +- January 2025: Clarified that name and version needs to be normalized for + ``.dist-info`` and ``.data`` directories. Appendix diff --git a/source/specifications/build-details/examples/build-details-v1.0.json b/source/specifications/build-details/examples/build-details-v1.0.json new file mode 100644 index 000000000..dd08b230f --- /dev/null +++ b/source/specifications/build-details/examples/build-details-v1.0.json @@ -0,0 +1,51 @@ +{ + "schema_version": "1.0", + "base_prefix": "/usr", + "base_interpreter": "/usr/bin/python", + "platform": "linux-x86_64", + "language": { + "version": "3.14", + "version_info": { + "major": 3, + "minor": 14, + "micro": 0, + "releaselevel": "alpha", + "serial": 0 + } + }, + "implementation": { + "name": "cpython", + "version": { + "major": 3, + "minor": 14, + "micro": 0, + "releaselevel": "alpha", + "serial": 0 + }, + "hexversion": 51249312, + "cache_tag": "cpython-314", + "_multiarch": "x86_64-linux-gnu" + }, + "abi": { + "flags": ["t", "d"], + "extension_suffix": ".cpython-314-x86_64-linux-gnu.so", + "stable_abi_suffix": ".abi3.so" + }, + "suffixes": { + "source": [".py"], + "bytecode": [".pyc"], + "optimized_bytecode": [".pyc"], + "debug_bytecode": [".pyc"], + "extensions": [".cpython-314-x86_64-linux-gnu.so", ".abi3.so", ".so"] + }, + "libpython": { + "dynamic": "/usr/lib/libpython3.14.so.1.0", + "dynamic_stableabi": "/usr/lib/libpython3.so", + "static": "/usr/lib/python3.14/config-3.14-x86_64-linux-gnu/libpython3.14.a", + "link_extensions": true + }, + "c_api": { + "headers": "/usr/include/python3.14", + "pkgconfig_path": "/usr/lib/pkgconfig" + } +} diff --git a/source/specifications/build-details/index.rst b/source/specifications/build-details/index.rst new file mode 100644 index 000000000..0cd5b5fe5 --- /dev/null +++ b/source/specifications/build-details/index.rst @@ -0,0 +1,52 @@ +.. _build-details: + +========================== +:file:`build-details.json` +========================== + +.. toctree:: + :hidden: + + v1.0 + + +The ``build-details.json`` file is a standardized file format that provides +build-specfic information of a Python installation, such as its version, +extension ABI details, and other information that is specific to that particular +build of Python. + +Starting from Python 3.14, a ``build-details.json`` file is installed in the +platform-independent standard library directory (``stdlib``, e.g. +``/usr/lib/python3.14/build-details.json``). + +Please refer to the :ref:`latest version ` for its +specification. + +.. + Update to point to the latest version! + +.. literalinclude:: examples/build-details-v1.0.json + :caption: Example + :language: json + :linenos: + + +Changelog +--------- + +.. + Order in decreasing order. + +v1.0 +~~~~ + +.. list-table:: + + * - Specification + - :ref:`build-details-v1.0` + + * - Schema + - https://packaging.python.org/en/latest/specifications/schemas/build-details-v1.0.schema.json + + +- Initial version, introduced by :pep:`739`. diff --git a/source/specifications/build-details/v1.0.rst b/source/specifications/build-details/v1.0.rst new file mode 100644 index 000000000..3a8cfe277 --- /dev/null +++ b/source/specifications/build-details/v1.0.rst @@ -0,0 +1,18 @@ +.. _build-details-v1.0: + +=========================== +``build-details.json`` v1.0 +=========================== + + +Specification +------------- + +.. jsonschema:: ../../../extra/specifications/schemas/build-details-v1.0.schema.json + :lift_title: false + + +Example +------- + +.. literalinclude:: examples/build-details-v1.0.json diff --git a/source/specifications/core-metadata.rst b/source/specifications/core-metadata.rst index 90793c791..eb9a03ff6 100644 --- a/source/specifications/core-metadata.rst +++ b/source/specifications/core-metadata.rst @@ -6,6 +6,8 @@ Core metadata specifications ============================ +This page describes version 2.5, approved in September 2025. + Fields defined in the following specification should be considered valid, complete and not subject to change. The required fields are: @@ -48,11 +50,11 @@ Metadata-Version .. versionadded:: 1.0 Version of the file format; legal values are "1.0", "1.1", "1.2", "2.1", -"2.2", and "2.3". +"2.2", "2.3", "2.4", and "2.5". -Automated tools consuming metadata SHOULD warn if ``metadata_version`` is +Automated tools consuming metadata SHOULD warn if ``metadata-version`` is greater than the highest version they support, and MUST fail if -``metadata_version`` has a greater major version than the highest +``metadata-version`` has a greater major version than the highest version they support (as described in the :ref:`Version specifier specification `, the major version is the value before the first dot). @@ -63,7 +65,7 @@ all of the needed fields. Example:: - Metadata-Version: 2.3 + Metadata-Version: 2.4 .. _core-metadata-name: @@ -131,6 +133,16 @@ only, and indicates that the field value was calculated at wheel build time, and may not be the same as the value in the sdist or in other wheels for the project. +Note in particular that if you have obtained a prebuilt wheel, you cannot +assume that a field which is not marked as ``Dynamic`` will have the same value +in other wheels, as some wheels are not built directly from the sdist, but are +modified from existing wheels (the ``auditwheel`` tool does this, for example, +and it's commonly used when building wheels for PyPI). Such modifications +*could* include changing metadata (even non-dynamic metadata). Similarly, if +you have a sdist and a wheel which you didn't build from that sdist, you cannot +assume that the wheel's metadata matches that of the sdist, even if the field +is not marked as ``Dynamic``. + Full details of the semantics of ``Dynamic`` are described in :pep:`643`. .. _core-metadata-platform: @@ -341,32 +353,6 @@ Example:: These tools have been very widely used for many years, so it was easier to update the specification to match the de facto standard. -.. _home-page-optional: -.. _core-metadata-home-page: - -Home-page -========= - -.. versionadded:: 1.0 - -A string containing the URL for the distribution's home page. - -Example:: - - Home-page: http://www.example.com/~cschultz/bvote/ - -.. _core-metadata-download-url: - -Download-URL -============ - -.. versionadded:: 1.1 - -A string containing the URL from which this version of the distribution -can be downloaded. (This means that the URL can't be something like -".../BeagleVote-latest.tgz", but instead must be ".../BeagleVote-0.45.tgz".) - - .. _author-optional: .. _core-metadata-author: @@ -460,6 +446,14 @@ License ======= .. versionadded:: 1.0 +.. deprecated:: 2.4 + in favour of ``License-Expression``. + +.. warning:: + As of Metadata 2.4, ``License`` and ``License-Expression`` are mutually + exclusive. If both are specified, tools which parse metadata will disregard + ``License`` and PyPI will reject uploads. + See `PEP 639 `__. Text indicating the license covering the distribution where the license is not a selection from the "License" Trove classifiers. See @@ -477,6 +471,56 @@ Examples:: License: GPL version 3, excluding DRM provisions +.. _license-expression-optional: +.. _core-metadata-license-expression: + +License-Expression +================== + +.. versionadded:: 2.4 + +Text string that is a valid SPDX +:term:`license expression `, +as specified in :doc:`/specifications/license-expression`. + +Note that the expression in this field only applies to the +:term:`Distribution Archive` containing the metadata with this field (e.g., +:term:`Source Distribution ` or :term:`Wheel`), +not the project overall or other files related to the project (including other +distribution archives). + +Examples:: + + License-Expression: MIT + License-Expression: BSD-3-Clause + License-Expression: MIT AND (Apache-2.0 OR BSD-2-Clause) + License-Expression: MIT OR GPL-2.0-or-later OR (FSFUL AND BSD-2-Clause) + License-Expression: GPL-3.0-only WITH Classpath-Exception-2.0 OR BSD-3-Clause + License-Expression: LicenseRef-Special-License OR CC0-1.0 OR Unlicense + License-Expression: LicenseRef-Proprietary + + +.. _license-file-optional: +.. _core-metadata-license-file: + +License-File (multiple use) +=========================== + +.. versionadded:: 2.4 + +Each entry is a string representation of the path of a license-related file. +The path is located within the project source tree, relative to the project +root directory. For details see :pep:`639`. + +Examples:: + + License-File: LICENSE + License-File: AUTHORS + License-File: LICENSE.txt + License-File: licenses/LICENSE.MIT + License-File: licenses/LICENSE.CC0 + + .. _metadata-classifier: .. _core-metadata-classifier: @@ -490,6 +534,11 @@ for the distribution. Classifiers are described in :pep:`301`, and the Python Package Index publishes a dynamic list of `currently defined classifiers `__. +.. note:: + The use of ``License ::`` classifiers is deprecated as of Metadata 2.4, + use ``License-Expression`` instead. See + `PEP 639 `_. + This field may be followed by an environment marker after a semicolon. Examples:: @@ -612,6 +661,10 @@ Example:: The label is free text limited to 32 characters. +Starting with :pep:`753`, project metadata consumers (such as the Python +Package Index) can use a standard normalization process to discover "well-known" +labels, which can then be given special presentations when being rendered +for human consumption. See :ref:`well-known-project-urls`. .. _metadata_provides_extra: .. _core-metadata-provides-extra: @@ -671,6 +724,101 @@ user SHOULD be warned and the value ignored to avoid ambiguity. Tools MAY choose to raise an error when reading an invalid name for older metadata versions. +.. _core-metadata-import-name: + +Import-Name (multiple use) +========================== + +.. versionadded:: 2.5 + +A string containing an import name that the project exclusively provides when +installed. The specified import name MUST be a valid Python identifier or can +be empty. The import names listed in this field MUST be importable when the +project is installed on *some* platform for the same version of the project. +This implies that the metadata MUST be consistent across all sdists and wheels +for a project release. + +An import name MAY be followed by a semicolon and the term "private" +(e.g. ``; private``) with any amount of whitespace surrounding the semicolon. +This signals to tools that the import name is not part of the public API for +the project. + +Projects SHOULD list all the shortest import names that are exclusively provided +by the project. If any of the shortest names are dotted names, all intervening +names from that name to the top-level name SHOULD also be listed appropriately +in ``Import-Name`` and/or ``Import-Namespace``. + +If a project lists the same name in both ``Import-Name`` and +``Import-Namespace``, tools MUST raise an error due to ambiguity. + +Tools SHOULD raise an error when two projects that are about to be installed +list names that overlap in each other's ``Import-Name`` entries, or when a +project has an entry in ``Import-Name`` that overlaps with another project's +``Import-Namespace`` entries. This is to avoid projects unexpectedly shadowing +another project's code. Tools MAY warn or raise an error when installing a +project into a preexisting environment where there is import name overlap with +a project that is already installed. + +Projects MAY have an empty ``Import-Name`` field in their metadata to represent +a project with no import names (i.e. there are no Python modules of any kind in +the distribution file). + +Since projects MAY have no ``Import-Name`` metadata (either because the +project uses an older metadata version, or because it didn't specify any), then +tools have no information about what names the project provides. However, in +practice the majority of projects have their project name match what their +import name would be. As such, it is a reasonable assumption to make that a +project name that is normalized in some way to an import name +(e.g. ``packaging.utils.canonicalize_name(name, validate=True).replace("-", "_")``) +can be used if some answer is needed. + +Examples:: + + Import-Name: PIL + Import-Name: _private_module ; private + Import-Name: zope.interface + Import-Name: + + +.. _core-metadata-import-namespace: + +Import-Namespace (multiple use) +=============================== + +.. versionadded:: 2.5 + +A string containing an import name that the project provides when installed, but +not exclusively. The specified import name MUST be a valid Python identifier. +This field is used for namespace packages where multiple projects can contribute +to the same import namespace. Projects all listing the same import name in +``Import-Namespace`` can be installed together without shadowing each other. + +An import name MAY be followed by a semicolon and the term "private" (e.g. +``; private``) with any amount of whitespace surrounding the semicolon. This +signals to tools that the import name is not part of the public API for the +project. + +Projects SHOULD list all the shortest import names that are exclusively provided +by the project. If any of the shortest names are dotted names, all intervening +names from that name to the top-level name SHOULD also be listed appropriately +in ``Import-Name`` and/or ``Import-Namespace``. + +The import names listed in this field MUST be importable when the project is +installed on *some* platform for the same version of the project. This implies +that the metadata MUST be consistent across all sdists and wheels for a project +release. + +If a project lists the same name in both ``Import-Name`` and +``Import-Namespace``, tools MUST raise an error due to ambiguity. + +Note that ``Import-Namespace`` CANNOT be empty like ``Import-Name``. + +Examples:: + + Import-Namespace: zope + Import-Name: _private_module ; private + + Rarely Used Fields ================== @@ -725,7 +873,7 @@ This field may be followed by an environment marker after a semicolon. Examples:: Provides-Dist: OtherProject - Provides-Dist: AnotherProject (3.4) + Provides-Dist: AnotherProject==3.4 Provides-Dist: virtual_package; python_version >= "3.4" .. _core-metadata-obsoletes-dist: @@ -762,6 +910,45 @@ Examples:: Deprecated Fields ================= +Deprecated fields should be avoided, but they are valid metadata fields. They +may be removed in future versions of the core metadata standard (at which point +they will only be valid in files that specify a metadata version prior to the +removal). Tools SHOULD warn users when deprecated fields are used. + +.. _home-page-optional: +.. _core-metadata-home-page: + +Home-page +--------- + +.. versionadded:: 1.0 + +.. deprecated:: 1.2 + + Per :pep:`753`, use :ref:`core-metadata-project-url` instead. + +A string containing the URL for the distribution's home page. + +Example:: + + Home-page: http://www.example.com/~cschultz/bvote/ + +.. _core-metadata-download-url: + +Download-URL +------------ + +.. versionadded:: 1.1 + +.. deprecated:: 1.2 + + Per :pep:`753`, use :ref:`core-metadata-project-url` instead. + +A string containing the URL from which this version of the distribution +can be downloaded. (This means that the URL can't be something like +"``.../BeagleVote-latest.tgz``", but instead must be +"``.../BeagleVote-0.45.tgz``".) + Requires -------- @@ -848,8 +1035,11 @@ History ======= - March 2001: Core metadata 1.0 was approved through :pep:`241`. -- April 2003: Core metadata 1.1 was approved through :pep:`314`: + +- April 2003: Core metadata 1.1 was approved through :pep:`314`. + - February 2010: Core metadata 1.2 was approved through :pep:`345`. + - February 2018: Core metadata 2.1 was approved through :pep:`566`. - Added ``Description-Content-Type`` and ``Provides-Extra``. @@ -864,6 +1054,23 @@ History - Restricted extra names to be normalized. +- August 2024: Core metadata 2.4 was approved through :pep:`639`. + + - Added the ``License-Expression`` field. + - Added the ``License-File`` field. + +- August 2025: Clarified that ``Dynamic`` only affects how fields + must be treated when building a wheel from a sdist, not when modifying + a wheel. + +- September 2025: Core metadata 2.5 was approved through :pep:`794`. + + - Added the ``Import-Name`` field. + - Added the ``Import-Namespace`` field. + +- October 2025: Clarified that ``License-Expression`` applies to the containing + distribution file and not the project itself. + ---- .. [1] reStructuredText markup: diff --git a/source/specifications/dependency-groups.rst b/source/specifications/dependency-groups.rst new file mode 100644 index 000000000..a35afb475 --- /dev/null +++ b/source/specifications/dependency-groups.rst @@ -0,0 +1,252 @@ +.. _dependency-groups: + +================= +Dependency Groups +================= + +This specification defines dependency groups, a mechanism for storing package +requirements in ``pyproject.toml`` files such that they are not included in +project metadata when it is built. + +Dependency groups are suitable for internal development use-cases like linting +and testing, as well as for projects which are not built for distribution, like +collections of related scripts. + +Fundamentally, dependency groups should be thought of as being a standardized +subset of the capabilities of ``requirements.txt`` files (which are +``pip``-specific). + +Specification +============= + +Examples +-------- + +This is a simple table which shows ``docs`` and ``test`` groups:: + + [dependency-groups] + docs = ["sphinx"] + test = ["pytest>7", "coverage"] + +and a similar table which defines ``docs``, ``test``, and ``coverage`` groups:: + + [dependency-groups] + docs = ["sphinx"] + coverage = ["coverage[toml]"] + test = ["pytest>7", {include-group = "coverage"}] + +The ``[dependency-groups]`` Table +--------------------------------- + +Dependency groups are defined as a table in ``pyproject.toml`` named +``dependency-groups``. The ``dependency-groups`` table contains an arbitrary +number of user-defined keys, each of which has, as its value, a list of +requirements. + +``[dependency-groups]`` keys, sometimes also called "group names", must be +:ref:`valid non-normalized names `. Tools which handle Dependency +Groups MUST :ref:`normalize ` these names before +comparisons. + +Tools SHOULD prefer to present the original, non-normalized name to users, and +if duplicate names are detected after normalization, tools SHOULD emit an +error. + +Requirement lists, the values in ``[dependency-groups]``, may contain strings, +tables (``dict`` in Python), or a mix of strings and tables. Strings must be +valid :ref:`dependency specifiers `, and tables must be +valid Dependency Group Includes. + +Dependency Group Include +------------------------ + +A Dependency Group Include includes another Dependency Group in the current +group. + +An include is a table with exactly one key, ``"include-group"``, whose value is +a string, the name of another Dependency Group. + +Includes are defined to be exactly equivalent to the contents of the named +Dependency Group, inserted into the current group at the location of the include. +For example, if ``foo = ["a", "b"]`` is one group, and +``bar = ["c", {include-group = "foo"}, "d"]`` is another, then ``bar`` should +evaluate to ``["c", "a", "b", "d"]`` when Dependency Group Includes are expanded. + +Dependency Group Includes may specify the same package multiple times. +Tools SHOULD NOT deduplicate or otherwise alter the list contents produced by the +include. For example, given the following table: + +.. code-block:: toml + + [dependency-groups] + group-a = ["foo"] + group-b = ["foo>1.0"] + group-c = ["foo<1.0"] + all = [ + "foo", + {include-group = "group-a"}, + {include-group = "group-b"}, + {include-group = "group-c"}, + ] + +The resolved value of ``all`` SHOULD be ``["foo", "foo", "foo>1.0", "foo<1.0"]``. +Tools should handle such a list exactly as they would handle any other case in +which they are asked to process the same requirement multiple times with +different version constraints. + +Dependency Group Includes may include groups containing Dependency Group Includes, +in which case those includes should be expanded as well. Dependency Group Includes +MUST NOT include cycles, and tools SHOULD report an error if they detect a cycle. + +Package Building +---------------- + +Build backends MUST NOT include Dependency Group data in built distributions as +package metadata. This means that sdist ``PKG-INFO`` and wheel ``METADATA`` +files should not include referenceable fields containing dependency groups. + +It is, however, valid to use dependency groups in the evaluation of dynamic +metadata, and ``pyproject.toml`` files included in sdists will still contain +``[dependency-groups]``. However, the table's contents are not part of a built +package's interfaces. + +Installing Dependency Groups & Extras +------------------------------------- + +There is no syntax or specification-defined interface for installing or +referring to dependency groups. Tools are expected to provide dedicated +interfaces for this purpose. + +Tools MAY choose to provide the same or similar interfaces for interacting +with dependency groups as they do for managing extras. Tools authors are +advised that the specification does not forbid having an extra whose name +matches a Dependency Group. Separately, users are advised to avoid creating +dependency groups whose names match extras, and tools MAY treat such matching +as an error. + +Validation and Compatibility +---------------------------- + +Tools supporting dependency groups may want to validate data before using it. +When implementing such validation, authors should be aware of the possibility +of future extensions to the specification, so that they do not unnecessarily +emit errors or warnings. + +Tools SHOULD error when evaluating or processing unrecognized data in +dependency groups. + +Tools SHOULD NOT eagerly validate the contents of *all* dependency groups +unless they have a need to do so. + +This means that in the presence of the following data, most tools should allow +the ``foo`` group to be used and only error if the ``bar`` group is used: + +.. code-block:: toml + + [dependency-groups] + foo = ["pyparsing"] + bar = [{set-phasers-to = "stun"}] + +.. note:: + + There are several known cases of tools which have good cause to be + stricter. Linters and validators are an example, as their purpose is to + validate the contents of all dependency groups. + +Reference Implementation +======================== + +The following Reference Implementation prints the contents of a Dependency +Group to stdout, newline delimited. +The output is therefore valid ``requirements.txt`` data. + +.. code-block:: python + + import re + import sys + import tomllib + from collections import defaultdict + + from packaging.requirements import Requirement + + + def _normalize_name(name: str) -> str: + return re.sub(r"[-_.]+", "-", name).lower() + + + def _normalize_group_names(dependency_groups: dict) -> dict: + original_names = defaultdict(list) + normalized_groups = {} + + for group_name, value in dependency_groups.items(): + normed_group_name = _normalize_name(group_name) + original_names[normed_group_name].append(group_name) + normalized_groups[normed_group_name] = value + + errors = [] + for normed_name, names in original_names.items(): + if len(names) > 1: + errors.append(f"{normed_name} ({', '.join(names)})") + if errors: + raise ValueError(f"Duplicate dependency group names: {', '.join(errors)}") + + return normalized_groups + + + def _resolve_dependency_group( + dependency_groups: dict, group: str, past_groups: tuple[str, ...] = () + ) -> list[str]: + if group in past_groups: + raise ValueError(f"Cyclic dependency group include: {group} -> {past_groups}") + + if group not in dependency_groups: + raise LookupError(f"Dependency group '{group}' not found") + + raw_group = dependency_groups[group] + if not isinstance(raw_group, list): + raise ValueError(f"Dependency group '{group}' is not a list") + + realized_group = [] + for item in raw_group: + if isinstance(item, str): + # packaging.requirements.Requirement parsing ensures that this is a valid + # PEP 508 Dependency Specifier + # raises InvalidRequirement on failure + Requirement(item) + realized_group.append(item) + elif isinstance(item, dict): + if tuple(item.keys()) != ("include-group",): + raise ValueError(f"Invalid dependency group item: {item}") + + include_group = _normalize_name(next(iter(item.values()))) + realized_group.extend( + _resolve_dependency_group( + dependency_groups, include_group, past_groups + (group,) + ) + ) + else: + raise ValueError(f"Invalid dependency group item: {item}") + + return realized_group + + + def resolve(dependency_groups: dict, group: str) -> list[str]: + if not isinstance(dependency_groups, dict): + raise TypeError("Dependency Groups table is not a dict") + if not isinstance(group, str): + raise TypeError("Dependency group name is not a str") + return _resolve_dependency_group(dependency_groups, group) + + + if __name__ == "__main__": + with open("pyproject.toml", "rb") as fp: + pyproject = tomllib.load(fp) + + dependency_groups_raw = pyproject["dependency-groups"] + dependency_groups = _normalize_group_names(dependency_groups_raw) + print("\n".join(resolve(pyproject["dependency-groups"], sys.argv[1]))) + +History +======= + +- October 2024: This specification was approved through :pep:`735`. diff --git a/source/specifications/dependency-specifiers.rst b/source/specifications/dependency-specifiers.rst index d6713f713..99886563c 100644 --- a/source/specifications/dependency-specifiers.rst +++ b/source/specifications/dependency-specifiers.rst @@ -63,7 +63,7 @@ Versions may be specified according to the rules of the :ref:`Version specifier specification `. (Note: URI is defined in :rfc:`std-66 <3986>`):: - version_cmp = wsp* '<' | '<=' | '!=' | '==' | '>=' | '>' | '~=' | '===' + version_cmp = wsp* '<=' | '<' | '!=' | '===' | '==' | '>=' | '>' | '~=' version = wsp* ( letterOrDigit | '-' | '_' | '.' | '*' | '+' | '!' )+ version_one = version_cmp version wsp* version_many = version_one (',' version_one)* (',' wsp*)? @@ -87,7 +87,7 @@ environments:: 'platform_system' | 'platform_version' | 'platform_machine' | 'platform_python_implementation' | 'implementation_name' | 'implementation_version' | - 'extra' # ONLY when defined by a containing layer + 'extra' | 'extras' | 'dependency_groups' # ONLY when defined by a containing layer ) marker_var = wsp* (env_var | python_str) marker_expr = marker_var marker_op marker_var @@ -128,6 +128,8 @@ Whitespace Non line-breaking whitespace is mostly optional with no semantic meaning. The sole exception is detecting the end of a URL requirement. +.. _dependency-specifiers-names: + Names ----- @@ -140,7 +142,9 @@ document we limit the acceptable values for identifiers to that regex. A full redefinition of name may take place in a future metadata PEP. The regex (run with re.IGNORECASE) is:: - ^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$ + ^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])\Z + +.. _dependency-specifiers-extras: Extras ------ @@ -159,6 +163,8 @@ are listed in the "security" extra of requests. If multiple extras are listed, all the dependencies are unioned together. +.. _dependency-specifiers-versions: + Versions -------- @@ -170,6 +176,8 @@ via a URL. Version comparison are also used in the markers feature. The optional brackets around a version are present for compatibility with :pep:`345` but should not be generated, only accepted. +.. _dependency-specifiers-environment-markers: + Environment Markers ------------------- @@ -188,15 +196,16 @@ safely evaluate it without running arbitrary code that could become a security vulnerability. Markers were first standardised in :pep:`345`. This document fixes some issues that were observed in the design described in :pep:`426`. -Comparisons in marker expressions are typed by the comparison operator. The - operators that are not in perform the same as they -do for strings in Python. The operators use the version comparison -rules of the :ref:`Version specifier specification ` -when those are defined (that is when both sides have a valid -version specifier). If there is no defined behaviour of this specification -and the operator exists in Python, then the operator falls back to -the Python behaviour. Otherwise an error should be raised. e.g. the following -will result in errors:: +Comparisons in marker expressions are typed by the comparison operator and the +type of the marker value. The operators that are not in + perform the same as they do for strings or sets in Python based on +whether the marker value is a string or set itself. The operators +use the version comparison rules of the +:ref:`Version specifier specification ` when those are +defined (that is when both sides have a valid version specifier). If there is no +defined behaviour of this specification and the operator exists in Python, then +the operator falls back to the Python behaviour for the types involved. +Otherwise an error should be raised. e.g. the following will result in errors:: "dog" ~= "fred" python_version ~= "surprise" @@ -227,52 +236,80 @@ no current specification for this. Regardless, outside of a context where this special handling is taking place, the "extra" variable should result in an error like all other unknown variables. +The "extras" and "dependency_groups" variables are also special. They are used +to specify any requested extras or dependency groups when installing from a lock +file. Outside of the context of lock files, these two variables should result in +an error like all other unknown variables. + .. list-table:: :header-rows: 1 * - Marker - Python equivalent + - Type - Sample values * - ``os_name`` - :py:data:`os.name` + - String - ``posix``, ``java`` * - ``sys_platform`` - :py:data:`sys.platform` + - String - ``linux``, ``linux2``, ``darwin``, ``java1.8.0_51`` (note that "linux" is from Python3 and "linux2" from Python2) * - ``platform_machine`` - :py:func:`platform.machine()` + - String - ``x86_64`` * - ``platform_python_implementation`` - :py:func:`platform.python_implementation()` + - String - ``CPython``, ``Jython`` * - ``platform_release`` - :py:func:`platform.release()` + - String - ``3.14.1-x86_64-linode39``, ``14.5.0``, ``1.8.0_51`` * - ``platform_system`` - :py:func:`platform.system()` + - String - ``Linux``, ``Windows``, ``Java`` * - ``platform_version`` - :py:func:`platform.version()` + - String - ``#1 SMP Fri Apr 25 13:07:35 EDT 2014`` ``Java HotSpot(TM) 64-Bit Server VM, 25.51-b03, Oracle Corporation`` ``Darwin Kernel Version 14.5.0: Wed Jul 29 02:18:53 PDT 2015; root:xnu-2782.40.9~2/RELEASE_X86_64`` * - ``python_version`` - ``'.'.join(platform.python_version_tuple()[:2])`` + - :ref:`Version ` - ``3.4``, ``2.7`` * - ``python_full_version`` - :py:func:`platform.python_version()` + - :ref:`Version ` - ``3.4.0``, ``3.5.0b1`` * - ``implementation_name`` - :py:data:`sys.implementation.name ` + - String - ``cpython`` * - ``implementation_version`` - see definition below + - :ref:`Version ` - ``3.4.0``, ``3.5.0b1`` * - ``extra`` - An error except when defined by the context interpreting the specification. - - ``test`` + - String + - ``toml`` + * - ``extras`` + - An error except when defined by the context interpreting the + specification. + - Set of strings + - ``{"toml"}`` + * - ``dependency_groups`` + - An error except when defined by the context interpreting the + specification. + - Set of strings + - ``{"test"}`` The ``implementation_version`` marker variable is derived from :py:data:`sys.implementation.version `: @@ -294,13 +331,15 @@ The ``implementation_version`` marker variable is derived from This environment markers section, initially defined through :pep:`508`, supersedes the environment markers section in :pep:`345`. +.. _dependency-specifiers-grammar: + Complete Grammar ================ The complete parsley grammar:: wsp = ' ' | '\t' - version_cmp = wsp* <'<=' | '<' | '!=' | '==' | '>=' | '>' | '~=' | '==='> + version_cmp = wsp* <'<=' | '<' | '!=' | '===' | '==' | '>=' | '>' | '~='> version = wsp* <( letterOrDigit | '-' | '_' | '.' | '*' | '+' | '!' )+> version_one = version_cmp:op version:v wsp* -> (op, v) version_many = version_one:v1 (',' version_one)*:v2 (',' wsp*)? -> [v1] + v2 @@ -320,7 +359,7 @@ The complete parsley grammar:: 'platform_system' | 'platform_version' | 'platform_machine' | 'platform_python_implementation' | 'implementation_name' | 'implementation_version' | - 'extra' # ONLY when defined by a containing layer + 'extra' | 'extras' | 'dependency_groups' # ONLY when defined by a containing layer ):varname -> lookup(varname) marker_var = wsp* (env_var | python_str) marker_expr = marker_var:l marker_op:o marker_var:r -> (o, l, r) @@ -485,6 +524,13 @@ History - June 2024: The definition of ``version_many`` was changed to allow trailing commas, matching with the behavior of the Python implementation that has been in use since late 2022. +- April 2025: Added ``extras`` and ``dependency_groups`` for + :ref:`lock-file-spec` as approved through :pep:`751`. +- August 2025: The suggested name validation regex was fixed to match the field + specification (it previously finished with ``$`` instead of ``\Z``, + incorrectly permitting trailing newlines) +- December 2025: Ensure ``===`` before ``==`` in grammar, to allow arbitrary + equality comparisons to be parsed. References diff --git a/source/specifications/direct-url-data-structure.rst b/source/specifications/direct-url-data-structure.rst index 231198ee8..a82537f0a 100644 --- a/source/specifications/direct-url-data-structure.rst +++ b/source/specifications/direct-url-data-structure.rst @@ -31,6 +31,9 @@ Depending on what ``url`` refers to, the second field MUST be one of ``vcs_info` local directory). These info fields have a (possibly empty) subdictionary as value, with the possible keys defined below. +Security Considerations +----------------------- + When persisted, ``url`` MUST be stripped of any sensitive authentication information, for security reasons. @@ -44,7 +47,9 @@ expression: Additionally, the user:password section of the URL MAY be a well-known, non security sensitive string. A typical example is ``git`` -in the case of an URL such as ``ssh://git@gitlab.com/user/repo``. +in the case of a URL such as ``ssh://git@gitlab.com/user/repo``. + +.. _direct-url-data-structure-vcs: VCS URLs -------- @@ -69,6 +74,8 @@ as a dictionary with the following keys: ``commit_id`` in order to reference an immutable version of the source code. +.. _direct-url-data-structure-archive: + Archive URLs ------------ @@ -101,6 +108,8 @@ When both the ``hash`` and ``hashes`` keys are present, the hash represented in ``hash`` key MUST also be present in the ``hashes`` dictionary, so consumers can consider the ``hashes`` key only if it is present, and fall back to ``hash`` otherwise. +.. _direct-url-data-structure-local-directory: + Local directories ----------------- @@ -115,6 +124,8 @@ be compliant with :rfc:`8089`. In particular, the path component must be absolute. Symbolic links SHOULD be preserved when making relative paths absolute. +.. _direct-url-data-structure-subdirectories: + Projects in subdirectories -------------------------- @@ -122,6 +133,8 @@ A top-level ``subdirectory`` field MAY be present containing a directory path, relative to the root of the VCS repository, source archive or local directory, to specify where ``pyproject.toml`` or ``setup.py`` is located. +.. _direct-url-data-structure-registered-vcs: + Registered VCS ============== @@ -223,122 +236,7 @@ JSON Schema The following JSON Schema can be used to validate the contents of ``direct_url.json``: -.. code-block:: - - { - "$schema": "https://json-schema.org/draft/2019-09/schema", - "title": "Direct URL Data", - "description": "Data structure that can represent URLs to python projects and distribution artifacts such as VCS source trees, local source trees, source distributions and wheels.", - "definitions": { - "URL": { - "type": "string", - "format": "uri" - }, - "DirInfo": { - "type": "object", - "properties": { - "editable": { - "type": ["boolean", "null"] - } - } - }, - "VCSInfo": { - "type": "object", - "properties": { - "vcs": { - "type": "string", - "enum": [ - "git", - "hg", - "bzr", - "svn" - ] - }, - "requested_revision": { - "type": "string" - }, - "commit_id": { - "type": "string" - }, - "resolved_revision": { - "type": "string" - } - }, - "required": [ - "vcs", - "commit_id" - ] - }, - "ArchiveInfo": { - "type": "object", - "properties": { - "hash": { - "type": "string", - "pattern": "^\\w+=[a-f0-9]+$", - "deprecated": true - }, - "hashes": { - "type": "object", - "patternProperties": { - "^[a-f0-9]+$": { - "type": "string" - } - } - } - } - } - }, - "allOf": [ - { - "type": "object", - "properties": { - "url": { - "$ref": "#/definitions/URL" - } - }, - "required": [ - "url" - ] - }, - { - "anyOf": [ - { - "type": "object", - "properties": { - "dir_info": { - "$ref": "#/definitions/DirInfo" - } - }, - "required": [ - "dir_info" - ] - }, - { - "type": "object", - "properties": { - "vcs_info": { - "$ref": "#/definitions/VCSInfo" - } - }, - "required": [ - "vcs_info" - ] - }, - { - "type": "object", - "properties": { - "archive_info": { - "$ref": "#/definitions/ArchiveInfo" - } - }, - "required": [ - "archive_info" - ] - } - ] - } - ] - } +.. literalinclude:: ../../extra/specifications/schemas/direct-url.schema.json Examples ======== diff --git a/source/specifications/externally-managed-environments.rst b/source/specifications/externally-managed-environments.rst index 2944eb3da..65fc14a62 100644 --- a/source/specifications/externally-managed-environments.rst +++ b/source/specifications/externally-managed-environments.rst @@ -205,7 +205,7 @@ virtual environment to install packages. Software distributors who have a non-Python-specific package manager that manages libraries in the ``sys.path`` of their Python package -should, in general, ship a ``EXTERNALLY-MANAGED`` file in their +should, in general, ship an ``EXTERNALLY-MANAGED`` file in their standard library directory. For instance, Debian may ship a file in ``/usr/lib/python3.9/EXTERNALLY-MANAGED`` consisting of something like diff --git a/source/specifications/file-yanking.rst b/source/specifications/file-yanking.rst new file mode 100644 index 000000000..4ab8cd5cc --- /dev/null +++ b/source/specifications/file-yanking.rst @@ -0,0 +1,92 @@ +.. _file-yanking: + +============ +File Yanking +============ + +.. note:: + + This specification was originally defined in + :pep:`592`. + +.. note:: + + :pep:`592` includes changes to the HTML and JSON index APIs. + These changes are documented in the :ref:`simple-repository-api` + under :ref:`HTML - Project Detail ` + and :ref:`JSON - Project Detail `. + +Specification +============= + +Links in the simple repository **MAY** have a ``data-yanked`` attribute +which may have no value, or may have an arbitrary string as a value. The +presence of a ``data-yanked`` attribute **SHOULD** be interpreted as +indicating that the file pointed to by this particular link has been +"Yanked", and should not generally be selected by an installer, except +under specific scenarios. + +The value of the ``data-yanked`` attribute, if present, is an arbitrary +string that represents the reason for why the file has been yanked. Tools +that process the simple repository API **MAY** surface this string to +end users. + +The yanked attribute is not immutable once set, and may be rescinded in +the future (and once rescinded, may be reset as well). Thus API users +**MUST** be able to cope with a yanked file being "unyanked" (and even +yanked again). + +Installers +---------- + +The desirable experience for users is that once a file is yanked, when +a human being is currently trying to directly install a yanked file, that +it fails as if that file had been deleted. However, when a human did that +awhile ago, and now a computer is just continuing to mechanically follow +the original order to install the now yanked file, then it acts as if it +had not been yanked. + +An installer **MUST** ignore yanked releases, if the selection constraints +can be satisfied with a non-yanked version, and **MAY** refuse to use a +yanked release even if it means that the request cannot be satisfied at all. +An implementation **SHOULD** choose a policy that follows the spirit of the +intention above, and that prevents "new" dependencies on yanked +releases/files. + +What this means is left up to the specific installer, to decide how to best +fit into the overall usage of their installer. However, there are two +suggested approaches to take: + +1. Yanked files are always ignored, unless they are the only file that + matches a version specifier that "pins" to an exact version using + either ``==`` (without any modifiers that make it a range, such as + ``.*``) or ``===``. Matching this version specifier should otherwise + be done as per :ref:`the version specifiers specification + ` for things like local versions, zero padding, + etc. +2. Yanked files are always ignored, unless they are the only file that + matches what a lock file (such as ``Pipfile.lock`` or ``poetry.lock``) + specifies to be installed. In this case, a yanked file **SHOULD** not + be used when creating or updating a lock file from some input file or + command. + +Regardless of the specific strategy that an installer chooses for deciding +when to install yanked files, an installer **SHOULD** emit a warning when +it does decide to install a yanked file. That warning **MAY** utilize the +value of the ``data-yanked`` attribute (if it has a value) to provide more +specific feedback to the user about why that file had been yanked. + + +Mirrors +------- + +Mirrors can generally treat yanked files one of two ways: + +1. They may choose to omit them from their simple repository API completely, + providing a view over the repository that shows only "active", unyanked + files. +2. They may choose to include yanked files, and additionally mirror the + ``data-yanked`` attribute as well. + +Mirrors **MUST NOT** mirror a yanked file without also mirroring the +``data-yanked`` attribute for it. diff --git a/source/specifications/glob-patterns.rst b/source/specifications/glob-patterns.rst new file mode 100644 index 000000000..abdb15b0f --- /dev/null +++ b/source/specifications/glob-patterns.rst @@ -0,0 +1,115 @@ +================= +``glob`` patterns +================= + +Some PyPA specifications, e.g. :ref:`pyproject.toml's license-files +`, accept certain types of *glob patterns* +to match a given string containing wildcards and character ranges against +files and directories. This specification defines which patterns are acceptable +and how they should be handled. + + +Valid glob patterns +=================== + +For PyPA purposes, a *valid glob pattern* MUST be a string matched against +filesystem entries as specified below: + +- Alphanumeric characters, underscores (``_``), hyphens (``-``) and dots (``.``) + MUST be matched verbatim. + +- Special glob characters: ``*``, ``?``, ``**`` and character ranges: ``[]`` + containing only the verbatim matched characters MUST be supported. + Within ``[...]``, the hyphen indicates a locale-agnostic range (e.g. ``a-z``, + order based on Unicode code points). + Hyphens at the start or end are matched literally. + +- Path delimiters MUST be the forward slash character (``/``). + +- Patterns always refer to *relative paths*, + e.g., when used in :file:`pyproject.toml`, patterns should always be + relative to the directory containing that file. + Therefore the leading slash character MUST NOT be used. + +- Parent directory indicators (``..``) MUST NOT be used. + +Any characters or character sequences not covered by this specification are +invalid. Projects MUST NOT use such values. +Tools consuming glob patterns SHOULD reject invalid values with an error. + +Literal paths (e.g. :file:`LICENSE`) are valid globs which means they +can also be defined. + +Tools consuming glob patterns: + +- MUST treat each value as a glob pattern, and MUST raise an error if the + pattern contains invalid glob syntax. +- MUST raise an error if any individual user-specified pattern does not match + at least one file. + +Examples of valid glob patterns: + +.. code-block:: python + + "LICEN[CS]E*" + "AUTHORS*" + "licenses/LICENSE.MIT" + "licenses/LICENSE.CC0" + "LICENSE.txt" + "licenses/*" + +Examples of invalid glob patterns: + +.. code-block:: python + + "..\LICENSE.MIT" + # .. must not be used. + # \ is an invalid path delimiter, / must be used. + + "LICEN{CSE*" + # the { character is not allowed + + +Reference implementation in Python +================================== + +It is possible to defer the majority of the pattern matching against the file +system to the :mod:`glob` module in Python's standard library. It is necessary +however to perform additional validations. + +The code below is as a simple reference implementation: + +.. code-block:: python + + import os + import re + from glob import glob + + + def find_pattern(pattern: str) -> list[str]: + """ + >>> find_pattern("/LICENSE.MIT") + Traceback (most recent call last): + ... + ValueError: Pattern '/LICENSE.MIT' should be relative... + >>> find_pattern("../LICENSE.MIT") + Traceback (most recent call last): + ... + ValueError: Pattern '../LICENSE.MIT' cannot contain '..'... + >>> find_pattern("LICEN{CSE*") + Traceback (most recent call last): + ... + ValueError: Pattern 'LICEN{CSE*' contains invalid characters... + """ + if ".." in pattern: + raise ValueError(f"Pattern {pattern!r} cannot contain '..'") + if pattern.startswith((os.sep, "/")) or ":\\" in pattern: + raise ValueError( + f"Pattern {pattern!r} should be relative and must not start with '/'" + ) + if re.match(r'^[\w\-\.\/\*\?\[\]]+$', pattern) is None: + raise ValueError(f"Pattern '{pattern}' contains invalid characters.") + found = glob(pattern, recursive=True) + if not found: + raise ValueError(f"Pattern '{pattern}' did not match any files.") + return found diff --git a/source/specifications/index-hosted-attestations.rst b/source/specifications/index-hosted-attestations.rst new file mode 100644 index 000000000..d078e87bd --- /dev/null +++ b/source/specifications/index-hosted-attestations.rst @@ -0,0 +1,368 @@ + +.. _index-hosted-attestations: + +========================= +Index hosted attestations +========================= + +.. note:: This specification was originally defined in :pep:`740`. + +.. note:: + + :pep:`740` includes changes to the HTML and JSON index APIs. + These changes are documented in the :ref:`simple-repository-api` + under :ref:`simple-repository-api-base` and :ref:`json-serialization`. + +Specification +============= + +.. _upload-endpoint: + +Upload endpoint changes +----------------------- + +.. important:: + + The "legacy" upload API is not standardized. + See `PyPI's Upload API documentation `_ + for how attestations are uploaded. + +.. _attestation-object: + +Attestation objects +------------------- + +An attestation object is a JSON object with several required keys; applications +or signers may include additional keys so long as all explicitly +listed keys are provided. The required layout of an attestation +object is provided as pseudocode below. + +.. code-block:: python + + @dataclass + class Attestation: + version: Literal[1] + """ + The attestation object's version, which is always 1. + """ + + verification_material: VerificationMaterial + """ + Cryptographic materials used to verify `envelope`. + """ + + envelope: Envelope + """ + The enveloped attestation statement and signature. + """ + + + @dataclass + class Envelope: + statement: bytes + """ + The attestation statement. + + This is represented as opaque bytes on the wire (encoded as base64), + but it MUST be an JSON in-toto v1 Statement. + """ + + signature: bytes + """ + A signature for the above statement, encoded as base64. + """ + + @dataclass + class VerificationMaterial: + certificate: str + """ + The signing certificate, as `base64(DER(cert))`. + """ + + transparency_entries: list[object] + """ + One or more transparency log entries for this attestation's signature + and certificate. + """ + +A full data model for each object in ``transparency_entries`` is provided in +:ref:`appendix`. Attestation objects **SHOULD** include one or more +transparency log entries, and **MAY** include additional keys for other +sources of signed time (such as an :rfc:`3161` Time Stamping Authority or a +`Roughtime `__ server). + +Attestation objects are versioned; this PEP specifies version 1. Each version +is tied to a single cryptographic suite to minimize unnecessary cryptographic +agility. In version 1, the suite is as follows: + +* Certificates are specified as X.509 certificates, and comply with the + profile in :rfc:`5280`. +* The message signature algorithm is ECDSA, with the P-256 curve for public keys + and SHA-256 as the cryptographic digest function. + +Future PEPs may change this suite (and the overall shape of the attestation +object) by selecting a new version number. + +.. _payload-and-signature-generation: + +Attestation statement and signature generation +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The *attestation statement* is the actual claim that is cryptographically signed +over within the attestation object (i.e., the ``envelope.statement``). + +The attestation statement is encoded as a +`v1 in-toto Statement object `__, +in JSON form. When serialized the statement is treated as an opaque binary blob, +avoiding the need for canonicalization. + +In addition to being a v1 in-toto Statement, the attestation statement is constrained +in the following ways: + +* The in-toto ``subject`` **MUST** contain only a single subject. +* ``subject[0].name`` is the distribution's filename, which **MUST** be + a valid :ref:`source distribution ` or + :ref:`wheel distribution ` filename. +* ``subject[0].digest`` **MUST** contain a SHA-256 digest. Other digests + **MAY** be present. The digests **MUST** be represented as hexadecimal strings. +* The following ``predicateType`` values are supported: + + * `SLSA Provenance `__: ``https://slsa.dev/provenance/v1`` + * `PyPI Publish Attestation `__: ``https://docs.pypi.org/attestations/publish/v1`` + +The signature over this statement is constructed using the +`v1 DSSE signature protocol `__, +with a ``PAYLOAD_TYPE`` of ``application/vnd.in-toto+json`` and a ``PAYLOAD_BODY`` of the JSON-encoded +statement above. No other ``PAYLOAD_TYPE`` is permitted. + +.. _provenance-object: + +Provenance objects +------------------ + +The index will serve uploaded attestations along with metadata that can assist +in verifying them in the form of JSON serialized objects. + +These *provenance objects* will be available via both the Simple Index +and JSON-based Simple API as described above, and will have the following layout: + +.. code-block:: json + + { + "version": 1, + "attestation_bundles": [ + { + "publisher": { + "kind": "important-ci-service", + "claims": {}, + "vendor-property": "foo", + "another-property": 123 + }, + "attestations": [ + { /* attestation 1 ... */ }, + { /* attestation 2 ... */ } + ] + } + ] + } + +or, as pseudocode: + +.. code-block:: python + + @dataclass + class Publisher: + kind: string + """ + The kind of Trusted Publisher. + """ + + claims: object | None + """ + Any context-specific claims retained by the index during Trusted Publisher + authentication. + """ + + _rest: object + """ + Each publisher object is open-ended, meaning that it MAY contain additional + fields beyond the ones specified explicitly above. This field signals that, + but is not itself present. + """ + + @dataclass + class AttestationBundle: + publisher: Publisher + """ + The publisher associated with this set of attestations. + """ + + attestations: list[Attestation] + """ + The set of attestations included in this bundle. + """ + + @dataclass + class Provenance: + version: Literal[1] + """ + The provenance object's version, which is always 1. + """ + + attestation_bundles: list[AttestationBundle] + """ + One or more attestation "bundles". + """ + +* ``version`` is ``1``. Like attestation objects, provenance objects are + versioned, and this PEP only defines version ``1``. +* ``attestation_bundles`` is a **required** JSON array, containing one + or more "bundles" of attestations. Each bundle corresponds to a + signing identity (such as a Trusted Publishing identity), and contains + one or more attestation objects. + + As noted in the ``Publisher`` model, + each ``AttestationBundle.publisher`` object is specific to its Trusted Publisher + but must include at minimum: + + * A ``kind`` key, which **MUST** be a JSON string that uniquely identifies the + kind of Trusted Publisher. + * A ``claims`` key, which **MUST** be a JSON object containing any context-specific + claims retained by the index during Trusted Publisher authentication. + + All other keys in the publisher object are publisher-specific. + + Each array of attestation objects is a superset of the ``attestations`` + array supplied by the uploaded through the ``attestations`` field at upload + time, as described in :ref:`upload-endpoint` and + :ref:`changes-to-provenance-objects`. + +.. _changes-to-provenance-objects: + +Changes to provenance objects +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Provenance objects are *not* immutable, and may change over time. Reasons +for changes to the provenance object include but are not limited to: + +* Addition of new attestations for a pre-existing signing identity: the index + **MAY** choose to allow additional attestations by pre-existing signing + identities, such as newer attestation versions for already uploaded + files. + +* Addition of new signing identities and associated attestations: the index + **MAY** choose to support attestations from sources other than the file's + uploader, such as third-party auditors or the index itself. These attestations + may be performed asynchronously, requiring the index to insert them into + the provenance object *post facto*. + +.. _attestation-verification: + +Attestation verification +------------------------ + +Verifying an attestation object against a distribution file requires verification of each of the +following: + +* ``version`` is ``1``. The verifier **MUST** reject any other version. +* ``verification_material.certificate`` is a valid signing certificate, as + issued by an *a priori* trusted authority (such as a root of trust already + present within the verifying client). +* ``verification_material.certificate`` identifies an appropriate signing + subject, such as the machine identity of the Trusted Publisher that published + the package. +* ``envelope.statement`` is a valid in-toto v1 Statement, with a subject + and digest that **MUST** match the distribution's filename and contents. + For the distribution's filename, matching **MUST** be performed by parsing + using the appropriate source distribution or wheel filename format, as + the statement's subject may be equivalent but normalized. +* ``envelope.signature`` is a valid signature for ``envelope.statement`` + corresponding to ``verification_material.certificate``, + as reconstituted via the + `v1 DSSE signature protocol `__. + +In addition to the above required steps, a verifier **MAY** additionally verify +``verification_material.transparency_entries`` on a policy basis, e.g. requiring +at least one transparency log entry or a threshold of entries. When verifying +transparency entries, the verifier **MUST** confirm that the inclusion time for +each entry lies within the signing certificate's validity period. + +.. _appendix: + +Appendix: Data models for Transparency Log Entries +==================================================== + +This appendix contains pseudocoded data models for transparency log entries +in attestation objects. Each transparency log entry serves as a source +of signed inclusion time, and can be verified either online or offline. + +.. code-block:: python + + @dataclass + class TransparencyLogEntry: + log_index: int + """ + The global index of the log entry, used when querying the log. + """ + + log_id: str + """ + An opaque, unique identifier for the log. + """ + + entry_kind: str + """ + The kind (type) of log entry. + """ + + entry_version: str + """ + The version of the log entry's submitted format. + """ + + integrated_time: int + """ + The UNIX timestamp from the log from when the entry was persisted. + """ + + inclusion_proof: InclusionProof + """ + The actual inclusion proof of the log entry. + """ + + + @dataclass + class InclusionProof: + log_index: int + """ + The index of the entry in the tree it was written to. + """ + + root_hash: str + """ + The digest stored at the root of the Merkle tree at the time of proof + generation. + """ + + tree_size: int + """ + The size of the Merkle tree at the time of proof generation. + """ + + hashes: list[str] + """ + A list of hashes required to complete the inclusion proof, sorted + in order from leaf to root. The leaf and root hashes are not themselves + included in this list; the root is supplied via `root_hash` and the client + must calculate the leaf hash. + """ + + checkpoint: str + """ + The signed tree head's signature, at the time of proof generation. + """ + + cosigned_checkpoints: list[str] + """ + Cosigned checkpoints from zero or more log witnesses. + """ diff --git a/source/specifications/index.rst b/source/specifications/index.rst index c8d2a3bed..c375654a2 100644 --- a/source/specifications/index.rst +++ b/source/specifications/index.rst @@ -15,3 +15,6 @@ and for proposing new ones, is documented on section-installation-metadata section-distribution-formats section-package-indices + section-python-description-formats + section-reproducible-environments + schemas/index.rst diff --git a/source/specifications/inline-script-metadata.rst b/source/specifications/inline-script-metadata.rst index f40b9ac4a..6fa832a3e 100644 --- a/source/specifications/inline-script-metadata.rst +++ b/source/specifications/inline-script-metadata.rst @@ -79,7 +79,7 @@ script metadata (dependency data and tool configuration). This document MAY include the top-level fields ``dependencies`` and ``requires-python``, and MAY optionally include a ``[tool]`` table. -The ``[tool]`` MAY be used by any tool, script runner or otherwise, to configure +The ``[tool]`` table MAY be used by any tool, script runner or otherwise, to configure behavior. It has the same semantics as the :ref:`[tool] table in pyproject.toml `. diff --git a/source/specifications/license-expression.rst b/source/specifications/license-expression.rst new file mode 100644 index 000000000..50860b7aa --- /dev/null +++ b/source/specifications/license-expression.rst @@ -0,0 +1,56 @@ +================== +License Expression +================== + +:pep:`639` defined a new :ref:`pyproject.toml's license ` +value and added a corresponding :ref:`core metadata License-Expression field +`. +This specification defines which license expressions are acceptable. + + +Specification +============= + +License can be defined as a text string that is a valid SPDX +:term:`license expression `, +as documented in the `SPDX specification `__, +either Version 2.2 or a later compatible version. + +A license expression can use the following license identifiers: + +- Any SPDX-listed license short-form identifiers that are published in + the `SPDX License List `__, + version 3.17 or any later compatible version. + +- The custom ``LicenseRef-[idstring]`` string(s), where ``[idstring]`` is + a unique string containing letters, numbers, ``.`` and/or ``-``, + to identify licenses that are not included in the SPDX license list. + The custom identifiers must follow the SPDX specification, + `clause 10.1 `__ of the given specification version. + + +Examples of valid license expressions: + +.. code-block:: yaml + + MIT + BSD-3-Clause + MIT AND (Apache-2.0 OR BSD-2-Clause) + MIT OR GPL-2.0-or-later OR (FSFUL AND BSD-2-Clause) + GPL-3.0-only WITH Classpath-Exception-2.0 OR BSD-3-Clause + LicenseRef-Special-License OR CC0-1.0 OR Unlicense + LicenseRef-Proprietary + + +Examples of invalid license expressions: + +.. code-block:: yaml + + Use-it-after-midnight # No `LicenseRef` prefix + Apache-2.0 OR 2-BSD-Clause # 2-BSD-Clause is not a valid SPDX identifier + LicenseRef-License with spaces # spaces are not allowed + LicenseRef-License_with_underscores # underscore are not allowed + +.. _spdxcustom: https://spdx.github.io/spdx-spec/v2.2.2/other-licensing-information-detected/ +.. _spdxlist: https://spdx.org/licenses/ +.. _spdxpression: https://spdx.github.io/spdx-spec/v2.2.2/SPDX-license-expressions/ diff --git a/source/specifications/name-normalization.rst b/source/specifications/name-normalization.rst index ba3246b63..560d956b5 100644 --- a/source/specifications/name-normalization.rst +++ b/source/specifications/name-normalization.rst @@ -17,7 +17,7 @@ underscore and hyphen. It must start and end with a letter or number. This means that valid project names are limited to those which match the following regex (run with :py:data:`re.IGNORECASE`):: - ^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$ + ^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])\Z .. _name-normalization: @@ -53,3 +53,6 @@ History :pep:`503 <503#normalized-names>`. - November 2015: The specification of valid names was approved through :pep:`508 <508#names>`. +- August 2025: The suggested name validation regex was fixed to match the field + specification (it previously finished with ``$`` instead of ``\Z``, + incorrectly permitting trailing newlines) diff --git a/source/specifications/platform-compatibility-tags.rst b/source/specifications/platform-compatibility-tags.rst index 381b84ca9..b4c14a4c0 100644 --- a/source/specifications/platform-compatibility-tags.rst +++ b/source/specifications/platform-compatibility-tags.rst @@ -82,6 +82,11 @@ decide how to best use the ABI tag. Platform Tag ============ +.. important:: + Platform tags are dependent on the versioning of the operating system or + platform they represent and may change over time as the underlying platform + changes its versioning. + Basic platform tags ------------------- @@ -109,8 +114,8 @@ subset of Linux platforms, and allows building wheels tagged with the ``manylinux`` platform tag which can be used across most common Linux distributions. -The current standard is the future-proof ``manylinux_x_y`` standard. It defines -tags of the form ``manylinux_x_y_arch``, where ``x`` and ``y`` are glibc major +The current standard is the future-proof :file:`manylinux_{x}_{y}` standard. It defines +tags of the form :file:`manylinux_{x}_{y}_{arch}`, where ``x`` and ``y`` are glibc major and minor versions supported (e.g. ``manylinux_2_24_xxx`` should work on any distro using glibc 2.24+), and ``arch`` is the architecture, matching the value of :py:func:`sysconfig.get_platform()` on the system as in the "simple" form above. @@ -151,7 +156,7 @@ auditwheel ``>=1.0.0`` ``>=2.0.0`` ``>=3.0.0`` ``>=3.3.0`` [# The ``musllinux`` family of tags is similar to ``manylinux``, but for Linux platforms that use the musl_ libc rather than glibc (a prime example being Alpine -Linux). The schema is ``musllinux_x_y_arch``, supporting musl ``x.y`` and higher +Linux). The schema is :file:`musllinux_{x}_{y}_{arch}`, supporting musl ``x.y`` and higher on the architecture ``arch``. The musl version values can be obtained by executing the musl libc shared @@ -189,6 +194,109 @@ There are currently two possible ways to find the musl library’s location that Python interpreter is running on, either with the system ldd_ command, or by parsing the ``PT_INTERP`` section’s value from the executable’s ELF_ header. +.. _macos: + +macOS +----- + +macOS uses the ``macosx`` family of tags (the ``x`` suffix is a historical +artefact of Apple's official macOS naming scheme). The schema for compatibility +tags is :file:`macosx_{x}_{y}_{arch}`, indicating that the wheel is compatible +with macOS ``x.y`` or later on the architecture ``arch``. + +For macOS 10, the tag is :file:`macosx_10_{y}_{arch}`, where ``y`` corresponds +to the minor version number of the macOS release. For macOS 11 and higher, the +tag is :file:`macosx_{x}_0_{arch}`, where ``x`` corresponds to the major +version number of the macOS release. Following the published macOS major +versions, the ``x`` value is either ``10 <= x <= 15``, or ``>=26`` and +corresponding to the year of the macOS release. For example, +``macosx_11_0_arm64`` indicates compatibility with macOS 11 or later. + +macOS binaries can be compiled for a single architecture, or can include support +for multiple architectures in the same binary (sometimes called "fat" binaries). +To indicate support for a single architecture, the value of ``arch`` must match +the value of :py:func:`platform.machine()` on the system. To indicate +support multiple architectures, the ``arch`` tag should be an identifier from +the following list that describes the set of supported architectures: + +============== ======================================== +``arch`` Architectures supported +============== ======================================== +``universal2`` ``arm64``, ``x86_64`` +``universal`` ``i386``, ``ppc``, ``ppc64``, ``x86_64`` +``intel`` ``i386``, ``x86_64`` +``fat`` ``i386``, ``ppc`` +``fat3`` ``i386``, ``ppc``, ``x86_64`` +``fat64`` ``ppc64``, ``x86_64`` +============== ======================================== + +The minimum supported macOS version may also be constrained by architecture. For +example, macOS 11 (Big Sur) was the first release to support arm64. These +additional constraints are enforced transparently by the macOS compilation +toolchain when building binaries that support multiple architectures. + +.. _android: + +Android +------- + +Android uses the schema :file:`android_{apilevel}_{abi}`, indicating +compatibility with the given Android API level or later, on the given ABI. For +example, ``android_27_arm64_v8a`` indicates support for API level 27 or later, +on ``arm64_v8a`` devices. Android makes no distinction between physical devices +and emulated devices. + +The API level should be a positive integer. This is *not* the same thing as +the user-facing Android version. For example, the release known as Android +12 (code named "Snow Cone") uses API level 31 or 32, depending on the specific +Android version in use. Android's release documentation contains the `full list +of Android versions and their corresponding API levels +`__. + +There are 4 `supported ABIs `__. +Normalized according to the rules above, they are: + +* ``armeabi_v7a`` +* ``arm64_v8a`` +* ``x86`` +* ``x86_64`` + +Virtually all current physical devices use one of the ARM architectures. ``x86`` +and ``x86_64`` are supported for use in the emulator. ``x86`` has not been +supported as a development platform since 2020, and no new emulator images have +been released since then. + +.. _ios: + +iOS +--- + +iOS uses the schema :file:`ios_{x}_{y}_{arch}_{sdk}`, indicating compatibility with +iOS ``x.y`` or later, on the ``arch`` architecture, using the ``sdk`` SDK. + +The value of ``x`` and ``y`` correspond to the major and minor version number of +the iOS release, respectively. They must both be positive integers. The version +number always includes a major *and* minor version, even if Apple's official +version numbering only refers to the major value. For example, a +``ios_13_0_arm64_iphonesimulator`` indicates compatibility with iOS 13 or later. + +The value of ``arch`` must match the value of :py:func:`platform.machine()` on +the system. + +The value of ``sdk`` must be either ``iphoneos`` (for physical devices), or +``iphonesimulator`` (for device simulators). These SDKs have the same API +surface, but are incompatible at the binary level, even if they are running on +the same CPU architecture. Code compiled for an arm64 simulator will not run on +an arm64 device. + +The combination of :file:`{arch}_{sdk}` is referred to as the "multiarch". There +are three possible values for multiarch: + +* ``arm64_iphoneos``, for physical iPhone/iPad devices. This includes every + iOS device manufactured since ~2015; +* ``arm64_iphonesimulator``, for simulators running on Apple Silicon macOS + hardware; and +* ``x86_64_iphonesimulator``, for simulators running on x86_64 hardware. Use === @@ -249,14 +357,14 @@ Compressed Tag Sets To allow for compact filenames of bdists that work with more than one compatibility tag triple, each tag in a filename can instead be a -'.'-separated, sorted, set of tags. For example, pip, a pure-Python +'.'-separated, sorted, collection of tags. For example, pip, a pure-Python package that is written to run under Python 2 and 3 with the same source code, could distribute a bdist with the tag ``py2.py3-none-any``. The full list of simple tags is:: for x in pytag.split('.'): for y in abitag.split('.'): - for z in archtag.split('.'): + for z in platformtag.split('.'): yield '-'.join((x, y, z)) A bdist format that implements this scheme should include the expanded @@ -339,9 +447,10 @@ History - November 2019: The ``manylinux_x_y`` perennial tag was approved through :pep:`600`. - April 2021: The ``musllinux_x_y`` tag was approved through :pep:`656`. - +- December 2023: The tags for iOS were approved through :pep:`730`. +- March 2024: The tags for Android were approved through :pep:`738`. .. _musl: https://musl.libc.org -.. _ldd: https://www.unix.com/man-page/posix/1/ldd/ +.. _ldd: https://www.man7.org/linux/man-pages/man1/ldd.1.html .. _elf: https://refspecs.linuxfoundation.org/elf/elf.pdf diff --git a/source/specifications/project-status-markers.rst b/source/specifications/project-status-markers.rst new file mode 100644 index 000000000..90df74441 --- /dev/null +++ b/source/specifications/project-status-markers.rst @@ -0,0 +1,89 @@ +.. _project-status-markers: + +====================== +Project Status Markers +====================== + +.. note:: + + This specification was originally defined in + :pep:`792`. + +.. note:: + + :pep:`792` includes changes to the HTML and JSON index APIs. + These changes are documented in the :ref:`simple-repository-api` + under :ref:`HTML - Project Detail ` + and :ref:`JSON - Project Detail `. + +Specification +============= + +A project always has exactly one status. If no status is explicitly noted, +then the project is considered to be in the ``active`` state. + +Indices **MAY** implement any subset of the status markers specified, +as applicable to their needs. + +This standard does not prescribe *which* principals (i.e. project maintainers, +index administrators, etc.) are allowed to set and unset which statuses. + +``active`` +---------- + +Description: The project is active. This is the default status for a project. + +Index semantics: + +* The index hosting the project **MUST** allow uploads of new distributions to + the project. +* The index **MUST** offer existing distributions of the project for download. + +Installer semantics: none. + +``archived`` +------------ + +Description: The project does not expect to be updated in the future. + +Index semantics: + +* The index hosting the project **MUST NOT** allow uploads of new distributions to + the project. +* The index **MUST** offer existing distributions of the project for download. + +Installer semantics: + +* Installers **MAY** produce warnings about a project's archival. + +``quarantined`` +--------------- + +Description: The project is considered generally unsafe for use, e.g. due to +malware. + +Index semantics: + +* The index hosting the project **MUST NOT** allow uploads of new distributions to + the project. +* The index **MUST NOT** offer any distributions of the project for download. + +Installer semantics: + +* Installers **MAY** produce warnings about a project's quarantine, although + doing so is effectively moot (as the index will not offer any distributions + for installation). + +``deprecated`` +-------------- + +Description: The project is considered obsolete, and may have been superseded +by another project. + +Index semantics: + +* This status shares the same semantics as ``active``. + +Installer semantics: + +* Installers **MAY** produce warnings about a project's deprecation. diff --git a/source/specifications/pylock-toml.rst b/source/specifications/pylock-toml.rst new file mode 100644 index 000000000..342e608c5 --- /dev/null +++ b/source/specifications/pylock-toml.rst @@ -0,0 +1,842 @@ +.. _pylock-toml-spec: +.. _lock-file-spec: + +============================= +``pylock.toml`` Specification +============================= + +The ``pylock.toml`` file format is for specifying dependencies to enable +reproducible installation in a Python environment. + +.. note:: This specification was originally defined in :pep:`751`. + + +--------- +File Name +--------- + +A lock file MUST be named :file:`pylock.toml` or match the regular expression +``r"^pylock\.([^.]+)\.toml$"`` if a name for the lock file is desired or if +multiple lock files exist (i.e. the regular expression +``r"^pylock\.([^.]+\.)?toml$"`` for any file name). The prefix and suffix of a +named file MUST be lowercase when possible, for easy detection and removal, +e.g.: + +.. code-block:: Python + + if len(filename) > 11 and filename.startswith("pylock.") and filename.endswith(".toml"): + name = filename.removeprefix("pylock.").removesuffix(".toml") + +The expectation is that services that automatically install from lock files will +search for: + +1. The lock file with the service's name and doing the default install +2. A multi-use :file:`pylock.toml` with a dependency group with the name of the service +3. The default install of :file:`pylock.toml` + +E.g. a cloud host service named "spam" would first look for +:file:`pylock.spam.toml` to install from, and if that file didn't exist then install +from :file:`pylock.toml` and look for a dependency group named "spam" to use if +present. + +The lock file(s) SHOULD be located in the directory as appropriate for the scope +of the lock file. Locking against a single :file:`pyproject.toml`, for instance, +would place the :file:`pylock.toml` in the same directory. If the lock file covered +multiple projects in a monorepo, then the expectation is the :file:`pylock.toml` +file would be in the directory that held all the projects being locked. + + +----------- +File Format +----------- + +The format of the file is TOML_. + +Tools SHOULD write their lock files in a consistent way to minimize noise in +diff output. Keys in tables -- including the top-level table -- SHOULD be +recorded in a consistent order (if inspiration is desired, this specification has tried to +write down keys in a logical order). As well, tools SHOULD sort arrays in +consistent order. Usage of inline tables SHOULD also be kept consistent. + + +.. _pylock-lock-version: + +``lock-version`` +================ + +- **Type**: string; value of ``"1.0"`` +- **Required?**: yes +- **Inspiration**: :ref:`core-metadata-metadata-version` +- Record the file format version that the file adheres to. +- This PEP specifies the initial version -- and only valid value until future + updates to the standard change it -- as ``"1.0"``. +- If a tool supports the major version but not the minor version, a tool + SHOULD warn when an unknown key is seen. +- If a tool doesn't support a major version, it MUST raise an error. + + +.. _pylock-environments: + +``environments`` +================ + +- **Type**: Array of strings +- **Required?**: no +- **Inspiration**: uv_ +- A list of :ref:`dependency-specifiers-environment-markers` for + which the lock file is considered compatible with. +- Tools SHOULD write exclusive/non-overlapping environment markers to ease in + understanding. + + +.. _pylock-requires-python: + +``requires-python`` +=================== + +- **Type**: string +- **Required?**: no +- **Inspiration**: PDM_, Poetry_, uv_ +- Specifies the :ref:`core-metadata-requires-python` for the minimum + Python version compatible for any environment supported by the lock file + (i.e. the minimum viable Python version for the lock file). + + +.. _pylock-extras: + +``extras`` +========== + +- **Type**: Array of strings +- **Required?**: no; defaults to ``[]`` +- **Inspiration**: :ref:`core-metadata-provides-extra` +- The list of :ref:`extras ` supported + by this lock file. +- Lockers MAY choose to not support writing lock files that support extras and + dependency groups (i.e. tools may only support exporting a single-use lock + file). +- Tools supporting extras MUST also support dependency groups. +- Tools should explicitly set this key to an empty array to signal that the + inputs used to generate the lock file had no extras (e.g. a + :ref:`pyproject.toml ` file had no + :ref:`[project.optional-dependencies] ` + table), signalling that the lock file is, in effect, multi-use even if it only + looks to be single-use. + + +.. _pylock-dependency-groups: + +``dependency-groups`` +===================== + +- **Type**: Array of strings +- **Required?**: no; defaults to ``[]`` +- **Inspiration**: :ref:`pyproject-tool-table` +- The list of :ref:`dependency-groups` publicly supported by this lock + file (i.e. dependency groups users are expected to be able to specify via a + tool's UI). +- Lockers MAY choose to not support writing lock files that support extras and + dependency groups (i.e. tools may only support exporting a single-use lock + file). +- Tools supporting dependency groups MUST also support extras. +- Tools SHOULD explicitly set this key to an empty array to signal that the + inputs used to generate the lock file had no dependency groups (e.g. a + :ref:`pyproject.toml ` file had no + :ref:`[dependency-groups] ` table), signalling that the + lock file is, in effect, multi-use even if it only looks to be single-use. + + +.. _pylock-default-groups: + +``default-groups`` +================== + +- **Type**: Array of strings +- **Required?**: no; defaults to ``[]`` +- **Inspiration**: Poetry_, PDM_ +- The name of synthetic dependency groups to represent what should be installed + by default (e.g. what + :ref:`[project.dependencies] ` implicitly + represents). +- Meant to be used in situations where :ref:`pylock-packages-marker` + necessitates such a group to exist. +- The groups listed by this key SHOULD NOT be listed in + :ref:`pylock-dependency-groups` as the groups are not meant to be directly + exposed to users by name but instead via an installer's UI. + + +.. _pylock-created-by: + +``created-by`` +============== + +- **Type**: string +- **Required?**: yes +- **Inspiration**: Tools with their name in their lock file name +- Records the name of the tool used to create the lock file. +- Tools MAY use the :ref:`pylock-tool` table to record enough details that it + can be inferred what inputs were used to create the lock file. +- Tools SHOULD record the normalized name of the tool if it is available as a + Python package to facilitate finding the tool. + + +.. _pylock-packages: + +``[[packages]]`` +================ + +- **Type**: array of tables +- **Required?**: yes +- **Inspiration**: PDM_, Poetry_, uv_ +- An array containing all packages that *may* be installed. +- Packages MAY be listed multiple times with varying data, but all packages to + be installed MUST narrow down to a single entry at install time. + + +.. _pylock-packages-name: + +``packages.name`` +----------------- + +- **Type**: string +- **Required?**: yes +- **Inspiration**: :ref:`core-metadata-name` +- The name of the package :ref:`normalized `. + + +.. _pylock-packages-version: + +``packages.version`` +-------------------- + +- **Type**: string +- **Required?**: no +- **Inspiration**: :ref:`core-metadata-version` +- The version of the package. +- The version SHOULD be specified when the version is known to be stable + (i.e. when an :ref:`sdist ` or + :ref:`wheels ` are specified). +- The version MUST NOT be included when it cannot be guaranteed to be consistent + with the code used (i.e. when a + :ref:`source tree ` is used). + + +.. _pylock-packages-marker: + +``packages.marker`` +------------------- + +- **Type**: string +- **Required?**: no +- **Inspiration**: PDM_ +- The + :ref:`environment marker ` + which specify when the package should be installed. + + +.. _pylock-packages-requires-python: + +``packages.requires-python`` +---------------------------- + +- **Type**: string +- **Required?**: no +- **Inspiration**: :ref:`core-metadata-requires-python` +- Holds the :ref:`version-specifiers` for Python version compatibility + for the package. + + +.. _pylock-packages-dependencies: + +``[[packages.dependencies]]`` +----------------------------- + +- **Type**: array of tables +- **Required?**: no +- **Inspiration**: PDM_, Poetry_, uv_ +- Records the other entries in :ref:`pylock-packages` which are direct + dependencies of this package. +- Each entry is a table which contains the minimum information required to tell + which other package entry it corresponds to where doing a key-by-key + comparison would find the appropriate package with no ambiguity (e.g. if there + are two entries for the ``spam`` package, then you can include the version + number like ``{name = "spam", version = "1.0.0"}``, or by source like + ``{name = "spam", vcs = { url = "..."}``). +- Tools MUST NOT use this information when doing installation; it is purely + informational for auditing purposes. + + +.. _pylock-packages-vcs: + +``[packages.vcs]`` +------------------ + +- **Type**: table +- **Required?**: no; mutually-exclusive with :ref:`pylock-packages-directory`, + :ref:`pylock-packages-archive`, :ref:`pylock-packages-sdist`, and + :ref:`pylock-packages-wheels` +- **Inspiration**: :ref:`direct-url-data-structure` +- Record the version control system details for the + :ref:`source tree ` it + contains. +- Tools MAY choose to not support version control systems, both from a locking + and/or installation perspective. +- Tools MAY choose to only support a subset of the available VCS types. +- Tools SHOULD provide a way for users to opt in/out of using version control + systems. +- Installation from a version control system is considered originating from a + :ref:`direct URL reference `. + + +.. _pylock-packages-vcs-type: + +``packages.vcs.type`` +''''''''''''''''''''' + +- **Type**: string; supported values specified in + :ref:`direct-url-data-structure-registered-vcs` +- **Required?**: yes +- **Inspiration**: :ref:`direct-url-data-structure-vcs` +- The type of version control system used. + + +.. _pylock-packages-vcs-url: + +``packages.vcs.url`` +'''''''''''''''''''' + +- **Type**: string +- **Required?**: if :ref:`pylock-packages-vcs-path` is not specified +- **Inspiration**: :ref:`direct-url-data-structure-vcs` +- The URL_ to the source tree. + + +.. _pylock-packages-vcs-path: + +``packages.vcs.path`` +''''''''''''''''''''' + +- **Type**: string +- **Required?**: if :ref:`pylock-packages-vcs-url` is not specified +- **Inspiration**: :ref:`direct-url-data-structure-vcs` +- The path to the local directory of the source tree. +- If a relative path is used it MUST be relative to the location of this file. +- If the path is relative it MAY use POSIX-style path separators explicitly + for portability. + + +.. _pylock-packages-vcs-requested-revision: + +``packages.vcs.requested-revision`` +''''''''''''''''''''''''''''''''''' + +- **Type**: string +- **Required?**: no +- **Inspiration**: :ref:`direct-url-data-structure-vcs` +- The branch/tag/ref/commit/revision/etc. that the user requested. +- This is purely informational and to facilitate writing the + :ref:`direct-url-data-structure`; it MUST NOT be used to checkout + the repository. + + +.. _pylock-packages-vcs-commit-id: + +``packages.vcs.commit-id`` +'''''''''''''''''''''''''' + +- **Type**: string +- **Required?**: yes +- **Inspiration**: :ref:`direct-url-data-structure-vcs` +- The exact commit/revision number that is to be installed. +- If the VCS supports commit-hash based revision identifiers, such a + commit-hash, it MUST be used as the commit ID in order to reference an + immutable version of the source code. + + +.. _pylock-packages-vcs-subdirectory: + +``packages.vcs.subdirectory`` +''''''''''''''''''''''''''''' + +- **Type**: string +- **Required?**: no +- **Inspiration**: :ref:`direct-url-data-structure-subdirectories` +- The subdirectory within the + :ref:`source tree ` where + the project root of the project is (e.g. the location of the + :ref:`pyproject.toml ` file). +- The path MUST be relative to the root of the source tree structure. + + +.. _pylock-packages-directory: + +``[packages.directory]`` +------------------------ + +- **Type**: table +- **Required?**: no; mutually-exclusive with :ref:`pylock-packages-vcs`, + :ref:`pylock-packages-archive`, :ref:`pylock-packages-sdist`, and + :ref:`pylock-packages-wheels` +- **Inspiration**: :ref:`direct-url-data-structure-local-directory` +- Record the local directory details for the + :ref:`source tree ` it + contains. +- Tools MAY choose to not support local directories, both from a locking + and/or installation perspective. +- Tools SHOULD provide a way for users to opt in/out of using local directories. +- Installation from a directory is considered originating from a + :ref:`direct URL reference `. + + +.. _pylock-packages-directory-path: + +``packages.directory.path`` +''''''''''''''''''''''''''' + +- **Type**: string +- **Required?**: yes +- **Inspiration**: :ref:`direct-url-data-structure-local-directory` +- The local directory where the source tree is. +- If the path is relative it MUST be relative to the location of the lock file. +- If the path is relative it MAY use POSIX-style path separators for + portability. + + +.. _pylock-packages-directory-editable: + +``packages.directory.editable`` +''''''''''''''''''''''''''''''' + +- **Type**: boolean +- **Required?**: no; defaults to ``false`` +- **Inspiration**: :ref:`direct-url-data-structure-local-directory` +- A flag representing whether the source tree was an editable install at lock + time. +- An installer MAY choose to ignore this flag if user actions or context would + make an editable install unnecessary or undesirable (e.g. a container image + that will not be mounted for development purposes but instead deployed to + production where it would be treated at read-only). + + +.. _pylock-packages-directory-subdirectory: + +``packages.directory.subdirectory`` +''''''''''''''''''''''''''''''''''' + +See :ref:`pylock-packages-vcs-subdirectory`. + + +.. _pylock-packages-archive: + +``[packages.archive]`` +---------------------- + +- **Type**: table +- **Required?**: no +- **Inspiration**: :ref:`direct-url-data-structure-archive` +- A direct reference to an archive file to install from + (this can include wheels and sdists, as well as other archive formats + containing a source tree). +- Tools MAY choose to not support archive files, both from a locking + and/or installation perspective. +- Tools SHOULD provide a way for users to opt in/out of using archive files. +- Installation from an archive file is considered originating from a + :ref:`direct URL reference `. + + +.. _pylock-packages-archive-url: + +``packages.archive.url`` +'''''''''''''''''''''''' + +See :ref:`pylock-packages-vcs-url`. + + +.. _pylock-packages-archive-path: + +``packages.archive.path`` +''''''''''''''''''''''''' + +See :ref:`pylock-packages-vcs-path`. + + +.. _pylock-packages-archive-size: + +``packages.archive.size`` +''''''''''''''''''''''''' + +- **Type**: integer +- **Required?**: no +- **Inspiration**: uv_, :ref:`simple-repository-api` +- The size of the archive file. +- Tools SHOULD provide the file size when reasonably possible (e.g. the file + size is available via the Content-Length_ header from a HEAD_ HTTP request). + + +.. _pylock-packages-archive-upload-time: + +``packages.archive.upload-time`` +'''''''''''''''''''''''''''''''' + +- **Type**: datetime +- **Required?**: no +- **Inspiration**: :ref:`simple-repository-api` +- The time the file was uploaded. +- The date and time MUST be recorded in UTC. + + +.. _pylock-packages-archive-hashes: + +``[packages.archive.hashes]`` +''''''''''''''''''''''''''''' + +- **Type**: Table of strings +- **Required?**: yes +- **Inspiration**: PDM_, Poetry_, uv_, :ref:`simple-repository-api` +- A table listing known hash values of the file where the key is the hash + algorithm and the value is the hash value. +- The table MUST contain at least one entry. +- Hash algorithm keys SHOULD be lowercase. +- At least one secure algorithm from :py:data:`hashlib.algorithms_guaranteed` + SHOULD always be included (at time of writing, sha256 specifically is + recommended. + + +.. _pylock-packages-archive-subdirectory: + +``packages.archive.subdirectory`` +'''''''''''''''''''''''''''''''''' + +See :ref:`pylock-packages-vcs-subdirectory`. + + +.. _pylock-packages-index: + +``packages.index`` +------------------ + +- **Type**: string +- **Required?**: no +- **Inspiration**: uv_ +- The base URL for the package index from :ref:`simple-repository-api` + where the sdist and/or wheels were found (e.g. ``https://pypi.org/simple/``). +- When possible, this SHOULD be specified to assist with generating + `software bill of materials`_ -- aka SBOMs -- and to assist in finding a file + if a URL ceases to be valid. +- Tools MAY support installing from an index if the URL recorded for a specific + file is no longer valid (e.g. returns a 404 HTTP error code). + + +.. _pylock-packages-sdist: + +``[packages.sdist]`` +-------------------- + +- **Type**: table +- **Required?**: no; mutually-exclusive with :ref:`pylock-packages-vcs`, + :ref:`pylock-packages-directory`, and :ref:`pylock-packages-archive` +- **Inspiration**: uv_ +- Details of a :ref:`source-distribution-format-sdist` for the + package. +- Tools MAY choose to not support sdist files, both from a locking + and/or installation perspective. +- Tools SHOULD provide a way for users to opt in/out of using sdist files. + + +.. _pylock-packages-sdist-name: + +``packages.sdist.name`` +''''''''''''''''''''''' + +- **Type**: string +- **Required?**: no, not when the last component of + :ref:`pylock-packages-sdist-path`/ :ref:`pylock-packages-sdist-url` would be + the same value +- **Inspiration**: PDM_, Poetry_, uv_ +- The file name of the :ref:`source-distribution-format-sdist` file. + + +.. _pylock-packages-sdist-upload-time: + +``packages.sdist.upload-time`` +'''''''''''''''''''''''''''''' + +See :ref:`pylock-packages-archive-upload-time`. + + +.. _pylock-packages-sdist-url: + +``packages.sdist.url`` +'''''''''''''''''''''' + +See :ref:`pylock-packages-archive-url`. + + +.. _pylock-packages-sdist-path: + +``packages.sdist.path`` +''''''''''''''''''''''' + +See :ref:`pylock-packages-archive-path`. + + +.. _pylock-packages-sdist-size: + +``packages.sdist.size`` +''''''''''''''''''''''' + +See :ref:`pylock-packages-archive-size`. + + +.. _pylock-packages-sdist-hashes: + +``packages.sdist.hashes`` +''''''''''''''''''''''''' + +See :ref:`pylock-packages-archive-hashes`. + + + +.. _pylock-packages-wheels: + +``[[packages.wheels]]`` +----------------------- + +- **Type**: array of tables +- **Required?**: no; mutually-exclusive with :ref:`pylock-packages-vcs`, + :ref:`pylock-packages-directory`, and :ref:`pylock-packages-archive` +- **Inspiration**: PDM_, Poetry_, uv_ +- For recording the wheel files as specified by + :ref:`binary-distribution-format` for the package. +- Tools MUST support wheel files, both from a locking and installation + perspective. + + +.. _pylock-packages-wheels-name: + +``packages.wheels.name`` +'''''''''''''''''''''''' + +- **Type**: string +- **Required?**: no, not when the last component of + :ref:`pylock-packages-wheels-path`/ :ref:`pylock-packages-wheels-url` would be + the same value +- **Inspiration**: PDM_, Poetry_, uv_ +- The file name of the :ref:`binary-distribution-format` file. + + +.. _pylock-packages-wheels-upload-time: + +``packages.wheels.upload-time`` +''''''''''''''''''''''''''''''' + +See :ref:`pylock-packages-archive-upload-time`. + + +.. _pylock-packages-wheels-url: + +``packages.wheels.url`` +''''''''''''''''''''''' + +See :ref:`pylock-packages-archive-url`. + + +.. _pylock-packages-wheels-path: + +``packages.wheels.path`` +'''''''''''''''''''''''' + +See :ref:`pylock-packages-archive-path`. + + +.. _pylock-packages-wheels-size: + +``packages.wheels.size`` +'''''''''''''''''''''''' + +See :ref:`pylock-packages-archive-size`. + + +.. _pylock-packages-wheels-hashes: + +``packages.wheels.hashes`` +'''''''''''''''''''''''''' + +See :ref:`pylock-packages-archive-hashes`. + + +.. _pylock-packages-attestation-identities: + +``[[packages.attestation-identities]]`` +--------------------------------------- + +- **Type**: array of tables +- **Required?**: no +- **Inspiration**: :ref:`provenance-object` +- A recording of the attestations for **any** file recorded for this package. +- If available, tools SHOULD include the attestation identities found. +- Publisher-specific keys are to be included in the table as-is + (i.e. top-level), following the spec at + :ref:`index-hosted-attestations`. + + +.. _pylock-packages-attestation-identities-kind: + +``packages.attestation-identities.kind`` +'''''''''''''''''''''''''''''''''''''''' + +- **Type**: string +- **Required?**: yes +- **Inspiration**: :ref:`provenance-object` +- The unique identity of the Trusted Publisher. + + +.. _pylock-packages-tool: + +``[packages.tool]`` +------------------- + +- **Type**: table +- **Required?**: no +- **Inspiration**: :ref:`pyproject-tool-table` +- Similar usage as that of the :ref:`pylock-tool` table from the + :ref:`pyproject-toml-spec`, but at the package version level instead + of at the lock file level (which is also available via :ref:`pylock-tool`). +- Data recorded in the table MUST be disposable (i.e. it MUST NOT affect + installation). + + +.. _pylock-tool: + +``[tool]`` +========== + +- **Type**: table +- **Required?**: no +- **Inspiration**: :ref:`pyproject-tool-table` +- See :ref:`pylock-packages-tool`. + + +------- +Example +------- + +.. literalinclude:: pylock-toml/pylock.example.toml + + +------------ +Installation +------------ + +The following outlines the steps to be taken to install from a lock file +(while the requirements are prescriptive, the general steps and order are +a suggestion): + +#. Gather the extras and dependency groups to install and set ``extras`` and + ``dependency_groups`` for marker evaluation, respectively. + + #. ``extras`` SHOULD be set to the empty set by default. + #. ``dependency_groups`` SHOULD be the set created from + :ref:`pylock-default-groups` by default. + +#. Check if the metadata version specified by :ref:`pylock-lock-version` is + supported; an error or warning MUST be raised as appropriate. +#. If :ref:`pylock-requires-python` is specified, check that the environment + being installed for meets the requirement; an error MUST be raised if it is + not met. +#. If :ref:`pylock-environments` is specified, check that at least one of the + environment marker expressions is satisfied; an error MUST be raised if no + expression is satisfied. +#. For each package listed in :ref:`pylock-packages`: + + #. If :ref:`pylock-packages-marker` is specified, check if it is satisfied; + if it isn't, skip to the next package. + #. If :ref:`pylock-packages-requires-python` is specified, check if it is + satisfied; an error MUST be raised if it isn't. + #. Check that no other conflicting instance of the package has been slated to + be installed; an error about the ambiguity MUST be raised otherwise. + #. Check that the source of the package is specified appropriately (i.e. + there are no conflicting sources in the package entry); + an error MUST be raised if any issues are found. + #. Add the package to the set of packages to install. + +#. For each package to be installed: + + - If :ref:`pylock-packages-vcs` is set: + + #. Clone the repository to the commit ID specified in + :ref:`pylock-packages-vcs-commit-id`. + #. :ref:`Build ` the package, + respecting :ref:`pylock-packages-vcs-subdirectory`. + #. :ref:`Install `. + + - Else if :ref:`pylock-packages-directory` is set: + + #. :ref:`Build ` the package, + respecting :ref:`pylock-packages-directory-subdirectory`. + #. :ref:`Install `. + + - Else if :ref:`pylock-packages-archive` is set: + + #. Get the file. + #. Validate using :ref:`pylock-packages-archive-size` and + :ref:`pylock-packages-archive-hashes`. + #. :ref:`Build ` the package, + respecting :ref:`pylock-packages-archive-subdirectory`. + #. :ref:`Install `. + + - Else if there are entries for :ref:`pylock-packages-wheels`: + + #. Look for the appropriate wheel file based on + :ref:`pylock-packages-wheels-name`; if one is not found then move on to + :ref:`pylock-packages-sdist` or an error MUST be raised about a + lack of source for the project. + #. Get the file: + + - If :ref:`pylock-packages-wheels-path` is set, use it. + - Else if :ref:`pylock-packages-wheels-url` is set, try to use it; + optionally tools MAY use :ref:`pylock-packages-index` or some + tool-specific mechanism to download the selected wheel file (tools + MUST NOT try to change what wheel file to download based on what's + available; what file to install should be determined in an offline + fashion for reproducibility). + + #. Validate using :ref:`pylock-packages-wheels-size` and + :ref:`pylock-packages-wheels-hashes`. + #. :ref:`Install `. + + - Else if no :ref:`pylock-packages-wheels` file is found or + :ref:`pylock-packages-sdist` is solely set: + + #. Get the file. + + - If :ref:`pylock-packages-sdist-path` is set, use it. + - Else if :ref:`pylock-packages-sdist-url` is set, try to use it; tools + MAY use :ref:`pylock-packages-index` or some tool-specific mechanism + to download the file. + + #. Validate using :ref:`pylock-packages-sdist-size` and + :ref:`pylock-packages-sdist-hashes`. + #. :ref:`Build ` the package. + #. :ref:`Install `. + + +------- +History +------- + +- April 2025: Initial version, approved via :pep:`751`. + + +.. _Content-Length: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Length +.. _Dependabot: https://docs.github.com/en/code-security/dependabot +.. _HEAD: https://developer.mozilla.org/en-US/docs/Web/HTTP/Methods/HEAD +.. _PDM: https://pypi.org/project/pdm/ +.. _pip-tools: https://pypi.org/project/pip-tools/ +.. _Poetry: https://pypi.org/project/poetry/ +.. _requirements file: +.. _requirements files: https://pip.pypa.io/en/stable/reference/requirements-file-format/ +.. _software bill of materials: https://www.cisa.gov/sbom +.. _TOML: https://toml.io/ +.. _uv: https://pypi.org/project/uv/ +.. _URL: https://url.spec.whatwg.org/ diff --git a/source/specifications/pylock-toml/pylock.example.toml b/source/specifications/pylock-toml/pylock.example.toml new file mode 100644 index 000000000..45e8731b2 --- /dev/null +++ b/source/specifications/pylock-toml/pylock.example.toml @@ -0,0 +1,60 @@ +lock-version = '1.0' +environments = ["sys_platform == 'win32'", "sys_platform == 'linux'"] +requires-python = '== 3.12' +created-by = 'mousebender' + +[[packages]] +name = 'attrs' +version = '25.1.0' +requires-python = '>= 3.8' + + [[packages.wheels]] + name = 'attrs-25.1.0-py3-none-any.whl' + upload-time = 2025-01-25T11:30:10.164985+00:00 + url = 'https://files.pythonhosted.org/packages/fc/30/d4986a882011f9df997a55e6becd864812ccfcd821d64aac8570ee39f719/attrs-25.1.0-py3-none-any.whl' + size = 63152 + hashes = {sha256 = 'c75a69e28a550a7e93789579c22aa26b0f5b83b75dc4e08fe092980051e1090a'} + + [[packages.attestation-identities]] + environment = 'release-pypi' + kind = 'GitHub' + repository = 'python-attrs/attrs' + workflow = 'pypi-package.yml' + +[[packages]] +name = 'cattrs' +version = '24.1.2' +requires-python = '>= 3.8' +dependencies = [ + {name = 'attrs'}, +] + + [[packages.wheels]] + name = 'cattrs-24.1.2-py3-none-any.whl' + upload-time = 2024-09-22T14:58:34.812643+00:00 + url = 'https://files.pythonhosted.org/packages/c8/d5/867e75361fc45f6de75fe277dd085627a9db5ebb511a87f27dc1396b5351/cattrs-24.1.2-py3-none-any.whl' + size = 66446 + hashes = {sha256 = '67c7495b760168d931a10233f979b28dc04daf853b30752246f4f8471c6d68d0'} + +[[packages]] +name = 'numpy' +version = '2.2.3' +requires-python = '>= 3.10' + + [[packages.wheels]] + name = 'numpy-2.2.3-cp312-cp312-win_amd64.whl' + upload-time = 2025-02-13T16:51:21.821880+00:00 + url = 'https://files.pythonhosted.org/packages/42/6e/55580a538116d16ae7c9aa17d4edd56e83f42126cb1dfe7a684da7925d2c/numpy-2.2.3-cp312-cp312-win_amd64.whl' + size = 12626357 + hashes = {sha256 = '83807d445817326b4bcdaaaf8e8e9f1753da04341eceec705c001ff342002e5d'} + + [[packages.wheels]] + name = 'numpy-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl' + upload-time = 2025-02-13T16:50:00.079662+00:00 + url = 'https://files.pythonhosted.org/packages/39/04/78d2e7402fb479d893953fb78fa7045f7deb635ec095b6b4f0260223091a/numpy-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl' + size = 16116679 + hashes = {sha256 = '3b787adbf04b0db1967798dba8da1af07e387908ed1553a0d6e74c084d1ceafe'} + +[tool.mousebender] +command = ['.', 'lock', '--platform', 'cpython3.12-windows-x64', '--platform', 'cpython3.12-manylinux2014-x64', 'cattrs', 'numpy'] +run-on = 2025-03-06T12:28:57.760769 diff --git a/source/specifications/pyproject-toml.rst b/source/specifications/pyproject-toml.rst index efa562a73..48f35599e 100644 --- a/source/specifications/pyproject-toml.rst +++ b/source/specifications/pyproject-toml.rst @@ -61,6 +61,10 @@ table then the default values as specified above should be used. If the table is specified but is missing required fields then the tool should consider it an error. +Tools may choose to present an error to the user if the file exists, +``[build-system]`` table is missing, and there is no clear indication +that the project should be built (e.g., no setup.py/setup.cfg or other +build configuration files, and no ``[project]`` table). To provide a type-specific representation of the resulting data from the TOML file for illustrative purposes only, the following @@ -136,8 +140,11 @@ The complete list of keys allowed in the ``[project]`` table are: - ``dynamic`` - ``entry-points`` - ``gui-scripts`` +- ``import-names`` +- ``import-namespaces`` - ``keywords`` - ``license`` +- ``license-files`` - ``maintainers`` - ``name`` - ``optional-dependencies`` @@ -148,6 +155,8 @@ The complete list of keys allowed in the ``[project]`` table are: - ``version`` +.. _pyproject-toml-name: + ``name`` -------- @@ -160,6 +169,9 @@ The name of the project. Tools SHOULD :ref:`normalize ` this name, as soon as it is read for internal consistency. + +.. _pyproject-toml-version: + ``version`` ----------- @@ -173,6 +185,8 @@ The version of the project, as defined in the Users SHOULD prefer to specify already-normalized versions. +.. _pyproject-toml-description: + ``description`` --------------- @@ -184,6 +198,8 @@ The summary description of the project in one line. Tools MAY error if this includes multiple lines. +.. _pyproject-toml-readme: + ``readme`` ---------- @@ -223,6 +239,8 @@ as supported by the :ref:`core metadata `. Otherwise tools MUST raise an error for unsupported content-types. +.. _pyproject-toml-requires-python: + ``requires-python`` ------------------- @@ -233,20 +251,82 @@ tools MUST raise an error for unsupported content-types. The Python version requirements of the project. +.. _pyproject-toml-license: + ``license`` ----------- +- TOML_ type: string +- Corresponding :ref:`core metadata ` field: + :ref:`License-Expression ` + +Text string that is a valid SPDX +:term:`license expression `, +as specified in :doc:`/specifications/license-expression`. +Tools SHOULD validate and perform case normalization of the expression. + +This key should **only** be specified if the license expression for any +and all distribution files created by a build backend using the +:file:`pyproject.toml` is the same as the one specified. If the license +expression will differ then it should either be specified as dynamic or +not set at all. + +Legacy specification +'''''''''''''''''''' + - TOML_ type: table - Corresponding :ref:`core metadata ` field: :ref:`License ` The table may have one of two keys. The ``file`` key has a string -value that is a file path relative to ``pyproject.toml`` to the file +value that is a file path relative to :file:`pyproject.toml` to the file which contains the license for the project. Tools MUST assume the file's encoding is UTF-8. The ``text`` key has a string value which is the license of the project. These keys are mutually exclusive, so a tool MUST raise an error if the metadata specifies both keys. +The table subkeys were deprecated by :pep:`639` in favor of the string value. + +.. _pyproject-toml-license-files: + +``license-files`` +----------------- + +- TOML_ type: array of strings +- Corresponding :ref:`core metadata ` field: + :ref:`License-File ` + +An array specifying paths in the project source tree relative to the project +root directory (i.e. directory containing :file:`pyproject.toml` or legacy project +configuration files, e.g. :file:`setup.py`, :file:`setup.cfg`, etc.) +to file(s) containing licenses and other legal notices to be +distributed with the package. + +The strings MUST contain valid glob patterns, as specified in +:doc:`/specifications/glob-patterns`. + +Patterns are relative to the directory containing :file:`pyproject.toml`, + +Tools MUST assume that license file content is valid UTF-8 encoded text, +and SHOULD validate this and raise an error if it is not. + +Build tools: + +- MUST include all files matched by a listed pattern in all distribution + archives. +- MUST list each matched file path under a License-File field in the + Core Metadata. + +If the ``license-files`` key is present and +is set to a value of an empty array, then tools MUST NOT include any +license files and MUST NOT raise an error. +If the ``license-files`` key is not defined, tools can decide how to handle +license files. For example they can choose not to include any files or use +their own logic to discover the appropriate files in the distribution. + + +.. _pyproject-toml-authors: +.. _pyproject-toml-maintainers: ``authors``/``maintainers`` --------------------------- @@ -290,6 +370,8 @@ follows: 4. Multiple values should be separated by commas. +.. _pyproject-toml-keywords: + ``keywords`` ------------ @@ -300,6 +382,8 @@ follows: The keywords for the project. +.. _pyproject-toml-classifiers: + ``classifiers`` --------------- @@ -309,6 +393,14 @@ The keywords for the project. Trove classifiers which apply to the project. +The use of ``License ::`` classifiers is deprecated and tools MAY issue a +warning informing users about that. +Build tools MAY raise an error if both the ``license`` string value +(translating to ``License-Expression`` metadata field) and the ``License ::`` +classifiers are used. + + +.. _pyproject-toml-urls: ``urls`` -------- @@ -318,9 +410,14 @@ Trove classifiers which apply to the project. :ref:`Project-URL ` A table of URLs where the key is the URL label and the value is the -URL itself. +URL itself. See :ref:`well-known-project-urls` for normalization rules +and well-known rules when processing metadata for presentation. +.. _pyproject-toml-scripts: +.. _pyproject-toml-gui-scripts: +.. _pyproject-toml-entry-points: + Entry points ------------ @@ -351,6 +448,9 @@ be ambiguous in the face of ``[project.scripts]`` and ``[project.gui-scripts]``, respectively. +.. _pyproject-toml-dependencies: +.. _pyproject-toml-optional-dependencies: + ``dependencies``/``optional-dependencies`` ------------------------------------------ @@ -378,7 +478,98 @@ matching :ref:`Provides-Extra ` metadata. +.. _pyproject-toml-import-names: +``import-names`` +---------------- + +- TOML_ type: array of strings +- Corresponding :ref:`core metadata ` field: + :ref:`Import-Name ` + +An array of strings specifying the import names that the project exclusively +provides when installed. Each string MUST be a valid Python identifier or can +be empty. An import name MAY be followed by a semicolon and the term "private" +(e.g. ``"; private"``) with any amount of whitespace surrounding the semicolon. + +Projects SHOULD list all the shortest import names that are exclusively provided +by the project. If any of the shortest names are dotted names, all intervening +names from that name to the top-level name should also be listed appropriately +in ``import-names`` and/or ``import-namespaces``. For instance, a project which +is a single package named spam with multiple submodules would only list +``project.import-names = ["spam"]``. A project that lists ``spam.bacon.eggs`` +would also need to account for ``spam`` and ``spam.bacon`` appropriately in +``import-names`` and ``import-namespaces``. Listing all names acts as a check +that the intent of the import names is as expected. As well, projects SHOULD +list all import names, public or private, using the ``; private`` modifier as +appropriate. + +If a project lists the same name in both ``import-names`` and +``import-namespaces``, then tools MUST raise an error due to ambiguity. + +Projects MAY set ``import-names`` to an empty array to represent a project with +no import names (i.e. there are no Python modules of any kind in the +distribution file). + +Build back-ends MAY support dynamically calculating the value if the user +declares the key in ``project.dynamic``. + +Examples: + +.. code-block:: toml + + [project] + name = "pillow" + import-names = ["PIL"] + +.. code-block:: toml + + [project] + name = "myproject" + import-names = ["mypackage", "_private_module ; private"] + + +.. _pyproject-toml-import-namespaces: + +``import-namespaces`` +--------------------- + +- TOML_ type: array of strings +- Corresponding :ref:`core metadata ` field: + :ref:`Import-Namespace ` + +An array of strings specifying the import names that the project provides when +installed, but not exclusively. Each string MUST be a valid Python identifier. +An import name MAY be followed by a semicolon and the term "private" (e.g. +``"; private"``) with any amount of whitespace surrounding the semicolon. Note +that unlike ``import-names``, ``import-namespaces`` CANNOT be an empty array. + +Projects SHOULD list all the shortest import names that are exclusively provided +by the project. If any of the shortest names are dotted names, all intervening +names from that name to the top-level name should also be listed appropriately +in ``import-names`` and/or ``import-namespaces``. + +This field is used for namespace packages where multiple projects can contribute +to the same import namespace. Projects all listing the same import name in +``import-namespaces`` can be installed together without shadowing each other. + +If a project lists the same name in both ``import-names`` and +``import-namespaces``, then tools MUST raise an error due to ambiguity. + +Build back-ends MAY support dynamically calculating the value if the user +declares the key in ``project.dynamic``. + +Example: + +.. code-block:: toml + + [project] + name = "zope-interface" + import-namespaces = ["zope"] + import-names = ["zope.interface"] + + +.. _pyproject-toml-dynamic: .. _declaring-project-metadata-dynamic: ``dynamic`` @@ -449,6 +640,13 @@ History - November 2020: The specification of the ``[project]`` table was approved through :pep:`621`. +- December 2024: The ``license`` key was redefined, the ``license-files`` key was + added and ``License::`` classifiers were deprecated through :pep:`639`. + +- September 2025: Clarity that the ``license`` key applies to all distribution + files generated from the :file:`pyproject.toml` file. +- October 2025: The ``import-names`` and ``import-namespaces`` keys were added + through :pep:`794`. .. _TOML: https://toml.io diff --git a/source/specifications/recording-installed-packages.rst b/source/specifications/recording-installed-packages.rst index ee8e69f79..a689fa7fd 100644 --- a/source/specifications/recording-installed-packages.rst +++ b/source/specifications/recording-installed-packages.rst @@ -66,6 +66,12 @@ The ``METADATA`` file is mandatory. All other files may be omitted at the installing tool's discretion. Additional installer-specific files may be present. +This :file:`.dist-info/` directory may contain the following directories, described in +detail below: + +* :file:`licenses/`: contains license files. +* :file:`sboms/`: contains Software Bill-of-Materials files (SBOMs). + .. note:: The :ref:`binary-distribution-format` specification describes additional @@ -144,7 +150,7 @@ Here is an example snippet of a possible ``RECORD`` file:: __pycache__/black.cpython-38.pyc,, __pycache__/blackd.cpython-38.pyc,, black-19.10b0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 - black-19.10b0.dist-info/LICENSE,sha256=nAQo8MO0d5hQz1vZbhGqqK_HLUqG1KNiI9erouWNbgA,1080 + black-19.10b0.dist-info/licenses/LICENSE,sha256=nAQo8MO0d5hQz1vZbhGqqK_HLUqG1KNiI9erouWNbgA,1080 black-19.10b0.dist-info/METADATA,sha256=UN40nGoVVTSpvLrTBwNsXgZdZIwoKFSrrDDHP6B7-A0,58841 black-19.10b0.dist-info/RECORD,, black.py,sha256=45IF72OgNfF8WpeNHnxV2QGfbCLubV5Xjl55cI65kYs,140161 @@ -219,6 +225,26 @@ of requirement (i.e. name plus version specifier). Its detailed specification is at :ref:`direct-url`. +The :file:`licenses/` subdirectory +================================== + +If the metadata version is 2.4 or greater and one or more ``License-File`` +fields is specified, the :file:`.dist-info/` directory MUST contain a :file:`licenses/` +subdirectory which MUST contain the files listed in the ``License-File`` fields in +the :file:`METADATA` file at their respective paths relative to the +:file:`licenses/` directory. +Any files in this directory MUST be copied from wheels by the install tools. + + +The :file:`sboms/` subdirectory +================================== + +All files contained within the :file:`.dist-info/sboms/` directory MUST +be Software Bill-of-Materials (SBOM) files that describe software contained +within the installed package. +Any files in this directory MUST be copied from wheels by the install tools. + + Intentionally preventing changes to installed packages ====================================================== @@ -259,3 +285,5 @@ History for the full definition. - September 2020: Various amendments and clarifications were approved through :pep:`627`. +- December 2024: The :file:`.dist-info/licenses/` directory was specified through + :pep:`639`. diff --git a/source/specifications/schemas/index.rst b/source/specifications/schemas/index.rst new file mode 100644 index 000000000..a80891975 --- /dev/null +++ b/source/specifications/schemas/index.rst @@ -0,0 +1,8 @@ +.. _`packaging-schemas`: + +PyPA schemas +############ + +- `direct_url.json `_ +- `build-details.json `_ +- `pylock.toml `_ diff --git a/source/specifications/section-distribution-metadata.rst b/source/specifications/section-distribution-metadata.rst index 7fd3361a0..551e6b730 100644 --- a/source/specifications/section-distribution-metadata.rst +++ b/source/specifications/section-distribution-metadata.rst @@ -10,5 +10,9 @@ Package Distribution Metadata version-specifiers dependency-specifiers pyproject-toml + dependency-groups inline-script-metadata platform-compatibility-tags + well-known-project-urls + glob-patterns + license-expression diff --git a/source/specifications/section-package-indices.rst b/source/specifications/section-package-indices.rst index 13ba98113..1fcefe6ff 100644 --- a/source/specifications/section-package-indices.rst +++ b/source/specifications/section-package-indices.rst @@ -7,3 +7,6 @@ Package Index Interfaces pypirc simple-repository-api + file-yanking + index-hosted-attestations + project-status-markers diff --git a/source/specifications/section-python-description-formats.rst b/source/specifications/section-python-description-formats.rst new file mode 100644 index 000000000..5a56c3968 --- /dev/null +++ b/source/specifications/section-python-description-formats.rst @@ -0,0 +1,8 @@ +========================== +Python Description Formats +========================== + +.. toctree:: + :titlesonly: + + build-details/index diff --git a/source/specifications/section-reproducible-environments.rst b/source/specifications/section-reproducible-environments.rst new file mode 100644 index 000000000..1f83f3673 --- /dev/null +++ b/source/specifications/section-reproducible-environments.rst @@ -0,0 +1,8 @@ +========================= +Reproducible Environments +========================= + +.. toctree:: + :titlesonly: + + pylock-toml diff --git a/source/specifications/simple-repository-api.rst b/source/specifications/simple-repository-api.rst index b9f87ce7b..3b9a2ccac 100644 --- a/source/specifications/simple-repository-api.rst +++ b/source/specifications/simple-repository-api.rst @@ -5,16 +5,22 @@ Simple repository API ===================== +The keywords "**MUST**", "**MUST NOT**", "**REQUIRED**", "**SHALL**", +"**SHALL NOT**", "**SHOULD**", "**SHOULD NOT**", "**RECOMMENDED**", "**MAY**", +and "**OPTIONAL**"" in this document are to be interpreted as described in +:rfc:`2119`. + The interface for querying available package versions and retrieving packages from an index server comes in two forms: -HTML and JSON. +:ref:`HTML ` and +:ref:`JSON `. .. _simple-repository-api-base: -Base HTML API -============= +Base API +======== -A repository that implements the simple API is defined by its base URL, this is +A repository that implements the simple API is defined by its base URL. This is the top level URL that all additional URLs are below. The API is named the "simple" repository due to the fact that PyPI's base URL is ``https://pypi.org/simple/``. @@ -23,12 +29,118 @@ the top level URL that all additional URLs are below. The API is named the URL (so given PyPI's URL, a URL of ``/foo/`` would be ``https://pypi.org/simple/foo/``. +Normalized Names +---------------- + +This spec references the concept of a "normalized" project name. As per +:ref:`the name normalization specification ` +the only valid characters in a name are the ASCII alphabet, ASCII numbers, +``.``, ``-``, and ``_``. The name should be lowercased with all runs of the +characters ``.``, ``-``, or ``_`` replaced with a single ``-`` character. This +can be implemented in Python with the ``re`` module:: + + import re + + def normalize(name): + return re.sub(r"[-_.]+", "-", name).lower() + +.. _simple-repository-api-versioning: + +Versioning PyPI's Simple API +---------------------------- + +This spec proposes the inclusion of a meta tag on the responses of every +successful request to a simple API page, which contains a name attribute +of ``pypi:repository-version``, and a content that is a :ref:`version specifiers +specification ` compatible +version number, which is further constrained to ONLY be Major.Minor, and +none of the additional features supported by :ref:`the version specifiers +specification `. + +This would end up looking like: + +.. code-block:: html + + + +When interpreting the repository version: + +* Incrementing the major version is used to signal a backwards + incompatible change such that existing clients would no longer be + expected to be able to meaningfully use the API. +* Incrementing the minor version is used to signal a backwards + compatible change such that existing clients would still be + expected to be able to meaningfully use the API. + +It is left up to the discretion of any future specs as to what +specifically constitutes a backwards incompatible vs compatible change +beyond the broad suggestion that existing clients will be able to +"meaningfully" continue to use the API, and can include adding, +modifying, or removing existing features. + +It is expectation of this spec that the major version will never be +incremented, and any future major API evolutions would utilize a +different mechanism for API evolution. However the major version +is included to disambiguate with future versions (e.g. a hypothetical +simple api v2 that lived at /v2/, but which would be confusing if the +repository-version was set to a version >= 2). + +API Version History +~~~~~~~~~~~~~~~~~~~ + +This section contains only an abbreviated history of changes, +as marked by the API version number. For a full history of changes including +changes made before API versioning, see :ref:`History `. + +- API version 1.0: Initial version of the API, declared with :pep:`629`. +- API version 1.1: Added ``versions``, ``files[].size``, and ``files[].upload-time`` metadata + to the JSON serialization, declared with :pep:`700`. +- API version 1.2: Added repository "tracks" metadata, declared with :pep:`708`. +- API version 1.3: Added provenance metadata, declared with :pep:`740`. +- API version 1.4: Added status markers, declared with :pep:`792`. + +Clients +~~~~~~~ + +Clients interacting with the simple API **SHOULD** introspect each +response for the repository version, and if that data does not exist +**MUST** assume that it is version 1.0. + +When encountering a major version greater than expected, clients +**MUST** hard fail with an appropriate error message for the user. + +When encountering a minor version greater than expected, clients +**SHOULD** warn users with an appropriate message. + +Clients **MAY** still continue to use feature detection in order to +determine what features a repository uses. + +.. _simple-repository-html-serialization: + +HTML Serialization +------------------ + +.. _simple-repository-html-project-list: + +The following constraints apply to all HTML serialized responses described in +this spec: + +* All HTML responses **MUST** be a valid HTML5 document. +* HTML responses **MAY** contain one or more ``meta`` tags in the + ```` section. The semantics of these tags are defined below. + +Project List +~~~~~~~~~~~~ Within a repository, the root URL (``/`` for this spec which represents the base URL) **MUST** be a valid HTML5 page with a single anchor element per project in -the repository. The text of the anchor tag **MUST** be the name of -the project and the href attribute **MUST** link to the URL for that particular -project. As an example:: +the repository. + +The text of each anchor tag **MUST** be the name of +the project and the ``href`` attribute **MUST** link to the URL for that particular +project. As an example: + +.. code-block:: html @@ -38,14 +150,26 @@ project. As an example:: +.. _simple-repository-html-project-detail: + +Project Detail +~~~~~~~~~~~~~~ + Below the root URL is another URL for each individual project contained within -a repository. The format of this URL is ``//`` where the ```` -is replaced by the normalized name for that project, so a project named -"HolyGrail" would have a URL like ``/holygrail/``. This URL must respond with -a valid HTML5 page with a single anchor element per file for the project. The -href attribute **MUST** be a URL that links to the location of the file for -download, and the text of the anchor tag **MUST** match the final path -component (the filename) of the URL. The URL **SHOULD** include a hash in the +a repository. The format of this URL is ``//``, where the ```` +is replaced by the normalized name for that project. + +.. tip:: + + For example, a project named "HolyGrail" would have a URL like + ``/holygrail/``. + +The project detail URL must respond with a valid HTML5 page with a single +anchor element per file for the project. The ``href`` attribute **MUST** be a +URL that links to the location of the file for download, and the text of the +anchor tag **MUST** match the final path component (the filename) of the URL. + +Each file URL **SHOULD** include a hash in the form of a URL fragment with the following syntax: ``#=``, where ```` is the lowercase name of the hash function (such as ``sha256``) and ```` is the hex encoded digest. @@ -81,6 +205,26 @@ In addition to the above, the following constraints are placed on the API: associated signature, the signature would be located at ``/packages/HolyGrail-1.0.tar.gz.asc``. +* A repository **MAY** include a ``data-core-metadata`` attribute on a file + link. + + The repository **SHOULD** provide the hash of the Core Metadata file as the + ``data-core-metadata`` attribute's value using the syntax + ``=``, where ```` is the lower cased name of + the hash function used, and ```` is the hex encoded digest. The + repository **MAY** use ``true`` as the attribute's value if a hash is unavailable. + +* A repository **MAY** include a ``data-dist-info-metadata`` attribute on a + file link. + + Index clients **MAY** consume this key if present, as a legacy fallback + for ``data-core-metadata``. + + .. important:: + + ``data-dist-info-metadata`` was standardized with :pep:`658` and renamed to + ``data-core-metadata`` with :pep:`714`. + * A repository **MAY** include a ``data-gpg-sig`` attribute on a file link with a value of either ``true`` or ``false`` to indicate whether or not there is a GPG signature. Repositories that do this **SHOULD** include it on every link. @@ -89,165 +233,60 @@ In addition to the above, the following constraints are placed on the API: link. This exposes the :ref:`core-metadata-requires-python` metadata field for the corresponding release. Where this is present, installer tools **SHOULD** ignore the download when installing to a Python version that - doesn't satisfy the requirement. For example:: + doesn't satisfy the requirement. For example: + + .. code-block:: html ... In the attribute value, < and > have to be HTML encoded as ``<`` and ``>``, respectively. -Normalized Names ----------------- - -This spec references the concept of a "normalized" project name. As per -:ref:`the name normalization specification ` -the only valid characters in a name are the ASCII alphabet, ASCII numbers, -``.``, ``-``, and ``_``. The name should be lowercased with all runs of the -characters ``.``, ``-``, or ``_`` replaced with a single ``-`` character. This -can be implemented in Python with the ``re`` module:: - - import re - - def normalize(name): - return re.sub(r"[-_.]+", "-", name).lower() - -.. _simple-repository-api-yank: - -Adding "Yank" Support to the Simple API -======================================= - -Links in the simple repository **MAY** have a ``data-yanked`` attribute -which may have no value, or may have an arbitrary string as a value. The -presence of a ``data-yanked`` attribute **SHOULD** be interpreted as -indicating that the file pointed to by this particular link has been -"Yanked", and should not generally be selected by an installer, except -under specific scenarios. - -The value of the ``data-yanked`` attribute, if present, is an arbitrary -string that represents the reason for why the file has been yanked. Tools -that process the simple repository API **MAY** surface this string to -end users. +* A repository **MAY** include a ``data-yanked`` attribute on a file link. -The yanked attribute is not immutable once set, and may be rescinded in -the future (and once rescinded, may be reset as well). Thus API users -**MUST** be able to cope with a yanked file being "unyanked" (and even -yanked again). + The ``data-yanked`` attribute may have no value, or may have an + arbitrary string as a value. The presence of a ``data-yanked`` attribute + **SHOULD** be interpreted as indicating that the file pointed to by this + particular link has been "Yanked", and should not generally be selected by + an installer, except under specific scenarios. + The value of the ``data-yanked`` attribute, if present, is an arbitrary + string that represents the reason for why the file has been yanked. -Installers ----------- + .. note:: -The desirable experience for users is that once a file is yanked, when -a human being is currently trying to directly install a yanked file, that -it fails as if that file had been deleted. However, when a human did that -awhile ago, and now a computer is just continuing to mechanically follow -the original order to install the now yanked file, then it acts as if it -had not been yanked. - -An installer **MUST** ignore yanked releases, if the selection constraints -can be satisfied with a non-yanked version, and **MAY** refuse to use a -yanked release even if it means that the request cannot be satisfied at all. -An implementation **SHOULD** choose a policy that follows the spirit of the -intention above, and that prevents "new" dependencies on yanked -releases/files. - -What this means is left up to the specific installer, to decide how to best -fit into the overall usage of their installer. However, there are two -suggested approaches to take: - -1. Yanked files are always ignored, unless they are the only file that - matches a version specifier that "pins" to an exact version using - either ``==`` (without any modifiers that make it a range, such as - ``.*``) or ``===``. Matching this version specifier should otherwise - be done as per :ref:`the version specifiers specification - ` for things like local versions, zero padding, - etc. -2. Yanked files are always ignored, unless they are the only file that - matches what a lock file (such as ``Pipfile.lock`` or ``poetry.lock``) - specifies to be installed. In this case, a yanked file **SHOULD** not - be used when creating or updating a lock file from some input file or - command. - -Regardless of the specific strategy that an installer chooses for deciding -when to install yanked files, an installer **SHOULD** emit a warning when -it does decide to install a yanked file. That warning **MAY** utilize the -value of the ``data-yanked`` attribute (if it has a value) to provide more -specific feedback to the user about why that file had been yanked. - - -Mirrors -------- - -Mirrors can generally treat yanked files one of two ways: - -1. They may choose to omit them from their simple repository API completely, - providing a view over the repository that shows only "active", unyanked - files. -2. They may choose to include yanked files, and additionally mirror the - ``data-yanked`` attribute as well. - -Mirrors **MUST NOT** mirror a yanked file without also mirroring the -``data-yanked`` attribute for it. + The semantics of how tools should handle yanked files is + described in :ref:`file-yanking`. -.. _simple-repository-api-versioning: +* A repository **MAY** include a ``data-provenance`` attribute on a file link. + The value of this attribute **MUST** be a fully qualified URL, signaling that + the file's provenance can be found at that URL. This URL **MUST** represent + a `secure origin `_. -Versioning PyPI's Simple API -============================ + .. note:: -This spec proposes the inclusion of a meta tag on the responses of every -successful request to a simple API page, which contains a name attribute -of "pypi:repository-version", and a content that is a :ref:`version specifiers -specification ` compatible -version number, which is further constrained to ONLY be Major.Minor, and -none of the additional features supported by :ref:`the version specifiers -specification `. - -This would end up looking like:: + The ``data-provenance`` attribute was added with API version 1.3. - - -When interpreting the repository version: - -* Incrementing the major version is used to signal a backwards - incompatible change such that existing clients would no longer be - expected to be able to meaningfully use the API. -* Incrementing the minor version is used to signal a backwards - compatible change such that existing clients would still be - expected to be able to meaningfully use the API. - -It is left up to the discretion of any future specs as to what -specifically constitutes a backwards incompatible vs compatible change -beyond the broad suggestion that existing clients will be able to -"meaningfully" continue to use the API, and can include adding, -modifying, or removing existing features. - -It is expectation of this spec that the major version will never be -incremented, and any future major API evolutions would utilize a -different mechanism for API evolution. However the major version -is included to disambiguate with future versions (e.g. a hypothetical -simple api v2 that lived at /v2/, but which would be confusing if the -repository-version was set to a version >= 2). + .. note:: -This spec sets the current API version to "1.0", and expects that -future specs that further evolve the simple API will increment the -minor version number. + The format of the linked provenance is defined in :ref:`index-hosted-attestations`. +* A repository **MAY** include ``pypi:project-status`` and + ``pypi:project-status-reason`` meta tags on the response itself. -Clients -------- + The value of ``pypi:project-status`` **MUST** be a valid + project status marker, while the value of + ``pypi:project-status-reason`` **MUST** be an arbitrary string if present. -Clients interacting with the simple API **SHOULD** introspect each -response for the repository version, and if that data does not exist -**MUST** assume that it is version 1.0. + .. note:: -When encountering a major version greater than expected, clients -**MUST** hard fail with an appropriate error message for the user. + The set of valid project status markers and their semantics is described + in :ref:`project-status-markers`. -When encountering a minor version greater than expected, clients -**SHOULD** warn users with an appropriate message. + .. note:: -Clients **MAY** still continue to use feature detection in order to -determine what features a repository uses. + The ``pypi:project-status`` and ``pypi:project-status-reason`` meta tags + were added with API version 1.4. .. _simple-repository-api-metadata-file: @@ -351,8 +390,8 @@ JSON Serialization ------------------ The URL structure from :ref:`the base HTML API specification -` still applies, as this spec only adds an additional -serialization format for the already existing API. +` still applies, as this spec only adds +an additional serialization format for the already existing API. The following constraints apply to all JSON serialized responses described in this spec: @@ -360,7 +399,7 @@ spec: * All JSON responses will *always* be a JSON object rather than an array or other type. -* While JSON doesn't natively support an URL type, any value that represents an +* While JSON doesn't natively support a URL type, any value that represents an URL in this API may be either absolute or relative as long as they point to the correct location. If relative, they are relative to the current URL as if it were HTML. @@ -380,6 +419,10 @@ spec: * All requirements of :ref:`the base HTML API specification ` that are not HTML specific still apply. +* Keys (at any level) with a leading underscore are reserved as private for + index server use. No future standard will assign a meaning to any such key. + +.. _simple-repository-json-project-list: Project List ~~~~~~~~~~~~ @@ -396,7 +439,7 @@ As an example: { "meta": { - "api-version": "1.0" + "api-version": "1.4" }, "projects": [ {"name": "Frob"}, @@ -424,6 +467,7 @@ As an example: best thought of as a set, but both JSON and HTML lack the functionality to have sets. +.. _simple-repository-json-project-detail: Project Detail ~~~~~~~~~~~~~~ @@ -433,12 +477,57 @@ The format of this URL is ``//`` where the ```` is replaced by name for that project, so a project named "Silly_Walk" would have a URL like ``/silly-walk/``. -This URL must respond with a JSON encoded dictionary that has three keys: +This URL must respond with a JSON encoded dictionary that has four keys: - ``name``: The normalized name of the project. - ``files``: A list of dictionaries, each one representing an individual file. - ``meta``: The general response metadata as `described earlier `__. + In addition to the general response metadata, the project detail ``meta`` + dictionary **MAY** also include the following: + + - ``project-status``: If present, this **MUST** be a valid project status marker. + + .. note:: + + The set of valid project status markers and their semantics is described + in :ref:`project-status-markers`. + + .. note:: + + The ``project-status`` key was added with API version 1.4. + + - ``project-status-reason``: If present, this **MUST** be an arbitrary string + description of the project status. + + .. note:: + + The ``project-status-reason`` key was added with API version 1.4. + +- ``versions``: A list of version strings specifying all of the project versions + uploaded for this project. The value of ``versions`` is logically a set, + and as such may not contain duplicates, and the order of the versions is + not significant. + + .. note:: + + All of the files listed in the ``files`` key MUST be associated with one of the + versions in the ``versions`` key. The ``versions`` key MAY contain versions with + no associated files (to represent versions with no files uploaded, if the server + has such a concept). + + .. note:: + + Because servers may hold "legacy" data from before the adoption of + :ref:`the version specifiers specification (VSS) `, version + strings currently cannot be required to be valid VSS versions, and therefore + cannot be assumed to be orderable using the VSS rules. However, servers **SHOULD** + use normalized VSS versions where possible. + + .. note:: + + The ``versions`` key was added with API version 1.1. + Each individual file dictionary has the following keys: - ``filename``: The filename that is being represented. @@ -466,7 +555,7 @@ Each individual file dictionary has the following keys: Unlike ``data-requires-python`` in :ref:`the base HTML API specification `, the ``requires-python`` key does not require any special escaping other than anything JSON does naturally. -- ``dist-info-metadata``: An **optional** key that indicates +- ``core-metadata``: An **optional** key that indicates that metadata for this file is available, via the same location as specified in :ref:`the API metadata file specification ` (``{file_url}.metadata``). Where this @@ -484,17 +573,61 @@ Each individual file dictionary has the following keys: It is recommended that servers make the hashes of the metadata file available if possible. + +- ``dist-info-metadata``: An **optional**, deprecated alias for ``core-metadata``. + + Index clients **MAY** consume this key if present, as a legacy fallback + for ``core-metadata``. + + .. important:: + + ``dist-info-metadata`` was standardized with :pep:`658` and renamed to + ``core-metadata`` with :pep:`714`. + - ``gpg-sig``: An **optional** key that acts a boolean to indicate if the file has an associated GPG signature or not. The URL for the signature file follows what is specified in :ref:`the base HTML API specification ` (``{file_url}.asc``). If this key does not exist, then the signature may or may not exist. + - ``yanked``: An **optional** key which may be either a boolean to indicate if the file has been yanked, or a non empty, but otherwise arbitrary, string to indicate that a file has been yanked with a specific reason. If the ``yanked`` key is present and is a truthy value, then it **SHOULD** be interpreted as indicating that the - file pointed to by the ``url`` field has been "Yanked" as per :ref:`the API - yank specification `. + file pointed to by the ``url`` field has been "Yanked". + + .. note:: + + The semantics of how tools should handle yanked files is + described in :ref:`file-yanking`. + +- ``size``: A **mandatory** key. It **MUST** contain an integer which is the file size in bytes. + + .. note:: + + The ``size`` key was added with API version 1.1. + +- ``upload-time``: An **optional** key that, if present, **MUST** contain a valid + ISO 8601 date/time string in the format ``yyyy-mm-ddThh:mm:ss.ffffffZ`` + which represents the time the file was uploaded to the index. + + As indicated by the ``Z`` suffix, the upload time **MUST** use the UTC timezone. + The fractional seconds part of the timestamp (the ``.ffffff`` part) is optional, + and if present may contain up to 6 digits of precision. If a server does not record + upload time information for a file, it **MAY** omit the ``upload-time`` key. + + .. note:: + + The ``upload-time`` key was added with API version 1.1. + +- ``provenance``: An **optional** key which, if present **MUST** be either a JSON + string or ``null``. If not ``null``, it **MUST** be a URL to the file's + associated provenance, with the same rules as ``data-provenance`` in the + :ref:`base HTML API specification `. + + .. note:: + + The ``provenance`` field was added with API version 1.3. As an example: @@ -502,7 +635,9 @@ As an example: { "meta": { - "api-version": "1.0" + "api-version": "1.4", + "project-status": "active", + "project-status-reason": "this project is not yet haunted" }, "name": "holygrail", "files": [ @@ -511,16 +646,20 @@ As an example: "url": "https://example.com/files/holygrail-1.0.tar.gz", "hashes": {"sha256": "...", "blake2b": "..."}, "requires-python": ">=3.7", - "yanked": "Had a vulnerability" + "yanked": "Had a vulnerability", + "size": 123456 }, { "filename": "holygrail-1.0-py3-none-any.whl", "url": "https://example.com/files/holygrail-1.0-py3-none-any.whl", "hashes": {"sha256": "...", "blake2b": "..."}, "requires-python": ">=3.7", - "dist-info-metadata": true + "dist-info-metadata": true, + "provenance": "https://example.com/files/holygrail-1.0-py3-none-any.whl.provenance", + "size": 1337 } - ] + ], + "versions": ["1.0"] } @@ -616,7 +755,7 @@ likely just be treated the same as a ``406 Not Acceptable`` error. This spec **does** require that if the meta version ``latest`` is being used, the server **MUST** respond with the content type for the actual version that is contained in the response -(i.e. A ``Accept: application/vnd.pypi.simple.latest+json`` request that returns +(i.e. an ``Accept: application/vnd.pypi.simple.latest+json`` request that returns a ``v1.x`` response should have a ``Content-Type`` of ``application/vnd.pypi.simple.v1+json``). @@ -725,7 +864,7 @@ may *optionally* be used instead. URL Parameter ^^^^^^^^^^^^^ -Servers that implement the Simple API may choose to support an URL parameter named +Servers that implement the Simple API may choose to support a URL parameter named ``format`` to allow the clients to request a specific version of the URL. The value of the ``format`` parameter should be **one** of the valid content types. @@ -771,46 +910,6 @@ which version+format a specific repository URL was configured for, and when maki a request to that server, emit an ``Accept`` header that *only* includes the correct content type. - -TUF Support - PEP 458 ---------------------- - -:pep:`458` requires that all API responses are hashable and that they can be uniquely -identified by a path relative to the repository root. For a Simple API repository, the -target path is the Root of our API (e.g. ``/simple/`` on PyPI). This creates -challenges when accessing the API using a TUF client instead of directly using a -standard HTTP client, as the TUF client cannot handle the fact that a target could -have multiple different representations that all hash differently. - -:pep:`458` does not specify what the target path should be for the Simple API, but -TUF requires that the target paths be "file-like", in other words, a path like -``simple/PROJECT/`` is not acceptable, because it technically points to a -directory. - -The saving grace is that the target path does not *have* to actually match the URL -being fetched from the Simple API, and it can just be a sigil that the fetching code -knows how to transform into the actual URL that needs to be fetched. This same thing -can hold true for other aspects of the actual HTTP request, such as the ``Accept`` -header. - -Ultimately figuring out how to map a directory to a filename is out of scope for this -spec (but it would be in scope for :pep:`458`), and this spec defers making a decision -about how exactly to represent this inside of :pep:`458` metadata. - -However, it appears that the current WIP branch against pip that attempts to implement -:pep:`458` is using a target path like ``simple/PROJECT/index.html``. This could be -modified to include the API version and serialization format using something like -``simple/PROJECT/vnd.pypi.simple.vN.FORMAT``. So the v1 HTML format would be -``simple/PROJECT/vnd.pypi.simple.v1.html`` and the v1 JSON format would be -``simple/PROJECT/vnd.pypi.simple.v1.json``. - -In this case, since ``text/html`` is an alias to ``application/vnd.pypi.simple.v1+html`` -when interacting through TUF, it likely will make the most sense to normalize to the -more explicit name. - -Likewise the ``latest`` metaversion should not be included in the targets, only -explicitly declared versions should be supported. - Recommendations --------------- @@ -867,108 +966,7 @@ It is recommended that clients: - Check the ``Content-Type`` of the response and ensure it matches something that you were expecting. -Additional Fields for the Simple API for Package Indexes -======================================================== - -This specification defines version 1.1 of the simple repository API. For the -HTML version of the API, there is no change from version 1.0. For the JSON -version of the API, the following changes are made: - -- The ``api-version`` must specify version 1.1 or later. -- A new ``versions`` key is added at the top level. -- Two new "file information" keys, ``size`` and ``upload-time``, are added to - the ``files`` data. -- Keys (at any level) with a leading underscore are reserved as private for - index server use. No future standard will assign a meaning to any such key. - -The ``versions`` and ``size`` keys are mandatory. The ``upload-time`` key is -optional. - -Versions --------- - -An additional key, ``versions`` MUST be present at the top level, in addition to -the keys ``name``, ``files`` and ``meta`` defined in :ref:`the JSON API -specification `. This key MUST -contain a list of version strings specifying all of the project versions uploaded -for this project. The value is logically a set, and as such may not contain -duplicates, and the order of the values is not significant. - -All of the files listed in the ``files`` key MUST be associated with one of the -versions in the ``versions`` key. The ``versions`` key MAY contain versions with -no associated files (to represent versions with no files uploaded, if the server -has such a concept). - -Note that because servers may hold "legacy" data from before the adoption of -:ref:`the version specifiers specification (VSS) `, version -strings currently cannot be required to be valid VSS versions, and therefore -cannot be assumed to be orderable using the VSS rules. However, servers SHOULD -use normalised VSS versions where -possible. - - -Additional file information ---------------------------- - -Two new keys are added to the ``files`` key. - -- ``size``: This field is mandatory. It MUST contain an integer which is the - file size in bytes. -- ``upload-time``: This field is optional. If present, it MUST contain a valid - ISO 8601 date/time string, in the format ``yyyy-mm-ddThh:mm:ss.ffffffZ``, - which represents the time the file was uploaded to the index. As indicated by - the ``Z`` suffix, the upload time MUST use the UTC timezone. The fractional - seconds part of the timestamp (the ``.ffffff`` part) is optional, and if - present may contain up to 6 digits of precision. If a server does not record - upload time information for a file, it MAY omit the ``upload-time`` key. - -Rename dist-info-metadata in the Simple API -=========================================== - - -The keywords "**MUST**", "**MUST NOT**", "**REQUIRED**", "**SHALL**", -"**SHALL NOT**", "**SHOULD**", "**SHOULD NOT**", "**RECOMMENDED**", "**MAY**", -and "**OPTIONAL**"" in this document are to be interpreted as described in -:rfc:`RFC 2119 <2119>`. - - -Servers -------- - -The :ref:`the API metadata file specification -` metadata, when used in the HTML -representation of the Simple API, -**MUST** be emitted using the attribute name ``data-core-metadata``, with the -supported values remaining the same. - -The :ref:`the API metadata file specification -` metadata, when used in the :ref:`the -JSON API specification ` JSON representation of the -Simple API, **MUST** be emitted using the key ``core-metadata``, with the -supported values remaining the same. - -To support clients that used the previous key names, the HTML representation -**MAY** also be emitted using the ``data-dist-info-metadata``, and if it does -so it **MUST** match the value of ``data-core-metadata``. - - - -Clients -------- - -Clients consuming any of the HTML representations of the Simple API **MUST** -read the :ref:`the API metadata file specification -` metadata from the key -``data-core-metadata`` if it is -present. They **MAY** optionally use the legacy ``data-dist-info-metadata`` if -it is present but ``data-core-metadata`` is not. - -Clients consuming the JSON representation of the Simple API **MUST** read the -:ref:`the API metadata file specification -` metadata from the key ``core-metadata`` -if it is present. They -**MAY** optionally use the legacy ``dist-info-metadata`` key if it is present -but ``core-metadata`` is not. +.. _simple-repository-history: History ======= @@ -986,3 +984,6 @@ History format, in :pep:`700` * June 2023: renaming the field which provides package metadata independently from a package, in :pep:`714` +* November 2024: provenance metadata in the HTML and JSON formats, in :pep:`740` +* July 2025: project status markers in the HTML and JSON formats, in :pep:`792` +* July 2025: layout changes (dedicated page for file yanking, introduce concepts before API details) diff --git a/source/specifications/source-distribution-format.rst b/source/specifications/source-distribution-format.rst index bae618df1..9ac93be7b 100644 --- a/source/specifications/source-distribution-format.rst +++ b/source/specifications/source-distribution-format.rst @@ -5,7 +5,7 @@ Source distribution format ========================== -The current standard format of source distribution format is identified by the +The current standard source distribution format is identified by the presence of a :file:`pyproject.toml` file in the distribution archive. The layout of such a distribution was originally specified in :pep:`517` and is formally documented here. @@ -20,16 +20,20 @@ specification. Source distributions are also known as *sdists* for short. +.. _source-distribution-format-source-tree: + Source trees ============ A *source tree* is a collection of files and directories -- like a version control system checkout -- which contains a :file:`pyproject.toml` file that -can be use to build a source distribution from the contained files and +can be used to build a source distribution from the contained files and directories. :pep:`517` and :pep:`518` specify what is required to meet the definition of what :file:`pyproject.toml` must contain for something to be deemed a source tree. +.. _source-distribution-format-sdist: + Source distribution file name ============================= @@ -58,10 +62,15 @@ A ``.tar.gz`` source distribution (sdist) contains a single top-level directory called ``{name}-{version}`` (e.g. ``foo-1.0``), containing the source files of the package. The name and version MUST match the metadata stored in the file. This directory must also contain a :file:`pyproject.toml` in the format defined in -:ref:`pyproject-toml-spec`, and a ``PKG-INFO`` file containing +:ref:`pyproject-toml-spec`, and a :file:`PKG-INFO` file containing metadata in the format described in the :ref:`core-metadata` specification. The metadata MUST conform to at least version 2.2 of the metadata specification. +If the metadata version is 2.4 or greater, the source distribution MUST contain +any license files specified by the ``License-File`` field in the :file:`PKG-INFO` +at their respective paths relative to the root directory of the sdist +(containing the :file:`pyproject.toml` and the :file:`PKG-INFO` metadata). + No other content of a sdist is required or defined. Build systems can store whatever information they need in the sdist to build the project. @@ -150,3 +159,5 @@ History :pep:`625`. * August 2023: Source distribution archive features were standardized through :pep:`721`. +* December 2024: License files inclusion into source distribution was standardized + through :pep:`639`. diff --git a/source/specifications/version-specifiers.rst b/source/specifications/version-specifiers.rst index cde0bc49a..13015794f 100644 --- a/source/specifications/version-specifiers.rst +++ b/source/specifications/version-specifiers.rst @@ -226,7 +226,7 @@ part of the ``3.3`` release series. form to ``X.Y.0`` when comparing it to any release segment that includes three components. -Date based release segments are also permitted. An example of a date based +Date-based release segments are also permitted. An example of a date-based release scheme using the year and month of the release:: 2012.4 @@ -237,6 +237,8 @@ release scheme using the year and month of the release:: ... +.. _pre-release-versions: + Pre-releases ------------ @@ -341,6 +343,9 @@ post-releases:: X.YrcN.devM # Developmental release of a release candidate X.Y.postN.devM # Developmental release of a post-release +Do note that development releases are considered a type of pre-release when +handling them. + .. note:: While they may be useful for continuous integration purposes, publishing @@ -1011,8 +1016,9 @@ Arbitrary equality Arbitrary equality comparisons are simple string equality operations which do not take into account any of the semantic information such as zero padding or -local versions. This operator also does not support prefix matching as the -``==`` operator does. +local versions. The comparison MUST treat ASCII letters case-insensitively, e.g. +by lowercasing, and is unspecified for non-ASCII text. This operator also does +not support prefix matching as the ``==`` operator does. The primary use case for arbitrary equality is to allow for specifying a version which cannot otherwise be represented by this specification. This operator is @@ -1178,7 +1184,7 @@ more information on ``file://`` URLs on Windows see Summary of differences from pkg_resources.parse_version ======================================================= -* Note: this comparison is to ``pkg_resourses.parse_version`` as it existed at +* Note: this comparison is to ``pkg_resources.parse_version`` as it existed at the time :pep:`440` was written. After the PEP was accepted, setuptools 6.0 and later versions adopted the behaviour described here. @@ -1264,3 +1270,6 @@ History ======= - August 2014: This specification was approved through :pep:`440`. +- May 2025: Clarify that development releases are a form of pre-release when + they are handled. +- Nov 2025: Make arbitrary equality case insensitivity explicit. diff --git a/source/specifications/well-known-project-urls.rst b/source/specifications/well-known-project-urls.rst new file mode 100644 index 000000000..30fefd12b --- /dev/null +++ b/source/specifications/well-known-project-urls.rst @@ -0,0 +1,176 @@ +.. _`well-known-project-urls`: + +=================================== +Well-known Project URLs in Metadata +=================================== + +.. important:: + + This document is primarily of interest to metadata *consumers*, + who should use the normalization rules and well-known list below + to make their presentation of project URLs consistent across the + Python ecosystem. + + Metadata *producers* (such as build tools and individual package + maintainers) may continue to use any labels they please, within the + overall ``Project-URL`` length restrictions. However, when possible, users are + *encouraged* to pick meaningful labels that normalize to well-known + labels. + +.. note:: + + See :ref:`Writing your pyproject.toml - urls ` + for user-oriented guidance on choosing project URL labels in your package's + metadata. + +.. note:: This specification was originally defined in :pep:`753`. + +:pep:`753` deprecates the :ref:`core-metadata-home-page` and +:ref:`core-metadata-download-url` metadata fields in favor of +:ref:`core-metadata-project-url`, and defines a normalization and +lookup procedure for determining whether a ``Project-URL`` is +"well-known," i.e. has the semantics assigned to ``Home-page``, +``Download-URL``, or other common project URLs. + +This allows indices (such as the Python Package Index) and other downstream +metadata consumers to present project URLs in a +consistent manner. + +.. _project-url-label-normalization: + +Label normalization +=================== + +.. note:: + + Label normalization is performed by metadata *consumers*, not metadata + producers. + +To determine whether a ``Project-URL`` label is "well-known," metadata +consumers should normalize the label before comparing it to the +:ref:`list of well-known labels `. + +The normalization procedure for ``Project-URL`` labels is defined +by the following Python function: + +.. code-block:: python + + import string + + def normalize_label(label: str) -> str: + chars_to_remove = string.punctuation + string.whitespace + removal_map = str.maketrans("", "", chars_to_remove) + return label.translate(removal_map).lower() + +In plain language: a label is *normalized* by deleting all ASCII punctuation +and whitespace, and then converting the result to lowercase. + +The following table shows examples of labels before (raw) and after +normalization: + +.. list-table:: + :header-rows: 1 + + * - Raw + - Normalized + * - ``Homepage`` + - ``homepage`` + * - ``Home-page`` + - ``homepage`` + * - ``Home page`` + - ``homepage`` + * - ``Change_Log`` + - ``changelog`` + * - ``What's New?`` + - ``whatsnew`` + * - ``github`` + - ``github`` + +.. _well-known-labels: + +Well-known labels +================= + +.. note:: + + The list of well-known labels is a living standard, maintained as part of + this document. + +The following table lists labels that are well-known for the purpose of +specializing the presentation of ``Project-URL`` metadata: + +.. list-table:: + :header-rows: 1 + + * - Label (Human-readable equivalent) + - Description + - Aliases + * - ``homepage`` (Homepage) + - The project's home page + - *(none)* + * - ``source`` (Source Code) + - The project's hosted source code or repository + - ``repository``, ``sourcecode``, ``github`` + * - ``download`` (Download) + - A download URL for the current distribution, equivalent to ``Download-URL`` + - *(none)* + * - ``changelog`` (Changelog) + - The project's comprehensive changelog + - ``changes``, ``whatsnew``, ``history`` + * - ``releasenotes`` (Release Notes) + - The project's curated release notes + - *(none)* + * - ``documentation`` (Documentation) + - The project's online documentation + - ``docs`` + * - ``issues`` (Issue Tracker) + - The project's bug tracker + - ``bugs``, ``issue``, ``tracker``, ``issuetracker``, ``bugtracker`` + * - ``funding`` (Funding) + - Funding Information + - ``sponsor``, ``donate``, ``donation`` + +Package metadata consumers may choose to render aliased labels the same as +their "parent" well known label, or further specialize them. + +Example behavior +================ + +The following shows the flow of project URL metadata from +``pyproject.toml`` to core metadata to a potential index presentation: + +.. code-block:: toml + :caption: Example project URLs in standard configuration + + [project.urls] + "Home Page" = "https://example.com" + DOCUMENTATION = "https://readthedocs.org" + Repository = "https://upstream.example.com/me/spam.git" + GitHub = "https://github.com/example/spam" + +.. code-block:: email + :caption: Core metadata representation + + Project-URL: Home page, https://example.com + Project-URL: DOCUMENTATION, https://readthedocs.org + Project-URL: Repository, https://upstream.example.com/me/spam.git + Project-URL: GitHub, https://github.com/example/spam + +.. code-block:: text + :caption: Potential rendering + + Homepage: https://example.com + Documentation: https://readthedocs.org + Source Code: https://upstream.example.com/me/spam.git + Source Code (GitHub): https://github.com/example/spam + +Observe that the core metadata appears in the form provided by the user +(since metadata *producers* do not perform normalization), but the +metadata *consumer* normalizes and identifies appropriate +human-readable equivalents based on the normalized form: + +* ``Home page`` becomes ``homepage``, which is rendered as ``Homepage`` +* ``DOCUMENTATION`` becomes ``documentation``, which is rendered as ``Documentation`` +* ``Repository`` becomes ``repository``, which is rendered as ``Source Code`` +* ``GitHub`` becomes ``github``, which is rendered as ``Source Code (GitHub)`` + (as a specialization of ``Source Code``) diff --git a/source/tutorials/installing-packages.rst b/source/tutorials/installing-packages.rst index 817148d06..3a9aa23bb 100644 --- a/source/tutorials/installing-packages.rst +++ b/source/tutorials/installing-packages.rst @@ -228,8 +228,8 @@ Currently, there are two common tools for creating Python virtual environments: (Python versions prior to 3.12 also installed :ref:`setuptools`). * :ref:`virtualenv` needs to be installed separately, but supports Python 2.7+ and Python 3.3+, and :ref:`pip`, :ref:`setuptools` and :ref:`wheel` are - always installed into created virtual environments by default (regardless of - Python version). + installed into created virtual environments by default. Note that ``setuptools`` is no longer + included by default starting with Python 3.12 (and ``virtualenv`` follows this behavior). The basic usage is like so: diff --git a/source/tutorials/managing-dependencies.rst b/source/tutorials/managing-dependencies.rst index db3b82533..bb67a60e3 100644 --- a/source/tutorials/managing-dependencies.rst +++ b/source/tutorials/managing-dependencies.rst @@ -177,3 +177,5 @@ and techniques, listed in alphabetical order, to see if one of them is a better structured as a distributable Python package with a valid ``pyproject.toml`` file. By contrast, Pipenv explicitly avoids making the assumption that the application being worked on will support distribution as a ``pip``-installable Python package. +* `uv `__ for a single tool that covers the entire project + management workflow, including dependency management, packaging, and publishing. diff --git a/source/tutorials/packaging-projects.rst b/source/tutorials/packaging-projects.rst index 4c205e28f..4f69de20b 100644 --- a/source/tutorials/packaging-projects.rst +++ b/source/tutorials/packaging-projects.rst @@ -136,38 +136,7 @@ The :file:`pyproject.toml` tells :term:`build frontend ` tools l examples for common build backends, but check your backend's own documentation for more details. -.. tab:: Hatchling - - .. code-block:: toml - - [build-system] - requires = ["hatchling"] - build-backend = "hatchling.build" - -.. tab:: setuptools - - .. code-block:: toml - - [build-system] - requires = ["setuptools>=61.0"] - build-backend = "setuptools.build_meta" - -.. tab:: Flit - - .. code-block:: toml - - [build-system] - requires = ["flit_core>=3.4"] - build-backend = "flit_core.buildapi" - -.. tab:: PDM - - .. code-block:: toml - - [build-system] - requires = ["pdm-backend"] - build-backend = "pdm.backend" - +.. include:: ../shared/build-backend-tabs.rst The ``requires`` key is a list of packages that are needed to build your package. The :term:`frontend ` should install them automatically when building your package. @@ -175,6 +144,8 @@ Frontends usually run builds in isolated environments, so omitting dependencies here may cause build-time errors. This should always include your backend's package, and might have other build-time dependencies. +The minimum version specified in the above code block is the one that introduced support +for :ref:`the new license metadata `. The ``build-backend`` key is the name of the Python object that frontends will use to perform the build. @@ -210,12 +181,13 @@ following this tutorial. ] description = "A small example package" readme = "README.md" - requires-python = ">=3.8" + requires-python = ">=3.9" classifiers = [ "Programming Language :: Python :: 3", - "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", ] + license = "MIT" + license-files = ["LICEN[CS]E*"] [project.urls] Homepage = "https://github.com/pypa/sampleproject" @@ -242,11 +214,15 @@ following this tutorial. packages until it finds one that has a matching Python version. - ``classifiers`` gives the index and :ref:`pip` some additional metadata about your package. In this case, the package is only compatible with Python - 3, is licensed under the MIT license, and is OS-independent. You should - always include at least which version(s) of Python your package works on, - which license your package is available under, and which operating systems + 3 and is OS-independent. You should + always include at least which version(s) of Python your package works on + and which operating systems your package will work on. For a complete list of classifiers, see https://pypi.org/classifiers/. +- ``license`` is the :term:`SPDX license expression ` of + your :term:`Distribution Archive` files. +- ``license-files`` is the list of glob paths to the license files, + relative to the directory where :file:`pyproject.toml` is located. - ``urls`` lets you list any number of extra links to show on PyPI. Generally this could be to the source, documentation, issue trackers, etc. @@ -274,12 +250,12 @@ if you'd like. Creating a LICENSE ------------------ -It's important for every package uploaded to the Python Package Index to include -a license. This tells users who install your package the terms under which they -can use your package. For help picking a license, see -https://choosealicense.com/. Once you have chosen a license, open -:file:`LICENSE` and enter the license text. For example, if you had chosen the -MIT license: +It's important for every :term:`Distribution Archive` uploaded to the Python +Package Index to include a license. This tells users who install your +:term:`Distribution Archive` the terms under which they can use it. For help +picking a license, see https://choosealicense.com/. Once you have chosen a +license, open :file:`LICENSE` and enter the license text. For example, if you +had chosen the MIT license: .. code-block:: text @@ -305,6 +281,9 @@ MIT license: Most build backends automatically include license files in packages. See your backend's documentation for more details. +If you include the path to license in the ``license-files`` key of +:file:`pyproject.toml`, and your build backend supports :pep:`639`, +the file will be automatically included in the package. Including other files @@ -421,16 +400,15 @@ Once installed, run Twine to upload all of the archives under :file:`dist`: py -m twine upload --repository testpypi dist/* -You will be prompted for a username and password. For the username, -use ``__token__``. For the password, use the token value, including -the ``pypi-`` prefix. +You will be prompted for an API token. Use the token value, including the ``pypi-`` +prefix. Note that the input will be hidden, so be sure to paste correctly. After the command completes, you should see output similar to this: .. code-block:: Uploading distributions to https://test.pypi.org/legacy/ - Enter your username: __token__ + Enter your API token: Uploading example_package_YOUR_USERNAME_HERE-0.0.1-py3-none-any.whl 100% ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 8.2/8.2 kB β€’ 00:01 β€’ ? Uploading example_package_YOUR_USERNAME_HERE-0.0.1.tar.gz