forked from github.com/pypiserver
Compare commits
113 Commits
elaborate-
...
master
Author | SHA1 | Date | |
---|---|---|---|
f65bc5bf6e | |||
|
acff1bbab8 | ||
|
5ca6004ebe | ||
|
31c9cf14d1 | ||
|
2619c17602 | ||
|
d5886ae3d5 | ||
|
6bfeddc1fc | ||
4ddfc3a077 | |||
|
2f0a56c380 | ||
|
84bf12cdd4 | ||
|
946fbfe64e | ||
|
d588913e75 | ||
|
50c7a78f4f | ||
|
a558dbcfb2 | ||
|
2ab30ea2ec | ||
|
9109f61d66 | ||
|
e788785fa4 | ||
|
904f50fa79 | ||
|
ad9fb4a3b5 | ||
|
b4666c14a6 | ||
|
a49af99231 | ||
|
df99872921 | ||
|
c235ef44fd | ||
|
6417769207 | ||
|
4645f7b10a | ||
|
e54270207d | ||
|
f14b92cfbf | ||
|
9edae77659 | ||
|
faf5ddfcc3 | ||
|
9c2ebf9e60 | ||
|
3f520cdc18 | ||
|
85e065e1ad | ||
|
73dbe153e3 | ||
|
e0c9723ee7 | ||
|
a95f456a68 | ||
|
8cc8e80a88 | ||
|
754b0f40d7 | ||
|
5fd640062c | ||
|
abc4bfb418 | ||
|
383c936fb8 | ||
|
d716d0faf3 | ||
|
ae3dcf2bbd | ||
|
2f3b9979c9 | ||
|
4a0c6fbed0 | ||
|
0ba44b54f1 | ||
|
530852b279 | ||
|
6ea316c4c1 | ||
|
249cc6c7c4 | ||
|
16280557c0 | ||
|
784a9a134a | ||
|
bbd2a47bae | ||
|
61e44871d4 | ||
|
640a74872b | ||
|
a67829eea6 | ||
|
388658e624 | ||
|
c36fc51da2 | ||
|
6502f031d4 | ||
|
28959cfdbc | ||
|
5308fba405 | ||
|
c63b2b1337 | ||
|
0e8a49dd1c | ||
|
4538014127 | ||
|
fd97b465dd | ||
|
1f26b20421 | ||
|
04a1826b9a | ||
|
e675ab71fd | ||
|
4485902b6d | ||
|
4d81ea7124 | ||
|
eb3d6941b1 | ||
|
cd1e17cc26 | ||
|
3ba17777da | ||
|
e039f4011f | ||
|
3950cdc4bc | ||
|
f2330fa95e | ||
|
e608e8645f | ||
|
3b1a0f828d | ||
|
22d47a53db | ||
|
cd4bff5785 | ||
|
a10cdcead3 | ||
|
049ae42b1a | ||
|
050bc9e17c | ||
|
ee912cf1d3 | ||
|
64b4d21318 | ||
|
057d3b469d | ||
|
bdbd839a1b | ||
|
652a7171a6 | ||
|
d34c99269a | ||
|
a1d7264e03 | ||
|
48688d7e4c | ||
|
2885ac2e6d | ||
|
be39eb2f05 | ||
|
a1002c5e99 | ||
|
3713da9d66 | ||
|
7693c03485 | ||
|
4e189aec9c | ||
|
fb81ab767a | ||
|
e7bc2bb75d | ||
|
c205355253 | ||
|
a9414fb964 | ||
|
1ae9f20e3f | ||
|
d28fc966af | ||
|
43958e4548 | ||
|
35fcdd0e06 | ||
|
302b4a8a43 | ||
|
df300de33d | ||
|
8306de15db | ||
|
d868005e1f | ||
|
df7454ff20 | ||
|
cf424c982d | ||
|
7688e1b2bd | ||
|
d0694d9e15 | ||
|
4b1bd1c9db | ||
|
4e1fd1eedc |
2
.coveragerc
Normal file
2
.coveragerc
Normal file
@ -0,0 +1,2 @@
|
||||
[run]
|
||||
omit = pypiserver/bottle.py
|
@ -1,8 +1,9 @@
|
||||
*
|
||||
!pypiserver
|
||||
!requirements
|
||||
!docker-requirements.txt
|
||||
!entrypoint.sh
|
||||
!README.rst
|
||||
!docker/docker-requirements.txt
|
||||
!docker/gunicorn.conf.py
|
||||
!docker/entrypoint.sh
|
||||
!README.md
|
||||
!setup.cfg
|
||||
!setup.py
|
||||
|
13
.github/dependabot.yml
vendored
Normal file
13
.github/dependabot.yml
vendored
Normal file
@ -0,0 +1,13 @@
|
||||
# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
|
||||
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/docker"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/requirements"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
|
272
.github/workflows/ci.yml
vendored
Normal file
272
.github/workflows/ci.yml
vendored
Normal file
@ -0,0 +1,272 @@
|
||||
# Run tests
|
||||
|
||||
name: CI
|
||||
|
||||
on:
|
||||
# This will run when any branch or tag is pushed
|
||||
push:
|
||||
branches:
|
||||
- "master"
|
||||
tags:
|
||||
- "v**"
|
||||
# Allowing to run on fork and other pull requests
|
||||
pull_request:
|
||||
|
||||
env:
|
||||
LAST_SUPPORTED_PYTHON: "3.12"
|
||||
|
||||
jobs:
|
||||
test-python:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
# make sure to align the `python-version`s in the Matrix with env.LAST_SUPPORTED_PYTHON
|
||||
python-version: [
|
||||
"3.7",
|
||||
"3.8",
|
||||
"3.9",
|
||||
"3.10",
|
||||
"pypy3.9",
|
||||
"3.11",
|
||||
"3.12",
|
||||
"3.x", # make sure to test the current stable Python version
|
||||
]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
pip install --upgrade setuptools
|
||||
pip install tox==3.27.*
|
||||
- name: Run tests
|
||||
run: tox -e py
|
||||
|
||||
check:
|
||||
# These checks only need to be done once, not for every python version we
|
||||
# support
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
# Use the current version of Python
|
||||
python-version: ${{ env.LAST_SUPPORTED_PYTHON }}
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
pip install -r "requirements/dev.pip"
|
||||
pip install types-pkg_resources # one of mypy required stubs
|
||||
- name: Check types
|
||||
# individual mypy files for now, until we get the rest
|
||||
# of the project typechecking
|
||||
run: >-
|
||||
mypy
|
||||
docker/test_docker.py
|
||||
pypiserver/config.py
|
||||
tests/test_init.py
|
||||
- name: Check formatting
|
||||
run: black --diff --check .
|
||||
- name: Validate README
|
||||
id: validate_readme
|
||||
run: mdformat --check README.md
|
||||
continue-on-error: true
|
||||
- name: check mdformat result
|
||||
run: |
|
||||
if [ "${{ steps.validate_readme.outcome }}" == "failure" ]; then
|
||||
echo "copy readme to /tmp/pypiserver"
|
||||
mkdir -p /tmp/pypiserver
|
||||
cp README.md /tmp/pypiserver
|
||||
echo "README.md is not formatted correctly. Please run 'mdformat README.md' and commit the result."
|
||||
mdformat /tmp/pypiserver/README.md
|
||||
diff -u README.md /tmp/pypiserver/README.md
|
||||
exit 1
|
||||
else
|
||||
echo "README.md is formatted correctly."
|
||||
fi
|
||||
|
||||
# Full-flow docker tests, again not python version dependent
|
||||
# We _could_ test this on MacOS, but it takes forever to get docker
|
||||
# installed. I'm going to say for now probably 99% of people using
|
||||
# the docker image will be doing so from a linux system, e.g. for
|
||||
# a k8s deploy, and I've verified manually that things work on
|
||||
# MacOS, so /shrug.
|
||||
test-docker:
|
||||
runs-on: "ubuntu-latest"
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
# Use the current version of Python
|
||||
python-version: ${{ env.LAST_SUPPORTED_PYTHON }}
|
||||
- name: Install test dependencies
|
||||
run: pip install -r "requirements/test.pip"
|
||||
- name: Install package
|
||||
run: pip install -r "requirements/exe.pip"
|
||||
- name: Run tests
|
||||
run: "pytest docker/test_docker.py"
|
||||
|
||||
tests:
|
||||
runs-on: "ubuntu-latest"
|
||||
needs:
|
||||
- "check"
|
||||
- "test-docker"
|
||||
- "test-python"
|
||||
steps:
|
||||
- name: "Everything is good!"
|
||||
run: "echo true"
|
||||
|
||||
# RELEASES
|
||||
|
||||
## PYPI
|
||||
|
||||
build-wheel-and-push-to-pypi:
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- "tests"
|
||||
steps:
|
||||
- uses: actions/checkout@master
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ env.LAST_SUPPORTED_PYTHON }}
|
||||
|
||||
- name: Install dev dependencies
|
||||
run: pip install -r "requirements/dev.pip"
|
||||
|
||||
- name: Build distribution _wheel_.
|
||||
run: |
|
||||
./bin/package.sh
|
||||
|
||||
- name: Publish distribution 📦 to PyPI.
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
# Push to PyPi only if a tag is pushed
|
||||
if: startsWith(github.event.ref, 'refs/tags/v')
|
||||
with:
|
||||
password: ${{ secrets.PYPI_API_TOKEN }}
|
||||
print-hash: true
|
||||
|
||||
## DOCKER (DOCKER HUB & CONTAINER REGISTRY)
|
||||
|
||||
# figure out which docker tags we need to push
|
||||
docker-determine-tags:
|
||||
runs-on: "ubuntu-latest"
|
||||
needs:
|
||||
- "tests"
|
||||
env:
|
||||
STABLE_IMAGES: '["pypiserver/pypiserver", "ghcr.io/pypiserver/pypiserver"]'
|
||||
FLEXIBLE_IMAGES: '["pypiserver/pypiserver"]'
|
||||
outputs:
|
||||
tags: "${{ steps.tags.outputs.tags }}"
|
||||
has_tags: "${{ steps.has_tags.outputs.has_tags }}"
|
||||
images: ${{ contains(steps.tags.outputs.tags, 'unstable') && env.FLEXIBLE_IMAGES || env.STABLE_IMAGES }}
|
||||
steps:
|
||||
- uses: "actions/checkout@v3"
|
||||
|
||||
- uses: "actions/setup-python@v4"
|
||||
with:
|
||||
python-version: ${{ env.LAST_SUPPORTED_PYTHON }}
|
||||
|
||||
# This script prints a JSON array of needed docker tags, depending on the
|
||||
# ref. That array is then used to construct the matrix of the
|
||||
# deploy-docker job
|
||||
- name: "Get expected docker tags"
|
||||
id: "tags"
|
||||
run: >-
|
||||
echo "::set-output name=tags::$(bin/ci_helper.py ${{ github.ref }} docker_tags)"
|
||||
|
||||
# This is needed because GH actions will fail on an empty matrix, so
|
||||
# we need to be sure the `if` condition is false on the next job if
|
||||
# the matrix will be empty. The script prints 'true' if the array is
|
||||
# not empty, or 'false' otherwise.
|
||||
- name: "Determine whether any tags are needed"
|
||||
id: "has_tags"
|
||||
run: >-
|
||||
echo "::set-output name=has_tags::$(bin/ci_helper.py ${{ github.ref }} has_tags)"
|
||||
|
||||
# Deploy any needed docker tags
|
||||
deploy-docker:
|
||||
runs-on: "ubuntu-latest"
|
||||
needs:
|
||||
- "docker-determine-tags"
|
||||
if: "${{ fromJson(needs.docker-determine-tags.outputs.has_tags) }}"
|
||||
strategy:
|
||||
matrix:
|
||||
tag: "${{ fromJson(needs.docker-determine-tags.outputs.tags) }}"
|
||||
image: "${{ fromJson(needs.docker-determine-tags.outputs.images) }}"
|
||||
steps:
|
||||
- uses: "actions/checkout@v3"
|
||||
|
||||
- name: "Cache Docker layers"
|
||||
uses: "actions/cache@v3"
|
||||
with:
|
||||
path: "/tmp/.buildx-cache"
|
||||
key: "${{ runner.os }}-buildx-${{ github.sha }}"
|
||||
restore-keys: |
|
||||
${{ runner.os }}-buildx-
|
||||
|
||||
- name: "Login to Docker Hub"
|
||||
uses: "docker/login-action@v3"
|
||||
with:
|
||||
username: "${{ secrets.DOCKER_HUB_USER }}"
|
||||
password: "${{ secrets.DOCKER_HUB_TOKEN }}"
|
||||
|
||||
- name: "Login to GitHub Container Registry"
|
||||
uses: "docker/login-action@v3"
|
||||
with:
|
||||
registry: "ghcr.io"
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: "Set up QEMU"
|
||||
uses: "docker/setup-qemu-action@v3"
|
||||
|
||||
- name: "Set up Docker Buildx"
|
||||
id: "buildx"
|
||||
uses: "docker/setup-buildx-action@v3"
|
||||
|
||||
- name: "Build and push"
|
||||
id: "docker_build"
|
||||
uses: "docker/build-push-action@v5"
|
||||
with:
|
||||
context: "./"
|
||||
platforms: linux/amd64,linux/arm64
|
||||
file: "./Dockerfile"
|
||||
builder: "${{ steps.buildx.outputs.name }}"
|
||||
push: true
|
||||
tags: "${{ matrix.image }}:${{ matrix.tag }}"
|
||||
cache-from: "type=local,src=/tmp/.buildx-cache"
|
||||
cache-to: "type=local,dest=/tmp/.buildx-cache"
|
||||
|
||||
- name: "Image digest"
|
||||
run: "echo ${{ steps.docker_build.outputs.digest }}"
|
||||
|
||||
- name: "Docker Hub Description"
|
||||
uses: peter-evans/dockerhub-description@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_HUB_USER }}
|
||||
password: ${{ secrets.DOCKER_HUB_TOKEN }}
|
||||
repository: pypiserver/pypiserver
|
||||
|
||||
## GITHUB RELEASE DRAFT
|
||||
|
||||
create_release:
|
||||
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v')
|
||||
runs-on: "ubuntu-latest"
|
||||
needs:
|
||||
- "tests"
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: softprops/action-gh-release@v1
|
||||
with:
|
||||
body: 👋 This is a draft release. Please update it manually.
|
||||
prerelease: false
|
||||
draft: true
|
||||
files: |
|
||||
CHANGES.rst
|
77
.github/workflows/rc.yml
vendored
Normal file
77
.github/workflows/rc.yml
vendored
Normal file
@ -0,0 +1,77 @@
|
||||
# Release Candidate GitHub Action
|
||||
|
||||
name: release_candidate
|
||||
|
||||
# Performed actions:
|
||||
# - [x] create a new AUTO-RC-<DATE> branch
|
||||
# - [x] prepare RC metadata and description
|
||||
# - [x] update CHANGES.rst (+ rc-title, + date)
|
||||
# - [x] create changes commit
|
||||
# - [x] push to GH
|
||||
# - [x] open a PR to `master`
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 0 1 * *" # each 1st day of the month
|
||||
workflow_dispatch: # on manual trigger
|
||||
|
||||
jobs:
|
||||
new-rc:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CHANGES_FILE: CHANGES.rst
|
||||
PR_BODY_FILE: /tmp/pr-body.md
|
||||
RF_DOCS_FILE: ./docs/contents/repo-maintenance/release-work.md
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
# Flag to fetch all history.
|
||||
# @see https://github.com/marketplace/actions/checkout#Fetch-all-history-for-all-tags-and-branches
|
||||
fetch-depth: 0
|
||||
|
||||
- id: get-rc-date
|
||||
run: echo "RC_DATE=$(date +'%Y-%m-%d')" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- id: make-pr-body-file
|
||||
run: |
|
||||
PR_BODY_FILE=${{ env.PR_BODY_FILE }}
|
||||
RC_DATE=${{ steps.get-rc-date.outputs.RC_DATE }}
|
||||
|
||||
touch ${PR_BODY_FILE}
|
||||
echo "📦 Automated release candidate for ${RC_DATE}." >> ${PR_BODY_FILE}
|
||||
echo "" >> ${PR_BODY_FILE}
|
||||
echo "_TODO:_" >> ${PR_BODY_FILE}
|
||||
echo "- [ ] Manually adjust generated CHANGES lines" >> ${PR_BODY_FILE}
|
||||
echo "- [ ] Manually adjust generated CHANGES title" >> ${PR_BODY_FILE}
|
||||
echo "- [ ] Manually adjust generated CHANGES date" >> ${PR_BODY_FILE}
|
||||
echo "- [ ] Approve and merge this PR" >> ${PR_BODY_FILE}
|
||||
echo "- [ ] See \`${{ env.RF_DOCS_FILE }}\` to continue" >> ${PR_BODY_FILE}
|
||||
|
||||
echo "${PR_BODY_FILE}:"
|
||||
cat ${PR_BODY_FILE}
|
||||
|
||||
- id: propose-rc
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
CHANGES_FILE=${{ env.CHANGES_FILE }}
|
||||
PR_BODY_FILE=${{ env.PR_BODY_FILE }}
|
||||
RC_DATE=${{ steps.get-rc-date.outputs.RC_DATE }}
|
||||
git config user.name github-actions
|
||||
git config user.email github-actions@github.com
|
||||
git checkout -b auto-release-candidate-${RC_DATE}
|
||||
git push -u origin auto-release-candidate-${RC_DATE}
|
||||
|
||||
git status
|
||||
git fetch
|
||||
|
||||
./bin/update_changelog.sh
|
||||
|
||||
git add ${CHANGES_FILE}
|
||||
git commit -m "chore(rc-changes): update ${CHANGES_FILE}"
|
||||
git push
|
||||
|
||||
gh pr create --title "chore(auto-release-candidate-${RC_DATE})" \
|
||||
--body-file ${PR_BODY_FILE} \
|
||||
--base master \
|
||||
--draft
|
56
.github/workflows/rt.yml
vendored
Normal file
56
.github/workflows/rt.yml
vendored
Normal file
@ -0,0 +1,56 @@
|
||||
# Release Tag workflow
|
||||
|
||||
name: release_tag
|
||||
|
||||
# Performed actions:
|
||||
# - [x] infer the last RC version
|
||||
# - [x] run bumpver.py with the new version
|
||||
# - [x] push the commit and new tag
|
||||
# - [x] support dry-run mode
|
||||
|
||||
on:
|
||||
workflow_dispatch: # on manual trigger
|
||||
inputs:
|
||||
dryrun:
|
||||
description: "Whether to run the release in a dry-run mode"
|
||||
default: true
|
||||
required: true
|
||||
type: boolean
|
||||
|
||||
jobs:
|
||||
new-tag:
|
||||
if: ${{ github.ref_name == 'master' }}
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CHANGE_FILE: CHANGES.rst
|
||||
EXPECTED_DIFF_COUNT: 1
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- id: get-version
|
||||
run: |
|
||||
CHANGE_FILE=${{ env.CHANGE_FILE }}
|
||||
LAST_VERSION=$(grep -m1 -E ' \([0-9]+-[0-9]+-[0-9]+\)$' ${CHANGE_FILE} | awk '{ print $1 }')
|
||||
echo "👀 Version detected: ${LAST_VERSION}"
|
||||
echo "LAST_VERSION=${LAST_VERSION}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.x"
|
||||
|
||||
- id: install-requirements
|
||||
run: pip install -r "requirements/dev.pip"
|
||||
|
||||
- name: run `bumpver`
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
echo ${{ inputs.dryrun && '💡 Running in dry-run mode' || 'Preparing release...' }}
|
||||
|
||||
CHANGE_FILE=${{ env.CHANGE_FILE }}
|
||||
LAST_VERSION=${{ steps.get-version.outputs.LAST_VERSION }}
|
||||
git config user.name github-actions
|
||||
git config user.email github-actions@github.com
|
||||
|
||||
python3 bin/bumpver.py ${{ inputs.dryrun && '-n' || '' }} -t "Automated release ${LAST_VERSION}" ${LAST_VERSION}
|
||||
git push --follow-tags
|
16
.gitignore
vendored
16
.gitignore
vendored
@ -16,8 +16,8 @@
|
||||
.ropeproject
|
||||
ID
|
||||
__pycache__/
|
||||
/build/
|
||||
/dist/
|
||||
**/build/
|
||||
**/dist/
|
||||
/*.egg*
|
||||
/MANIFEST
|
||||
/README.html
|
||||
@ -26,9 +26,10 @@ __pycache__/
|
||||
/.pydevproject
|
||||
/.pytest_cache
|
||||
/.tox/
|
||||
/*.egg-info/
|
||||
**/*.egg-info/
|
||||
/.standalone
|
||||
/.coverage*
|
||||
!/.coveragerc
|
||||
/htmlcov/
|
||||
/.installed.cfg
|
||||
/develop-eggs/
|
||||
@ -46,3 +47,12 @@ __pycache__/
|
||||
.venv/
|
||||
venv/
|
||||
|
||||
# Temporary / runtime generated files
|
||||
traefik/
|
||||
auth/
|
||||
|
||||
# Release Candidate Artifacts
|
||||
rc/
|
||||
|
||||
# Various local temporary artifacts
|
||||
.tmp/
|
||||
|
28
.travis.yml
28
.travis.yml
@ -1,28 +0,0 @@
|
||||
sudo: required
|
||||
language: python
|
||||
services: docker
|
||||
|
||||
python:
|
||||
- 3.6
|
||||
- 3.7
|
||||
- 3.8
|
||||
- 3.9
|
||||
- pypy3
|
||||
|
||||
install:
|
||||
- pip install -U setuptools twine pip sphinx tox tox-travis
|
||||
|
||||
script:
|
||||
- ./bin/test-docker.sh
|
||||
- tox
|
||||
- ./bin/check_readme.sh
|
||||
|
||||
branches:
|
||||
except:
|
||||
- standalone
|
||||
|
||||
jobs:
|
||||
include:
|
||||
- python: 3.8
|
||||
install: pip install -U black
|
||||
script: black --check .
|
104
CHANGES.rst
104
CHANGES.rst
@ -1,9 +1,111 @@
|
||||
Changelog
|
||||
=========
|
||||
|
||||
2.0.0 (tbd)
|
||||
3.0.0 (tbd)
|
||||
-----------
|
||||
|
||||
2.1.1 (2024-04-24)
|
||||
--------------------------
|
||||
|
||||
- 31c9cf1 FIX: deprecated `setuptools.py` when building in `package.sh` (#568)
|
||||
- 2619c17 FIX: use the right env variables in `release-tag` workflow (#569)
|
||||
|
||||
2.1.0 (2024-04-24)
|
||||
--------------------------
|
||||
|
||||
- d588913 ENH: Bump github action versions and add multiarch support (#553)
|
||||
- a558dbc ENH: Handle tar.xz archives (#536)
|
||||
- 2f0a56c FIX: support Python 3.12 (#539)
|
||||
- 84bf12c MAINT: make the last supported python version explicit in `ci.yaml` (#558)
|
||||
- 946fbfe MAINT: Update setuptools requirement from <62.0.0,>=40.0 to >=40.0,<70.0.0 in /requirements (#557)
|
||||
- 50c7a78 MAINT: add tar xz test case (#538)
|
||||
|
||||
2.0.1 (2023-10-01)
|
||||
--------------------------
|
||||
|
||||
- e788785 FIX: dockerhub description readme (#533)
|
||||
- 904f50f FIX: specify long_description as MD type (#532)
|
||||
- ad9fb4a MAINT: simpler release_tag action (#530)
|
||||
|
||||
|
||||
2.0.0 (2023-10-01)
|
||||
--------------------------
|
||||
|
||||
- df99872 FIX: Json Info for the same version (#511)
|
||||
- c235ef4 ENH: Switch default hash-algo to sha256 (#459)
|
||||
- 6417769 MAINT: add GitHub container registry for stable images (#521)
|
||||
- 4645f7b MAINT: cleanup release process (#516)
|
||||
- e542702 MAINT: TOC internal links (#520)
|
||||
- f14b92c MAINT: readme output diff on format error (#512)
|
||||
- 9edae77 ENH: Feature/markdown conversion (#503)
|
||||
|
||||
1.5.2 (2023-07-30)
|
||||
--------------------------
|
||||
|
||||
- 3f520cd FIX: Add missing pip dependency (#500)
|
||||
- 85e065e MAINT: Feat/dependabot (#493)
|
||||
- 73dbe15 FIX: Health endpoint usage is missing. (#481)
|
||||
- e0c9723 MAINT: Bump waitress from 1.4.4 to 2.1.2 in /docker (#454)
|
||||
- a95f456 MAINT: update docs folder (#479)
|
||||
- 8cc8e80 MAINT: Update README.rst and config.py (#470)
|
||||
- 754b0f4 MAINT: add help output for `run` and `update` to README (#478)
|
||||
- 5fd6400 MAINT: Update README to reflect run/update commands (#451)
|
||||
- abc4bfb MAINT: Upgrade to psf/black stable style 2023 (#474)
|
||||
- 383c936 MAINT: disable tests on Python3.6 (#471)
|
||||
- d716d0f FIX: explicit optional types in `config.py` (#472)
|
||||
- ae3dcf2 ENH: :stethoscope: allow customized health check endpoint (#442)
|
||||
- 2f3b997 FIX: correct 1.5.1 tag date in CHANGES (#457)
|
||||
- 4a0c6fb MAINT: from importlib import reload for Python 3 (#448)
|
||||
- 0ba44b5 FIX: force setuptools update + no duplicate runs in GH Actions (#445)
|
||||
- 530852b MAINT: Support current versions of CPython (#453)
|
||||
- 6ea316c MAINT: Upgrade GitHub Actions (#447)
|
||||
|
||||
1.5.1 (2022-10-18)
|
||||
--------------------------
|
||||
|
||||
- 61e4487 ENH: add extremely basic /health endpoint (#396)
|
||||
- bbd2a47 FIX: docker tests in cicd (#444)
|
||||
- 784a9a1 MAINT: Replace usage of deprecated inspect.getargspec (#436)
|
||||
- 640a748 MAINT: Add traefik/ and auth/ dirs to gitignore (#398)
|
||||
- a67829e MAINT: Fix typos in README (#431)
|
||||
|
||||
1.5.0 (2022-05-01)
|
||||
------------------
|
||||
|
||||
- CHORE: pull CHANGES.rst from v1.4.x
|
||||
- DOC: add call for maintainers to README
|
||||
- DOC: Fixes in README examples (#380)
|
||||
- DOC: start of empty contributor documentation directory (#383)
|
||||
- ENH: added JSON Topic for use with micropython-upip (#395, thanks @awachtler)
|
||||
- ENH: Backwards-compatible argparse config (not yet in use) (#339)
|
||||
- ENH: Refactor storage operations into separate Backend classes (#348)
|
||||
- ENH: Restore ability to drop hashing in new config (#347)
|
||||
- ENH: Specify doctype for generated pages.
|
||||
Fixed violation of PEP 503 warning with pip>=22.
|
||||
(#413 and #414, thanks @domdfcoding and @luismsgomes)
|
||||
- ENH: Use argparse config throughout app (#349)
|
||||
- FIX: Fix silly typo in the readme :) (#362)
|
||||
- FIX: fix small typing and lint errors in config.py (#421)
|
||||
- FIX: fix version incompatibility that breaks twine in docker test (#356)
|
||||
- FIX: fix(setuptools-in-tests) add `packages=[]` in test setup (#425)
|
||||
- FIX: Gunicorn/gevent docker, log fixes, cache busting (#371)
|
||||
- MAINT: Add aggregate "tests" job (#370)
|
||||
- MAINT: Add release to PyPi job (#428)
|
||||
- MAINT: add github action for release management (#417)
|
||||
- MAINT: Add python 3.9 testing (#351, disabled in #407)
|
||||
- MAINT: Cleanup code to python 3.6 (#342)
|
||||
- MAINT: contributor update in README (#415)
|
||||
- MAINT: disable unstable test for Python 3.9 (#407, planned temporary)
|
||||
- MAINT: Docker improvements (#365)
|
||||
- MAINT: drop standalone, drop py 2.7 and 3.5 (#338)
|
||||
- MAINT: Merge branch 'v1.4.x'
|
||||
- MAINT: Push to Docker Hub from CI (#375)
|
||||
- MAINT: Refactor test_server to increase speed (#354)
|
||||
- MAINT: Run black on codebase (#336)
|
||||
- MAINT: run gh actions on PRs from forks #401
|
||||
- MAINT: small code style fixes
|
||||
- MAINT: Switch to GH actions (#361)
|
||||
|
||||
1.4.2 (2020-10-10)
|
||||
------------------
|
||||
|
||||
|
42
Dockerfile
42
Dockerfile
@ -31,26 +31,37 @@ RUN apk add --no-cache --virtual .build-deps \
|
||||
|
||||
FROM base AS builder_dependencies
|
||||
|
||||
COPY pypiserver /code/pypiserver
|
||||
COPY requirements /code/requirements
|
||||
COPY docker-requirements.txt /code
|
||||
COPY setup.cfg /code
|
||||
COPY setup.py /code
|
||||
COPY README.rst /code
|
||||
WORKDIR /code
|
||||
|
||||
COPY docker/docker-requirements.txt .
|
||||
|
||||
# Install requirements
|
||||
RUN apk add --no-cache --virtual .build-deps \
|
||||
build-base \
|
||||
libffi-dev \
|
||||
&& mkdir /install \
|
||||
&& python -m pip install --no-warn-script-location \
|
||||
--prefix=/install \
|
||||
/code --requirement /code/docker-requirements.txt
|
||||
&& python -m pip install \
|
||||
--no-warn-script-location \
|
||||
--prefix=/install \
|
||||
--requirement docker-requirements.txt
|
||||
|
||||
# Install pypiserver
|
||||
# - do this separately from deps so that when developing, every change does not
|
||||
# require reinstalling deps
|
||||
COPY pypiserver pypiserver
|
||||
COPY setup.cfg .
|
||||
COPY setup.py .
|
||||
COPY README.md .
|
||||
RUN python -m pip install --no-warn-script-location --prefix=/install .
|
||||
|
||||
FROM base
|
||||
|
||||
WORKDIR /data
|
||||
# Copy the libraries installed via pip
|
||||
COPY --from=builder_dependencies /install /usr/local
|
||||
COPY --from=builder_gosu /usr/local/bin/gosu /usr/local/bin/gosu
|
||||
COPY entrypoint.sh /entrypoint.sh
|
||||
COPY docker/entrypoint.sh /entrypoint.sh
|
||||
COPY docker/gunicorn.conf.py /data
|
||||
|
||||
# Use a consistent user and group ID so that linux users
|
||||
# can create a corresponding system user and set permissions
|
||||
@ -61,11 +72,14 @@ RUN apk add bash \
|
||||
&& addgroup -S -g 9898 pypiserver \
|
||||
&& adduser -S -u 9898 -G pypiserver pypiserver --home /data\
|
||||
&& mkdir -p /data/packages \
|
||||
&& chmod +x /entrypoint.sh
|
||||
&& chmod +x /entrypoint.sh
|
||||
|
||||
VOLUME /data/packages
|
||||
WORKDIR /data
|
||||
ENV PORT=8080
|
||||
EXPOSE $PORT
|
||||
ENV PYPISERVER_PORT=8080
|
||||
# PORT is deprecated. Please use PYPISERVER_PORT instead
|
||||
ENV PORT=$PYPISERVER_PORT
|
||||
# Flush logs immediately to stdout
|
||||
ENV PYTHONUNBUFFERED=t
|
||||
EXPOSE $PYPISERVER_PORT
|
||||
|
||||
ENTRYPOINT ["/entrypoint.sh"]
|
||||
|
23
Makefile
Normal file
23
Makefile
Normal file
@ -0,0 +1,23 @@
|
||||
##
|
||||
# pypiserver
|
||||
#
|
||||
# this makefile is used to help with building resources needed for testing
|
||||
#
|
||||
# @file
|
||||
# @version 0.1
|
||||
|
||||
SHELL = /bin/sh
|
||||
|
||||
MYPKG_SRC = fixtures/mypkg/setup.py $(shell find fixtures/mypkg/mypkg -type f -name '*.py')
|
||||
|
||||
# Build the test fixture package.
|
||||
mypkg: fixtures/mypkg/dist/pypiserver_mypkg-1.0.0.tar.gz
|
||||
mypkg: fixtures/mypkg/dist/pypiserver_mypkg-1.0.0-py2.py3-none-any.whl
|
||||
fixtures/mypkg/dist/pypiserver_mypkg-1.0.0.tar.gz: $(MYPKG_SRC)
|
||||
cd fixtures/mypkg; python setup.py sdist
|
||||
|
||||
fixtures/mypkg/dist/pypiserver_mypkg-1.0.0-py2.py3-none-any.whl: $(MYPKG_SRC)
|
||||
cd fixtures/mypkg; python setup.py bdist_wheel
|
||||
|
||||
|
||||
# end
|
1005
README.rst
1005
README.rst
File diff suppressed because it is too large
Load Diff
31
bin/README.md
Normal file
31
bin/README.md
Normal file
@ -0,0 +1,31 @@
|
||||
# Build scripts folder
|
||||
|
||||
## Highlight files
|
||||
|
||||
- `bumpver.py` : Bump, commit and tag new project versions
|
||||
- `package.sh` : Build deployable artifact (wheel) in `/dist/` folder.
|
||||
|
||||
## Fully manual release check-list
|
||||
|
||||
1. Update `/CHANGES.rst` (+ Title + Date).
|
||||
|
||||
2. Push to GitHub to run all TCs once more.
|
||||
|
||||
3. Bump version: commit & tag it with `/bin/bumpver.py`. Use `--help`.
|
||||
> 💡 Read [PEP-440](https://www.python.org/dev/peps/pep-0440/) to decide the version.
|
||||
|
||||
4. Push it in GitHub with `--follow-tags`.
|
||||
|
||||
### Manually publishing a new package
|
||||
|
||||
1. Generate package *wheel* with `/bin/package.sh`.
|
||||
|
||||
2. Upload to PyPi with `twine upload -s -i <gpg-user> dist/*`
|
||||
|
||||
3. Ensure that the new tag is built on
|
||||
[`hub.docker.com`](https://hub.docker.com/r/pypiserver/pypiserver)
|
||||
as `latest` and as a direct tag reference.
|
||||
|
||||
4. Copy release notes from `/CHANGES.rst` in GitHub as new *"release"*
|
||||
page on the new tag.
|
||||
> 💡 Check syntactic differences between `.md` and `.rst` files.
|
@ -1,36 +0,0 @@
|
||||
====================
|
||||
Build scripts folder
|
||||
====================
|
||||
|
||||
|
||||
Files:
|
||||
======
|
||||
|
||||
- ``bumpver.py`` : Bump, commit and tag new project versions
|
||||
- ``check_readme.sh`` : Check that README has no RsT-syntactic errors.
|
||||
- ``package.sh`` : Build deployable artifact (wheel) in ``/dist/`` folder.
|
||||
- ``README.rst`` : This file.
|
||||
|
||||
|
||||
Release check-list:
|
||||
===================
|
||||
1. Update ``/CHANGES.rst`` (+ Title + Date) & ``/README.rst`` (Date,
|
||||
not version).
|
||||
|
||||
2. Push to GitHub to run all TCs once more.
|
||||
|
||||
3. Bump version: commit & tag it with ``/bin/bumpver.py``.
|
||||
Use ``--help``.
|
||||
Read `PEP-440 <https://www.python.org/dev/peps/pep-0440/>`_ to decide the version.
|
||||
|
||||
4. Push it in GitHub with ``--follow-tags``.
|
||||
|
||||
5. Generate package *wheel* with ``/bin/package.sh``.
|
||||
|
||||
6. Upload to PyPi with ``twine upload -s -i <gpg-user> dist/*``
|
||||
|
||||
7. Ensure that the new tag is built on hub.docker.com as ``latest`` and as a
|
||||
direct tag reference.
|
||||
|
||||
8. Copy release notes from ``/CHANGES.rst`` in GitHub as new *"release"* page
|
||||
on the new tag. Check syntactic differences between ``.md`` and ``.rst`` files.
|
@ -35,21 +35,22 @@ EXAMPLE:
|
||||
|
||||
"""
|
||||
|
||||
import os.path as osp
|
||||
import sys
|
||||
import re
|
||||
import functools as fnt
|
||||
import os.path as osp
|
||||
import re
|
||||
import sys
|
||||
from datetime import datetime
|
||||
|
||||
import docopt
|
||||
|
||||
|
||||
my_dir = osp.dirname(__file__)
|
||||
|
||||
VFILE = osp.join(my_dir, "..", "pypiserver", "__init__.py")
|
||||
VFILE_regex_v = re.compile(r'version *= *__version__ *= *"([^"]+)"')
|
||||
VFILE_regex_d = re.compile(r'__updated__ *= *"([^"]+)"')
|
||||
VFILE_regex_version = re.compile(r'version *= *__version__ *= *"([^"]+)"')
|
||||
VFILE_regex_datetime = re.compile(r'__updated__ *= *"([^"]+)"')
|
||||
VFILE_regex_date = re.compile(r'__updated__ *= *"([^"\s]+)\s')
|
||||
|
||||
RFILE = osp.join(my_dir, "..", "README.rst")
|
||||
RFILE = osp.join(my_dir, "..", "README.md")
|
||||
|
||||
PYTEST_ARGS = [osp.join("tests", "test_docs.py")]
|
||||
|
||||
@ -58,6 +59,13 @@ class CmdException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def get_current_date_info() -> (str, str):
|
||||
now = datetime.now()
|
||||
new_datetime = now.strftime("%Y-%m-%d %H:%M:%S%z")
|
||||
new_date = now.strftime("%Y-%m-%d")
|
||||
return (new_datetime, new_date)
|
||||
|
||||
|
||||
@fnt.lru_cache()
|
||||
def read_txtfile(fpath):
|
||||
with open(fpath, "rt", encoding="utf-8") as fp:
|
||||
@ -138,7 +146,6 @@ def exec_cmd(cmd):
|
||||
def do_commit(new_ver, old_ver, dry_run, amend, ver_files):
|
||||
import pathlib
|
||||
|
||||
# new_ver = strip_ver2_commonprefix(old_ver, new_ver)
|
||||
cmt_msg = "chore(ver): bump %s-->%s" % (old_ver, new_ver)
|
||||
|
||||
ver_files = [pathlib.Path(f).as_posix() for f in ver_files]
|
||||
@ -183,11 +190,12 @@ def bumpver(
|
||||
cmd.append(RFILE)
|
||||
exec_cmd(cmd)
|
||||
|
||||
regexes = [VFILE_regex_v, VFILE_regex_d]
|
||||
old_ver, old_date = extract_file_regexes(VFILE, regexes)
|
||||
regexes = [VFILE_regex_version, VFILE_regex_datetime, VFILE_regex_date]
|
||||
old_ver, old_datetime, old_date = extract_file_regexes(VFILE, regexes)
|
||||
|
||||
if not new_ver:
|
||||
yield old_ver
|
||||
yield old_datetime
|
||||
yield old_date
|
||||
else:
|
||||
if new_ver == old_ver:
|
||||
@ -199,12 +207,13 @@ def bumpver(
|
||||
msg += "!\n Use of --force recommended."
|
||||
raise CmdException(msg % new_ver)
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
new_date = datetime.now().strftime("%Y-%m-%d %H:%M:%S%z")
|
||||
|
||||
new_datetime, new_date = get_current_date_info()
|
||||
ver_files = [osp.normpath(f) for f in [VFILE, RFILE]]
|
||||
subst_pairs = [(old_ver, new_ver), (old_date, new_date)]
|
||||
subst_pairs = [
|
||||
(old_ver, new_ver),
|
||||
(old_datetime, new_datetime),
|
||||
(old_date, new_date),
|
||||
]
|
||||
|
||||
for repl in replace_substrings(ver_files, subst_pairs):
|
||||
new_txt, fpath, replacements = repl
|
||||
@ -258,6 +267,7 @@ def main(*args):
|
||||
except CmdException as ex:
|
||||
sys.exit(str(ex))
|
||||
except Exception as ex:
|
||||
print("Unexpected error happened.")
|
||||
raise ex
|
||||
|
||||
|
||||
|
@ -1,37 +0,0 @@
|
||||
#!/bin/bash
|
||||
#-*- coding: utf-8 -*-
|
||||
#
|
||||
## Checks that README has no RsT-syntactic errors.
|
||||
# Since it is used by `setup.py`'s `description` if it has any errors,
|
||||
# PyPi would fail parsing them, ending up with an ugly landing page,
|
||||
# when uploaded.
|
||||
|
||||
>&2 echo "+++ Checking README for PyPy...."
|
||||
set +x ## Enable for debug
|
||||
|
||||
my_dir=`dirname "$0"`
|
||||
cd $my_dir/..
|
||||
|
||||
py=""
|
||||
rst="rst2html"
|
||||
if [ ! -x "`which $rst 2>/dev/null`" ]; then
|
||||
## In WinPython, only a python-script exist in PATH,
|
||||
# so execute it with python-interpreter.
|
||||
#
|
||||
exe="`which rst2html.py 2> /dev/null`"
|
||||
if [ $? -eq 0 ]; then
|
||||
py=python
|
||||
rst="$exe"
|
||||
else
|
||||
echo -e "Cannot find 'rst2html'! \n Sphinx installed? `pip show sphinx`" &&
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -x "`which cygpath`" ]; then
|
||||
rst="`cygpath -w $rst`"
|
||||
fi
|
||||
fi
|
||||
|
||||
export PYTHONPATH='$my_dir/..'
|
||||
#python setup.py --long-description > t.rst ## Uncomment to inspect it.
|
||||
python setup.py --long-description | $py "$rst" --halt=warning > /dev/null && echo OK
|
88
bin/ci_helper.py
Executable file
88
bin/ci_helper.py
Executable file
@ -0,0 +1,88 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""Output expected docker tags to build in CI."""
|
||||
|
||||
import json
|
||||
import typing as t
|
||||
import re
|
||||
from argparse import ArgumentParser, Namespace
|
||||
|
||||
|
||||
RELEASE_RE = re.compile(r"v[0-9]+\.[0-9]+\.[0-9]+(\.post[0-9]+)?")
|
||||
PRE_RELEASE_RE = re.compile(r"v[0-9]+\.[0-9]+\.[0-9]+(a|b|c|\.?dev)[0-9]+")
|
||||
|
||||
|
||||
def parse_args() -> Namespace:
|
||||
"""Parse cmdline args."""
|
||||
parser = ArgumentParser()
|
||||
parser.add_argument(
|
||||
"ref",
|
||||
help=(
|
||||
"The github ref for which CI is running. This may be a full ref "
|
||||
"like refs/tags/v1.2.3 or refs/heads/master, or just a tag/branch "
|
||||
"name like v1.2.3 or master."
|
||||
),
|
||||
)
|
||||
parser.add_argument(
|
||||
"action",
|
||||
help=("The action to perform"),
|
||||
choices=("docker_tags", "pypi_release", "has_tags"),
|
||||
)
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def strip_ref_to_name(ref: str) -> str:
|
||||
"""Strip a full ref to a name."""
|
||||
strips = ("refs/heads/", "refs/tags/")
|
||||
for strip in strips:
|
||||
if ref.startswith(strip):
|
||||
return ref[len(strip) :]
|
||||
return ref
|
||||
|
||||
|
||||
def name_to_array(name: str) -> t.Tuple[str, ...]:
|
||||
"""Convert a ref name to an array of tags to build."""
|
||||
tags: t.Dict[str, t.Callable[[str], bool]] = {
|
||||
# unstable for any master build
|
||||
"unstable": lambda i: i == "master",
|
||||
# latest goes for full releases
|
||||
"latest": lambda i: RELEASE_RE.fullmatch(i) is not None,
|
||||
# the tag itself for any release or pre-release tag
|
||||
name: lambda i: (
|
||||
RELEASE_RE.fullmatch(i) is not None
|
||||
or PRE_RELEASE_RE.fullmatch(i) is not None
|
||||
),
|
||||
}
|
||||
return tuple(tag for tag, test in tags.items() if test(name))
|
||||
|
||||
|
||||
def ref_to_json(ref: str) -> str:
|
||||
"""Convert a ref to a JSON array and return it as a string."""
|
||||
array = name_to_array(strip_ref_to_name(ref))
|
||||
return json.dumps(array)
|
||||
|
||||
|
||||
def should_deploy_to_pypi(ref: str) -> str:
|
||||
"""Return a JSON bool indicating whether we should deploy to PyPI."""
|
||||
name = strip_ref_to_name(ref)
|
||||
return json.dumps(
|
||||
RELEASE_RE.fullmatch(name) is not None
|
||||
or PRE_RELEASE_RE.fullmatch(name) is not None
|
||||
)
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""Parse args and print the JSON array."""
|
||||
args = parse_args()
|
||||
action_switch: t.Dict[str, t.Callable[[], None]] = {
|
||||
"docker_tags": lambda: print(ref_to_json(args.ref)),
|
||||
"has_tags": lambda: print(
|
||||
json.dumps(len(name_to_array(strip_ref_to_name(args.ref))) > 0)
|
||||
),
|
||||
"pypi_release": lambda: print(should_deploy_to_pypi(args.ref)),
|
||||
}
|
||||
action_switch[args.action]()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
@ -5,5 +5,5 @@
|
||||
my_dir=`dirname "$0"`
|
||||
cd $my_dir/..
|
||||
|
||||
rm -r build/* dist/*
|
||||
python setup.py bdist_wheel sdist
|
||||
rm -r build/* dist/* || echo "no build/* or dist/* folder is found"
|
||||
python3 -m build
|
||||
|
Binary file not shown.
@ -1,48 +0,0 @@
|
||||
#!/usr/bin/env sh
|
||||
|
||||
# Perform some simple validation to make sure the Docker image works
|
||||
# Should be run from the repo root.
|
||||
|
||||
set -xe # exit on any error, show debug output
|
||||
|
||||
DIR="$( cd "$( dirname "$0" )" >/dev/null 2>&1 && pwd )"
|
||||
|
||||
docker build . -t pypiserver:test
|
||||
|
||||
docker run pypiserver:test --help > /dev/null
|
||||
|
||||
# Mount our htpasswd file, which contains a test user with a bcrypt-encrypted
|
||||
# "test" password
|
||||
CONTAINER_ID=$(docker run \
|
||||
-d \
|
||||
-v "${DIR}/test.htpasswd:/data/.htpasswd" \
|
||||
-p 8080:8080 \
|
||||
pypiserver:test -a "list,update,download" -P /data/.htpasswd packages)
|
||||
|
||||
trap "docker container stop $CONTAINER_ID" EXIT
|
||||
|
||||
sleep 15 # give the container some time to get going
|
||||
|
||||
# Ensure we can authenticate locally
|
||||
RET=$(curl localhost:8080)
|
||||
echo $RET
|
||||
echo $RET | grep -q "pypiserver"
|
||||
|
||||
RET=$(curl localhost:8080/packages/)
|
||||
echo $RET
|
||||
echo $RET | grep -q "401"
|
||||
|
||||
|
||||
RET=$(curl test:test@localhost:8080/packages/)
|
||||
echo $RET
|
||||
echo $RET | grep -q "Index of packages"
|
||||
|
||||
twine upload \
|
||||
-u test \
|
||||
-p test \
|
||||
--repository-url http://localhost:8080 \
|
||||
"${DIR}/pypiserver-1.2.6-py2.py3-none-any.whl"
|
||||
|
||||
RET=$(curl test:test@localhost:8080/packages/)
|
||||
echo $RET
|
||||
echo $RET | grep -q "pypiserver-1.2.6"
|
@ -1,2 +0,0 @@
|
||||
test:$2y$05$0wU8vmgucWeyLyqxB.mm1OOPf660/exARXPN5uC.gHaWziv7C4t/m
|
||||
|
116
bin/update_changelog.sh
Executable file
116
bin/update_changelog.sh
Executable file
@ -0,0 +1,116 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Script to create a new RC entry
|
||||
#
|
||||
# Actions:
|
||||
# 1. find latest published version
|
||||
# 2. find the TBD planned big version
|
||||
# 3. create new RC version entry
|
||||
# 4. add change log entries
|
||||
|
||||
set -e # exit on errors
|
||||
|
||||
# TODO: provide that as parameters?
|
||||
|
||||
CHANGE_FILE='CHANGES.rst'
|
||||
RC_DATE=$(date +'%m-%d-%Y')
|
||||
WORKSPACE_DIR="${GITHUB_WORKSPACE:-.}/rc"
|
||||
TMP_CHANGE_LOG="${WORKSPACE_DIR}/rc-${RC_DATE}.txt"
|
||||
|
||||
|
||||
############
|
||||
# CLEANUPS #
|
||||
############
|
||||
|
||||
rm -rf $TMP_CHANGE_LOG
|
||||
mkdir -p $WORKSPACE_DIR
|
||||
|
||||
|
||||
##################
|
||||
# INITIALIZATION #
|
||||
##################
|
||||
|
||||
echo "Updating $CHANGE_FILE:"
|
||||
|
||||
# TODO(tech-debt): get `LAST_VERSION` with a separate bash script
|
||||
LAST_VERSION=$(grep -m1 -E ' \([0-9]+-[0-9]+-[0-9]+\)$' $CHANGE_FILE | awk '{ print $1 }')
|
||||
|
||||
echo "Detected last release version: $LAST_VERSION"
|
||||
|
||||
|
||||
###################
|
||||
# VERSION BUMPING #
|
||||
###################
|
||||
|
||||
|
||||
echo "Bumping patch version..."
|
||||
MAJOR_COLUMN=1
|
||||
MINOR_COLUMN=2
|
||||
PATCH_COLUMN=3
|
||||
|
||||
# `awk` is used to bump the PATCH version since the last public release.
|
||||
# -F - gives a separator for splitting the original release into columns.
|
||||
# -v - provides a value for variable to be used in the `awk` command.
|
||||
# -v K=$PATCH_COLUMN - provides value for `K` - the version column to bump.
|
||||
# This attempts to preserve the a standard syntax for GNU Awk.
|
||||
# More can be found here: https://www.gnu.org/software/gawk/manual/gawk.html
|
||||
BUMPED_VERSION=$(echo $LAST_VERSION | awk -F. -v K=$PATCH_COLUMN '{$K+=1; print $0}' OFS='.')
|
||||
|
||||
echo "Bumped to new candidate version: $BUMPED_VERSION"
|
||||
|
||||
RC_VERSION=${BUMPED_VERSION}rc${RC_DATE}
|
||||
|
||||
echo "Final RC version: $RC_VERSION"
|
||||
|
||||
|
||||
###################
|
||||
# CHANGELOG ENTRY #
|
||||
###################
|
||||
|
||||
|
||||
CHANGE_DIFF_TARGETS="v${LAST_VERSION}..HEAD"
|
||||
VERSION_TITLE="${RC_VERSION} (__rc__)"
|
||||
# Using GNU Awk syntax: -v LL specifies the title pattern variable.
|
||||
TITLE_LINE=$(awk -v LL=${#VERSION_TITLE} 'BEGIN{for(c=0;c<LL;c++) printf "-"}')
|
||||
VERSION_HEADER="$VERSION_TITLE\n${TITLE_LINE}"
|
||||
|
||||
# DEBUG INFO
|
||||
echo -e "Comparing versions between: $CHANGE_DIFF_TARGETS\n"
|
||||
|
||||
# VERSION HEADER:
|
||||
echo -e "$VERSION_HEADER\n" >> $TMP_CHANGE_LOG
|
||||
|
||||
# COLLECT ALL COMMITS:
|
||||
git log --pretty=oneline --abbrev-commit $CHANGE_DIFF_TARGETS | sed 's/^/- /' >> $TMP_CHANGE_LOG
|
||||
# DEBUG:
|
||||
git log --pretty=oneline --abbrev-commit $CHANGE_DIFF_TARGETS | sed 's/^/- /'
|
||||
|
||||
# CHECK FINAL CONTENT
|
||||
echo -e "\nCollected info:"
|
||||
ls $WORKSPACE_DIR
|
||||
cat $TMP_CHANGE_LOG
|
||||
|
||||
# APPEND INFO TO CHANGE FILE:
|
||||
# 1. Finds the first (tbd) release
|
||||
# 2. Populates space between (tbd) release and the latest one with RC changes
|
||||
# NB: supporting macos and linux interoperability
|
||||
# see https://stackoverflow.com/questions/43171648/sed-gives-sed-cant-read-no-such-file-or-directory
|
||||
if [[ "$OSTYPE" == "darwin"* ]]; then
|
||||
# begin: mac os support
|
||||
sed -i '' "/^[0-9]\.0\.0.*\(tbd\)/{N;G;r\
|
||||
\
|
||||
$TMP_CHANGE_LOG
|
||||
\
|
||||
}" $CHANGE_FILE
|
||||
# end;
|
||||
else
|
||||
# begin: linux support
|
||||
sed -i "/^[0-9]\.0\.0.*\(tbd\)/{N;G;r\
|
||||
\
|
||||
$TMP_CHANGE_LOG
|
||||
\
|
||||
}" $CHANGE_FILE
|
||||
# end;
|
||||
fi
|
||||
|
||||
# CHANGE_LOG_CONTENTS=$(cat $TMP_CHANGE_LOG)
|
@ -88,6 +88,8 @@ except ImportError:
|
||||
ez["use_setuptools"](**setup_args)
|
||||
|
||||
if to_reload:
|
||||
from importlib import reload
|
||||
|
||||
reload(pkg_resources)
|
||||
import pkg_resources
|
||||
|
||||
|
@ -1,3 +0,0 @@
|
||||
passlib==1.7.2
|
||||
bcrypt==3.1.7
|
||||
watchdog==0.10.3
|
14
docker/README.md
Normal file
14
docker/README.md
Normal file
@ -0,0 +1,14 @@
|
||||
<!-- -*-GFM-*- -->
|
||||
|
||||
# Docker Resources and Tests
|
||||
|
||||
This directory contains resources and tests for the docker image.
|
||||
|
||||
Note that for these tests to run, the pytest process must be able to run
|
||||
`docker`. If you are on a system where that requires `sudo`, you will need to
|
||||
run the tests with `sudo`.
|
||||
|
||||
Tests are here rather than in `/tests` because there's no reason to run these
|
||||
tests as part of the usual `tox` process, which is run in CI against every
|
||||
supported Python version. We only need to run the Docker tests once.
|
||||
|
11
docker/docker-requirements.txt
Normal file
11
docker/docker-requirements.txt
Normal file
@ -0,0 +1,11 @@
|
||||
# We use gunicorn as the default server in the docker container, with gevent
|
||||
# workers
|
||||
gevent==21.12.0
|
||||
gunicorn==20.0.4
|
||||
passlib==1.7.4
|
||||
bcrypt==3.2.0
|
||||
# If a user overrides args but does not override the server arg, we fall back to
|
||||
# whatever bottle chooses as a default. Since the wsgiref server is not
|
||||
# production-ready, install waitress as a fallback for these cases.
|
||||
waitress==2.1.2
|
||||
watchdog==1.0.2
|
132
docker/entrypoint.sh
Executable file
132
docker/entrypoint.sh
Executable file
@ -0,0 +1,132 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
function run() {
|
||||
# we're not root. Run as who we are.
|
||||
if [[ "$EUID" -ne 0 ]]; then
|
||||
eval "$@"
|
||||
else
|
||||
gosu pypiserver "$@"
|
||||
fi
|
||||
}
|
||||
|
||||
if [[ "$EUID" -ne 0 && "$EUID" -ne $(id -u pypiserver) ]]; then
|
||||
USER_ID="$EUID"
|
||||
WARN=(
|
||||
"The pypiserver container was run as a non-root, non-pypiserver user."
|
||||
"Pypiserver will be run as this user if possible, but this is not"
|
||||
"officially supported."
|
||||
)
|
||||
echo "" 1>&2
|
||||
echo "${WARN[@]}" 1>&2
|
||||
echo "" 1>&2
|
||||
else
|
||||
USER_ID=$(id -u pypiserver)
|
||||
fi
|
||||
|
||||
|
||||
function print_permissions_help() {
|
||||
MSG1=(
|
||||
"If you are mounting a volume at /data or /data/packages and are running the"
|
||||
"container on a linux system, you may need to add add a pypiserver"
|
||||
"group to the host and give it permission to access the directories."
|
||||
"Please see https://github.com/pypiserver/pypiserver/issues/256 for more"
|
||||
"details."
|
||||
)
|
||||
MSG2=(
|
||||
"Please see https://github.com/pypiserver/pypiserver/issues/256 for more"
|
||||
"details."
|
||||
)
|
||||
echo "" 1>&2
|
||||
echo "${MSG1[@]}" 1>&2
|
||||
echo "" 1>&2
|
||||
echo "${MSG2[@]}" 1>&2
|
||||
}
|
||||
|
||||
|
||||
# the user must have read and execute access to the /data directory
|
||||
# (execute to be able to cd into directory and list content metadata)
|
||||
if ! run test -r /data -a -x /data; then
|
||||
|
||||
chown -R "$USER_ID:pypiserver" /data || true
|
||||
|
||||
if ! run test -r /data -a -x /data; then
|
||||
FAIL_MSG=(
|
||||
"Cannot start pypiserver:"
|
||||
"pypiserver user (UID $USER_ID)"
|
||||
"or pypiserver group (GID $(id -g pypiserver))"
|
||||
"must have read/execute access to /data"
|
||||
)
|
||||
echo "${FAIL_MSG[@]}" 1>&2
|
||||
echo "" 1>&2
|
||||
print_permissions_help
|
||||
exit 1
|
||||
fi
|
||||
|
||||
fi
|
||||
|
||||
# The /data/packages directory must exist
|
||||
# It not existing is very unlikely, possibly impossible, because the VOLUME
|
||||
# specification in the Dockerfile leads to its being created even if someone is
|
||||
# mounting a volume at /data that does not contain a /packages subdirectory
|
||||
if [[ ! -d "/data/packages" ]]; then
|
||||
if ! run test -w /data; then
|
||||
FAIL_MSG=(
|
||||
"Cannot start pypiserver:"
|
||||
"/data/packages does not exist and"
|
||||
"pypiserver user (UID $USER_ID)"
|
||||
"or pypiserver group (GID $(id -g pypiserver))"
|
||||
"does not have write access to /data to create it"
|
||||
)
|
||||
echo "" 1>&2
|
||||
echo "${FAIL_MSG[@]}" 1>&2
|
||||
print_permissions_help
|
||||
exit 1
|
||||
fi
|
||||
run mkdir /data/packages
|
||||
fi
|
||||
|
||||
# The pypiserver user needs read/write/execute access to the packages directory
|
||||
if ! run \
|
||||
test -w /data/packages \
|
||||
-a -r /data/packages \
|
||||
-a -x /data/packages; then
|
||||
|
||||
# We'll try to chown as a last resort.
|
||||
# Don't complain if it fails, since we'll bomb on the next check anyway.
|
||||
chown -R "$USER_ID:pypiserver" /data/packages || true
|
||||
|
||||
if ! run \
|
||||
test -w /data/packages \
|
||||
-a -r /data/packages \
|
||||
-a -x /data/packages; then
|
||||
FAIL_MSG=(
|
||||
"Cannot start pypiserver:"
|
||||
"pypiserver user (UID $USER_ID)"
|
||||
"or pypiserver group (GID $(id -g pypiserver))"
|
||||
"must have read/write/execute access to /data/packages"
|
||||
)
|
||||
echo "" 1>&2
|
||||
echo "${FAIL_MSG[@]}" 1>&2
|
||||
print_permissions_help
|
||||
exit 1
|
||||
fi
|
||||
|
||||
fi
|
||||
|
||||
|
||||
if [[ "$*" == "" ]]; then
|
||||
# Use the gunicorn server by default, since it's more performant than
|
||||
# bottle's default server
|
||||
CMD=("run" "-p" "${PYPISERVER_PORT:-$PORT}" "--server" "gunicorn")
|
||||
else
|
||||
# this reassigns the array to the CMD variable
|
||||
CMD=( "${@}" )
|
||||
fi
|
||||
|
||||
if [[ "$EUID" -ne 0 ]]; then
|
||||
exec pypi-server "${CMD[@]}"
|
||||
else
|
||||
exec gosu pypiserver pypi-server "${CMD[@]}"
|
||||
fi
|
14
docker/gunicorn.conf.py
Normal file
14
docker/gunicorn.conf.py
Normal file
@ -0,0 +1,14 @@
|
||||
"""Default gunicorn config for the docker environment.
|
||||
|
||||
To override, mount a new gunicorn config at /data/gunicorn.conf.py in your
|
||||
Docker container.
|
||||
"""
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
|
||||
# Enable to log every request
|
||||
# accesslog = "-"
|
||||
errorlog = "-"
|
||||
preload_app = True
|
||||
workers = 1
|
||||
worker_class = "gevent"
|
598
docker/test_docker.py
Normal file
598
docker/test_docker.py
Normal file
@ -0,0 +1,598 @@
|
||||
"""Tests for the Pypiserver Docker image."""
|
||||
|
||||
import contextlib
|
||||
import os
|
||||
import shutil
|
||||
import socket
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
import time
|
||||
import typing as t
|
||||
from pathlib import Path
|
||||
|
||||
import httpx
|
||||
import pypiserver
|
||||
import pytest
|
||||
|
||||
|
||||
PYPISERVER_PROCESS_NAME = "pypi-server"
|
||||
TEST_DEMO_PIP_PACKAGE = "pypiserver-mypkg"
|
||||
|
||||
THIS_DIR = Path(__file__).parent
|
||||
ROOT_DIR = THIS_DIR.parent
|
||||
DOCKERFILE = ROOT_DIR / "Dockerfile"
|
||||
FIXTURES = ROOT_DIR / "fixtures"
|
||||
MYPKG_ROOT = FIXTURES / "mypkg"
|
||||
HTPASS_FILE = FIXTURES / "htpasswd.a.a"
|
||||
|
||||
|
||||
# This rule is largely useless when using pytest because of the need to use the
|
||||
# name of the fixture as an argument to the test function or fixture using it
|
||||
# pylint: disable=redefined-outer-name
|
||||
#
|
||||
# Also useless rule for our test context, where we may want to group test
|
||||
# functions in a class to share common fixtures, but where we don't care about
|
||||
# the `self` instance.
|
||||
# pylint: disable=no-self-use
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def image() -> str:
|
||||
"""Build the docker image for pypiserver.
|
||||
|
||||
Return the tag.
|
||||
"""
|
||||
tag = "pypiserver:test"
|
||||
run(
|
||||
"docker",
|
||||
"build",
|
||||
"--file",
|
||||
str(DOCKERFILE),
|
||||
"--tag",
|
||||
tag,
|
||||
str(ROOT_DIR),
|
||||
cwd=ROOT_DIR,
|
||||
)
|
||||
return tag
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def mypkg_build() -> None:
|
||||
"""Ensure the mypkg test fixture package is build."""
|
||||
# Use make for this so that it will skip the build step if it's not needed
|
||||
run("make", "mypkg", cwd=ROOT_DIR)
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def mypkg_paths(
|
||||
mypkg_build: None, # pylint: disable=unused-argument
|
||||
) -> t.Dict[str, Path]:
|
||||
"""The path to the mypkg sdist file."""
|
||||
dist_dir = Path(MYPKG_ROOT) / "dist"
|
||||
assert dist_dir.exists()
|
||||
|
||||
sdist = dist_dir / "pypiserver_mypkg-1.0.0.tar.gz"
|
||||
assert sdist.exists()
|
||||
|
||||
wheel = dist_dir / "pypiserver_mypkg-1.0.0-py2.py3-none-any.whl"
|
||||
assert wheel.exists()
|
||||
|
||||
return {
|
||||
"dist_dir": dist_dir,
|
||||
"sdist": sdist,
|
||||
"wheel": wheel,
|
||||
}
|
||||
|
||||
|
||||
def wait_for_container(port: int) -> None:
|
||||
"""Wait for the container to be available."""
|
||||
for _ in range(60):
|
||||
try:
|
||||
httpx.get(f"http://localhost:{port}").raise_for_status()
|
||||
except (httpx.RequestError, httpx.HTTPStatusError):
|
||||
time.sleep(1)
|
||||
else:
|
||||
return
|
||||
|
||||
# If we reach here, we've tried 60 times without success, meaning either
|
||||
# the container is broken or it took more than about a minute to become
|
||||
# functional, either of which cases is something we will want to look into.
|
||||
raise RuntimeError("Could not connect to pypiserver container")
|
||||
|
||||
|
||||
def get_socket() -> int:
|
||||
"""Find a random, open socket and return it."""
|
||||
# Close the socket automatically upon exiting the block
|
||||
with contextlib.closing(
|
||||
socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
) as sock:
|
||||
# Bind to a random open socket >=1024
|
||||
sock.bind(("", 0))
|
||||
# Return the socket number
|
||||
return sock.getsockname()[1]
|
||||
|
||||
|
||||
class RunReturn(t.NamedTuple):
|
||||
"""Simple wrapper around a simple subprocess call's results."""
|
||||
|
||||
returncode: int
|
||||
out: str
|
||||
err: str
|
||||
|
||||
|
||||
def run(
|
||||
*cmd: str,
|
||||
capture: bool = False,
|
||||
raise_on_err: bool = True,
|
||||
check_code: t.Callable[[int], bool] = lambda c: c == 0,
|
||||
**popen_kwargs: t.Any,
|
||||
) -> RunReturn:
|
||||
"""Run a command to completion."""
|
||||
stdout = subprocess.PIPE if capture else None
|
||||
stderr = subprocess.PIPE if capture else None
|
||||
proc = subprocess.Popen(cmd, stdout=stdout, stderr=stderr, **popen_kwargs)
|
||||
out, err = proc.communicate()
|
||||
result = RunReturn(
|
||||
proc.returncode,
|
||||
"" if out is None else out.decode(),
|
||||
"" if err is None else err.decode(),
|
||||
)
|
||||
if raise_on_err and not check_code(result.returncode):
|
||||
raise RuntimeError(result)
|
||||
return result
|
||||
|
||||
|
||||
def uninstall_pkgs() -> None:
|
||||
"""Uninstall any packages we've installed."""
|
||||
res = run("pip", "freeze", capture=True)
|
||||
if any(
|
||||
ln.strip().startswith(TEST_DEMO_PIP_PACKAGE)
|
||||
for ln in res.out.splitlines()
|
||||
):
|
||||
run("pip", "uninstall", "-y", TEST_DEMO_PIP_PACKAGE)
|
||||
|
||||
|
||||
@pytest.fixture(scope="session", autouse=True)
|
||||
def session_cleanup() -> t.Iterator[None]:
|
||||
"""Deal with any pollution of the local env."""
|
||||
yield
|
||||
uninstall_pkgs()
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def cleanup() -> t.Iterator[None]:
|
||||
"""Clean up after tests that may have affected the env."""
|
||||
yield
|
||||
uninstall_pkgs()
|
||||
|
||||
|
||||
class TestCommands:
|
||||
"""Test commands other than `run`."""
|
||||
|
||||
def test_help(self, image: str) -> None:
|
||||
"""We can get help from the docker container."""
|
||||
res = run("docker", "run", image, "--help", capture=True)
|
||||
assert PYPISERVER_PROCESS_NAME in res.out
|
||||
|
||||
def test_version(self, image: str) -> None:
|
||||
"""We can get the version from the docker container."""
|
||||
res = run("docker", "run", image, "--version", capture=True)
|
||||
assert res.out.strip() == pypiserver.__version__
|
||||
|
||||
|
||||
class TestPermissions:
|
||||
"""Test permission validation, especially with mounted volumes."""
|
||||
|
||||
@pytest.mark.parametrize("perms", (0o706, 0o701, 0o704))
|
||||
def test_needs_rx_on_data(self, image: str, perms: int) -> None:
|
||||
"""Read and execute permissions are required on /data."""
|
||||
# Note we can't run this one as root because then we have to make a file
|
||||
# that even we can't delete.
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
# Make sure the directory is not readable for anyone other than
|
||||
# the owner
|
||||
data_dir = Path(tmpdir) / "data"
|
||||
data_dir.mkdir(mode=perms)
|
||||
|
||||
res = run(
|
||||
"docker",
|
||||
"run",
|
||||
"--rm",
|
||||
"--user",
|
||||
# Run as a not us user ID, so access to /data will be
|
||||
# determined by the "all other users" setting
|
||||
str(os.getuid() + 1),
|
||||
"-v",
|
||||
# Mount the temporary directory as the /data directory
|
||||
f"{data_dir}:/data",
|
||||
image,
|
||||
capture=True,
|
||||
# This should error out, so we check that the code is non-zero
|
||||
check_code=lambda c: c != 0,
|
||||
)
|
||||
|
||||
assert "must have read/execute access" in res.err
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"extra_args",
|
||||
(("--user", str(os.getuid())), ("--user", str(os.getuid() + 1))),
|
||||
)
|
||||
def test_needs_rwx_on_packages(self, image: str, extra_args: tuple) -> None:
|
||||
"""RWX permission is required on /data/packages."""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
td_path = Path(tmpdir)
|
||||
# Make the /data directory read/writable by anyone
|
||||
td_path.chmod(0o777)
|
||||
# Add the /data/packages directory, and make it readable by anyone,
|
||||
# but writable only by the owner
|
||||
(td_path / "packages").mkdir(mode=0o444)
|
||||
|
||||
res = run(
|
||||
"docker",
|
||||
"run",
|
||||
"--rm",
|
||||
*extra_args,
|
||||
"-v",
|
||||
# Mount the temporary directory as the /data directory
|
||||
f"{tmpdir}:/data",
|
||||
image,
|
||||
capture=True,
|
||||
# We should error out in this case
|
||||
check_code=lambda c: c != 0,
|
||||
)
|
||||
assert "must have read/write/execute access" in res.err
|
||||
|
||||
def test_runs_as_pypiserver_user(self, image: str) -> None:
|
||||
"""Test that the default run uses the pypiserver user."""
|
||||
host_port = get_socket()
|
||||
res = run(
|
||||
"docker",
|
||||
"run",
|
||||
"--rm",
|
||||
"--detach",
|
||||
"--publish",
|
||||
f"{host_port}:8080",
|
||||
image,
|
||||
capture=True,
|
||||
)
|
||||
container_id = res.out.strip()
|
||||
try:
|
||||
wait_for_container(host_port)
|
||||
res = run(
|
||||
"docker",
|
||||
"container",
|
||||
"exec",
|
||||
container_id,
|
||||
"ps",
|
||||
"a",
|
||||
capture=True,
|
||||
)
|
||||
proc_line = next(
|
||||
filter(
|
||||
# grab the process line for the pypi-server process
|
||||
lambda ln: PYPISERVER_PROCESS_NAME in ln,
|
||||
res.out.splitlines(),
|
||||
)
|
||||
)
|
||||
user = proc_line.split()[1]
|
||||
# the ps command on these alpine containers doesn't always show the
|
||||
# full user name, so we only check for the first bit
|
||||
assert user.startswith("pypi")
|
||||
finally:
|
||||
run("docker", "container", "rm", "-f", container_id)
|
||||
|
||||
|
||||
class ContainerInfo(t.NamedTuple):
|
||||
"""Info about a running container"""
|
||||
|
||||
container_id: str
|
||||
port: int
|
||||
args: tuple
|
||||
|
||||
|
||||
class TestBasics:
|
||||
"""Test basic pypiserver functionality in a simple unauthed container."""
|
||||
|
||||
# We want to automatically parametrize this class' tests with a variety of
|
||||
# pypiserver args, since it should work the same in all of these cases
|
||||
@pytest.fixture(
|
||||
scope="class",
|
||||
params=[
|
||||
# default (gunicorn) server with cached backend
|
||||
(),
|
||||
# default (gunicorn) server with non-cached backend
|
||||
("--backend", "simple-dir"),
|
||||
# explicit gunicorn server with a non-cached backend
|
||||
("--server", "gunicorn", "--backend", "simple-dir"),
|
||||
# explicit gunicorn server
|
||||
("--server", "gunicorn"),
|
||||
# explicit waitress server
|
||||
("--server", "wsgiref"),
|
||||
# explicit wsgiref server
|
||||
("--server", "wsgiref"),
|
||||
],
|
||||
)
|
||||
def container(
|
||||
self, request: pytest.FixtureRequest, image: str
|
||||
) -> t.Iterator[ContainerInfo]:
|
||||
"""Run the pypiserver container.
|
||||
|
||||
Returns the container ID.
|
||||
"""
|
||||
port = get_socket()
|
||||
args = (
|
||||
"docker",
|
||||
"run",
|
||||
"--rm",
|
||||
"--publish",
|
||||
f"{port}:8080",
|
||||
"--detach",
|
||||
image,
|
||||
"run",
|
||||
"--passwords",
|
||||
".",
|
||||
"--authenticate",
|
||||
".",
|
||||
*request.param,
|
||||
)
|
||||
res = run(*args, capture=True)
|
||||
wait_for_container(port)
|
||||
container_id = res.out.strip()
|
||||
yield ContainerInfo(container_id, port, args)
|
||||
run("docker", "container", "rm", "-f", container_id)
|
||||
|
||||
@pytest.fixture(scope="class")
|
||||
def upload_mypkg(
|
||||
self,
|
||||
container: ContainerInfo,
|
||||
mypkg_paths: t.Dict[str, Path],
|
||||
) -> None:
|
||||
"""Upload mypkg to the container."""
|
||||
run(
|
||||
sys.executable,
|
||||
"-m",
|
||||
"twine",
|
||||
"upload",
|
||||
"--repository-url",
|
||||
f"http://localhost:{container.port}",
|
||||
"--username",
|
||||
"",
|
||||
"--password",
|
||||
"",
|
||||
f"{mypkg_paths['dist_dir']}/*",
|
||||
)
|
||||
|
||||
@pytest.mark.usefixtures("upload_mypkg")
|
||||
def test_download(self, container: ContainerInfo) -> None:
|
||||
"""Download mypkg from the container."""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
run(
|
||||
sys.executable,
|
||||
"-m",
|
||||
"pip",
|
||||
"download",
|
||||
"--index-url",
|
||||
f"http://localhost:{container.port}/simple",
|
||||
"--dest",
|
||||
tmpdir,
|
||||
"pypiserver_mypkg",
|
||||
)
|
||||
assert any(
|
||||
"pypiserver_mypkg" in path.name
|
||||
for path in Path(tmpdir).iterdir()
|
||||
)
|
||||
|
||||
@pytest.mark.usefixtures("upload_mypkg", "cleanup")
|
||||
def test_install(self, container: ContainerInfo) -> None:
|
||||
"""Install mypkg from the container.
|
||||
|
||||
Note this also ensures that name normalization is working,
|
||||
since we are requesting the package name with a dash, rather
|
||||
than an underscore.
|
||||
"""
|
||||
run(
|
||||
sys.executable,
|
||||
"-m",
|
||||
"pip",
|
||||
"install",
|
||||
"--force-reinstall",
|
||||
"--index-url",
|
||||
f"http://localhost:{container.port}/simple",
|
||||
TEST_DEMO_PIP_PACKAGE,
|
||||
)
|
||||
run("python", "-c", "'import pypiserver_mypkg; mypkg.pkg_name()'")
|
||||
|
||||
def test_expected_server(self, container: ContainerInfo) -> None:
|
||||
"""Ensure we run the server we think we're running."""
|
||||
resp = httpx.get(f"http://localhost:{container.port}")
|
||||
server = resp.headers["server"].lower()
|
||||
arg_pairs = tuple(zip(container.args, container.args[1:]))
|
||||
if (
|
||||
container.args[-1] == "pypiserver:test"
|
||||
or ("--server", "gunicorn") in arg_pairs
|
||||
):
|
||||
# We specified no overriding args, so we should run gunicorn, or
|
||||
# we specified gunicorn in overriding args.
|
||||
assert "gunicorn" in server
|
||||
elif ("--server", "wsgiref") in arg_pairs:
|
||||
# We explicitly specified the wsgiref server
|
||||
assert "wsgiserver" in server
|
||||
elif ("--server", "waitress") in arg_pairs:
|
||||
# We explicitly specified the wsgiref server
|
||||
assert "waitress" in server
|
||||
else:
|
||||
# We overrode args, so instead of using the gunicorn default,
|
||||
# we use the `auto` option. Bottle won't choose gunicorn as an
|
||||
# auto server, so we have waitress installed in the docker container
|
||||
# as a fallback for these scenarios, since wsgiref is not production
|
||||
# ready
|
||||
assert "waitress" in server
|
||||
|
||||
def test_welcome(self, container: ContainerInfo) -> None:
|
||||
"""View the welcome page."""
|
||||
resp = httpx.get(f"http://localhost:{container.port}")
|
||||
assert resp.status_code == 200
|
||||
assert "pypiserver" in resp.text
|
||||
|
||||
|
||||
class TestAuthed:
|
||||
"""Test basic pypiserver functionality in a simple unauthed container."""
|
||||
|
||||
HOST_PORT = get_socket()
|
||||
|
||||
@pytest.fixture(scope="class")
|
||||
def container(self, image: str) -> t.Iterator[str]:
|
||||
"""Run the pypiserver container.
|
||||
|
||||
Returns the container ID.
|
||||
"""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
dirpath = Path(tmpdir)
|
||||
shutil.copy2(HTPASS_FILE, dirpath / "htpasswd")
|
||||
pkg_path = dirpath / "packages"
|
||||
pkg_path.mkdir(mode=0o777)
|
||||
|
||||
res = run(
|
||||
"docker",
|
||||
"run",
|
||||
"--rm",
|
||||
"--publish",
|
||||
f"{self.HOST_PORT}:8080",
|
||||
"-v",
|
||||
f"{dirpath / 'htpasswd'}:/data/htpasswd",
|
||||
"--detach",
|
||||
image,
|
||||
"run",
|
||||
"--passwords",
|
||||
"/data/htpasswd",
|
||||
"--authenticate",
|
||||
"download, update",
|
||||
capture=True,
|
||||
)
|
||||
wait_for_container(self.HOST_PORT)
|
||||
container_id = res.out.strip()
|
||||
yield container_id
|
||||
run("docker", "container", "rm", "-f", container_id)
|
||||
|
||||
@pytest.fixture(scope="class")
|
||||
def upload_mypkg(
|
||||
self,
|
||||
container: str, # pylint: disable=unused-argument
|
||||
mypkg_paths: t.Dict[str, Path],
|
||||
) -> None:
|
||||
"""Upload mypkg to the container."""
|
||||
run(
|
||||
sys.executable,
|
||||
"-m",
|
||||
"twine",
|
||||
"upload",
|
||||
"--repository-url",
|
||||
f"http://localhost:{self.HOST_PORT}",
|
||||
"--username",
|
||||
"a",
|
||||
"--password",
|
||||
"a",
|
||||
f"{mypkg_paths['dist_dir']}/*",
|
||||
)
|
||||
|
||||
def test_upload_failed_auth(
|
||||
self,
|
||||
container: str, # pylint: disable=unused-argument
|
||||
mypkg_paths: t.Dict[str, Path],
|
||||
) -> None:
|
||||
"""Upload mypkg to the container."""
|
||||
run(
|
||||
sys.executable,
|
||||
"-m",
|
||||
"twine",
|
||||
"upload",
|
||||
"--repository-url",
|
||||
f"http://localhost:{self.HOST_PORT}",
|
||||
f"{mypkg_paths['dist_dir']}/*",
|
||||
check_code=lambda c: c != 0,
|
||||
)
|
||||
|
||||
@pytest.mark.usefixtures("upload_mypkg")
|
||||
def test_download(self) -> None:
|
||||
"""Download mypkg from the container."""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
run(
|
||||
sys.executable,
|
||||
"-m",
|
||||
"pip",
|
||||
"download",
|
||||
"--index-url",
|
||||
f"http://a:a@localhost:{self.HOST_PORT}/simple",
|
||||
"--dest",
|
||||
tmpdir,
|
||||
"pypiserver_mypkg",
|
||||
)
|
||||
assert any(
|
||||
"pypiserver_mypkg" in path.name
|
||||
for path in Path(tmpdir).iterdir()
|
||||
)
|
||||
|
||||
@pytest.mark.usefixtures("upload_mypkg")
|
||||
def test_download_failed_auth(self) -> None:
|
||||
"""Download mypkg from the container."""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
run(
|
||||
sys.executable,
|
||||
"-m",
|
||||
"pip",
|
||||
"download",
|
||||
"--index-url",
|
||||
f"http://foo:bar@localhost:{self.HOST_PORT}/simple",
|
||||
"--dest",
|
||||
tmpdir,
|
||||
"pypiserver_mypkg",
|
||||
check_code=lambda c: c != 0,
|
||||
)
|
||||
|
||||
@pytest.mark.usefixtures("upload_mypkg", "cleanup")
|
||||
def test_install(self) -> None:
|
||||
"""Install mypkg from the container.
|
||||
|
||||
Note this also ensures that name normalization is working,
|
||||
since we are requesting the package name with a dash, rather
|
||||
than an underscore.
|
||||
"""
|
||||
run(
|
||||
sys.executable,
|
||||
"-m",
|
||||
"pip",
|
||||
"install",
|
||||
"--force-reinstall",
|
||||
"--index-url",
|
||||
f"http://a:a@localhost:{self.HOST_PORT}/simple",
|
||||
TEST_DEMO_PIP_PACKAGE,
|
||||
)
|
||||
run("python", "-c", "'import pypiserver_mypkg; mypkg.pkg_name()'")
|
||||
|
||||
@pytest.mark.usefixtures("upload_mypkg", "cleanup")
|
||||
def test_install_failed_auth(self) -> None:
|
||||
"""Install mypkg from the container.
|
||||
|
||||
Note this also ensures that name normalization is working,
|
||||
since we are requesting the package name with a dash, rather
|
||||
than an underscore.
|
||||
"""
|
||||
run(
|
||||
sys.executable,
|
||||
"-m",
|
||||
"pip",
|
||||
"install",
|
||||
"--force-reinstall",
|
||||
"--no-cache",
|
||||
"--index-url",
|
||||
f"http://foo:bar@localhost:{self.HOST_PORT}/simple",
|
||||
TEST_DEMO_PIP_PACKAGE,
|
||||
check_code=lambda c: c != 0,
|
||||
)
|
||||
|
||||
def test_welcome(self) -> None:
|
||||
"""View the welcome page."""
|
||||
resp = httpx.get(f"http://localhost:{self.HOST_PORT}")
|
||||
assert resp.status_code == 200
|
||||
assert "pypiserver" in resp.text
|
33
docs/README.md
Normal file
33
docs/README.md
Normal file
@ -0,0 +1,33 @@
|
||||
# pypiserver docs
|
||||
|
||||
Welcome to the documentation section of `pypiserver`.
|
||||
|
||||
> :bulb: This documentation is still a work in progress
|
||||
> and would benefit from your help.
|
||||
|
||||
## More information
|
||||
|
||||
If something is missing in the documentation here, maybe it is covered in
|
||||
[README.md](../README.md).
|
||||
|
||||
## Documentation setup
|
||||
|
||||
> Some areas to describe are highlighted in
|
||||
[this issue](https://github.com/pypiserver/pypiserver/issues/368)
|
||||
([#368](https://github.com/pypiserver/pypiserver/issues/368)),
|
||||
but feel free to add if you see something missing there.
|
||||
|
||||
The [`./contents`](./contents/) directory is designed to
|
||||
all the documentation files.
|
||||
|
||||
### How to contribute
|
||||
|
||||
Everyone is very welcome to extend `pypiserver` documentation.
|
||||
|
||||
1. :thought_balloon: If you think of a topic you would like to describe, this is great.
|
||||
2. :deciduous_tree: Fork this project and clone it locally to start your contribution.
|
||||
3. :page_facing_up: Create a new Markdown (`<doc title>.md`) file/subdirectory+file inside the [`./contents`](./contents/):
|
||||
- :file_folder: Use subdirectories to organize related documentation into topics.
|
||||
4. :octocat: Push your changes to your fork and open a PR to this repository.
|
||||
5. :bell: Tag someone from the maintainers if you'd like, but in any case we will try to review the PR as soon as possible.
|
||||
6. :dizzy: Once reviewed and accepted, your documentation will be merged into this repository.
|
Before Width: | Height: | Size: 15 KiB After Width: | Height: | Size: 15 KiB |
0
docs/contents/.gitkeep
Normal file
0
docs/contents/.gitkeep
Normal file
123
docs/contents/repo-maintenance/release-work.md
Normal file
123
docs/contents/repo-maintenance/release-work.md
Normal file
@ -0,0 +1,123 @@
|
||||
# `Pypi-server` Release Workflow Reference
|
||||
|
||||
The official `pypi-server` releases are handled using
|
||||
[GitHub Actions workflows](../../../.github/workflows/).
|
||||
|
||||
## General release process
|
||||
|
||||
```mermaid
|
||||
flowchart LR
|
||||
rc["release-candidate ⭐️"]
|
||||
rn["release-notes 📝"]
|
||||
rm["confirmed-tag ✅"]
|
||||
ci["code-checks 🧪"]
|
||||
pk["build-and-pack 📦"]
|
||||
py["pypi-index 🗃️"]
|
||||
do["docker-hub 🐳"]
|
||||
gh["gh-container-registry 🚀"]
|
||||
gr["github-release 📣"]
|
||||
|
||||
subgraph "Preparation 🌱"
|
||||
rc-->rn-->rm
|
||||
end
|
||||
subgraph "Integration 🪴"
|
||||
rm-->ci-->pk
|
||||
end
|
||||
subgraph "Deploy 🌳"
|
||||
pk--> py & do & gh & gr
|
||||
end
|
||||
```
|
||||
|
||||
## Process walkthrough
|
||||
|
||||
> 🗺️ ***This description approximates the real GitHub workflows and steps.***
|
||||
> 👀 *For a more detailed view, do check out the linked resources as you read.*
|
||||
|
||||
### Preparation 🌱
|
||||
|
||||
> 🛠️ *These step are applicable only for maintainers.*
|
||||
|
||||
#### Release candidate ⭐️
|
||||
|
||||
A new release candidate can be initiated ***manually** or **on a monthly schedule***.
|
||||
|
||||
This is done via the [`rc.yml`](../../../.github/workflows/rc.yml) GH
|
||||
Workflow's `workflow_dispatch` or `schedule` trigger.
|
||||
|
||||
The workflow automatically prepares a list of changes for the `CHANGES.rst` and
|
||||
creates a new Pull Request *(rc PR)* named
|
||||
`chore(auto-release-candidate-YYY-MM-DD)` including these draft change notes.
|
||||
|
||||
#### Release notes 📝
|
||||
|
||||
In the created rc PR, open the `CHANGES.rst` and:
|
||||
|
||||
1. ***adjust the suggested changelog items***
|
||||
2. ***choose & set the next released version***
|
||||
3. ***set the right release date***
|
||||
|
||||
Commit the changes and push them to the head branch of the rc PR.
|
||||
|
||||
#### Confirmed tag ✅
|
||||
|
||||
1. Once everything is looking good, ***approve and merge*** the rc PR.
|
||||
|
||||
It will create the new *commit* with the updated `CHANGES.rst`
|
||||
on the default branch.
|
||||
|
||||
2. Next, to create a release tag, ***manually run*** the
|
||||
[`rt.yml`](../../../.github/workflows/rt.yml) GH Workflow.
|
||||
|
||||
First, it executes all the [`bumpver`](../../../bin/README.md) procedures.
|
||||
|
||||
Next, it commits and pushes the new **version tag** to the default branch.
|
||||
|
||||
### Integration 🪴
|
||||
|
||||
#### Code checks 🧪
|
||||
|
||||
Once any *commmit* or *tag* is pushed to the default branch,
|
||||
[`ci.yml`](../../../.github/workflows/ci.yml) GH Workflow automatically
|
||||
executes diverse code checks: e.g. *linting*, *formatting*, *tests*.
|
||||
|
||||
#### Build and pack 📦
|
||||
|
||||
If all the checks are successful, [`ci.yml`](../../../.github/workflows/ci.yml)
|
||||
builds all the code artifacts: e.g. *wheels*, *docker images*.
|
||||
|
||||
### Deploy 🌳
|
||||
|
||||
#### Publish to PyPi 🗃️
|
||||
|
||||
> 🏷️ This happens only on new *version tags*.
|
||||
|
||||
Once everythig is built, [`ci.yml`](../../../.github/workflows/ci.yml) uploads
|
||||
the wheels to the [`pypiserver` PyPi project](https://pypi.org/project/pypiserver/).
|
||||
|
||||
#### Publish to Docker Hub 🐳
|
||||
|
||||
> 🏷️ Docker image *tags* are determined on the fly.
|
||||
|
||||
If all is successful so far, [`ci.yml`](../../../.github/workflows/ci.yml) tags
|
||||
the built docker images and pushes them to the
|
||||
[`pypiserver` Docker Hub repository](https://hub.docker.com/r/pypiserver/pypiserver).
|
||||
|
||||
#### Publish to GitHub Container Registry 🚀
|
||||
|
||||
> 🏷️ Docker image *tags* are determined on the fly.
|
||||
|
||||
For all `stable` (i.e. `latest`, tag, release ...) tags derived by
|
||||
[`ci.yml`](../../../.github/workflows/ci.yml) tags,
|
||||
the built docker images are *also* pushed to
|
||||
[`pypiserver` GitHub Container Registry](https://github.com/orgs/pypiserver/packages?repo_name=pypiserver).
|
||||
|
||||
#### Publish a GitHub Release draft 📣
|
||||
|
||||
> 🛠️ *This step is applicable only for maintainers.*
|
||||
> 🏷️ This happens only on new *version tags*.
|
||||
|
||||
To make the release noticeable, [`ci.yml`](../../../.github/workflows/ci.yml)
|
||||
also creates a *draft*
|
||||
[GitHub Release entry in the `pypiserver` repository](https://github.com/pypiserver/pypiserver/releases).
|
||||
|
||||
> 📝 Since it is a *draft*, the entry should be *manually* adjusted further.
|
@ -1,11 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# chown the VOLUME mount set in the dockerfile
|
||||
# If you're using an alternative directory for packages,
|
||||
# you'll need to ensure that pypiserver has read and
|
||||
# write access to that directory
|
||||
chown -R pypiserver:pypiserver /data/packages
|
||||
|
||||
exec gosu pypiserver pypi-server -p "$PORT" $@
|
6
fixtures/mypkg/mypkg/__init__.py
Normal file
6
fixtures/mypkg/mypkg/__init__.py
Normal file
@ -0,0 +1,6 @@
|
||||
"""A very simple python file to package for testing."""
|
||||
|
||||
|
||||
def pkg_name() -> None:
|
||||
"""Print the package name."""
|
||||
print("mypkg")
|
7
fixtures/mypkg/setup.cfg
Normal file
7
fixtures/mypkg/setup.cfg
Normal file
@ -0,0 +1,7 @@
|
||||
[wheel]
|
||||
universal=1
|
||||
|
||||
[mypy]
|
||||
follow_imports = silent
|
||||
ignore_missing_imports = True
|
||||
|
10
fixtures/mypkg/setup.py
Normal file
10
fixtures/mypkg/setup.py
Normal file
@ -0,0 +1,10 @@
|
||||
"""A simple setup file for this test package."""
|
||||
|
||||
from setuptools import setup
|
||||
|
||||
setup(
|
||||
name="pypiserver_mypkg",
|
||||
description="Test pkg",
|
||||
version="1.0.0",
|
||||
packages=["mypkg"],
|
||||
)
|
@ -7,9 +7,9 @@ import typing as t
|
||||
from pypiserver.bottle import Bottle
|
||||
from pypiserver.config import Config, RunConfig, strtobool
|
||||
|
||||
version = __version__ = "2.0.0dev1"
|
||||
version = __version__ = "2.1.1"
|
||||
__version_info__ = tuple(_re.split("[.-]", __version__))
|
||||
__updated__ = "2020-10-11 11:23:15"
|
||||
__updated__ = "2024-04-25 01:23:25"
|
||||
|
||||
__title__ = "pypiserver"
|
||||
__summary__ = "A minimal PyPI server for use with pip/easy_install."
|
||||
@ -121,7 +121,7 @@ def app(**kwargs: t.Any) -> Bottle:
|
||||
(or its base), defined in `pypiserver.config`, may be overridden.
|
||||
"""
|
||||
config = Config.default_with_overrides(**backwards_compat_kwargs(kwargs))
|
||||
return app_from_config(config)
|
||||
return setup_routes_from_config(app_from_config(config), config)
|
||||
|
||||
|
||||
def app_from_config(config: RunConfig) -> Bottle:
|
||||
@ -141,6 +141,20 @@ def app_from_config(config: RunConfig) -> Bottle:
|
||||
return _app.app
|
||||
|
||||
|
||||
def setup_routes_from_config(app: Bottle, config: RunConfig) -> Bottle:
|
||||
"""Set up additional routes supplied from the config."""
|
||||
|
||||
def _setup_health_endpoint(app, config):
|
||||
if config.health_endpoint in [route.rule for route in app.routes]:
|
||||
raise RuntimeError(
|
||||
"Provided health endpoint overlaps with existing routes"
|
||||
)
|
||||
app.route(config.health_endpoint, "GET", lambda: "Ok")
|
||||
|
||||
_setup_health_endpoint(app, config)
|
||||
return app
|
||||
|
||||
|
||||
T = t.TypeVar("T")
|
||||
|
||||
|
||||
|
@ -1,26 +1,28 @@
|
||||
#! /usr/bin/env python
|
||||
#! /usr/bin/env python3
|
||||
"""Entrypoint for pypiserver."""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import enum
|
||||
import importlib
|
||||
import logging
|
||||
import sys
|
||||
import typing as t
|
||||
from pathlib import Path
|
||||
from wsgiref.simple_server import WSGIRequestHandler
|
||||
|
||||
import functools as ft
|
||||
from pypiserver.config import Config, UpdateConfig
|
||||
|
||||
|
||||
log = logging.getLogger("pypiserver.main")
|
||||
|
||||
|
||||
def init_logging(
|
||||
level=logging.NOTSET,
|
||||
frmt=None,
|
||||
filename=None,
|
||||
level: int = logging.NOTSET,
|
||||
frmt: str = None,
|
||||
filename: t.Union[str, Path] = None,
|
||||
stream: t.Optional[t.IO] = sys.stderr,
|
||||
logger=None,
|
||||
):
|
||||
logger: logging.Logger = None,
|
||||
) -> None:
|
||||
"""Configure the specified logger, or the root logger otherwise."""
|
||||
logger = logger or logging.getLogger()
|
||||
logger.setLevel(level)
|
||||
|
||||
@ -36,14 +38,90 @@ def init_logging(
|
||||
logger.addHandler(handler)
|
||||
|
||||
|
||||
def main(argv=None):
|
||||
class WsgiHandler(WSGIRequestHandler):
|
||||
"""A simple request handler to configure logging."""
|
||||
|
||||
# The default `FixedHandler` that bottle's `WSGIRefServer` uses does not
|
||||
# log in a particularly predictable or configurable way. We'll pass this
|
||||
# in to use instead.
|
||||
def address_string(self) -> str: # Prevent reverse DNS lookups please.
|
||||
# This method copied directly from bottle's `FixedHandler` and
|
||||
# maintained on the Chesterton's fence principle (i.e. I don't know
|
||||
# why it's important, so I'm not going to get rid of it)
|
||||
return self.client_address[0]
|
||||
|
||||
def log_message(
|
||||
self, format: str, *args: t.Any # pylint: disable=redefined-builtin
|
||||
) -> None:
|
||||
"""Log a message."""
|
||||
# The log_message method on the `HttpRequestHandler` base class just
|
||||
# writes directly to stderr. We'll use its same formatting, but pass
|
||||
# it through the logger instead.
|
||||
log.info(
|
||||
"%s - - [%s] %s\n",
|
||||
self.address_string(),
|
||||
self.log_date_time_string(),
|
||||
format % args,
|
||||
)
|
||||
|
||||
|
||||
class AutoServer(enum.Enum):
|
||||
"""Expected servers that can be automaticlaly selected by bottle."""
|
||||
|
||||
Waitress = enum.auto()
|
||||
Paste = enum.auto()
|
||||
Twisted = enum.auto()
|
||||
CherryPy = enum.auto()
|
||||
WsgiRef = enum.auto()
|
||||
|
||||
|
||||
# Possible automatically selected servers. This MUST match the available
|
||||
# auto servers in bottle.py
|
||||
AUTO_SERVER_IMPORTS = (
|
||||
(AutoServer.Waitress, "waitress"),
|
||||
(AutoServer.Paste, "paste"),
|
||||
(AutoServer.Twisted, "twisted.web"),
|
||||
(AutoServer.CherryPy, "cheroot.wsgi"),
|
||||
(AutoServer.CherryPy, "cherrypy.wsgiserver"),
|
||||
# this should always be available because it's part of the stdlib
|
||||
(AutoServer.WsgiRef, "wsgiref"),
|
||||
)
|
||||
|
||||
|
||||
def _can_import(name: str) -> bool:
|
||||
"""Attempt to import a module. Return a bool indicating success."""
|
||||
try:
|
||||
importlib.import_module(name)
|
||||
return True
|
||||
except ImportError:
|
||||
return False
|
||||
|
||||
|
||||
def guess_auto_server() -> AutoServer:
|
||||
"""Guess which server bottle will use for the auto setting."""
|
||||
# Return the first server that can be imported.
|
||||
server = next(
|
||||
(s for s, i in AUTO_SERVER_IMPORTS if _can_import(i)),
|
||||
None,
|
||||
)
|
||||
if server is None:
|
||||
raise RuntimeError(
|
||||
"Unexpected error determining bottle auto server. There may be an "
|
||||
"issue with this python environment. Please report this bug at "
|
||||
"https://github.com/pypiserver/pypiserver/issues"
|
||||
)
|
||||
return server
|
||||
|
||||
|
||||
def main(argv: t.Sequence[str] = None) -> None:
|
||||
"""Application entrypoint for pypiserver.
|
||||
|
||||
This function drives the application (as opposed to the library)
|
||||
implementation of pypiserver. Usage from the commandline will result in
|
||||
this function being called.
|
||||
"""
|
||||
import pypiserver
|
||||
# pylint: disable=import-outside-toplevel
|
||||
import pypiserver # pylint: disable=redefined-outer-name
|
||||
|
||||
if argv is None:
|
||||
# The first item in sys.argv is the name of the python file being
|
||||
@ -84,25 +162,56 @@ def main(argv=None):
|
||||
from pypiserver import bottle
|
||||
|
||||
bottle.debug(config.verbosity > 1)
|
||||
bottle._stderr = ft.partial(
|
||||
bottle._stderr = ft.partial( # pylint: disable=protected-access
|
||||
_logwrite, logging.getLogger(bottle.__name__), logging.INFO
|
||||
)
|
||||
|
||||
# Here `app` is a Bottle instance, which we pass to bottle.run() to run
|
||||
# the server
|
||||
app = pypiserver.app_from_config(config)
|
||||
app = pypiserver.setup_routes_from_config(app, config)
|
||||
|
||||
if config.server_method == "gunicorn":
|
||||
# When bottle runs gunicorn, gunicorn tries to pull its arguments from
|
||||
# sys.argv. Because pypiserver's arguments don't match gunicorn's,
|
||||
# this leads to errors.
|
||||
# Gunicorn can be configured by using a `gunicorn.conf.py` config file
|
||||
# or by specifying the `GUNICORN_CMD_ARGS` env var. See gunicorn
|
||||
# docs for more info.
|
||||
sys.argv = ["gunicorn"]
|
||||
|
||||
wsgi_kwargs = {"handler_class": WsgiHandler}
|
||||
|
||||
if config.server_method == "auto":
|
||||
expected_server = guess_auto_server()
|
||||
extra_kwargs = (
|
||||
wsgi_kwargs if expected_server is AutoServer.WsgiRef else {}
|
||||
)
|
||||
log.debug(
|
||||
"Server 'auto' selected. Expecting bottle to run '%s'. "
|
||||
"Passing extra keyword args: %s",
|
||||
expected_server.name,
|
||||
extra_kwargs,
|
||||
)
|
||||
else:
|
||||
extra_kwargs = wsgi_kwargs if config.server_method == "wsgiref" else {}
|
||||
log.debug(
|
||||
"Running bottle with selected server '%s'", config.server_method
|
||||
)
|
||||
|
||||
bottle.run(
|
||||
app=app,
|
||||
host=config.host,
|
||||
port=config.port,
|
||||
server=config.server_method,
|
||||
**extra_kwargs,
|
||||
)
|
||||
|
||||
|
||||
def _logwrite(logger, level, msg):
|
||||
if msg:
|
||||
line_endings = ["\r\n", "\n\r", "\n"]
|
||||
for le in line_endings:
|
||||
for le in line_endings: # pylint: disable=invalid-name
|
||||
if msg.endswith(le):
|
||||
msg = msg[: -len(le)]
|
||||
if msg:
|
||||
|
@ -1,14 +1,20 @@
|
||||
from collections import namedtuple
|
||||
import logging
|
||||
import mimetypes
|
||||
import os
|
||||
import re
|
||||
import zipfile
|
||||
import xml.dom.minidom
|
||||
import xmlrpc.client as xmlrpclib
|
||||
import zipfile
|
||||
from collections import defaultdict
|
||||
from collections import namedtuple
|
||||
from io import BytesIO
|
||||
from json import dumps
|
||||
from urllib.parse import urljoin, urlparse
|
||||
|
||||
from pypiserver.config import RunConfig
|
||||
from . import __version__
|
||||
from . import core
|
||||
from . import mirror_cache
|
||||
from .bottle import (
|
||||
static_file,
|
||||
redirect,
|
||||
@ -18,26 +24,10 @@ from .bottle import (
|
||||
Bottle,
|
||||
template,
|
||||
)
|
||||
|
||||
try:
|
||||
import xmlrpc.client as xmlrpclib # py3
|
||||
except ImportError:
|
||||
import xmlrpclib # py2
|
||||
|
||||
try:
|
||||
from io import BytesIO
|
||||
except ImportError:
|
||||
from StringIO import StringIO as BytesIO
|
||||
|
||||
try: # PY3
|
||||
from urllib.parse import urljoin, urlparse
|
||||
except ImportError: # PY2
|
||||
from urlparse import urljoin, urlparse
|
||||
|
||||
from .pkg_helpers import guess_pkgname_and_version, normalize_pkgname_for_url
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
config: RunConfig
|
||||
|
||||
app = Bottle()
|
||||
|
||||
|
||||
@ -103,19 +93,13 @@ def favicon():
|
||||
def root():
|
||||
fp = request.custom_fullpath
|
||||
|
||||
try:
|
||||
numpkgs = len(list(config.iter_packages()))
|
||||
except Exception as exc:
|
||||
log.error(f"Could not list packages: {exc}")
|
||||
numpkgs = 0
|
||||
|
||||
# Ensure template() does not consider `msg` as filename!
|
||||
msg = config.welcome_msg + "\n"
|
||||
return template(
|
||||
msg,
|
||||
URL=request.url.rstrip("/") + "/",
|
||||
VERSION=__version__,
|
||||
NUMPKGS=numpkgs,
|
||||
NUMPKGS=config.backend.package_count(),
|
||||
PACKAGES=fp.rstrip("/") + "/packages/",
|
||||
SIMPLE=fp.rstrip("/") + "/simple/",
|
||||
)
|
||||
@ -148,16 +132,12 @@ def remove_pkg():
|
||||
if not name or not version:
|
||||
msg = f"Missing 'name'/'version' fields: name={name}, version={version}"
|
||||
raise HTTPError(400, msg)
|
||||
pkgs = list(
|
||||
filter(
|
||||
lambda pkg: pkg.pkgname == name and pkg.version == version,
|
||||
core.find_packages(config.iter_packages()),
|
||||
)
|
||||
)
|
||||
if len(pkgs) == 0:
|
||||
|
||||
pkgs = list(config.backend.find_version(name, version))
|
||||
if not pkgs:
|
||||
raise HTTPError(404, f"{name} ({version}) not found")
|
||||
for pkg in pkgs:
|
||||
os.unlink(pkg.fn)
|
||||
config.backend.remove_package(pkg)
|
||||
|
||||
|
||||
Upload = namedtuple("Upload", "pkg sig")
|
||||
@ -183,13 +163,11 @@ def file_upload():
|
||||
continue
|
||||
if (
|
||||
not is_valid_pkg_filename(uf.raw_filename)
|
||||
or core.guess_pkgname_and_version(uf.raw_filename) is None
|
||||
or guess_pkgname_and_version(uf.raw_filename) is None
|
||||
):
|
||||
raise HTTPError(400, f"Bad filename: {uf.raw_filename}")
|
||||
|
||||
if not config.overwrite and core.exists(
|
||||
config.package_root, uf.raw_filename
|
||||
):
|
||||
if not config.overwrite and config.backend.exists(uf.raw_filename):
|
||||
log.warning(
|
||||
f"Cannot upload {uf.raw_filename!r} since it already exists! \n"
|
||||
" You may start server with `--overwrite` option. "
|
||||
@ -200,7 +178,7 @@ def file_upload():
|
||||
" You may start server with `--overwrite` option.",
|
||||
)
|
||||
|
||||
core.store(config.package_root, uf.raw_filename, uf.save)
|
||||
config.backend.add_package(uf.raw_filename, uf.file)
|
||||
if request.auth:
|
||||
user = request.auth[0]
|
||||
else:
|
||||
@ -231,10 +209,10 @@ def update():
|
||||
|
||||
|
||||
@app.route("/simple")
|
||||
@app.route("/simple/:prefix")
|
||||
@app.route("/simple/:project")
|
||||
@app.route("/packages")
|
||||
@auth("list")
|
||||
def pep_503_redirects(prefix=None):
|
||||
def pep_503_redirects(project=None):
|
||||
return redirect(request.custom_fullpath + "/", 301)
|
||||
|
||||
|
||||
@ -248,7 +226,7 @@ def handle_rpc():
|
||||
.childNodes[0]
|
||||
.wholeText.strip()
|
||||
)
|
||||
log.info(f"Processing RPC2 request for '{methodname}'")
|
||||
log.debug(f"Processing RPC2 request for '{methodname}'")
|
||||
if methodname == "search":
|
||||
value = (
|
||||
parser.getElementsByTagName("string")[0]
|
||||
@ -257,7 +235,7 @@ def handle_rpc():
|
||||
)
|
||||
response = []
|
||||
ordering = 0
|
||||
for p in config.iter_packages():
|
||||
for p in config.backend.get_all_packages():
|
||||
if p.pkgname.count(value) > 0:
|
||||
# We do not presently have any description/summary, returning
|
||||
# version instead
|
||||
@ -278,8 +256,9 @@ def handle_rpc():
|
||||
@app.route("/simple/")
|
||||
@auth("list")
|
||||
def simpleindex():
|
||||
links = sorted(core.get_prefixes(config.iter_packages()))
|
||||
links = sorted(config.backend.get_projects())
|
||||
tmpl = """\
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>Simple Index</title>
|
||||
@ -295,60 +274,67 @@ def simpleindex():
|
||||
return template(tmpl, links=links)
|
||||
|
||||
|
||||
@app.route("/simple/:prefix/")
|
||||
@app.route("/simple/:project/")
|
||||
@auth("list")
|
||||
def simple(prefix=""):
|
||||
# PEP 503: require normalized prefix
|
||||
normalized = core.normalize_pkgname_for_url(prefix)
|
||||
if prefix != normalized:
|
||||
return redirect("/simple/{0}/".format(normalized), 301)
|
||||
def simple(project):
|
||||
# PEP 503: require normalized project
|
||||
normalized = normalize_pkgname_for_url(project)
|
||||
if project != normalized:
|
||||
return redirect(f"/simple/{normalized}/", 301)
|
||||
|
||||
files = sorted(
|
||||
core.find_packages(config.iter_packages(), prefix=prefix),
|
||||
packages = sorted(
|
||||
config.backend.find_project_packages(project),
|
||||
key=lambda x: (x.parsed_version, x.relfn),
|
||||
)
|
||||
if not files:
|
||||
if not config.disable_fallback:
|
||||
return redirect(f"{config.fallback_url.rstrip('/')}/{prefix}/")
|
||||
if not packages:
|
||||
if config.mirror:
|
||||
return mirror_cache.MirrorCache.add(project=project, config=config)
|
||||
elif not config.disable_fallback:
|
||||
return redirect(f"{config.fallback_url.rstrip('/')}/{project}/")
|
||||
return HTTPError(404, f"Not Found ({normalized} does not exist)\n\n")
|
||||
|
||||
fp = request.custom_fullpath
|
||||
links = [
|
||||
current_uri = request.custom_fullpath
|
||||
|
||||
links = (
|
||||
(
|
||||
os.path.basename(f.relfn),
|
||||
urljoin(fp, f"../../packages/{f.fname_and_hash(config.hash_algo)}"),
|
||||
os.path.basename(pkg.relfn),
|
||||
urljoin(current_uri, f"../../packages/{pkg.fname_and_hash}"),
|
||||
)
|
||||
for f in files
|
||||
]
|
||||
for pkg in packages
|
||||
)
|
||||
|
||||
tmpl = """\
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>Links for {{prefix}}</title>
|
||||
<title>Links for {{project}}</title>
|
||||
</head>
|
||||
<body>
|
||||
<h1>Links for {{prefix}}</h1>
|
||||
<h1>Links for {{project}}</h1>
|
||||
% for file, href in links:
|
||||
<a href="{{href}}">{{file}}</a><br>
|
||||
% end
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
return template(tmpl, prefix=prefix, links=links)
|
||||
return template(tmpl, project=project, links=links)
|
||||
|
||||
|
||||
@app.route("/packages/")
|
||||
@auth("list")
|
||||
def list_packages():
|
||||
fp = request.custom_fullpath
|
||||
files = sorted(
|
||||
core.find_packages(config.iter_packages()),
|
||||
packages = sorted(
|
||||
config.backend.get_all_packages(),
|
||||
key=lambda x: (os.path.dirname(x.relfn), x.pkgname, x.parsed_version),
|
||||
)
|
||||
links = [
|
||||
(f.relfn_unix, urljoin(fp, f.fname_and_hash(config.hash_algo)))
|
||||
for f in files
|
||||
]
|
||||
|
||||
links = (
|
||||
(pkg.relfn_unix, urljoin(fp, pkg.fname_and_hash)) for pkg in packages
|
||||
)
|
||||
|
||||
tmpl = """\
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>Index of packages</title>
|
||||
@ -367,7 +353,7 @@ def list_packages():
|
||||
@app.route("/packages/:filename#.*#")
|
||||
@auth("download")
|
||||
def server_static(filename):
|
||||
entries = core.find_packages(config.iter_packages())
|
||||
entries = config.backend.get_all_packages()
|
||||
for x in entries:
|
||||
f = x.relfn_unix
|
||||
if f == filename:
|
||||
@ -381,12 +367,43 @@ def server_static(filename):
|
||||
"Cache-Control", f"public, max-age={config.cache_control}"
|
||||
)
|
||||
return response
|
||||
|
||||
if config.mirror and mirror_cache.MirrorCache.has_project(filename):
|
||||
return mirror_cache.MirrorCache.get_static_file(filename=filename, config=config)
|
||||
return HTTPError(404, f"Not Found ({filename} does not exist)\n\n")
|
||||
|
||||
|
||||
@app.route("/:prefix")
|
||||
@app.route("/:prefix/")
|
||||
def bad_url(prefix):
|
||||
@app.route("/:project/json")
|
||||
@auth("list")
|
||||
def json_info(project):
|
||||
# PEP 503: require normalized project
|
||||
normalized = normalize_pkgname_for_url(project)
|
||||
if project != normalized:
|
||||
return redirect(f"/{normalized}/json", 301)
|
||||
|
||||
packages = sorted(
|
||||
config.backend.find_project_packages(project),
|
||||
key=lambda x: x.parsed_version,
|
||||
reverse=True,
|
||||
)
|
||||
|
||||
if not packages:
|
||||
raise HTTPError(404, f"package {project} not found")
|
||||
|
||||
latest_version = packages[0].version
|
||||
releases = defaultdict(list)
|
||||
req_url = request.url
|
||||
for x in packages:
|
||||
releases[x.version].append(
|
||||
{"url": urljoin(req_url, "../../packages/" + x.relfn)}
|
||||
)
|
||||
|
||||
rv = {"info": {"version": latest_version}, "releases": releases}
|
||||
response.content_type = "application/json"
|
||||
return dumps(rv)
|
||||
|
||||
|
||||
@app.route("/:project")
|
||||
@app.route("/:project/")
|
||||
def bad_url(project):
|
||||
"""Redirect unknown root URLs to /simple/."""
|
||||
return redirect(core.get_bad_url_redirect_path(request, prefix))
|
||||
return redirect(core.get_bad_url_redirect_path(request, project))
|
||||
|
325
pypiserver/backend.py
Normal file
325
pypiserver/backend.py
Normal file
@ -0,0 +1,325 @@
|
||||
import abc
|
||||
import functools
|
||||
import hashlib
|
||||
import itertools
|
||||
import logging
|
||||
import os
|
||||
import typing as t
|
||||
from pathlib import Path
|
||||
|
||||
from .cache import CacheManager, ENABLE_CACHING
|
||||
from .core import PkgFile
|
||||
from .pkg_helpers import (
|
||||
normalize_pkgname,
|
||||
is_listed_path,
|
||||
guess_pkgname_and_version,
|
||||
)
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
from .config import _ConfigCommon as Configuration
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
PathLike = t.Union[str, os.PathLike]
|
||||
|
||||
|
||||
class IBackend(abc.ABC):
|
||||
@abc.abstractmethod
|
||||
def get_all_packages(self) -> t.Iterable[PkgFile]:
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
def find_project_packages(self, project: str) -> t.Iterable[PkgFile]:
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
def find_version(self, name: str, version: str) -> t.Iterable[PkgFile]:
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
def get_projects(self) -> t.Iterable[str]:
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
def exists(self, filename: str) -> bool:
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
def digest(self, pkg: PkgFile) -> t.Optional[str]:
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
def package_count(self) -> int:
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
def add_package(self, filename: str, stream: t.BinaryIO) -> None:
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
def remove_package(self, pkg: PkgFile) -> None:
|
||||
pass
|
||||
|
||||
|
||||
class Backend(IBackend, abc.ABC):
|
||||
def __init__(self, config: "Configuration"):
|
||||
self.hash_algo = config.hash_algo
|
||||
|
||||
@abc.abstractmethod
|
||||
def get_all_packages(self) -> t.Iterable[PkgFile]:
|
||||
"""Implement this method to return an Iterable of all packages (as
|
||||
PkgFile objects) that are available in the Backend.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
def add_package(self, filename: str, stream: t.BinaryIO) -> None:
|
||||
"""Add a package to the Backend. `filename` is the package's filename
|
||||
(without any directory parts). It is just a name, there is no file by
|
||||
that name (yet). `stream` is an open file-like object that can be used
|
||||
to read the file's content. To convert the package into an actual file
|
||||
on disk, run `write_file(filename, stream)`.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
def remove_package(self, pkg: PkgFile) -> None:
|
||||
"""Remove a package from the Backend"""
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
def exists(self, filename: str) -> bool:
|
||||
"""Does a package by the given name exist?"""
|
||||
pass
|
||||
|
||||
def digest(self, pkg: PkgFile) -> t.Optional[str]:
|
||||
if self.hash_algo is None or pkg.fn is None:
|
||||
return None
|
||||
return digest_file(pkg.fn, self.hash_algo)
|
||||
|
||||
def package_count(self) -> int:
|
||||
"""Return a count of all available packages. When implementing a Backend
|
||||
class, either use this method as is, or override it with a more
|
||||
performant version.
|
||||
"""
|
||||
return sum(1 for _ in self.get_all_packages())
|
||||
|
||||
def get_projects(self) -> t.Iterable[str]:
|
||||
"""Return an iterable of all (unique) projects available in the store
|
||||
in their PEP503 normalized form. When implementing a Backend class,
|
||||
either use this method as is, or override it with a more performant
|
||||
version.
|
||||
"""
|
||||
return set(package.pkgname_norm for package in self.get_all_packages())
|
||||
|
||||
def find_project_packages(self, project: str) -> t.Iterable[PkgFile]:
|
||||
"""Find all packages from a given project. The project may be given
|
||||
as either the normalized or canonical name. When implementing a
|
||||
Backend class, either use this method as is, or override it with a
|
||||
more performant version.
|
||||
"""
|
||||
return (
|
||||
x
|
||||
for x in self.get_all_packages()
|
||||
if normalize_pkgname(project) == x.pkgname_norm
|
||||
)
|
||||
|
||||
def find_version(self, name: str, version: str) -> t.Iterable[PkgFile]:
|
||||
"""Return all packages that match PkgFile.pkgname == name and
|
||||
PkgFile.version == version` When implementing a Backend class,
|
||||
either use this method as is, or override it with a more performant
|
||||
version.
|
||||
"""
|
||||
return filter(
|
||||
lambda pkg: pkg.pkgname == name and pkg.version == version,
|
||||
self.get_all_packages(),
|
||||
)
|
||||
|
||||
|
||||
class SimpleFileBackend(Backend):
|
||||
def __init__(self, config: "Configuration"):
|
||||
super().__init__(config)
|
||||
self.roots = [Path(root).resolve() for root in config.roots]
|
||||
|
||||
def get_all_packages(self) -> t.Iterable[PkgFile]:
|
||||
return itertools.chain.from_iterable(listdir(r) for r in self.roots)
|
||||
|
||||
def add_package(self, filename: str, stream: t.BinaryIO) -> None:
|
||||
write_file(stream, self.roots[0].joinpath(filename))
|
||||
|
||||
def remove_package(self, pkg: PkgFile) -> None:
|
||||
if pkg.fn is not None:
|
||||
try:
|
||||
os.remove(pkg.fn)
|
||||
except FileNotFoundError:
|
||||
log.warning(
|
||||
"Tried to remove %s, but it is already gone", pkg.fn
|
||||
)
|
||||
except OSError:
|
||||
log.exception("Unexpected error removing package: %s", pkg.fn)
|
||||
raise
|
||||
|
||||
def exists(self, filename: str) -> bool:
|
||||
return any(
|
||||
filename == existing_file.name
|
||||
for root in self.roots
|
||||
for existing_file in all_listed_files(root)
|
||||
)
|
||||
|
||||
|
||||
class CachingFileBackend(SimpleFileBackend):
|
||||
def __init__(
|
||||
self,
|
||||
config: "Configuration",
|
||||
cache_manager: t.Optional[CacheManager] = None,
|
||||
):
|
||||
super().__init__(config)
|
||||
|
||||
self.cache_manager = cache_manager or CacheManager() # type: ignore
|
||||
|
||||
def add_package(self, filename: str, stream: t.BinaryIO) -> None:
|
||||
super().add_package(filename, stream)
|
||||
self.cache_manager.invalidate_root_cache(self.roots[0])
|
||||
|
||||
def remove_package(self, pkg: PkgFile) -> None:
|
||||
super().remove_package(pkg)
|
||||
self.cache_manager.invalidate_root_cache(pkg.root)
|
||||
|
||||
def get_all_packages(self) -> t.Iterable[PkgFile]:
|
||||
return itertools.chain.from_iterable(
|
||||
self.cache_manager.listdir(r, listdir) for r in self.roots
|
||||
)
|
||||
|
||||
def digest(self, pkg: PkgFile) -> t.Optional[str]:
|
||||
if self.hash_algo is None or pkg.fn is None:
|
||||
return None
|
||||
return self.cache_manager.digest_file(
|
||||
pkg.fn, self.hash_algo, digest_file
|
||||
)
|
||||
|
||||
|
||||
def write_file(fh: t.BinaryIO, destination: PathLike) -> None:
|
||||
"""write a byte stream into a destination file. Writes are chunked to reduce
|
||||
the memory footprint
|
||||
"""
|
||||
chunk_size = 2**20 # 1 MB
|
||||
offset = fh.tell()
|
||||
try:
|
||||
with open(destination, "wb") as dest:
|
||||
for chunk in iter(lambda: fh.read(chunk_size), b""):
|
||||
dest.write(chunk)
|
||||
finally:
|
||||
fh.seek(offset)
|
||||
|
||||
|
||||
def listdir(root: Path) -> t.Iterator[PkgFile]:
|
||||
root = root.resolve()
|
||||
files = all_listed_files(root)
|
||||
yield from valid_packages(root, files)
|
||||
|
||||
|
||||
def all_listed_files(root: Path) -> t.Iterator[Path]:
|
||||
for dirpath, dirnames, filenames in os.walk(root):
|
||||
dirnames[:] = (
|
||||
dirname for dirname in dirnames if is_listed_path(Path(dirname))
|
||||
)
|
||||
for filename in filenames:
|
||||
if not is_listed_path(Path(filename)):
|
||||
continue
|
||||
filepath = root / dirpath / filename
|
||||
if Path(filepath).is_file():
|
||||
yield filepath
|
||||
|
||||
|
||||
def valid_packages(root: Path, files: t.Iterable[Path]) -> t.Iterator[PkgFile]:
|
||||
for file in files:
|
||||
res = guess_pkgname_and_version(str(file.name))
|
||||
if res is not None:
|
||||
pkgname, version = res
|
||||
fn = str(file)
|
||||
root_name = str(root)
|
||||
yield PkgFile(
|
||||
pkgname=pkgname,
|
||||
version=version,
|
||||
fn=fn,
|
||||
root=root_name,
|
||||
relfn=fn[len(root_name) + 1 :],
|
||||
)
|
||||
|
||||
|
||||
def digest_file(file_path: PathLike, hash_algo: str) -> str:
|
||||
"""
|
||||
Reads and digests a file according to specified hashing-algorith.
|
||||
|
||||
:param file_path: path to a file on disk
|
||||
:param hash_algo: any algo contained in :mod:`hashlib`
|
||||
:return: <hash_algo>=<hex_digest>
|
||||
|
||||
From http://stackoverflow.com/a/21565932/548792
|
||||
"""
|
||||
blocksize = 2**16
|
||||
digester = hashlib.new(hash_algo)
|
||||
with open(file_path, "rb") as f:
|
||||
for block in iter(lambda: f.read(blocksize), b""):
|
||||
digester.update(block)
|
||||
return f"{hash_algo}={digester.hexdigest()}"
|
||||
|
||||
|
||||
def get_file_backend(config: "Configuration") -> Backend:
|
||||
if ENABLE_CACHING:
|
||||
return CachingFileBackend(config)
|
||||
return SimpleFileBackend(config)
|
||||
|
||||
|
||||
PkgFunc = t.TypeVar("PkgFunc", bound=t.Callable[..., t.Iterable[PkgFile]])
|
||||
|
||||
|
||||
def with_digester(func: PkgFunc) -> PkgFunc:
|
||||
@functools.wraps(func)
|
||||
def add_digester_method(
|
||||
self: "BackendProxy", *args: t.Any, **kwargs: t.Any
|
||||
) -> t.Iterable[PkgFile]:
|
||||
packages = func(self, *args, **kwargs)
|
||||
for package in packages:
|
||||
package.digester = self.backend.digest
|
||||
yield package
|
||||
|
||||
return t.cast(PkgFunc, add_digester_method)
|
||||
|
||||
|
||||
class BackendProxy(IBackend):
|
||||
def __init__(self, wraps: Backend):
|
||||
self.backend = wraps
|
||||
|
||||
@with_digester
|
||||
def get_all_packages(self) -> t.Iterable[PkgFile]:
|
||||
return self.backend.get_all_packages()
|
||||
|
||||
@with_digester
|
||||
def find_project_packages(self, project: str) -> t.Iterable[PkgFile]:
|
||||
return self.backend.find_project_packages(project)
|
||||
|
||||
def find_version(self, name: str, version: str) -> t.Iterable[PkgFile]:
|
||||
return self.backend.find_version(name, version)
|
||||
|
||||
def get_projects(self) -> t.Iterable[str]:
|
||||
return self.backend.get_projects()
|
||||
|
||||
def exists(self, filename: str) -> bool:
|
||||
assert "/" not in filename
|
||||
return self.backend.exists(filename)
|
||||
|
||||
def package_count(self) -> int:
|
||||
return self.backend.package_count()
|
||||
|
||||
def add_package(self, filename: str, fh: t.BinaryIO) -> None:
|
||||
assert "/" not in filename
|
||||
return self.backend.add_package(filename, fh)
|
||||
|
||||
def remove_package(self, pkg: PkgFile) -> None:
|
||||
return self.backend.remove_package(pkg)
|
||||
|
||||
def digest(self, pkg: PkgFile) -> t.Optional[str]:
|
||||
return self.backend.digest(pkg)
|
@ -41,7 +41,6 @@ import base64, cgi, email.utils, functools, hmac, itertools, mimetypes,\
|
||||
from datetime import date as datedate, datetime, timedelta
|
||||
from tempfile import TemporaryFile
|
||||
from traceback import format_exc, print_exc
|
||||
from inspect import getargspec
|
||||
from unicodedata import normalize
|
||||
|
||||
|
||||
@ -79,6 +78,7 @@ except IOError:
|
||||
# Lots of stdlib and builtin differences.
|
||||
if py3k:
|
||||
import http.client as httplib
|
||||
from inspect import getfullargspec as getargspec
|
||||
import _thread as thread
|
||||
from urllib.parse import urljoin, SplitResult as UrlSplitResult
|
||||
from urllib.parse import urlencode, quote as urlquote, unquote as urlunquote
|
||||
@ -101,6 +101,7 @@ if py3k:
|
||||
def _raise(*a): raise a[0](a[1]).with_traceback(a[2])
|
||||
else: # 2.x
|
||||
import httplib
|
||||
from inspect import getargspec
|
||||
import thread
|
||||
from urlparse import urljoin, SplitResult as UrlSplitResult
|
||||
from urllib import urlencode, quote as urlquote, unquote as urlunquote
|
||||
@ -1752,7 +1753,7 @@ class JSONPlugin(object):
|
||||
if isinstance(rv, dict):
|
||||
#Attempt to serialize, raises exception on failure
|
||||
json_response = dumps(rv)
|
||||
#Set content type only if serialization succesful
|
||||
#Set content type only if serialization successful
|
||||
response.content_type = 'application/json'
|
||||
return json_response
|
||||
elif isinstance(rv, HTTPResponse) and isinstance(rv.body, dict):
|
||||
@ -2327,7 +2328,7 @@ class ResourceManager(object):
|
||||
''' Search for a resource and return an absolute file path, or `None`.
|
||||
|
||||
The :attr:`path` list is searched in order. The first match is
|
||||
returend. Symlinks are followed. The result is cached to speed up
|
||||
returned. Symlinks are followed. The result is cached to speed up
|
||||
future lookups. '''
|
||||
if name not in self.cache or DEBUG:
|
||||
for path in self.path:
|
||||
|
@ -4,10 +4,23 @@
|
||||
#
|
||||
|
||||
from os.path import dirname
|
||||
|
||||
from watchdog.observers import Observer
|
||||
from pathlib import Path
|
||||
import typing as t
|
||||
import threading
|
||||
|
||||
try:
|
||||
from watchdog.observers import Observer
|
||||
|
||||
ENABLE_CACHING = True
|
||||
|
||||
except ImportError:
|
||||
Observer = None
|
||||
|
||||
ENABLE_CACHING = False
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
from pypiserver.core import PkgFile
|
||||
|
||||
|
||||
class CacheManager:
|
||||
"""
|
||||
@ -26,6 +39,11 @@ class CacheManager:
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
if not ENABLE_CACHING:
|
||||
raise RuntimeError(
|
||||
"Please install the extra cache requirements by running 'pip "
|
||||
"install pypiserver[cache]' to use the CachingFileBackend"
|
||||
)
|
||||
|
||||
# Cache for listdir output
|
||||
self.listdir_cache = {}
|
||||
@ -46,7 +64,12 @@ class CacheManager:
|
||||
self.digest_lock = threading.Lock()
|
||||
self.listdir_lock = threading.Lock()
|
||||
|
||||
def listdir(self, root, impl_fn):
|
||||
def listdir(
|
||||
self,
|
||||
root: t.Union[Path, str],
|
||||
impl_fn: t.Callable[[Path], t.Iterable["PkgFile"]],
|
||||
) -> t.Iterable["PkgFile"]:
|
||||
root = str(root)
|
||||
with self.listdir_lock:
|
||||
try:
|
||||
return self.listdir_cache[root]
|
||||
@ -56,11 +79,13 @@ class CacheManager:
|
||||
if root not in self.watched:
|
||||
self._watch(root)
|
||||
|
||||
v = list(impl_fn(root))
|
||||
v = list(impl_fn(Path(root)))
|
||||
self.listdir_cache[root] = v
|
||||
return v
|
||||
|
||||
def digest_file(self, fpath, hash_algo, impl_fn):
|
||||
def digest_file(
|
||||
self, fpath: str, hash_algo: str, impl_fn: t.Callable[[str, str], str]
|
||||
) -> str:
|
||||
with self.digest_lock:
|
||||
try:
|
||||
cache = self.digest_cache[hash_algo]
|
||||
@ -82,13 +107,17 @@ class CacheManager:
|
||||
cache[fpath] = v
|
||||
return v
|
||||
|
||||
def _watch(self, root):
|
||||
def _watch(self, root: str):
|
||||
self.watched.add(root)
|
||||
self.observer.schedule(_EventHandler(self, root), root, recursive=True)
|
||||
|
||||
def invalidate_root_cache(self, root: t.Union[Path, str]):
|
||||
with self.listdir_lock:
|
||||
self.listdir_cache.pop(str(root), None)
|
||||
|
||||
|
||||
class _EventHandler:
|
||||
def __init__(self, cache, root):
|
||||
def __init__(self, cache: CacheManager, root: str):
|
||||
self.cache = cache
|
||||
self.root = root
|
||||
|
||||
@ -101,8 +130,7 @@ class _EventHandler:
|
||||
return
|
||||
|
||||
# Lazy: just invalidate the whole cache
|
||||
with cache.listdir_lock:
|
||||
cache.listdir_cache.pop(self.root, None)
|
||||
cache.invalidate_root_cache(self.root)
|
||||
|
||||
# Digests are more expensive: invalidate specific paths
|
||||
paths = []
|
||||
@ -117,6 +145,3 @@ class _EventHandler:
|
||||
for _, subcache in cache.digest_cache.items():
|
||||
for path in paths:
|
||||
subcache.pop(path, None)
|
||||
|
||||
|
||||
cache_manager = CacheManager()
|
||||
|
@ -37,30 +37,72 @@ import argparse
|
||||
import contextlib
|
||||
import hashlib
|
||||
import io
|
||||
import itertools
|
||||
import logging
|
||||
import pathlib
|
||||
import pkg_resources
|
||||
import re
|
||||
import sys
|
||||
import textwrap
|
||||
import typing as t
|
||||
from distutils.util import strtobool as strtoint
|
||||
|
||||
try:
|
||||
# `importlib_resources` is required for Python versions below 3.12
|
||||
# See more in the package docs: https://pypi.org/project/importlib-resources/
|
||||
try:
|
||||
from importlib_resources import files as import_files
|
||||
except ImportError:
|
||||
from importlib.resources import files as import_files
|
||||
|
||||
def get_resource_bytes(package: str, resource: str) -> bytes:
|
||||
ref = import_files(package).joinpath(resource)
|
||||
return ref.read_bytes()
|
||||
|
||||
except ImportError:
|
||||
# The `pkg_resources` is deprecated in Python 3.12
|
||||
import pkg_resources
|
||||
|
||||
def get_resource_bytes(package: str, resource: str) -> bytes:
|
||||
return pkg_resources.resource_string(package, resource)
|
||||
|
||||
|
||||
from pypiserver.backend import (
|
||||
SimpleFileBackend,
|
||||
CachingFileBackend,
|
||||
Backend,
|
||||
IBackend,
|
||||
get_file_backend,
|
||||
BackendProxy,
|
||||
)
|
||||
|
||||
# The `passlib` requirement is optional, so we need to verify its import here.
|
||||
|
||||
try:
|
||||
from passlib.apache import HtpasswdFile
|
||||
except ImportError:
|
||||
HtpasswdFile = None
|
||||
|
||||
from pypiserver import core
|
||||
|
||||
def legacy_strtoint(val: str) -> int:
|
||||
"""Convert a string representation of truth to true (1) or false (0).
|
||||
|
||||
True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values
|
||||
are 'n', 'no', 'f', 'false', 'off', and '0'. Raises ValueError if
|
||||
'val' is anything else.
|
||||
|
||||
The "strtobool" function in distutils does a nice job at parsing strings,
|
||||
but returns an integer. This just wraps it in a boolean call so that we
|
||||
get a bool.
|
||||
|
||||
Borrowed from deprecated distutils.
|
||||
"""
|
||||
val = val.lower()
|
||||
if val in ("y", "yes", "t", "true", "on", "1"):
|
||||
return 1
|
||||
elif val in ("n", "no", "f", "false", "off", "0"):
|
||||
return 0
|
||||
else:
|
||||
raise ValueError("invalid truth value {!r}".format(val))
|
||||
|
||||
|
||||
# The "strtobool" function in distutils does a nice job at parsing strings,
|
||||
# but returns an integer. This just wraps it in a boolean call so that we
|
||||
# get a bool.
|
||||
strtobool: t.Callable[[str], bool] = lambda val: bool(strtoint(val))
|
||||
strtobool: t.Callable[[str], bool] = lambda val: bool(legacy_strtoint(val))
|
||||
|
||||
|
||||
# Specify defaults here so that we can use them in tests &c. and not need
|
||||
@ -70,7 +112,8 @@ class DEFAULTS:
|
||||
|
||||
AUTHENTICATE = ["update"]
|
||||
FALLBACK_URL = "https://pypi.org/simple/"
|
||||
HASH_ALGO = "md5"
|
||||
HEALTH_ENDPOINT = "/health"
|
||||
HASH_ALGO = "sha256"
|
||||
INTERFACE = "0.0.0.0"
|
||||
LOG_FRMT = "%(asctime)s|%(name)s|%(levelname)s|%(thread)d|%(message)s"
|
||||
LOG_ERR_FRMT = "%(body)s: %(exception)s \n%(traceback)s"
|
||||
@ -80,6 +123,7 @@ class DEFAULTS:
|
||||
PACKAGE_DIRECTORIES = [pathlib.Path("~/packages").expanduser().resolve()]
|
||||
PORT = 8080
|
||||
SERVER_METHOD = "auto"
|
||||
BACKEND = "auto"
|
||||
|
||||
|
||||
def auth_arg(arg: str) -> t.List[str]:
|
||||
@ -127,12 +171,23 @@ def hash_algo_arg(arg: str) -> t.Optional[str]:
|
||||
)
|
||||
|
||||
|
||||
def health_endpoint_arg(arg: str) -> str:
|
||||
"""Verify the health_endpoint and raises ValueError if invalid."""
|
||||
rule_regex = r"^/[a-z0-9/_-]+$"
|
||||
if re.fullmatch(rule_regex, arg, re.I) is not None:
|
||||
return arg
|
||||
|
||||
raise argparse.ArgumentTypeError(
|
||||
"Invalid path for the health endpoint. Make sure that it contains only "
|
||||
"alphanumeric characters, hyphens, forward slashes and underscores. "
|
||||
f"In other words, make sure to match the following regex: {rule_regex}"
|
||||
)
|
||||
|
||||
|
||||
def html_file_arg(arg: t.Optional[str]) -> str:
|
||||
"""Parse the provided HTML file and return its contents."""
|
||||
if arg is None or arg == "pypiserver/welcome.html":
|
||||
return pkg_resources.resource_string(__name__, "welcome.html").decode(
|
||||
"utf-8"
|
||||
)
|
||||
return get_resource_bytes(__name__, "welcome.html").decode("utf-8")
|
||||
with open(arg, "r", encoding="utf-8") as f:
|
||||
msg = f.read()
|
||||
return msg
|
||||
@ -236,6 +291,28 @@ def add_common_args(parser: argparse.ArgumentParser) -> None:
|
||||
"standard python library)"
|
||||
),
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--hash-algo",
|
||||
default=DEFAULTS.HASH_ALGO,
|
||||
type=hash_algo_arg,
|
||||
help=(
|
||||
"Any `hashlib` available algorithm to use for generating fragments "
|
||||
"on package links. Can be disabled with one of (0, no, off, false)."
|
||||
),
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--backend",
|
||||
default=DEFAULTS.BACKEND,
|
||||
choices=("auto", "simple-dir", "cached-dir"),
|
||||
dest="backend_arg",
|
||||
help=(
|
||||
"A backend implementation. Keep the default 'auto' to automatically"
|
||||
" determine whether to activate caching or not"
|
||||
),
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--version",
|
||||
action="version",
|
||||
@ -254,7 +331,6 @@ def get_parser() -> argparse.ArgumentParser:
|
||||
"directories starting with a dot. Multiple package directories "
|
||||
"may be specified."
|
||||
),
|
||||
# formatter_class=argparse.RawTextHelpFormatter,
|
||||
formatter_class=PreserveWhitespaceRawTextHelpFormatter,
|
||||
epilog=(
|
||||
"Visit https://github.com/pypiserver/pypiserver "
|
||||
@ -354,6 +430,15 @@ def get_parser() -> argparse.ArgumentParser:
|
||||
"index."
|
||||
),
|
||||
)
|
||||
run_parser.add_argument(
|
||||
"--health-endpoint",
|
||||
default=DEFAULTS.HEALTH_ENDPOINT,
|
||||
type=health_endpoint_arg,
|
||||
help=(
|
||||
"Configure a custom liveness endpoint. It always returns 200 Ok if "
|
||||
"the service is up. Otherwise, it means that the service is not responsive."
|
||||
),
|
||||
)
|
||||
run_parser.add_argument(
|
||||
"--server",
|
||||
metavar="METHOD",
|
||||
@ -381,15 +466,6 @@ def get_parser() -> argparse.ArgumentParser:
|
||||
action="store_true",
|
||||
help="Allow overwriting existing package files during upload.",
|
||||
)
|
||||
run_parser.add_argument(
|
||||
"--hash-algo",
|
||||
default=DEFAULTS.HASH_ALGO,
|
||||
type=hash_algo_arg,
|
||||
help=(
|
||||
"Any `hashlib` available algorithm to use for generating fragments "
|
||||
"on package links. Can be disabled with one of (0, no, off, false)."
|
||||
),
|
||||
)
|
||||
run_parser.add_argument(
|
||||
"--welcome",
|
||||
metavar="HTML_FILE",
|
||||
@ -411,6 +487,7 @@ def get_parser() -> argparse.ArgumentParser:
|
||||
help=(
|
||||
'Add "Cache-Control: max-age=AGE" header to package downloads. '
|
||||
"Pip 6+ requires this for caching."
|
||||
"AGE is specified in seconds."
|
||||
),
|
||||
)
|
||||
run_parser.add_argument(
|
||||
@ -440,6 +517,14 @@ def get_parser() -> argparse.ArgumentParser:
|
||||
"to '%%s' to see them all."
|
||||
),
|
||||
)
|
||||
run_parser.add_argument(
|
||||
"--mirror",
|
||||
default=0,
|
||||
action="count",
|
||||
help=(
|
||||
"Mirror packages to local disk"
|
||||
),
|
||||
)
|
||||
|
||||
update_parser = subparsers.add_parser(
|
||||
"update",
|
||||
@ -504,9 +589,12 @@ def get_parser() -> argparse.ArgumentParser:
|
||||
|
||||
|
||||
TConf = t.TypeVar("TConf", bound="_ConfigCommon")
|
||||
BackendFactory = t.Callable[["_ConfigCommon"], Backend]
|
||||
|
||||
|
||||
class _ConfigCommon:
|
||||
hash_algo: t.Optional[str] = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
roots: t.List[pathlib.Path],
|
||||
@ -514,6 +602,8 @@ class _ConfigCommon:
|
||||
log_frmt: str,
|
||||
log_file: t.Optional[str],
|
||||
log_stream: t.Optional[t.IO],
|
||||
hash_algo: t.Optional[str],
|
||||
backend_arg: str,
|
||||
) -> None:
|
||||
"""Construct a RuntimeConfig."""
|
||||
# Global arguments
|
||||
@ -521,18 +611,24 @@ class _ConfigCommon:
|
||||
self.log_file = log_file
|
||||
self.log_stream = log_stream
|
||||
self.log_frmt = log_frmt
|
||||
|
||||
self.roots = roots
|
||||
self.hash_algo = hash_algo
|
||||
self.backend_arg = backend_arg
|
||||
|
||||
# Derived properties are directly based on other properties and are not
|
||||
# included in equality checks.
|
||||
self._derived_properties: t.Tuple[str, ...] = (
|
||||
"iter_packages",
|
||||
"package_root",
|
||||
"backend",
|
||||
)
|
||||
# The first package directory is considered the root. This is used
|
||||
# for uploads.
|
||||
self.package_root = self.roots[0]
|
||||
|
||||
self.backend = self.get_backend(backend_arg)
|
||||
|
||||
@classmethod
|
||||
def from_namespace(
|
||||
cls: t.Type[TConf], namespace: argparse.Namespace
|
||||
@ -551,6 +647,8 @@ class _ConfigCommon:
|
||||
log_stream=namespace.log_stream,
|
||||
log_frmt=namespace.log_frmt,
|
||||
roots=namespace.package_directory,
|
||||
hash_algo=namespace.hash_algo,
|
||||
backend_arg=namespace.backend_arg,
|
||||
)
|
||||
|
||||
@property
|
||||
@ -565,13 +663,16 @@ class _ConfigCommon:
|
||||
# If we've specified 3 or more levels of verbosity, just return not set.
|
||||
return levels.get(self.verbosity, logging.NOTSET)
|
||||
|
||||
def iter_packages(self) -> t.Iterator[core.PkgFile]:
|
||||
"""Iterate over packages in root directories."""
|
||||
yield from (
|
||||
itertools.chain.from_iterable(
|
||||
core.listdir(str(r)) for r in self.roots
|
||||
)
|
||||
)
|
||||
def get_backend(self, arg: str) -> IBackend:
|
||||
available_backends: t.Dict[str, BackendFactory] = {
|
||||
"auto": get_file_backend,
|
||||
"simple-dir": SimpleFileBackend,
|
||||
"cached-dir": CachingFileBackend,
|
||||
}
|
||||
|
||||
backend = available_backends[arg]
|
||||
|
||||
return BackendProxy(backend(self))
|
||||
|
||||
def with_updates(self: TConf, **kwargs: t.Any) -> TConf:
|
||||
"""Create a new config with the specified updates.
|
||||
@ -579,7 +680,7 @@ class _ConfigCommon:
|
||||
The current config is used as a base. Any properties not specified in
|
||||
keyword arguments will remain unchanged.
|
||||
"""
|
||||
return self.__class__(**{**dict(self), **kwargs}) # type: ignore
|
||||
return self.__class__(**{**dict(self), **kwargs})
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""A string representation indicating the class and its properties."""
|
||||
@ -599,7 +700,7 @@ class _ConfigCommon:
|
||||
return all(
|
||||
getattr(other, k) == v
|
||||
for k, v in self
|
||||
if not k in self._derived_properties
|
||||
if k not in self._derived_properties
|
||||
)
|
||||
|
||||
def __iter__(self) -> t.Iterator[t.Tuple[str, t.Any]]:
|
||||
@ -622,15 +723,16 @@ class RunConfig(_ConfigCommon):
|
||||
password_file: t.Optional[str],
|
||||
disable_fallback: bool,
|
||||
fallback_url: str,
|
||||
health_endpoint: str,
|
||||
server_method: str,
|
||||
overwrite: bool,
|
||||
hash_algo: t.Optional[str],
|
||||
welcome_msg: str,
|
||||
cache_control: t.Optional[int],
|
||||
mirror: bool,
|
||||
log_req_frmt: str,
|
||||
log_res_frmt: str,
|
||||
log_err_frmt: str,
|
||||
auther: t.Callable[[str, str], bool] = None,
|
||||
auther: t.Optional[t.Callable[[str, str], bool]] = None,
|
||||
**kwargs: t.Any,
|
||||
) -> None:
|
||||
"""Construct a RuntimeConfig."""
|
||||
@ -641,18 +743,18 @@ class RunConfig(_ConfigCommon):
|
||||
self.password_file = password_file
|
||||
self.disable_fallback = disable_fallback
|
||||
self.fallback_url = fallback_url
|
||||
self.health_endpoint = health_endpoint
|
||||
self.server_method = server_method
|
||||
self.overwrite = overwrite
|
||||
self.hash_algo = hash_algo
|
||||
self.welcome_msg = welcome_msg
|
||||
self.cache_control = cache_control
|
||||
self.log_req_frmt = log_req_frmt
|
||||
self.log_res_frmt = log_res_frmt
|
||||
self.log_err_frmt = log_err_frmt
|
||||
|
||||
# Derived properties
|
||||
self._derived_properties = self._derived_properties + ("auther",)
|
||||
self.auther = self.get_auther(auther)
|
||||
self.mirror = mirror
|
||||
|
||||
@classmethod
|
||||
def kwargs_from_namespace(
|
||||
@ -667,11 +769,12 @@ class RunConfig(_ConfigCommon):
|
||||
"password_file": namespace.passwords,
|
||||
"disable_fallback": namespace.disable_fallback,
|
||||
"fallback_url": namespace.fallback_url,
|
||||
"health_endpoint": namespace.health_endpoint,
|
||||
"server_method": namespace.server,
|
||||
"overwrite": namespace.overwrite,
|
||||
"hash_algo": namespace.hash_algo,
|
||||
"welcome_msg": namespace.welcome,
|
||||
"cache_control": namespace.cache_control,
|
||||
"mirror": namespace.mirror,
|
||||
"log_req_frmt": namespace.log_req_frmt,
|
||||
"log_res_frmt": namespace.log_res_frmt,
|
||||
"log_err_frmt": namespace.log_err_frmt,
|
||||
@ -752,6 +855,9 @@ class UpdateConfig(_ConfigCommon):
|
||||
}
|
||||
|
||||
|
||||
Configuration = t.Union[RunConfig, UpdateConfig]
|
||||
|
||||
|
||||
class Config:
|
||||
"""Config constructor for building a config from args."""
|
||||
|
||||
@ -768,8 +874,8 @@ class Config:
|
||||
|
||||
@classmethod
|
||||
def from_args(
|
||||
cls, args: t.Sequence[str] = None
|
||||
) -> t.Union[RunConfig, UpdateConfig]:
|
||||
cls, args: t.Optional[t.Sequence[str]] = None
|
||||
) -> Configuration:
|
||||
"""Construct a Config from the passed args or sys.argv."""
|
||||
# If pulling args from sys.argv (commandline arguments), argv[0] will
|
||||
# be the program name, (i.e. pypi-server), so we don't need to
|
||||
@ -846,7 +952,7 @@ class Config:
|
||||
def _adjust_old_args(args: t.Sequence[str]) -> t.List[str]:
|
||||
"""Adjust args for backwards compatibility.
|
||||
|
||||
Should only be called once args have been verified to be unparseable.
|
||||
Should only be called once args have been verified to be unparsable.
|
||||
"""
|
||||
# Backwards compatibility hack: for most of pypiserver's life, "run"
|
||||
# and "update" were not separate subcommands. The `-U` flag being
|
||||
|
@ -1,154 +1,69 @@
|
||||
#! /usr/bin/env python
|
||||
#! /usr/bin/env python3
|
||||
"""minimal PyPI like server for use with pip/easy_install"""
|
||||
|
||||
import hashlib
|
||||
import logging
|
||||
import mimetypes
|
||||
import os
|
||||
import re
|
||||
import typing as t
|
||||
from urllib.parse import quote
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
from pypiserver.pkg_helpers import normalize_pkgname, parse_version
|
||||
|
||||
mimetypes.add_type("application/octet-stream", ".egg")
|
||||
mimetypes.add_type("application/octet-stream", ".whl")
|
||||
mimetypes.add_type("text/plain", ".asc")
|
||||
|
||||
|
||||
# ### Next 2 functions adapted from :mod:`distribute.pkg_resources`.
|
||||
#
|
||||
component_re = re.compile(r"(\d+ | [a-z]+ | \.| -)", re.I | re.VERBOSE)
|
||||
replace = {"pre": "c", "preview": "c", "-": "final-", "rc": "c", "dev": "@"}.get
|
||||
|
||||
|
||||
def _parse_version_parts(s):
|
||||
for part in component_re.split(s):
|
||||
part = replace(part, part)
|
||||
if part in ["", "."]:
|
||||
continue
|
||||
if part[:1] in "0123456789":
|
||||
yield part.zfill(8) # pad for numeric comparison
|
||||
else:
|
||||
yield "*" + part
|
||||
|
||||
yield "*final" # ensure that alpha/beta/candidate are before final
|
||||
|
||||
|
||||
def parse_version(s):
|
||||
parts = []
|
||||
for part in _parse_version_parts(s.lower()):
|
||||
if part.startswith("*"):
|
||||
# remove trailing zeros from each series of numeric parts
|
||||
while parts and parts[-1] == "00000000":
|
||||
parts.pop()
|
||||
parts.append(part)
|
||||
return tuple(parts)
|
||||
|
||||
|
||||
#
|
||||
#### -- End of distribute's code.
|
||||
|
||||
|
||||
_archive_suffix_rx = re.compile(
|
||||
r"(\.zip|\.tar\.gz|\.tgz|\.tar\.bz2|-py[23]\.\d-.*|"
|
||||
r"\.win-amd64-py[23]\.\d\..*|\.win32-py[23]\.\d\..*|\.egg)$",
|
||||
re.I,
|
||||
)
|
||||
wheel_file_re = re.compile(
|
||||
r"""^(?P<namever>(?P<name>.+?)-(?P<ver>\d.*?))
|
||||
((-(?P<build>\d.*?))?-(?P<pyver>.+?)-(?P<abi>.+?)-(?P<plat>.+?)
|
||||
\.whl|\.dist-info)$""",
|
||||
re.VERBOSE,
|
||||
)
|
||||
_pkgname_re = re.compile(r"-\d+[a-z_.!+]", re.I)
|
||||
_pkgname_parts_re = re.compile(
|
||||
r"[\.\-](?=cp\d|py\d|macosx|linux|sunos|solaris|irix|aix|cygwin|win)", re.I
|
||||
)
|
||||
|
||||
|
||||
def _guess_pkgname_and_version_wheel(basename):
|
||||
m = wheel_file_re.match(basename)
|
||||
if not m:
|
||||
return None, None
|
||||
name = m.group("name")
|
||||
ver = m.group("ver")
|
||||
build = m.group("build")
|
||||
if build:
|
||||
return name, ver + "-" + build
|
||||
else:
|
||||
return name, ver
|
||||
|
||||
|
||||
def guess_pkgname_and_version(path):
|
||||
path = os.path.basename(path)
|
||||
if path.endswith(".asc"):
|
||||
path = path.rstrip(".asc")
|
||||
if path.endswith(".whl"):
|
||||
return _guess_pkgname_and_version_wheel(path)
|
||||
if not _archive_suffix_rx.search(path):
|
||||
return
|
||||
path = _archive_suffix_rx.sub("", path)
|
||||
if "-" not in path:
|
||||
pkgname, version = path, ""
|
||||
elif path.count("-") == 1:
|
||||
pkgname, version = path.split("-", 1)
|
||||
elif "." not in path:
|
||||
pkgname, version = path.rsplit("-", 1)
|
||||
else:
|
||||
pkgname = _pkgname_re.split(path)[0]
|
||||
ver_spec = path[len(pkgname) + 1 :]
|
||||
parts = _pkgname_parts_re.split(ver_spec)
|
||||
version = parts[0]
|
||||
return pkgname, version
|
||||
|
||||
|
||||
def normalize_pkgname(name):
|
||||
"""Perform PEP 503 normalization"""
|
||||
return re.sub(r"[-_.]+", "-", name).lower()
|
||||
|
||||
|
||||
def normalize_pkgname_for_url(name):
|
||||
"""Perform PEP 503 normalization and ensure the value is safe for URLs."""
|
||||
return quote(re.sub(r"[-_.]+", "-", name).lower())
|
||||
|
||||
|
||||
def is_allowed_path(path_part):
|
||||
p = path_part.replace("\\", "/")
|
||||
return not (p.startswith(".") or "/." in p)
|
||||
def get_bad_url_redirect_path(request, project):
|
||||
"""Get the path for a bad root url."""
|
||||
uri = request.custom_fullpath
|
||||
if uri.endswith("/"):
|
||||
uri = uri[:-1]
|
||||
uri = uri.rsplit("/", 1)[0]
|
||||
project = quote(project)
|
||||
uri += f"/simple/{project}/"
|
||||
return uri
|
||||
|
||||
|
||||
class PkgFile:
|
||||
|
||||
__slots__ = [
|
||||
"fn",
|
||||
"root",
|
||||
"_fname_and_hash",
|
||||
"relfn",
|
||||
"relfn_unix",
|
||||
"pkgname_norm",
|
||||
"pkgname",
|
||||
"version",
|
||||
"parsed_version",
|
||||
"replaces",
|
||||
"pkgname", # The projects/package name with possible capitalization
|
||||
"version", # The package version as a string
|
||||
"fn", # The full file path
|
||||
"root", # An optional root directory of the file
|
||||
"relfn", # The file path relative to the root
|
||||
"replaces", # The previous version of the package (used by manage.py)
|
||||
"pkgname_norm", # The PEP503 normalized project name
|
||||
"digest", # The file digest in the form of <algo>=<hash>
|
||||
"relfn_unix", # The relative file path in unix notation
|
||||
"parsed_version", # The package version as a tuple of parts
|
||||
"digester", # a function that calculates the digest for the package
|
||||
]
|
||||
digest: t.Optional[str]
|
||||
digester: t.Optional[t.Callable[["PkgFile"], t.Optional[str]]]
|
||||
parsed_version: tuple
|
||||
relfn_unix: t.Optional[str]
|
||||
|
||||
def __init__(
|
||||
self, pkgname, version, fn=None, root=None, relfn=None, replaces=None
|
||||
self,
|
||||
pkgname: str,
|
||||
version: str,
|
||||
fn: t.Optional[str] = None,
|
||||
root: t.Optional[str] = None,
|
||||
relfn: t.Optional[str] = None,
|
||||
replaces: t.Optional["PkgFile"] = None,
|
||||
):
|
||||
self.pkgname = pkgname
|
||||
self.pkgname_norm = normalize_pkgname(pkgname)
|
||||
self.version = version
|
||||
self.parsed_version = parse_version(version)
|
||||
self.parsed_version: tuple = parse_version(version)
|
||||
self.fn = fn
|
||||
self.root = root
|
||||
self.relfn = relfn
|
||||
self.relfn_unix = None if relfn is None else relfn.replace("\\", "/")
|
||||
self.replaces = replaces
|
||||
self.digest = None
|
||||
self.digester = None
|
||||
|
||||
def __repr__(self):
|
||||
def __repr__(self) -> str:
|
||||
return "{}({})".format(
|
||||
self.__class__.__name__,
|
||||
", ".join(
|
||||
@ -159,109 +74,9 @@ class PkgFile:
|
||||
),
|
||||
)
|
||||
|
||||
def fname_and_hash(self, hash_algo):
|
||||
if not hasattr(self, "_fname_and_hash"):
|
||||
if hash_algo:
|
||||
self._fname_and_hash = (
|
||||
f"{self.relfn_unix}#{hash_algo}="
|
||||
f"{digest_file(self.fn, hash_algo)}"
|
||||
)
|
||||
else:
|
||||
self._fname_and_hash = self.relfn_unix
|
||||
return self._fname_and_hash
|
||||
|
||||
|
||||
def _listdir(root: str) -> t.Iterable[PkgFile]:
|
||||
root = os.path.abspath(root)
|
||||
for dirpath, dirnames, filenames in os.walk(root):
|
||||
dirnames[:] = [x for x in dirnames if is_allowed_path(x)]
|
||||
for x in filenames:
|
||||
fn = os.path.join(root, dirpath, x)
|
||||
if not is_allowed_path(x) or not os.path.isfile(fn):
|
||||
continue
|
||||
res = guess_pkgname_and_version(x)
|
||||
if not res:
|
||||
# #Seems the current file isn't a proper package
|
||||
continue
|
||||
pkgname, version = res
|
||||
if pkgname:
|
||||
yield PkgFile(
|
||||
pkgname=pkgname,
|
||||
version=version,
|
||||
fn=fn,
|
||||
root=root,
|
||||
relfn=fn[len(root) + 1 :],
|
||||
)
|
||||
|
||||
|
||||
def find_packages(pkgs, prefix=""):
|
||||
prefix = normalize_pkgname(prefix)
|
||||
for x in pkgs:
|
||||
if prefix and x.pkgname_norm != prefix:
|
||||
continue
|
||||
yield x
|
||||
|
||||
|
||||
def get_prefixes(pkgs):
|
||||
normalized_pkgnames = set()
|
||||
for x in pkgs:
|
||||
if x.pkgname:
|
||||
normalized_pkgnames.add(x.pkgname_norm)
|
||||
return normalized_pkgnames
|
||||
|
||||
|
||||
def exists(root, filename):
|
||||
assert "/" not in filename
|
||||
dest_fn = os.path.join(root, filename)
|
||||
return os.path.exists(dest_fn)
|
||||
|
||||
|
||||
def store(root, filename, save_method):
|
||||
assert "/" not in filename
|
||||
dest_fn = os.path.join(root, filename)
|
||||
save_method(dest_fn, overwrite=True) # Overwite check earlier.
|
||||
|
||||
|
||||
def get_bad_url_redirect_path(request, prefix):
|
||||
"""Get the path for a bad root url."""
|
||||
p = request.custom_fullpath
|
||||
if p.endswith("/"):
|
||||
p = p[:-1]
|
||||
p = p.rsplit("/", 1)[0]
|
||||
prefix = quote(prefix)
|
||||
p += "/simple/{}/".format(prefix)
|
||||
return p
|
||||
|
||||
|
||||
def _digest_file(fpath, hash_algo):
|
||||
"""
|
||||
Reads and digests a file according to specified hashing-algorith.
|
||||
|
||||
:param str sha256: any algo contained in :mod:`hashlib`
|
||||
:return: <hash_algo>=<hex_digest>
|
||||
|
||||
From http://stackoverflow.com/a/21565932/548792
|
||||
"""
|
||||
blocksize = 2 ** 16
|
||||
digester = hashlib.new(hash_algo)
|
||||
with open(fpath, "rb") as f:
|
||||
for block in iter(lambda: f.read(blocksize), b""):
|
||||
digester.update(block)
|
||||
return digester.hexdigest()
|
||||
|
||||
|
||||
try:
|
||||
from .cache import cache_manager
|
||||
|
||||
def listdir(root: str) -> t.Iterable[PkgFile]:
|
||||
# root must be absolute path
|
||||
return cache_manager.listdir(root, _listdir)
|
||||
|
||||
def digest_file(fpath, hash_algo):
|
||||
# fpath must be absolute path
|
||||
return cache_manager.digest_file(fpath, hash_algo, _digest_file)
|
||||
|
||||
|
||||
except ImportError:
|
||||
listdir = _listdir
|
||||
digest_file = _digest_file
|
||||
@property
|
||||
def fname_and_hash(self) -> str:
|
||||
if self.digest is None and self.digester is not None:
|
||||
self.digest = self.digester(self)
|
||||
hashpart = f"#{self.digest}" if self.digest else ""
|
||||
return self.relfn_unix + hashpart # type: ignore
|
||||
|
@ -5,14 +5,17 @@ from __future__ import absolute_import, print_function, unicode_literals
|
||||
import itertools
|
||||
import os
|
||||
import sys
|
||||
from distutils.version import LooseVersion
|
||||
|
||||
from packaging.version import parse as packaging_parse
|
||||
from pathlib import Path
|
||||
from subprocess import call
|
||||
from xmlrpc.client import Server
|
||||
|
||||
import pip
|
||||
|
||||
from . import core
|
||||
|
||||
from xmlrpc.client import Server
|
||||
from .backend import listdir
|
||||
from .core import PkgFile
|
||||
from .pkg_helpers import normalize_pkgname, parse_version
|
||||
|
||||
|
||||
def make_pypi_client(url):
|
||||
@ -41,7 +44,7 @@ def filter_latest_pkgs(pkgs):
|
||||
pkgname2latest = {}
|
||||
|
||||
for x in pkgs:
|
||||
pkgname = core.normalize_pkgname(x.pkgname)
|
||||
pkgname = normalize_pkgname(x.pkgname)
|
||||
|
||||
if pkgname not in pkgname2latest:
|
||||
pkgname2latest[pkgname] = x
|
||||
@ -53,9 +56,9 @@ def filter_latest_pkgs(pkgs):
|
||||
|
||||
def build_releases(pkg, versions):
|
||||
for x in versions:
|
||||
parsed_version = core.parse_version(x)
|
||||
parsed_version = parse_version(x)
|
||||
if parsed_version > pkg.parsed_version:
|
||||
yield core.PkgFile(pkgname=pkg.pkgname, version=x, replaces=pkg)
|
||||
yield PkgFile(pkgname=pkg.pkgname, version=x, replaces=pkg)
|
||||
|
||||
|
||||
def find_updates(pkgset, stable_only=True):
|
||||
@ -98,7 +101,8 @@ def find_updates(pkgset, stable_only=True):
|
||||
|
||||
if no_releases:
|
||||
sys.stdout.write(
|
||||
f"no releases found on pypi for {', '.join(sorted(no_releases))}\n\n"
|
||||
f"no releases found on pypi for"
|
||||
f" {', '.join(sorted(no_releases))}\n\n"
|
||||
)
|
||||
|
||||
return need_update
|
||||
@ -109,12 +113,14 @@ class PipCmd:
|
||||
|
||||
@staticmethod
|
||||
def update_root(pip_version):
|
||||
"""Yield an appropriate root command depending on pip version."""
|
||||
# legacy_pip = StrictVersion(pip_version) < StrictVersion('10.0')
|
||||
legacy_pip = LooseVersion(pip_version) < LooseVersion("10.0")
|
||||
for part in ("pip", "-q"):
|
||||
"""Yield an appropriate root command depending on pip version.
|
||||
|
||||
Use `pip install` for `pip` 9 or lower, and `pip download` otherwise.
|
||||
"""
|
||||
legacy_pip = packaging_parse(pip_version).major < 10
|
||||
pip_command = "install" if legacy_pip else "download"
|
||||
for part in ("pip", "-q", pip_command):
|
||||
yield part
|
||||
yield "install" if legacy_pip else "download"
|
||||
|
||||
@staticmethod
|
||||
def update(
|
||||
@ -135,8 +141,7 @@ class PipCmd:
|
||||
def update_package(pkg, destdir, dry_run=False):
|
||||
"""Print and optionally execute a package update."""
|
||||
print(
|
||||
"# update {0.pkgname} from {0.replaces.version} to "
|
||||
"{0.version}".format(pkg)
|
||||
f"# update {pkg.pkgname} from {pkg.replaces.version} to {pkg.version}"
|
||||
)
|
||||
|
||||
cmd = tuple(
|
||||
@ -148,7 +153,7 @@ def update_package(pkg, destdir, dry_run=False):
|
||||
)
|
||||
)
|
||||
|
||||
print("{}\n".format(" ".join(cmd)))
|
||||
print(" ".join(cmd), end="\n\n")
|
||||
if not dry_run:
|
||||
call(cmd)
|
||||
|
||||
@ -171,7 +176,9 @@ def update(pkgset, destdir=None, dry_run=False, stable_only=True):
|
||||
def update_all_packages(
|
||||
roots, destdir=None, dry_run=False, stable_only=True, ignorelist=None
|
||||
):
|
||||
all_packages = itertools.chain(*[core.listdir(r) for r in roots])
|
||||
all_packages = itertools.chain.from_iterable(
|
||||
listdir(Path(r)) for r in roots
|
||||
)
|
||||
|
||||
skip_packages = set(ignorelist or ())
|
||||
|
||||
|
91
pypiserver/mirror_cache.py
Normal file
91
pypiserver/mirror_cache.py
Normal file
@ -0,0 +1,91 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
import os
|
||||
import logging
|
||||
from collections import OrderedDict
|
||||
from pypiserver.bottle import HTTPError, redirect
|
||||
from pypiserver.config import RunConfig
|
||||
log = logging.getLogger(__name__)
|
||||
try:
|
||||
import requests
|
||||
from bs4 import BeautifulSoup
|
||||
import_ok = True
|
||||
except ImportError:
|
||||
import_ok = False
|
||||
logging.error("mirror_cache import dependencies error")
|
||||
|
||||
|
||||
class CacheElement:
|
||||
def __init__(self, project: str):
|
||||
self.project = project
|
||||
self.html = ""
|
||||
self.cache = dict()
|
||||
|
||||
def add(self, href: str):
|
||||
targz = href.split("/")[-1]
|
||||
pkg_name = targz.split("#")[0]
|
||||
self.cache[f"{self.project}/{pkg_name}"] = href
|
||||
return f"/packages/{self.project}/{targz}"
|
||||
|
||||
|
||||
class MirrorCache:
|
||||
cache: OrderedDict[str, CacheElement] = dict()
|
||||
cache_limit = 10
|
||||
|
||||
@classmethod
|
||||
def add(cls, project: str, config: RunConfig) -> str:
|
||||
if not import_ok:
|
||||
return redirect(f"{config.fallback_url.rstrip('/')}/{project}/")
|
||||
|
||||
if project in cls.cache:
|
||||
log.info(f"mirror_cache serve html from cache {project}")
|
||||
return cls.cache[project].html
|
||||
|
||||
element = CacheElement(project=project)
|
||||
|
||||
resp = requests.get(f"{config.fallback_url.rstrip('/')}/{project}/")
|
||||
soup = BeautifulSoup(resp.content, "html.parser")
|
||||
links = soup.find_all("a")
|
||||
for link in links:
|
||||
# new href with mapping to old href for later
|
||||
new_href = element.add(href=link["href"])
|
||||
# create new link
|
||||
new_link = soup.new_tag("a")
|
||||
new_link.string = link.text.strip()
|
||||
new_link["href"] = new_href
|
||||
link.replace_with(new_link)
|
||||
element.html = str(soup)
|
||||
cls.cache[project] = element
|
||||
log.info(f"mirror_cache add project '{project}' to cache")
|
||||
# purge
|
||||
if len(cls.cache) > cls.cache_limit:
|
||||
item = cls.cache.popitem(last=False)
|
||||
log.info(f"mirror_cache limit '{cls.cache_limit}' exceeded, purged last item - {item}")
|
||||
return element.html
|
||||
|
||||
@classmethod
|
||||
def has_project(cls, filename):
|
||||
project = filename.split("/")[0]
|
||||
return project in cls.cache
|
||||
|
||||
@classmethod
|
||||
def get_static_file(cls, filename, config: RunConfig):
|
||||
if not import_ok:
|
||||
return HTTPError(404, f"Not Found ({filename} does not exist)\n\n")
|
||||
project = filename.split("/")[0]
|
||||
element = cls.cache[project]
|
||||
if filename in element.cache:
|
||||
href = element.cache[filename]
|
||||
resp = requests.get(href)
|
||||
cls.add_to_cache(filename=filename, resp=resp, config=config)
|
||||
return resp
|
||||
log.info(f"mirror_cache not found in cache {filename} ")
|
||||
return HTTPError(404, f"Not Found ({filename} does not exist)\n\n")
|
||||
|
||||
@classmethod
|
||||
def add_to_cache(cls, filename: str, resp: requests.Response, config: RunConfig):
|
||||
project = filename.split("/")[0]
|
||||
os.makedirs(os.path.join(config.package_root, project), exist_ok=True)
|
||||
log.info(f"mirror_cache add file '{filename}' to cache")
|
||||
with open(f"{config.package_root}/{filename}", "wb+") as f:
|
||||
f.write(resp.content)
|
112
pypiserver/pkg_helpers.py
Normal file
112
pypiserver/pkg_helpers.py
Normal file
@ -0,0 +1,112 @@
|
||||
import os
|
||||
import re
|
||||
import typing as t
|
||||
from pathlib import PurePath, Path
|
||||
from urllib.parse import quote
|
||||
|
||||
|
||||
def normalize_pkgname(name: str) -> str:
|
||||
"""Perform PEP 503 normalization"""
|
||||
return re.sub(r"[-_.]+", "-", name).lower()
|
||||
|
||||
|
||||
def normalize_pkgname_for_url(name: str) -> str:
|
||||
"""Perform PEP 503 normalization and ensure the value is safe for URLs."""
|
||||
return quote(normalize_pkgname(name))
|
||||
|
||||
|
||||
# ### Next 2 functions adapted from :mod:`distribute.pkg_resources`.
|
||||
#
|
||||
|
||||
|
||||
component_re = re.compile(r"(\d+ | [a-z]+ | \.| -)", re.I | re.VERBOSE)
|
||||
replace = {"pre": "c", "preview": "c", "-": "final-", "rc": "c", "dev": "@"}.get
|
||||
|
||||
|
||||
def _parse_version_parts(s):
|
||||
for part in component_re.split(s):
|
||||
part = replace(part, part)
|
||||
if part in ["", "."]:
|
||||
continue
|
||||
if part[:1] in "0123456789":
|
||||
yield part.zfill(8) # pad for numeric comparison
|
||||
else:
|
||||
yield "*" + part
|
||||
|
||||
yield "*final" # ensure that alpha/beta/candidate are before final
|
||||
|
||||
|
||||
def parse_version(s: str) -> tuple:
|
||||
parts = []
|
||||
for part in _parse_version_parts(s.lower()):
|
||||
if part.startswith("*"):
|
||||
# remove trailing zeros from each series of numeric parts
|
||||
while parts and parts[-1] == "00000000":
|
||||
parts.pop()
|
||||
parts.append(part)
|
||||
return tuple(parts)
|
||||
|
||||
|
||||
#
|
||||
# ### -- End of distribute's code.
|
||||
|
||||
|
||||
def is_listed_path(path_part: t.Union[PurePath, str]) -> bool:
|
||||
if isinstance(path_part, str):
|
||||
path_part = PurePath(path_part)
|
||||
return not any(part.startswith(".") for part in path_part.parts)
|
||||
|
||||
|
||||
_archive_suffix_rx = re.compile(
|
||||
r"(\.zip|\.tar\.gz|\.tgz|\.tar\.bz2|\.tar\.xz|-py[23]\.\d-.*|"
|
||||
r"\.win-amd64-py[23]\.\d\..*|\.win32-py[23]\.\d\..*|\.egg)$",
|
||||
re.I,
|
||||
)
|
||||
wheel_file_re = re.compile(
|
||||
r"""^(?P<namever>(?P<name>.+?)-(?P<ver>\d.*?))
|
||||
((-(?P<build>\d.*?))?-(?P<pyver>.+?)-(?P<abi>.+?)-(?P<plat>.+?)
|
||||
\.whl|\.dist-info)$""",
|
||||
re.VERBOSE,
|
||||
)
|
||||
_pkgname_re = re.compile(r"-\d+[a-z_.!+]", re.I)
|
||||
_pkgname_parts_re = re.compile(
|
||||
r"[\.\-](?=cp\d|py\d|macosx|linux|sunos|solaris|irix|aix|cygwin|win)", re.I
|
||||
)
|
||||
|
||||
|
||||
def _guess_pkgname_and_version_wheel(
|
||||
basename: str,
|
||||
) -> t.Optional[t.Tuple[str, str]]:
|
||||
m = wheel_file_re.match(basename)
|
||||
if not m:
|
||||
return None
|
||||
name = m.group("name")
|
||||
ver = m.group("ver")
|
||||
build = m.group("build")
|
||||
if build:
|
||||
return name, ver + "-" + build
|
||||
else:
|
||||
return name, ver
|
||||
|
||||
|
||||
def guess_pkgname_and_version(path: str) -> t.Optional[t.Tuple[str, str]]:
|
||||
path = os.path.basename(path)
|
||||
if path.endswith(".asc"):
|
||||
path = path.rstrip(".asc")
|
||||
if path.endswith(".whl"):
|
||||
return _guess_pkgname_and_version_wheel(path)
|
||||
if not _archive_suffix_rx.search(path):
|
||||
return None
|
||||
path = _archive_suffix_rx.sub("", path)
|
||||
if "-" not in path:
|
||||
pkgname, version = path, ""
|
||||
elif path.count("-") == 1:
|
||||
pkgname, version = path.split("-", 1)
|
||||
elif "." not in path:
|
||||
pkgname, version = path.rsplit("-", 1)
|
||||
else:
|
||||
pkgname = _pkgname_re.split(path)[0]
|
||||
ver_spec = path[len(pkgname) + 1 :]
|
||||
parts = _pkgname_parts_re.split(ver_spec)
|
||||
version = parts[0]
|
||||
return pkgname, version
|
42
pypiserver/plugin.py
Normal file
42
pypiserver/plugin.py
Normal file
@ -0,0 +1,42 @@
|
||||
""" NOT YET IMPLEMENTED
|
||||
|
||||
Plugins are callable setuptools entrypoints that are invoked at startup that
|
||||
a developer may use to extend the behaviour of pypiserver. A plugin for example
|
||||
may add an additional Backend to the system. A plugin will be called
|
||||
with the following keyword arguments
|
||||
|
||||
* app: the Bottle App object
|
||||
* add_argument: A callable for registering command line arguments for your
|
||||
plugin using the argparse cli library
|
||||
* backends: A Dict[str, callable] object that you may register a backend to.
|
||||
The key is the identifier for the backend in the `--backend` command line
|
||||
argument.
|
||||
The callable must take a single argument `config` as a Configuration object
|
||||
and return a Backend instance. It may be the class constructor or a factory
|
||||
function to construct a Backend object
|
||||
|
||||
In the future, the plugin callable may be called with additional keyword
|
||||
arguments, so a plugin should accept a **kwargs variadic keyword argument.
|
||||
"""
|
||||
|
||||
from pypiserver.backend import SimpleFileBackend, CachingFileBackend
|
||||
from pypiserver import get_file_backend
|
||||
|
||||
DEFAULT_PACKAGE_DIRECTORIES = ["~/packages"]
|
||||
|
||||
|
||||
# register this as a setuptools entrypoint under the 'pypiserver.plugin' key
|
||||
def my_plugin(add_argument, backends, **_):
|
||||
add_argument(
|
||||
"package_directory",
|
||||
default=DEFAULT_PACKAGE_DIRECTORIES,
|
||||
nargs="*",
|
||||
help="The directory from which to serve packages.",
|
||||
)
|
||||
backends.update(
|
||||
{
|
||||
"auto": get_file_backend,
|
||||
"simple-dir": SimpleFileBackend,
|
||||
"cached-dir": CachingFileBackend,
|
||||
}
|
||||
)
|
@ -4,17 +4,8 @@
|
||||
# pip install -r requirements/dev.pip
|
||||
|
||||
-r exe.pip
|
||||
-r test.pip
|
||||
|
||||
black
|
||||
docopt # For `/bin/bumpver.py`.
|
||||
gevent>=1.1b4; python_version >= '3'
|
||||
mypy; implementation_name == 'cpython'
|
||||
pip>=7
|
||||
passlib>=1.6
|
||||
pytest>=2.3
|
||||
pytest-cov
|
||||
setuptools
|
||||
setuptools-git>=0.3
|
||||
tox
|
||||
twine
|
||||
webtest
|
||||
wheel>=0.25.0
|
||||
|
2
requirements/mirror-cache-requirements.txt
Normal file
2
requirements/mirror-cache-requirements.txt
Normal file
@ -0,0 +1,2 @@
|
||||
beautifulsoup4==4.12.3
|
||||
requests==2.31.0
|
15
requirements/test-requirements.txt
Normal file
15
requirements/test-requirements.txt
Normal file
@ -0,0 +1,15 @@
|
||||
gevent>=1.1b4; python_version >= '3'
|
||||
httpx
|
||||
pip
|
||||
passlib>=1.6
|
||||
pytest>=6.2.2
|
||||
pytest-cov
|
||||
setuptools>=40.0,<70.0.0
|
||||
tox
|
||||
twine
|
||||
webtest
|
||||
wheel>=0.25.0
|
||||
build>=1.2.0; python_version >= '3.8'
|
||||
mdformat-gfm
|
||||
mdformat-frontmatter
|
||||
mdformat-footnote
|
3
requirements/test.pip
Normal file
3
requirements/test.pip
Normal file
@ -0,0 +1,3 @@
|
||||
# Just the absolutely necessary extra requirements for
|
||||
# running tests
|
||||
-r test-requirements.txt
|
@ -27,3 +27,6 @@ warn_unused_ignores = True
|
||||
|
||||
[mypy-tests.*]
|
||||
disallow_untyped_decorators = False
|
||||
|
||||
[mypy-test_docker.*]
|
||||
disallow_untyped_decorators = False
|
||||
|
19
setup.py
19
setup.py
@ -9,12 +9,21 @@ tests_require = [
|
||||
"pytest>=2.3",
|
||||
"tox",
|
||||
"twine",
|
||||
"pip>=7",
|
||||
"passlib>=1.6",
|
||||
"webtest",
|
||||
"build>=1.2.0;python_version>='3.8'",
|
||||
]
|
||||
|
||||
setup_requires = ["setuptools", "setuptools-git >= 0.3", "wheel >= 0.25.0"]
|
||||
setup_requires = [
|
||||
"setuptools",
|
||||
"setuptools-git>=0.3",
|
||||
"wheel>=0.25.0",
|
||||
]
|
||||
install_requires = [
|
||||
"pip>=7",
|
||||
"packaging>=23.2",
|
||||
"importlib_resources;python_version>'3.8' and python_version<'3.12'",
|
||||
]
|
||||
|
||||
|
||||
def read_file(rel_path: str):
|
||||
@ -40,11 +49,13 @@ def get_version():
|
||||
setup(
|
||||
name="pypiserver",
|
||||
description="A minimal PyPI server for use with pip/easy_install.",
|
||||
long_description=read_file("README.rst"),
|
||||
long_description=read_file("README.md"),
|
||||
long_description_content_type="text/markdown",
|
||||
version=get_version(),
|
||||
packages=["pypiserver"],
|
||||
package_data={"pypiserver": ["welcome.html"]},
|
||||
python_requires=">=3.6",
|
||||
install_requires=install_requires,
|
||||
setup_requires=setup_requires,
|
||||
extras_require={"passlib": ["passlib>=1.6"], "cache": ["watchdog"]},
|
||||
tests_require=tests_require,
|
||||
@ -69,6 +80,8 @@ setup(
|
||||
"Programming Language :: Python :: 3.6",
|
||||
"Programming Language :: Python :: 3.7",
|
||||
"Programming Language :: Python :: 3.8",
|
||||
"Programming Language :: Python :: 3.9",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
"Programming Language :: Python :: Implementation :: CPython",
|
||||
"Programming Language :: Python :: Implementation :: PyPy",
|
||||
"Topic :: Software Development :: Build Tools",
|
||||
|
@ -1,16 +0,0 @@
|
||||
## A test-distribution to check if
|
||||
# bottle supports uploading 100's of packages,
|
||||
# see: https://github.com/pypiserver/pypiserver/issues/82
|
||||
#
|
||||
# Has been run once `pip wheel .`, just to generate:
|
||||
# ./wheelhouse/centodeps-0.0.0-cp34-none-win_amd64.whl
|
||||
#
|
||||
from setuptools import setup
|
||||
|
||||
setup(
|
||||
name="centodeps",
|
||||
install_requires=["a==1.0"] * 200,
|
||||
options={
|
||||
"bdist_wheel": {"universal": True},
|
||||
},
|
||||
)
|
@ -4,34 +4,21 @@
|
||||
import logging
|
||||
import os
|
||||
import pathlib
|
||||
|
||||
|
||||
try: # python 3
|
||||
from html.parser import HTMLParser
|
||||
from html import unescape
|
||||
except ImportError:
|
||||
from HTMLParser import HTMLParser
|
||||
|
||||
unescape = HTMLParser().unescape
|
||||
|
||||
try:
|
||||
import xmlrpc.client as xmlrpclib
|
||||
except ImportError:
|
||||
import xmlrpclib # legacy Python
|
||||
import xmlrpc.client as xmlrpclib
|
||||
from html import unescape
|
||||
|
||||
# Third party imports
|
||||
import pytest
|
||||
import webtest
|
||||
|
||||
|
||||
# Local Imports
|
||||
from pypiserver import __main__, bottle
|
||||
|
||||
import tests.test_core as test_core
|
||||
|
||||
from tests.test_pkg_helpers import files, invalid_files
|
||||
from pypiserver import __main__, bottle, core, Bottle
|
||||
from pypiserver.backend import CachingFileBackend, SimpleFileBackend
|
||||
|
||||
# Enable logging to detect any problems with it
|
||||
##
|
||||
|
||||
__main__.init_logging()
|
||||
|
||||
|
||||
@ -43,12 +30,14 @@ def app(tmpdir):
|
||||
roots=[pathlib.Path(tmpdir.strpath)],
|
||||
authenticate=[],
|
||||
password_file=".",
|
||||
backend_arg="simple-dir",
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def testapp(app):
|
||||
"""Return a webtest TestApp initiated with pypiserver app"""
|
||||
bottle.debug(True)
|
||||
return webtest.TestApp(app)
|
||||
|
||||
|
||||
@ -111,7 +100,18 @@ def welcome_file_all_vars(request, root):
|
||||
return wfile
|
||||
|
||||
|
||||
def test_root_count(root, testapp):
|
||||
@pytest.fixture
|
||||
def add_file_to_root(app):
|
||||
def file_adder(root, filename, content=""):
|
||||
root.join(filename).write(content)
|
||||
backend = app.config.backend
|
||||
if isinstance(backend, CachingFileBackend):
|
||||
backend.cache_manager.invalidate_root_cache(root)
|
||||
|
||||
return file_adder
|
||||
|
||||
|
||||
def test_root_count(root, testapp, add_file_to_root):
|
||||
"""Test that the welcome page count updates with added packages
|
||||
|
||||
:param root: root temporary directory fixture
|
||||
@ -119,7 +119,7 @@ def test_root_count(root, testapp):
|
||||
"""
|
||||
resp = testapp.get("/")
|
||||
resp.mustcontain("PyPI compatible package index serving 0 packages")
|
||||
root.join("Twisted-11.0.0.tar.bz2").write("")
|
||||
add_file_to_root(root, "Twisted-11.0.0.tar.bz2")
|
||||
resp = testapp.get("/")
|
||||
resp.mustcontain("PyPI compatible package index serving 1 packages")
|
||||
|
||||
@ -188,6 +188,29 @@ def test_packages_empty(testapp):
|
||||
assert len(resp.html("a")) == 0
|
||||
|
||||
|
||||
def test_health_default_endpoint(testapp):
|
||||
resp = testapp.get("/health")
|
||||
assert resp.status_int == 200
|
||||
assert "Ok" in resp
|
||||
|
||||
|
||||
def test_health_customized_endpoint(root):
|
||||
from pypiserver import app
|
||||
|
||||
_app = app(root=root.strpath, health_endpoint="/healthz")
|
||||
testapp = webtest.TestApp(_app)
|
||||
resp = testapp.get("/healthz")
|
||||
assert resp.status_int == 200
|
||||
assert "Ok" in resp
|
||||
|
||||
|
||||
def test_health_invalid_customized_endpoint(root):
|
||||
from pypiserver import app
|
||||
|
||||
with pytest.raises(RuntimeError, match="overlaps with existing routes"):
|
||||
app(root=root.strpath, health_endpoint="/simple")
|
||||
|
||||
|
||||
def test_favicon(testapp):
|
||||
testapp.get("/favicon.ico", status=404)
|
||||
|
||||
@ -330,16 +353,19 @@ def test_nonroot_root_with_x_forwarded_host_without_trailing_slash(testapp):
|
||||
resp.mustcontain("""<a href="/priv/packages/">here</a>""")
|
||||
|
||||
|
||||
def test_nonroot_simple_index(root, testpriv):
|
||||
root.join("foobar-1.0.zip").write("")
|
||||
def test_nonroot_simple_index(root, testpriv, add_file_to_root):
|
||||
add_file_to_root(root, "foobar-1.0.zip", "123")
|
||||
resp = testpriv.get("/priv/simple/foobar/")
|
||||
links = resp.html("a")
|
||||
assert len(links) == 1
|
||||
assert links[0]["href"].startswith("/priv/packages/foobar-1.0.zip#")
|
||||
|
||||
|
||||
def test_nonroot_simple_index_with_x_forwarded_host(root, testapp):
|
||||
root.join("foobar-1.0.zip").write("")
|
||||
def test_nonroot_simple_index_with_x_forwarded_host(
|
||||
root, testapp, add_file_to_root
|
||||
):
|
||||
add_file_to_root(root, "foobar-1.0.zip", "123")
|
||||
|
||||
resp = testapp.get(
|
||||
"/simple/foobar/", headers={"X-Forwarded-Host": "forwarded.ed/priv/"}
|
||||
)
|
||||
@ -348,22 +374,25 @@ def test_nonroot_simple_index_with_x_forwarded_host(root, testapp):
|
||||
assert links[0]["href"].startswith("/priv/packages/foobar-1.0.zip#")
|
||||
|
||||
|
||||
def test_nonroot_simple_packages(root, testpriv):
|
||||
root.join("foobar-1.0.zip").write("123")
|
||||
def test_nonroot_simple_packages(root, testpriv, add_file_to_root):
|
||||
add_file_to_root(root, "foobar-1.0.zip", "123")
|
||||
resp = testpriv.get("/priv/packages/")
|
||||
links = resp.html("a")
|
||||
assert len(links) == 1
|
||||
assert links[0]["href"].startswith("/priv/packages/foobar-1.0.zip#")
|
||||
assert "/priv/packages/foobar-1.0.zip#" in links[0]["href"]
|
||||
|
||||
|
||||
def test_nonroot_simple_packages_with_x_forwarded_host(root, testapp):
|
||||
root.join("foobar-1.0.zip").write("123")
|
||||
def test_nonroot_simple_packages_with_x_forwarded_host(
|
||||
root, testapp, add_file_to_root
|
||||
):
|
||||
add_file_to_root(root, "foobar-1.0.zip", "123")
|
||||
|
||||
resp = testapp.get(
|
||||
"/packages/", headers={"X-Forwarded-Host": "forwarded/priv/"}
|
||||
)
|
||||
links = resp.html("a")
|
||||
assert len(links) == 1
|
||||
assert links[0]["href"].startswith("/priv/packages/foobar-1.0.zip#")
|
||||
assert "/priv/packages/foobar-1.0.zip#" in links[0]["href"]
|
||||
|
||||
|
||||
def test_root_no_relative_paths(testpriv):
|
||||
@ -413,6 +442,42 @@ def test_simple_index_list_name_with_underscore_no_egg(root, testapp):
|
||||
assert hrefs == {"foo-bar/"}
|
||||
|
||||
|
||||
def test_json_info(root, testapp):
|
||||
root.join("foobar-1.0.zip").write("")
|
||||
root.join("foobar-1.1.zip").write("")
|
||||
root.join("foobar-1.1-linux.zip").write("")
|
||||
root.join("foobarX-1.1.zip").write("")
|
||||
|
||||
resp = testapp.get("/foobar/json")
|
||||
assert "info" in resp.json
|
||||
assert "releases" in resp.json
|
||||
assert len(resp.json["info"]) == 1
|
||||
assert len(resp.json["releases"]) == 2
|
||||
assert len(resp.json["releases"]["1.0"]) == 1
|
||||
assert len(resp.json["releases"]["1.1"]) == 2
|
||||
|
||||
|
||||
def test_json_info_package_not_existing(root, testapp):
|
||||
resp = testapp.get("/foobar/json", status=404)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"package,normalized",
|
||||
[
|
||||
("FooBar", "foobar"),
|
||||
("Foo.Bar", "foo-bar"),
|
||||
("foo_bar", "foo-bar"),
|
||||
("Foo-Bar", "foo-bar"),
|
||||
("foo--_.bar", "foo-bar"),
|
||||
],
|
||||
)
|
||||
def test_json_info_normalized_name_redirect(testapp, package, normalized):
|
||||
resp = testapp.get("/{0}/json".format(package))
|
||||
assert resp.status_code >= 300
|
||||
assert resp.status_code < 400
|
||||
assert resp.location.endswith("/{0}/json".format(normalized))
|
||||
|
||||
|
||||
def test_no_cache_control_set(root, testapp):
|
||||
assert not testapp.app._pypiserver_config.cache_control
|
||||
root.join("foo_bar-1.0.tar.gz").write("")
|
||||
@ -444,7 +509,7 @@ def test_upload_badAction(testapp):
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"package", [f[0] for f in test_core.files if f[1] and "/" not in f[0]]
|
||||
"package", [f[0] for f in files if f[1] and "/" not in f[0]]
|
||||
)
|
||||
def test_upload(package, root, testapp):
|
||||
resp = testapp.post(
|
||||
@ -458,8 +523,23 @@ def test_upload(package, root, testapp):
|
||||
assert uploaded_pkgs[0].lower() == package.lower()
|
||||
|
||||
|
||||
def test_upload_conflict_on_existing(root, testapp):
|
||||
package = "foo_bar-1.0.tar.gz"
|
||||
root.join("foo_bar-1.0.tar.gz").write("")
|
||||
|
||||
resp = testapp.post(
|
||||
"/",
|
||||
params={":action": "file_upload"},
|
||||
upload_files=[("content", package, b"")],
|
||||
status=409,
|
||||
)
|
||||
|
||||
assert resp.status_int == 409
|
||||
assert "Package 'foo_bar-1.0.tar.gz' already exists!" in unescape(resp.text)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"package", [f[0] for f in test_core.files if f[1] and "/" not in f[0]]
|
||||
"package", [f[0] for f in files if f[1] and "/" not in f[0]]
|
||||
)
|
||||
def test_upload_with_signature(package, root, testapp):
|
||||
resp = testapp.post(
|
||||
@ -477,9 +557,7 @@ def test_upload_with_signature(package, root, testapp):
|
||||
assert f"{package.lower()}.asc" in uploaded_pkgs
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"package", [f[0] for f in test_core.files if f[1] is None]
|
||||
)
|
||||
@pytest.mark.parametrize("package", invalid_files)
|
||||
def test_upload_badFilename(package, root, testapp):
|
||||
resp = testapp.post(
|
||||
"/",
|
||||
@ -526,7 +604,7 @@ def test_search(root, testapp, search_xml, pkgs, matches):
|
||||
expected name and version matches for a search for the "test"
|
||||
package as specified by the search_xml fixture.
|
||||
|
||||
:param root: root temporry directory fixture; used as packages dir
|
||||
:param root: root temporary directory fixture; used as packages dir
|
||||
for testapp
|
||||
:param testapp: webtest TestApp
|
||||
:param str search_xml: XML string roughly equivalent to a pip search
|
||||
|
42
tests/test_backend.py
Normal file
42
tests/test_backend.py
Normal file
@ -0,0 +1,42 @@
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
from pypiserver.backend import listdir
|
||||
|
||||
|
||||
def create_path(root: Path, path: Path):
|
||||
if path.is_absolute():
|
||||
raise ValueError(
|
||||
"Only test using relative paths"
|
||||
" to prevent leaking outside test environment"
|
||||
)
|
||||
fullpath = root / path
|
||||
if not fullpath.parent.exists():
|
||||
fullpath.parent.mkdir(parents=True)
|
||||
fullpath.touch()
|
||||
|
||||
|
||||
valid_paths = ["direct-in-root.zip", "some/nested/pkg.zip"]
|
||||
|
||||
|
||||
@pytest.mark.parametrize("path_name", valid_paths)
|
||||
def test_listdir_generates_pkgfile_for_valid_package(tmp_path, path_name):
|
||||
path = Path(path_name)
|
||||
create_path(tmp_path, path)
|
||||
assert len(list(listdir(tmp_path))) == 1
|
||||
|
||||
|
||||
invalid_paths = [
|
||||
".hidden-pkg.zip",
|
||||
".hidden/dir/pkg.zip",
|
||||
"in/between/.hidden/pkg.zip",
|
||||
"invalid-wheel.whl",
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.parametrize("path_name", invalid_paths)
|
||||
def test_listdir_doesnt_generate_pkgfile_for_invalid_file(tmp_path, path_name):
|
||||
path = Path(path_name)
|
||||
create_path(tmp_path, path)
|
||||
assert not list(listdir(tmp_path))
|
@ -8,12 +8,13 @@ import sys
|
||||
|
||||
import pytest
|
||||
|
||||
from pypiserver.backend import SimpleFileBackend, BackendProxy
|
||||
from pypiserver.config import DEFAULTS, Config, RunConfig, UpdateConfig
|
||||
|
||||
FILE_DIR = pathlib.Path(__file__).parent.resolve()
|
||||
|
||||
# Username and password stored in the htpasswd.a.a test file.
|
||||
HTPASS_TEST_FILE = str(FILE_DIR / "htpasswd.a.a")
|
||||
HTPASS_TEST_FILE = str(FILE_DIR / "../fixtures/htpasswd.a.a")
|
||||
HTPASS_TEST_USER = "a"
|
||||
HTPASS_TEST_PASS = "a"
|
||||
|
||||
@ -368,6 +369,21 @@ _CONFIG_TEST_PARAMS: t.Tuple[ConfigTestCase, ...] = (
|
||||
exp_config_type=RunConfig,
|
||||
exp_config_values={"fallback_url": "foobar.com"},
|
||||
),
|
||||
# health-endpoint
|
||||
ConfigTestCase(
|
||||
case="Run: health-endpoint unspecified",
|
||||
args=["run"],
|
||||
legacy_args=[],
|
||||
exp_config_type=RunConfig,
|
||||
exp_config_values={"health_endpoint": DEFAULTS.HEALTH_ENDPOINT},
|
||||
),
|
||||
ConfigTestCase(
|
||||
case="Run: health-endpoint specified",
|
||||
args=["run", "--health-endpoint", "/healthz"],
|
||||
legacy_args=["--health-endpoint", "/healthz"],
|
||||
exp_config_type=RunConfig,
|
||||
exp_config_values={"health_endpoint": "/healthz"},
|
||||
),
|
||||
# server method
|
||||
ConfigTestCase(
|
||||
case="Run: server method unspecified",
|
||||
@ -530,10 +546,39 @@ _CONFIG_TEST_PARAMS: t.Tuple[ConfigTestCase, ...] = (
|
||||
exp_config_type=RunConfig,
|
||||
exp_config_values={"log_err_frmt": "foo"},
|
||||
),
|
||||
# backend
|
||||
ConfigTestCase(
|
||||
"Run: backend unspecified",
|
||||
args=["run"],
|
||||
legacy_args=[],
|
||||
exp_config_type=RunConfig,
|
||||
exp_config_values={
|
||||
"backend_arg": "auto",
|
||||
"_test": (
|
||||
lambda conf: (
|
||||
isinstance(conf.backend, BackendProxy)
|
||||
and isinstance(conf.backend.backend, SimpleFileBackend)
|
||||
)
|
||||
),
|
||||
},
|
||||
),
|
||||
ConfigTestCase(
|
||||
"Run: simple backend specified",
|
||||
args=["run", "--backend", "simple-dir"],
|
||||
legacy_args=["--backend", "simple-dir"],
|
||||
exp_config_type=RunConfig,
|
||||
exp_config_values={
|
||||
"_test": (
|
||||
lambda conf: (
|
||||
isinstance(conf.backend.backend, SimpleFileBackend)
|
||||
)
|
||||
),
|
||||
},
|
||||
),
|
||||
# ******************************************************************
|
||||
# Update subcommand args
|
||||
# ******************************************************************
|
||||
# exeucte
|
||||
# execute
|
||||
ConfigTestCase(
|
||||
case="Update: execute not specified",
|
||||
args=["update"],
|
||||
@ -653,6 +698,14 @@ _CONFIG_ERROR_CASES = (
|
||||
)
|
||||
for val in ("true", "foo", "1", "md6")
|
||||
),
|
||||
*(
|
||||
ConfigErrorCase(
|
||||
case=f"Invalid health endpoint: {val}",
|
||||
args=["run", "--health-endpoint", val],
|
||||
exp_txt="Invalid path for the health endpoint",
|
||||
)
|
||||
for val in ("/", "health", "/health!", "/:health", "/health?check=True")
|
||||
),
|
||||
)
|
||||
# pylint: disable=unsubscriptable-object
|
||||
CONFIG_ERROR_PARAMS = (i[1:] for i in _CONFIG_ERROR_CASES)
|
||||
@ -695,7 +748,7 @@ def test_config(
|
||||
@pytest.mark.parametrize(
|
||||
"args, exp_txt",
|
||||
CONFIG_ERROR_PARAMS,
|
||||
ids=CONFIG_TEST_IDS,
|
||||
ids=CONFIG_ERROR_IDS,
|
||||
)
|
||||
def test_config_error(
|
||||
args: t.List[str],
|
||||
@ -705,7 +758,7 @@ def test_config_error(
|
||||
"""Validate error cases."""
|
||||
with pytest.raises(SystemExit):
|
||||
Config.from_args(args)
|
||||
# Unfortunatley the error text is printed before the SystemExit is
|
||||
# Unfortunately the error text is printed before the SystemExit is
|
||||
# raised, rather than being raised _with_ the systemexit, so we
|
||||
# need to capture stderr and check it for our expected text, if
|
||||
# any was specified in the test case.
|
||||
|
@ -6,7 +6,10 @@ import os
|
||||
|
||||
import pytest
|
||||
|
||||
from pypiserver import __main__, core
|
||||
from pypiserver import __main__, core, backend
|
||||
from pypiserver.pkg_helpers import (
|
||||
normalize_pkgname_for_url,
|
||||
)
|
||||
from tests.doubles import Namespace
|
||||
|
||||
|
||||
@ -15,98 +18,9 @@ from tests.doubles import Namespace
|
||||
__main__.init_logging()
|
||||
|
||||
|
||||
files = [
|
||||
("pytz-2012b.tar.bz2", "pytz", "2012b"),
|
||||
("pytz-2012b.tgz", "pytz", "2012b"),
|
||||
("pytz-2012b.ZIP", "pytz", "2012b"),
|
||||
("pytz-2012a.zip", "pytz", "2012a"),
|
||||
("gevent-1.0b1.win32-py2.6.exe", "gevent", "1.0b1"),
|
||||
("gevent-1.0b1.win32-py2.7.msi", "gevent", "1.0b1"),
|
||||
("greenlet-0.3.4-py3.1-win-amd64.egg", "greenlet", "0.3.4"),
|
||||
("greenlet-0.3.4.win-amd64-py3.2.exe", "greenlet", "0.3.4"),
|
||||
("greenlet-0.3.4-py3.2-win32.egg", "greenlet", "0.3.4"),
|
||||
("greenlet-0.3.4-py2.7-linux-x86_64.egg", "greenlet", "0.3.4"),
|
||||
("pep8-0.6.0.zip", "pep8", "0.6.0"),
|
||||
("ABC12-34_V1X-1.2.3.zip", "ABC12", "34_V1X-1.2.3"),
|
||||
("A100-200-XYZ-1.2.3.zip", "A100-200-XYZ", "1.2.3"),
|
||||
("flup-1.0.3.dev-20110405.tar.gz", "flup", "1.0.3.dev-20110405"),
|
||||
("package-1.0.0-alpha.1.zip", "package", "1.0.0-alpha.1"),
|
||||
("package-1.3.7+build.11.e0f985a.zip", "package", "1.3.7+build.11.e0f985a"),
|
||||
("package-v1-8.1.301.ga0df26f.zip", "package-v1", "8.1.301.ga0df26f"),
|
||||
("package-v1.1-8.1.301.ga0df26f.zip", "package-v1.1", "8.1.301.ga0df26f"),
|
||||
("package-2013.02.17.dev123.zip", "package", "2013.02.17.dev123"),
|
||||
("package-20000101.zip", "package", "20000101"),
|
||||
("flup-123-1.0.3.dev-20110405.tar.gz", "flup-123", "1.0.3.dev-20110405"),
|
||||
("package-123-1.0.0-alpha.1.zip", "package-123", "1.0.0-alpha.1"),
|
||||
(
|
||||
"package-123-1.3.7+build.11.e0f985a.zip",
|
||||
"package-123",
|
||||
"1.3.7+build.11.e0f985a",
|
||||
),
|
||||
("package-123-v1.1_3-8.1.zip", "package-123-v1.1_3", "8.1"),
|
||||
("package-123-2013.02.17.dev123.zip", "package-123", "2013.02.17.dev123"),
|
||||
("package-123-20000101.zip", "package-123", "20000101"),
|
||||
(
|
||||
"pyelasticsearch-0.5-brainbot-1-20130712.zip",
|
||||
"pyelasticsearch",
|
||||
"0.5-brainbot-1-20130712",
|
||||
),
|
||||
("pywin32-217-cp27-none-win32.whl", "pywin32", "217"),
|
||||
("pywin32-217-55-cp27-none-win32.whl", "pywin32", "217-55"),
|
||||
("pywin32-217.1-cp27-none-win32.whl", "pywin32", "217.1"),
|
||||
("package.zip", "package", ""),
|
||||
(
|
||||
"package-name-0.0.1.dev0.linux-x86_64.tar.gz",
|
||||
"package-name",
|
||||
"0.0.1.dev0",
|
||||
),
|
||||
(
|
||||
"package-name-0.0.1.dev0.macosx-10.10-intel.tar.gz",
|
||||
"package-name",
|
||||
"0.0.1.dev0",
|
||||
),
|
||||
(
|
||||
"package-name-0.0.1.alpha.1.win-amd64-py3.2.exe",
|
||||
"package-name",
|
||||
"0.0.1.alpha.1",
|
||||
),
|
||||
("pkg-3!1.0-0.1.tgz", "pkg", "3!1.0-0.1"), # TO BE FIXED
|
||||
("pkg-3!1+.0-0.1.tgz", "pkg", "3!1+.0-0.1"), # TO BE FIXED
|
||||
("pkg.zip", "pkg", ""),
|
||||
("foo/pkg.zip", "pkg", ""),
|
||||
("foo/pkg-1b.zip", "pkg", "1b"),
|
||||
(
|
||||
"package-name-0.0.1.alpha.1.win-amd64-py3.2.exe",
|
||||
"package-name",
|
||||
"0.0.1.alpha.1",
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
def _capitalize_ext(fpath):
|
||||
f, e = os.path.splitext(fpath)
|
||||
if e != ".whl":
|
||||
e = e.upper()
|
||||
return f + e
|
||||
|
||||
|
||||
@pytest.mark.parametrize(("filename", "pkgname", "version"), files)
|
||||
def test_guess_pkgname_and_version(filename, pkgname, version):
|
||||
exp = (pkgname, version)
|
||||
assert core.guess_pkgname_and_version(filename) == exp
|
||||
assert core.guess_pkgname_and_version(_capitalize_ext(filename)) == exp
|
||||
|
||||
|
||||
@pytest.mark.parametrize(("filename", "pkgname", "version"), files)
|
||||
def test_guess_pkgname_and_version_asc(filename, pkgname, version):
|
||||
exp = (pkgname, version)
|
||||
filename = f"{filename}.asc"
|
||||
assert core.guess_pkgname_and_version(filename) == exp
|
||||
|
||||
|
||||
def test_listdir_bad_name(tmpdir):
|
||||
tmpdir.join("foo.whl").ensure()
|
||||
res = list(core.listdir(tmpdir.strpath))
|
||||
def test_listdir_bad_name(tmp_path):
|
||||
tmp_path.joinpath("foo.whl").touch()
|
||||
res = list(backend.listdir(tmp_path))
|
||||
assert res == []
|
||||
|
||||
|
||||
@ -122,33 +36,37 @@ hashes = (
|
||||
|
||||
|
||||
@pytest.mark.parametrize(("algo", "digest"), hashes)
|
||||
def test_hashfile(tmpdir, algo, digest):
|
||||
f = tmpdir.join("empty")
|
||||
f.ensure()
|
||||
assert core.digest_file(f.strpath, algo) == digest
|
||||
def test_hashfile(tmp_path, algo, digest):
|
||||
f = tmp_path.joinpath("empty")
|
||||
f.touch()
|
||||
assert backend.digest_file(str(f), algo) == f"{algo}={digest}"
|
||||
|
||||
|
||||
@pytest.mark.parametrize("hash_algo", ("md5", "sha256", "sha512"))
|
||||
def test_fname_and_hash(tmpdir, hash_algo):
|
||||
def test_fname_and_hash(tmp_path, hash_algo):
|
||||
"""Ensure we are returning the expected hashes for files."""
|
||||
f = tmpdir.join("tmpfile")
|
||||
f.ensure()
|
||||
pkgfile = core.PkgFile("tmp", "1.0.0", f.strpath, f.dirname, f.basename)
|
||||
assert pkgfile.fname_and_hash(hash_algo) == "{}#{}={}".format(
|
||||
f.basename, hash_algo, str(f.computehash(hashtype=hash_algo))
|
||||
)
|
||||
|
||||
def digester(pkg):
|
||||
digest = backend.digest_file(pkg.fn, hash_algo)
|
||||
pkg.digest = digest
|
||||
return digest
|
||||
|
||||
f = tmp_path.joinpath("tmpfile")
|
||||
f.touch()
|
||||
pkgfile = core.PkgFile("tmp", "1.0.0", str(f), f.parent, f.name)
|
||||
pkgfile.digester = digester
|
||||
|
||||
assert pkgfile.fname_and_hash == f"{f.name}#{digester(pkgfile)}"
|
||||
|
||||
|
||||
def test_redirect_prefix_encodes_newlines():
|
||||
def test_redirect_project_encodes_newlines():
|
||||
"""Ensure raw newlines are url encoded in the generated redirect."""
|
||||
request = Namespace(custom_fullpath="/\nSet-Cookie:malicious=1;")
|
||||
prefix = "\nSet-Cookie:malicious=1;"
|
||||
newpath = core.get_bad_url_redirect_path(request, prefix)
|
||||
project = "\nSet-Cookie:malicious=1;"
|
||||
newpath = core.get_bad_url_redirect_path(request, project)
|
||||
assert "\n" not in newpath
|
||||
|
||||
|
||||
def test_normalize_pkgname_for_url_encodes_newlines():
|
||||
"""Ensure newlines are url encoded in package names for urls."""
|
||||
assert "\n" not in core.normalize_pkgname_for_url(
|
||||
"/\nSet-Cookie:malicious=1;"
|
||||
)
|
||||
assert "\n" not in normalize_pkgname_for_url("/\nSet-Cookie:malicious=1;")
|
||||
|
@ -8,10 +8,12 @@ from pypiserver import version as my_ver
|
||||
|
||||
@pytest.fixture()
|
||||
def readme():
|
||||
return Path(__file__).parents[1].joinpath("README.rst").read_text()
|
||||
return Path(__file__).parents[1].joinpath("README.md").read_text()
|
||||
|
||||
|
||||
def test_READMEversion(readme):
|
||||
m = re.compile(r"^\s*:Version:\s*(.+)\s*$", re.MULTILINE).search(readme)
|
||||
m = re.compile(
|
||||
r"^\|\s*Version\s*\|\s*(\d+\.\d+\.\d+)\s*\|$", re.MULTILINE
|
||||
).search(readme)
|
||||
assert m, "Could not find version on README!"
|
||||
assert m.group(1) == my_ver, f"Updated version({m.group(1)}) on README!"
|
||||
|
@ -1,6 +1,7 @@
|
||||
"""
|
||||
Test module for app initialization
|
||||
"""
|
||||
|
||||
# Standard library imports
|
||||
import logging
|
||||
import pathlib
|
||||
@ -12,11 +13,12 @@ import pytest
|
||||
|
||||
# Local imports
|
||||
import pypiserver
|
||||
from pypiserver.config import Config
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
TEST_DIR = pathlib.Path(__file__).parent
|
||||
HTPASS_FILE = TEST_DIR / "htpasswd.a.a"
|
||||
HTPASS_FILE = TEST_DIR / "../fixtures/htpasswd.a.a"
|
||||
WELCOME_FILE = TEST_DIR / "sample_msg.html"
|
||||
|
||||
|
||||
@ -103,3 +105,19 @@ def test_backwards_compat_kwargs_duplicate_check(
|
||||
with pytest.raises(ValueError) as err:
|
||||
pypiserver.backwards_compat_kwargs(kwargs)
|
||||
assert "('redirect_to_fallback', 'disable_fallback')" in str(err.value)
|
||||
|
||||
|
||||
def test_setup_routes_from_config_customized_endpoint() -> None:
|
||||
_app = pypiserver.setup_routes_from_config(
|
||||
pypiserver.app(),
|
||||
Config.default_with_overrides(health_endpoint="/healthz"),
|
||||
)
|
||||
assert "/healthz" in (route.rule for route in _app.routes)
|
||||
|
||||
|
||||
def test_setup_routes_from_config_invalid_customized_endpoint() -> None:
|
||||
with pytest.raises(RuntimeError, match="overlaps with existing routes"):
|
||||
pypiserver.setup_routes_from_config(
|
||||
pypiserver.app(),
|
||||
Config.default_with_overrides(health_endpoint="/simple"),
|
||||
)
|
||||
|
@ -7,12 +7,13 @@ from unittest import mock
|
||||
|
||||
import pytest
|
||||
|
||||
import pypiserver.bottle
|
||||
from pypiserver import __main__
|
||||
from pypiserver.bottle import Bottle
|
||||
|
||||
|
||||
THIS_DIR = pathlib.Path(__file__).parent
|
||||
HTPASS_FILE = THIS_DIR / "htpasswd.a.a"
|
||||
HTPASS_FILE = THIS_DIR / "../fixtures/htpasswd.a.a"
|
||||
IGNORELIST_FILE = THIS_DIR / "test-ignorelist"
|
||||
|
||||
|
||||
@ -38,7 +39,6 @@ class main_wrapper:
|
||||
|
||||
@pytest.fixture()
|
||||
def main(monkeypatch):
|
||||
|
||||
main = main_wrapper()
|
||||
|
||||
def run(**kwargs):
|
||||
@ -66,14 +66,18 @@ def test_noargs(main):
|
||||
# Assert we're calling with the default host, port, and server, and
|
||||
# assume that we've popped `app` off of the bottle args in our `main`
|
||||
# fixture.
|
||||
assert main([]) == {"host": "0.0.0.0", "port": 8080, "server": "auto"}
|
||||
exp_kwargs = {"host": "0.0.0.0", "port": 8080, "server": "auto"}
|
||||
actual_kwargs = main([])
|
||||
# Only assert our expected are are present. We may pass extra kwargs
|
||||
# for particular servers, depending on what is available in the python
|
||||
# path.
|
||||
assert all(map(lambda k: exp_kwargs[k] == actual_kwargs[k], exp_kwargs))
|
||||
|
||||
|
||||
def test_port(main):
|
||||
expected = dict(host="0.0.0.0", port=8081, server="auto")
|
||||
assert main(["--port=8081"]) == expected
|
||||
assert main(["--port", "8081"]) == expected
|
||||
assert main(["-p", "8081"]) == expected
|
||||
assert main(["--port=8081"])["port"] == 8081
|
||||
assert main(["--port", "8081"])["port"] == 8081
|
||||
assert main(["-p", "8081"])["port"] == 8081
|
||||
|
||||
|
||||
def test_server(main):
|
||||
@ -81,6 +85,26 @@ def test_server(main):
|
||||
assert main(["--server", "cherrypy"])["server"] == "cherrypy"
|
||||
|
||||
|
||||
def test_wsgiserver_extra_args_present(monkeypatch, main):
|
||||
"""The wsgi server gets extra keyword arguments."""
|
||||
monkeypatch.setattr(
|
||||
__main__,
|
||||
"guess_auto_server",
|
||||
lambda: __main__.AutoServer.WsgiRef,
|
||||
)
|
||||
assert main([])["handler_class"] is __main__.WsgiHandler
|
||||
|
||||
|
||||
def test_wsgiserver_extra_kwargs_absent(monkeypatch, main):
|
||||
"""Other servers don't get wsgiserver args."""
|
||||
monkeypatch.setattr(
|
||||
__main__,
|
||||
"guess_auto_server",
|
||||
lambda: __main__.AutoServer.Waitress,
|
||||
)
|
||||
assert "handler_class" not in main([])
|
||||
|
||||
|
||||
def test_root_multiple(main):
|
||||
# Remember we're already setting THIS_DIR as a root in the `main` fixture
|
||||
main([str(THIS_DIR.parent)])
|
||||
@ -106,12 +130,12 @@ def test_fallback_url_default(main):
|
||||
|
||||
def test_hash_algo_default(main):
|
||||
main([])
|
||||
assert main.app._pypiserver_config.hash_algo == "md5"
|
||||
assert main.app._pypiserver_config.hash_algo == "sha256"
|
||||
|
||||
|
||||
def test_hash_algo(main):
|
||||
main(["--hash-algo=sha256"])
|
||||
assert main.app._pypiserver_config.hash_algo == "sha256"
|
||||
main(["--hash-algo=md5"])
|
||||
assert main.app._pypiserver_config.hash_algo == "md5"
|
||||
|
||||
|
||||
def test_hash_algo_off(main):
|
||||
@ -232,3 +256,57 @@ def test_blacklist_file(main):
|
||||
"""
|
||||
main(["-U", "--blacklist-file", str(IGNORELIST_FILE)])
|
||||
assert main.update_kwargs["ignorelist"] == ["mypiserver", "something"]
|
||||
|
||||
|
||||
def test_auto_servers() -> None:
|
||||
"""Test auto servers."""
|
||||
# A list of bottle ServerAdapters
|
||||
bottle_adapters = tuple(
|
||||
a.__name__.lower() for a in pypiserver.bottle.AutoServer.adapters
|
||||
)
|
||||
# We are going to expect that our AutoServer enum names must match those
|
||||
# at least closely enough to be recognizable.
|
||||
our_mappings = tuple(map(str.lower, __main__.AutoServer.__members__))
|
||||
|
||||
# Assert that all of our mappings are represented in bottle adapters
|
||||
assert all(
|
||||
any(mapping in a for a in bottle_adapters) for mapping in our_mappings
|
||||
)
|
||||
|
||||
# Assert that our import checking order matches the order in which the
|
||||
# adapters are defined in the AutoServer
|
||||
our_check_order = tuple(i[0] for i in __main__.AUTO_SERVER_IMPORTS)
|
||||
|
||||
# Some of the servers have more than one check, so we need to remove
|
||||
# duplicates before we check for identity with the AutoServer definition.
|
||||
seen: t.Dict[__main__.AutoServer, __main__.AutoServer] = {}
|
||||
our_check_order = tuple(
|
||||
seen.setdefault(i, i) for i in our_check_order if i not in seen
|
||||
)
|
||||
|
||||
# We should have the same number of deduped checkers as there are bottle
|
||||
# adapters
|
||||
assert len(our_check_order) == len(bottle_adapters)
|
||||
|
||||
# And the order should be the same
|
||||
assert all(
|
||||
us.name.lower() in them
|
||||
for us, them in zip(our_check_order, bottle_adapters)
|
||||
)
|
||||
|
||||
|
||||
def test_health_endpoint_default(main):
|
||||
main([])
|
||||
assert main.app._pypiserver_config.health_endpoint == "/health"
|
||||
assert "/health" in (route.rule for route in main.app.routes)
|
||||
|
||||
|
||||
def test_health_endpoint_customized(main):
|
||||
main(["--health-endpoint", "/healthz"])
|
||||
assert main.app._pypiserver_config.health_endpoint == "/healthz"
|
||||
assert "/healthz" in (route.rule for route in main.app.routes)
|
||||
|
||||
|
||||
def test_health_endpoint_invalid_customized(main):
|
||||
with pytest.raises(SystemExit):
|
||||
main(["--health-endpoint", "/health!"])
|
||||
|
@ -3,20 +3,15 @@
|
||||
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
try:
|
||||
from unittest.mock import Mock
|
||||
except ImportError:
|
||||
from mock import Mock
|
||||
from pathlib import Path
|
||||
from unittest.mock import Mock
|
||||
|
||||
import py
|
||||
import pytest
|
||||
|
||||
from pypiserver import manage
|
||||
from pypiserver.core import (
|
||||
PkgFile,
|
||||
guess_pkgname_and_version,
|
||||
parse_version,
|
||||
)
|
||||
from pypiserver.core import PkgFile
|
||||
from pypiserver.pkg_helpers import guess_pkgname_and_version, parse_version
|
||||
from pypiserver.manage import (
|
||||
PipCmd,
|
||||
build_releases,
|
||||
@ -210,17 +205,17 @@ def test_update_all_packages(monkeypatch):
|
||||
private_pkg_2 = PkgFile("my_other_private_pkg", "1.0")
|
||||
|
||||
roots_mock = {
|
||||
"/opt/pypi": [
|
||||
Path("/opt/pypi"): [
|
||||
public_pkg_1,
|
||||
private_pkg_1,
|
||||
],
|
||||
"/data/pypi": [public_pkg_2, private_pkg_2],
|
||||
Path("/data/pypi"): [public_pkg_2, private_pkg_2],
|
||||
}
|
||||
|
||||
def core_listdir_mock(directory):
|
||||
return roots_mock.get(directory, [])
|
||||
def core_listdir_mock(path: Path):
|
||||
return roots_mock.get(path, [])
|
||||
|
||||
monkeypatch.setattr(manage.core, "listdir", core_listdir_mock)
|
||||
monkeypatch.setattr(manage, "listdir", core_listdir_mock)
|
||||
monkeypatch.setattr(manage, "update", Mock(return_value=None))
|
||||
|
||||
destdir = None
|
||||
@ -243,7 +238,7 @@ def test_update_all_packages(monkeypatch):
|
||||
)
|
||||
|
||||
|
||||
def test_update_all_packages_with_ignorelist(monkeypatch):
|
||||
def test_update_all_packages_with_blacklist(monkeypatch):
|
||||
"""Test calling update_all_packages()"""
|
||||
public_pkg_1 = PkgFile("Flask", "1.0")
|
||||
public_pkg_2 = PkgFile("requests", "1.0")
|
||||
@ -251,17 +246,17 @@ def test_update_all_packages_with_ignorelist(monkeypatch):
|
||||
private_pkg_2 = PkgFile("my_other_private_pkg", "1.0")
|
||||
|
||||
roots_mock = {
|
||||
"/opt/pypi": [
|
||||
Path("/opt/pypi"): [
|
||||
public_pkg_1,
|
||||
private_pkg_1,
|
||||
],
|
||||
"/data/pypi": [public_pkg_2, private_pkg_2],
|
||||
Path("/data/pypi"): [public_pkg_2, private_pkg_2],
|
||||
}
|
||||
|
||||
def core_listdir_mock(directory):
|
||||
return roots_mock.get(directory, [])
|
||||
def core_listdir_mock(path: Path):
|
||||
return roots_mock.get(path, [])
|
||||
|
||||
monkeypatch.setattr(manage.core, "listdir", core_listdir_mock)
|
||||
monkeypatch.setattr(manage, "listdir", core_listdir_mock)
|
||||
monkeypatch.setattr(manage, "update", Mock(return_value=None))
|
||||
|
||||
destdir = None
|
||||
|
117
tests/test_pkg_helpers.py
Normal file
117
tests/test_pkg_helpers.py
Normal file
@ -0,0 +1,117 @@
|
||||
import os
|
||||
from pathlib import WindowsPath, PureWindowsPath
|
||||
|
||||
import pytest
|
||||
|
||||
from pypiserver.pkg_helpers import guess_pkgname_and_version, is_listed_path
|
||||
|
||||
files = [
|
||||
("pytz-2012b.tar.bz2", "pytz", "2012b"),
|
||||
("pytz-2012b.tgz", "pytz", "2012b"),
|
||||
("pytz-2012b.ZIP", "pytz", "2012b"),
|
||||
("pytz-2012a.zip", "pytz", "2012a"),
|
||||
("pytz-2012b.tar.xz", "pytz", "2012b"),
|
||||
("gevent-1.0b1.win32-py2.6.exe", "gevent", "1.0b1"),
|
||||
("gevent-1.0b1.win32-py2.7.msi", "gevent", "1.0b1"),
|
||||
("greenlet-0.3.4-py3.1-win-amd64.egg", "greenlet", "0.3.4"),
|
||||
("greenlet-0.3.4.win-amd64-py3.2.exe", "greenlet", "0.3.4"),
|
||||
("greenlet-0.3.4-py3.2-win32.egg", "greenlet", "0.3.4"),
|
||||
("greenlet-0.3.4-py2.7-linux-x86_64.egg", "greenlet", "0.3.4"),
|
||||
("pep8-0.6.0.zip", "pep8", "0.6.0"),
|
||||
("ABC12-34_V1X-1.2.3.zip", "ABC12", "34_V1X-1.2.3"),
|
||||
("A100-200-XYZ-1.2.3.zip", "A100-200-XYZ", "1.2.3"),
|
||||
("flup-1.0.3.dev-20110405.tar.gz", "flup", "1.0.3.dev-20110405"),
|
||||
("package-1.0.0-alpha.1.zip", "package", "1.0.0-alpha.1"),
|
||||
("package-1.3.7+build.11.e0f985a.zip", "package", "1.3.7+build.11.e0f985a"),
|
||||
("package-v1-8.1.301.ga0df26f.zip", "package-v1", "8.1.301.ga0df26f"),
|
||||
("package-v1.1-8.1.301.ga0df26f.zip", "package-v1.1", "8.1.301.ga0df26f"),
|
||||
("package-2013.02.17.dev123.zip", "package", "2013.02.17.dev123"),
|
||||
("package-20000101.zip", "package", "20000101"),
|
||||
("flup-123-1.0.3.dev-20110405.tar.gz", "flup-123", "1.0.3.dev-20110405"),
|
||||
("package-123-1.0.0-alpha.1.zip", "package-123", "1.0.0-alpha.1"),
|
||||
(
|
||||
"package-123-1.3.7+build.11.e0f985a.zip",
|
||||
"package-123",
|
||||
"1.3.7+build.11.e0f985a",
|
||||
),
|
||||
("package-123-v1.1_3-8.1.zip", "package-123-v1.1_3", "8.1"),
|
||||
("package-123-2013.02.17.dev123.zip", "package-123", "2013.02.17.dev123"),
|
||||
("package-123-20000101.zip", "package-123", "20000101"),
|
||||
(
|
||||
"pyelasticsearch-0.5-brainbot-1-20130712.zip",
|
||||
"pyelasticsearch",
|
||||
"0.5-brainbot-1-20130712",
|
||||
),
|
||||
("pywin32-217-cp27-none-win32.whl", "pywin32", "217"),
|
||||
("pywin32-217-55-cp27-none-win32.whl", "pywin32", "217-55"),
|
||||
("pywin32-217.1-cp27-none-win32.whl", "pywin32", "217.1"),
|
||||
("package.zip", "package", ""),
|
||||
(
|
||||
"package-name-0.0.1.dev0.linux-x86_64.tar.gz",
|
||||
"package-name",
|
||||
"0.0.1.dev0",
|
||||
),
|
||||
(
|
||||
"package-name-0.0.1.dev0.macosx-10.10-intel.tar.gz",
|
||||
"package-name",
|
||||
"0.0.1.dev0",
|
||||
),
|
||||
(
|
||||
"package-name-0.0.1.alpha.1.win-amd64-py3.2.exe",
|
||||
"package-name",
|
||||
"0.0.1.alpha.1",
|
||||
),
|
||||
("pkg-3!1.0-0.1.tgz", "pkg", "3!1.0-0.1"), # TO BE FIXED
|
||||
("pkg-3!1+.0-0.1.tgz", "pkg", "3!1+.0-0.1"), # TO BE FIXED
|
||||
("pkg.zip", "pkg", ""),
|
||||
("foo/pkg.zip", "pkg", ""),
|
||||
("foo/pkg-1b.zip", "pkg", "1b"),
|
||||
("foo/pywin32-217.1-cp27-none-win32.whl", "pywin32", "217.1"),
|
||||
(
|
||||
"package-name-0.0.1.alpha.1.win-amd64-py3.2.exe",
|
||||
"package-name",
|
||||
"0.0.1.alpha.1",
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
def _capitalize_ext(fpath):
|
||||
f, e = os.path.splitext(fpath)
|
||||
if e != ".whl":
|
||||
e = e.upper()
|
||||
return f + e
|
||||
|
||||
|
||||
@pytest.mark.parametrize(("filename", "pkgname", "version"), files)
|
||||
def test_guess_pkgname_and_version(filename, pkgname, version):
|
||||
exp = (pkgname, version)
|
||||
assert guess_pkgname_and_version(filename) == exp
|
||||
assert guess_pkgname_and_version(_capitalize_ext(filename)) == exp
|
||||
|
||||
|
||||
@pytest.mark.parametrize(("filename", "pkgname", "version"), files)
|
||||
def test_guess_pkgname_and_version_asc(filename, pkgname, version):
|
||||
exp = (pkgname, version)
|
||||
filename = f"{filename}.asc"
|
||||
assert guess_pkgname_and_version(filename) == exp
|
||||
|
||||
|
||||
invalid_files = ["some_file", "some_file.ext", "some_wheel.whl"]
|
||||
|
||||
|
||||
@pytest.mark.parametrize("filename", invalid_files)
|
||||
def test_guess_pkgname_and_version_invalid_files(filename):
|
||||
assert guess_pkgname_and_version(filename) is None
|
||||
|
||||
|
||||
paths = [
|
||||
("/some/path", True),
|
||||
(PureWindowsPath(r"c:\some\windows\path"), True),
|
||||
("/.hidden", False),
|
||||
(PureWindowsPath(r"c:\.hidden\windows\path"), False),
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.parametrize(("pathname", "allowed"), paths)
|
||||
def test_allowed_path_check(pathname, allowed):
|
||||
assert is_listed_path(pathname) == allowed
|
@ -11,172 +11,217 @@ The tests below are using 3 ways to startup pypi-servers:
|
||||
- "new_server": starting a new server with any configurations on each test.
|
||||
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
from collections import namedtuple
|
||||
import contextlib
|
||||
import functools
|
||||
import itertools
|
||||
import os
|
||||
import subprocess
|
||||
import shutil
|
||||
import socket
|
||||
import re
|
||||
import sys
|
||||
import tempfile
|
||||
import time
|
||||
import typing as t
|
||||
from collections import namedtuple
|
||||
from pathlib import Path
|
||||
from shlex import split
|
||||
from subprocess import Popen
|
||||
from textwrap import dedent
|
||||
from urllib.error import URLError
|
||||
from urllib.request import urlopen
|
||||
|
||||
try:
|
||||
from urllib.request import urlopen
|
||||
except ImportError:
|
||||
from urllib import urlopen
|
||||
|
||||
from py import path # @UnresolvedImport
|
||||
import pytest
|
||||
|
||||
|
||||
# ######################################################################
|
||||
# Fixtures & Helper Functions
|
||||
# ######################################################################
|
||||
|
||||
|
||||
_BUFF_SIZE = 2 ** 16
|
||||
_port = 8090
|
||||
SLEEP_AFTER_SRV = 3 # sec
|
||||
CURRENT_PATH = Path(__file__).parent
|
||||
ports = itertools.count(10000)
|
||||
Srv = namedtuple("Srv", ("port", "root"))
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def port():
|
||||
global _port
|
||||
_port += 1
|
||||
return _port
|
||||
|
||||
|
||||
Srv = namedtuple("Srv", ("proc", "port", "package"))
|
||||
|
||||
|
||||
def _run_server(packdir, port, authed, other_cli=""):
|
||||
@contextlib.contextmanager
|
||||
def run_server(root, authed=False, other_cli=""):
|
||||
"""Run a server, optionally with partial auth enabled."""
|
||||
htpasswd = (
|
||||
CURRENT_PATH.joinpath("../fixtures/htpasswd.a.a").expanduser().resolve()
|
||||
)
|
||||
pswd_opt_choices = {
|
||||
True: "-Ptests/htpasswd.a.a -a update,download",
|
||||
True: f"-P {htpasswd} -a update,download",
|
||||
False: "-P. -a.",
|
||||
"partial": "-Ptests/htpasswd.a.a -a update",
|
||||
"partial": f"-P {htpasswd} -a update",
|
||||
}
|
||||
pswd_opts = pswd_opt_choices[authed]
|
||||
|
||||
port = next(ports)
|
||||
cmd = (
|
||||
f"{sys.executable} -m pypiserver.__main__ -vvv --overwrite -i 127.0.0.1 "
|
||||
f"-p {port} {pswd_opts} {other_cli} {packdir}"
|
||||
f"{sys.executable} -m pypiserver.__main__ "
|
||||
f"run -vvv --overwrite -i 127.0.0.1 "
|
||||
f"-p {port} {pswd_opts} {other_cli} {root}"
|
||||
)
|
||||
proc = subprocess.Popen(cmd.split(), bufsize=_BUFF_SIZE)
|
||||
time.sleep(SLEEP_AFTER_SRV)
|
||||
assert proc.poll() is None
|
||||
|
||||
return Srv(proc, int(port), packdir)
|
||||
|
||||
|
||||
def _kill_server(srv):
|
||||
print(f"Killing {srv}")
|
||||
try:
|
||||
srv.proc.terminate()
|
||||
time.sleep(1)
|
||||
finally:
|
||||
srv.proc.kill()
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def new_server(packdir, port, authed=False, other_cli=""):
|
||||
srv = _run_server(packdir, port, authed=authed, other_cli=other_cli)
|
||||
proc = Popen(cmd.split(), bufsize=2**16)
|
||||
srv = Srv(port, root)
|
||||
try:
|
||||
wait_until_ready(srv)
|
||||
assert proc.poll() is None
|
||||
yield srv
|
||||
finally:
|
||||
_kill_server(srv)
|
||||
print(f"Killing {srv}")
|
||||
_kill_proc(proc)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def chdir(d):
|
||||
old_d = os.getcwd()
|
||||
def wait_until_ready(srv: Srv, n_tries=10):
|
||||
for _ in range(n_tries):
|
||||
if is_ready(srv):
|
||||
return True
|
||||
time.sleep(0.5)
|
||||
raise TimeoutError
|
||||
|
||||
|
||||
def is_ready(srv: Srv):
|
||||
try:
|
||||
os.chdir(d)
|
||||
yield
|
||||
return urlopen(build_url(srv.port), timeout=0.5).getcode() in (
|
||||
200,
|
||||
401,
|
||||
)
|
||||
except (URLError, socket.timeout):
|
||||
return False
|
||||
|
||||
|
||||
def _kill_proc(proc):
|
||||
proc.terminate()
|
||||
try:
|
||||
proc.wait(timeout=1)
|
||||
finally:
|
||||
os.chdir(old_d)
|
||||
proc.kill()
|
||||
|
||||
|
||||
def _run_python(cmd):
|
||||
return os.system(f"{sys.executable} {cmd}")
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def project(request):
|
||||
def fin():
|
||||
tmpdir.remove(True)
|
||||
|
||||
tmpdir = path.local(tempfile.mkdtemp())
|
||||
request.addfinalizer(fin)
|
||||
src_setup_py = path.local().join("tests", "centodeps-setup.py")
|
||||
assert src_setup_py.check()
|
||||
projdir = tmpdir.join("centodeps")
|
||||
projdir.mkdir()
|
||||
dst_setup_py = projdir.join("setup.py")
|
||||
src_setup_py.copy(dst_setup_py)
|
||||
assert dst_setup_py.check()
|
||||
|
||||
return projdir
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def package(project, request):
|
||||
with chdir(project.strpath):
|
||||
cmd = "setup.py bdist_wheel"
|
||||
assert _run_python(cmd) == 0
|
||||
pkgs = list(project.join("dist").visit("centodeps*.whl"))
|
||||
assert len(pkgs) == 1
|
||||
pkg = path.local(pkgs[0])
|
||||
assert pkg.check()
|
||||
|
||||
return pkg
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def packdir(package):
|
||||
return package.dirpath()
|
||||
|
||||
|
||||
open_port = 8081
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def open_server(packdir, request):
|
||||
srv = _run_server(packdir, open_port, authed=False)
|
||||
fin = functools.partial(_kill_server, srv)
|
||||
request.addfinalizer(fin)
|
||||
|
||||
return srv
|
||||
|
||||
|
||||
protected_port = 8082
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def protected_server(packdir, request):
|
||||
srv = _run_server(packdir, protected_port, authed=True)
|
||||
fin = functools.partial(_kill_server, srv)
|
||||
request.addfinalizer(fin)
|
||||
|
||||
return srv
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def empty_packdir(tmpdir):
|
||||
return tmpdir.mkdir("dists")
|
||||
|
||||
|
||||
def _build_url(port, user="", pswd=""):
|
||||
def build_url(port: t.Union[int, str], user: str = "", pswd: str = "") -> str:
|
||||
auth = f"{user}:{pswd}@" if user or pswd else ""
|
||||
return f"http://{auth}localhost:{port}"
|
||||
|
||||
|
||||
def _run_pip(cmd):
|
||||
def run_setup_py(path: Path, arguments: str) -> int:
|
||||
return os.system(f"{sys.executable} {path / 'setup.py'} {arguments}")
|
||||
|
||||
|
||||
def run_py_build(srcdir: Path, flags: str) -> int:
|
||||
return os.system(f"{sys.executable} -m build {flags} {srcdir}")
|
||||
|
||||
|
||||
# A test-distribution to check if
|
||||
# bottle supports uploading 100's of packages,
|
||||
# see: https://github.com/pypiserver/pypiserver/issues/82
|
||||
#
|
||||
# Has been run once `pip wheel .`, just to generate:
|
||||
# ./wheelhouse/centodeps-0.0.0-cp34-none-win_amd64.whl
|
||||
#
|
||||
SETUP_PY = """\
|
||||
from setuptools import setup
|
||||
|
||||
setup(
|
||||
name="centodeps",
|
||||
install_requires=["a==1.0"] * 200,
|
||||
options={
|
||||
"bdist_wheel": {"universal": True},
|
||||
},
|
||||
packages=[],
|
||||
)
|
||||
"""
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def project(tmp_path_factory):
|
||||
projdir = tmp_path_factory.mktemp("project") / "centodeps"
|
||||
projdir.mkdir(parents=True, exist_ok=True)
|
||||
projdir.joinpath("setup.py").write_text(SETUP_PY)
|
||||
return projdir
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def server_root(tmp_path_factory):
|
||||
return tmp_path_factory.mktemp("root")
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def wheel_file(project, tmp_path_factory):
|
||||
distdir = tmp_path_factory.mktemp("dist")
|
||||
if re.match("^3\.7", sys.version):
|
||||
assert run_setup_py(project, f"bdist_wheel -d {distdir}") == 0
|
||||
else:
|
||||
assert run_py_build(project, f"--wheel --outdir {distdir}") == 0
|
||||
wheels = list(distdir.glob("centodeps*.whl"))
|
||||
assert len(wheels) > 0
|
||||
return wheels[0]
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def hosted_wheel_file(wheel_file, server_root):
|
||||
dst = server_root / wheel_file.name
|
||||
shutil.copy(wheel_file, dst)
|
||||
yield dst
|
||||
if dst.is_file():
|
||||
dst.unlink()
|
||||
|
||||
|
||||
def clear_directory(root: Path):
|
||||
for path in root.iterdir():
|
||||
if path.is_file():
|
||||
path.unlink()
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def _open_server(server_root):
|
||||
with run_server(server_root, authed=False) as srv:
|
||||
yield srv
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def open_server(_open_server: Srv):
|
||||
yield _open_server
|
||||
clear_directory(_open_server.root)
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def _authed_server(server_root):
|
||||
with run_server(server_root, authed=True) as srv:
|
||||
yield srv
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def authed_server(_authed_server):
|
||||
yield _authed_server
|
||||
clear_directory(_authed_server.root)
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def _partial_auth_server(server_root):
|
||||
with run_server(server_root, authed="partial") as srv:
|
||||
yield srv
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def partial_authed_server(_partial_auth_server):
|
||||
yield _partial_auth_server
|
||||
clear_directory(_partial_auth_server.root)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def empty_packdir(tmp_path_factory):
|
||||
return tmp_path_factory.mktemp("dists")
|
||||
|
||||
|
||||
def pip_download(
|
||||
cmd: str,
|
||||
port: t.Union[int, str],
|
||||
install_dir: str,
|
||||
user: str = None,
|
||||
pswd: str = None,
|
||||
) -> int:
|
||||
url = build_url(port, user, pswd)
|
||||
return _run_pip(f"-vv download -d {install_dir} -i {url} {cmd}")
|
||||
|
||||
|
||||
def _run_pip(cmd: str) -> int:
|
||||
ncmd = (
|
||||
"pip --no-cache-dir --disable-pip-version-check "
|
||||
f"--retries 0 --timeout 5 --no-input {cmd}"
|
||||
@ -187,281 +232,139 @@ def _run_pip(cmd):
|
||||
return proc.returncode
|
||||
|
||||
|
||||
def _run_pip_install(cmd, port, install_dir, user=None, pswd=None):
|
||||
url = _build_url(port, user, pswd)
|
||||
return _run_pip(f"-vv download -d {install_dir} -i {url} {cmd}")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def pipdir(tmpdir):
|
||||
return tmpdir.mkdir("pip")
|
||||
def pipdir(tmp_path_factory):
|
||||
return tmp_path_factory.mktemp("pip")
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def pypirc_tmpfile(port, user, password):
|
||||
"""Create a temporary pypirc file."""
|
||||
fd, filepath = tempfile.mkstemp()
|
||||
os.close(fd)
|
||||
Path(filepath).write_text(
|
||||
def pypirc_file(repo, username="''", password="''"):
|
||||
pypirc_path = Path.home() / ".pypirc"
|
||||
old_pypirc = pypirc_path.read_text() if pypirc_path.is_file() else None
|
||||
pypirc_path.write_text(
|
||||
"\n".join(
|
||||
(
|
||||
"[distutils]",
|
||||
"index-servers: test",
|
||||
"" "[test]",
|
||||
f"repository: {_build_url(port)}",
|
||||
f"username: {user}",
|
||||
"",
|
||||
"[test]",
|
||||
f"repository: {repo}",
|
||||
f"username: {username}",
|
||||
f"password: {password}",
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
print(Path(filepath).read_text())
|
||||
yield filepath
|
||||
os.remove(filepath)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def pypirc_file(txt):
|
||||
pypirc_path = path.local("~/.pypirc", expanduser=1)
|
||||
old_pypirc = pypirc_path.read() if pypirc_path.check() else None
|
||||
pypirc_path.write(txt)
|
||||
try:
|
||||
yield
|
||||
yield pypirc_path
|
||||
finally:
|
||||
if old_pypirc:
|
||||
pypirc_path.write(old_pypirc)
|
||||
pypirc_path.write_text(old_pypirc)
|
||||
else:
|
||||
pypirc_path.remove()
|
||||
pypirc_path.unlink()
|
||||
|
||||
|
||||
def twine_upload(
|
||||
packages, repository="test", conf="pypirc", expect_failure=False
|
||||
):
|
||||
"""Call 'twine upload' with appropriate arguments"""
|
||||
@pytest.fixture
|
||||
def open_pypirc(open_server):
|
||||
with pypirc_file(repo=build_url(open_server.port)) as path:
|
||||
yield path
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def authed_pypirc(authed_server):
|
||||
username, password = "a", "a"
|
||||
with pypirc_file(
|
||||
repo=build_url(authed_server.port),
|
||||
username=username,
|
||||
password=password,
|
||||
) as path:
|
||||
yield path
|
||||
|
||||
|
||||
def run_twine(command: str, package: str, conf: str) -> None:
|
||||
proc = Popen(
|
||||
(
|
||||
"twine",
|
||||
"upload",
|
||||
"--repository",
|
||||
repository,
|
||||
"--config-file",
|
||||
conf,
|
||||
" ".join(packages),
|
||||
split(
|
||||
f"twine {command} --repository test --config-file {conf} {package}"
|
||||
)
|
||||
)
|
||||
proc.communicate()
|
||||
if not expect_failure and proc.returncode:
|
||||
assert False, "Twine upload failed. See stdout/err"
|
||||
|
||||
|
||||
def twine_register(
|
||||
packages, repository="test", conf="pypirc", expect_failure=False
|
||||
):
|
||||
"""Call 'twine register' with appropriate args"""
|
||||
proc = Popen(
|
||||
(
|
||||
"twine",
|
||||
"register",
|
||||
"--repository",
|
||||
repository,
|
||||
"--config-file",
|
||||
conf,
|
||||
" ".join(packages),
|
||||
)
|
||||
)
|
||||
proc.communicate()
|
||||
if not expect_failure and proc.returncode:
|
||||
assert False, "Twine register failed. See stdout/err"
|
||||
assert not proc.returncode, f"Twine {command} failed. See stdout/err"
|
||||
|
||||
|
||||
# ######################################################################
|
||||
# Tests
|
||||
# ######################################################################
|
||||
|
||||
|
||||
def test_pipInstall_packageNotFound(empty_packdir, port, pipdir, package):
|
||||
with new_server(empty_packdir, port):
|
||||
cmd = "centodeps"
|
||||
assert _run_pip_install(cmd, port, pipdir) != 0
|
||||
assert not pipdir.listdir()
|
||||
all_servers = [
|
||||
("open_server", "open_pypirc"),
|
||||
("authed_server", "authed_pypirc"),
|
||||
("partial_authed_server", "authed_pypirc"),
|
||||
]
|
||||
|
||||
|
||||
def test_pipInstall_openOk(open_server, package, pipdir):
|
||||
cmd = "centodeps"
|
||||
assert _run_pip_install(cmd, open_server.port, pipdir) == 0
|
||||
assert pipdir.join(package.basename).check()
|
||||
def test_pip_install_package_not_found(open_server, pipdir):
|
||||
assert pip_download("centodeps", open_server.port, pipdir) != 0
|
||||
assert not list(pipdir.iterdir())
|
||||
|
||||
|
||||
def test_pipInstall_authedFails(protected_server, pipdir):
|
||||
cmd = "centodeps"
|
||||
assert _run_pip_install(cmd, protected_server.port, pipdir) != 0
|
||||
assert not pipdir.listdir()
|
||||
def test_pip_install_open_succeeds(open_server, hosted_wheel_file, pipdir):
|
||||
assert pip_download("centodeps", open_server.port, pipdir) == 0
|
||||
assert pipdir.joinpath(hosted_wheel_file.name).is_file()
|
||||
|
||||
|
||||
def test_pipInstall_authedOk(protected_server, package, pipdir):
|
||||
cmd = "centodeps"
|
||||
@pytest.mark.usefixtures("wheel_file")
|
||||
def test_pip_install_authed_fails(authed_server, pipdir):
|
||||
assert pip_download("centodeps", authed_server.port, pipdir) != 0
|
||||
assert not list(pipdir.iterdir())
|
||||
|
||||
|
||||
def test_pip_install_authed_succeeds(authed_server, hosted_wheel_file, pipdir):
|
||||
assert (
|
||||
_run_pip_install(cmd, protected_server.port, pipdir, user="a", pswd="a")
|
||||
pip_download(
|
||||
"centodeps", authed_server.port, pipdir, user="a", pswd="a"
|
||||
)
|
||||
== 0
|
||||
)
|
||||
assert pipdir.join(package.basename).check()
|
||||
assert pipdir.joinpath(hosted_wheel_file.name).is_file()
|
||||
|
||||
|
||||
@pytest.mark.parametrize("pkg_frmt", ["bdist", "bdist_wheel"])
|
||||
def test_setuptoolsUpload_open(empty_packdir, port, project, package, pkg_frmt):
|
||||
url = _build_url(port, None, None)
|
||||
with pypirc_file(
|
||||
dedent(
|
||||
f"""\
|
||||
[distutils]
|
||||
index-servers: test
|
||||
|
||||
[test]
|
||||
repository: {url}
|
||||
username: ''
|
||||
password: ''
|
||||
"""
|
||||
)
|
||||
):
|
||||
with new_server(empty_packdir, port):
|
||||
with chdir(project.strpath):
|
||||
cmd = f"setup.py -vvv {pkg_frmt} upload -r {url}"
|
||||
for i in range(5):
|
||||
print(f"++Attempt #{i}")
|
||||
assert _run_python(cmd) == 0
|
||||
time.sleep(SLEEP_AFTER_SRV)
|
||||
assert len(empty_packdir.listdir()) == 1
|
||||
|
||||
|
||||
@pytest.mark.parametrize("pkg_frmt", ["bdist", "bdist_wheel"])
|
||||
def test_setuptoolsUpload_authed(
|
||||
empty_packdir, port, project, package, pkg_frmt, monkeypatch
|
||||
):
|
||||
url = _build_url(port)
|
||||
with pypirc_file(
|
||||
dedent(
|
||||
f"""\
|
||||
[distutils]
|
||||
index-servers: test
|
||||
|
||||
[test]
|
||||
repository: {url}
|
||||
username: a
|
||||
password: a
|
||||
"""
|
||||
)
|
||||
):
|
||||
with new_server(empty_packdir, port, authed=True):
|
||||
with chdir(project.strpath):
|
||||
cmd = (
|
||||
f"setup.py -vvv {pkg_frmt} register -r test upload -r test"
|
||||
)
|
||||
for i in range(5):
|
||||
print(f"++Attempt #{i}")
|
||||
assert _run_python(cmd) == 0
|
||||
time.sleep(SLEEP_AFTER_SRV)
|
||||
assert len(empty_packdir.listdir()) == 1
|
||||
|
||||
|
||||
@pytest.mark.parametrize("pkg_frmt", ["bdist", "bdist_wheel"])
|
||||
def test_setuptools_upload_partial_authed(
|
||||
empty_packdir, port, project, pkg_frmt
|
||||
):
|
||||
"""Test uploading a package with setuptools with partial auth."""
|
||||
url = _build_url(port)
|
||||
with pypirc_file(
|
||||
dedent(
|
||||
f"""\
|
||||
[distutils]
|
||||
index-servers: test
|
||||
|
||||
[test]
|
||||
repository: {url}
|
||||
username: a
|
||||
password: a
|
||||
"""
|
||||
)
|
||||
):
|
||||
with new_server(empty_packdir, port, authed="partial"):
|
||||
with chdir(project.strpath):
|
||||
cmd = (
|
||||
f"setup.py -vvv {pkg_frmt} register -r test upload -r test"
|
||||
)
|
||||
for i in range(5):
|
||||
print(f"++Attempt #{i}")
|
||||
assert _run_python(cmd) == 0
|
||||
time.sleep(SLEEP_AFTER_SRV)
|
||||
assert len(empty_packdir.listdir()) == 1
|
||||
|
||||
|
||||
def test_partial_authed_open_download(empty_packdir, port):
|
||||
def test_partial_authed_open_download(partial_authed_server):
|
||||
"""Validate that partial auth still allows downloads."""
|
||||
url = _build_url(port) + "/simple"
|
||||
with new_server(empty_packdir, port, authed="partial"):
|
||||
resp = urlopen(url)
|
||||
assert resp.getcode() == 200
|
||||
|
||||
|
||||
def test_twine_upload_open(empty_packdir, port, package):
|
||||
"""Test twine upload with no authentication"""
|
||||
user, pswd = "foo", "bar"
|
||||
with new_server(empty_packdir, port):
|
||||
with pypirc_tmpfile(port, user, pswd) as rcfile:
|
||||
twine_upload([package.strpath], repository="test", conf=rcfile)
|
||||
time.sleep(SLEEP_AFTER_SRV)
|
||||
|
||||
assert len(empty_packdir.listdir()) == 1
|
||||
url = build_url(partial_authed_server.port) + "/simple"
|
||||
resp = urlopen(url)
|
||||
assert resp.getcode() == 200
|
||||
|
||||
|
||||
@pytest.mark.parametrize("hash_algo", ("md5", "sha256", "sha512"))
|
||||
def test_hash_algos(empty_packdir, port, package, pipdir, hash_algo):
|
||||
@pytest.mark.usefixtures("hosted_wheel_file")
|
||||
def test_hash_algos(server_root, pipdir, hash_algo):
|
||||
"""Test twine upload with no authentication"""
|
||||
user, pswd = "foo", "bar"
|
||||
with new_server(
|
||||
empty_packdir, port, other_cli="--hash-algo {}".format(hash_algo)
|
||||
):
|
||||
with pypirc_tmpfile(port, user, pswd) as rcfile:
|
||||
twine_upload([package.strpath], repository="test", conf=rcfile)
|
||||
time.sleep(SLEEP_AFTER_SRV)
|
||||
|
||||
assert _run_pip_install("centodeps", port, pipdir) == 0
|
||||
with run_server(
|
||||
server_root, other_cli="--hash-algo {}".format(hash_algo)
|
||||
) as srv:
|
||||
assert pip_download("centodeps", srv.port, pipdir) == 0
|
||||
|
||||
|
||||
def test_twine_upload_authed(empty_packdir, port, package):
|
||||
"""Test authenticated twine upload"""
|
||||
user, pswd = "a", "a"
|
||||
with new_server(empty_packdir, port, authed=False):
|
||||
with pypirc_tmpfile(port, user, pswd) as rcfile:
|
||||
twine_upload([package.strpath], repository="test", conf=rcfile)
|
||||
time.sleep(SLEEP_AFTER_SRV)
|
||||
assert len(empty_packdir.listdir()) == 1
|
||||
@pytest.mark.parametrize(["server_fixture", "pypirc_fixture"], all_servers)
|
||||
def test_twine_upload(
|
||||
server_fixture, pypirc_fixture, server_root, wheel_file, request
|
||||
):
|
||||
"""Test twine upload with no authentication"""
|
||||
assert len(list(server_root.iterdir())) == 0
|
||||
request.getfixturevalue(server_fixture)
|
||||
pypirc = request.getfixturevalue(pypirc_fixture)
|
||||
|
||||
assert empty_packdir.join(package.basename).check(), (
|
||||
package.basename,
|
||||
empty_packdir.listdir(),
|
||||
run_twine("upload", wheel_file, conf=pypirc)
|
||||
|
||||
assert len(list(server_root.iterdir())) == 1
|
||||
assert server_root.joinpath(wheel_file.name).is_file(), (
|
||||
wheel_file.name,
|
||||
list(server_root.iterdir()),
|
||||
)
|
||||
|
||||
|
||||
def test_twine_upload_partial_authed(empty_packdir, port, package):
|
||||
"""Test partially authenticated twine upload"""
|
||||
user, pswd = "a", "a"
|
||||
with new_server(empty_packdir, port, authed="partial"):
|
||||
with pypirc_tmpfile(port, user, pswd) as rcfile:
|
||||
twine_upload([package.strpath], repository="test", conf=rcfile)
|
||||
time.sleep(SLEEP_AFTER_SRV)
|
||||
assert len(empty_packdir.listdir()) == 1
|
||||
|
||||
|
||||
def test_twine_register_open(open_server, package):
|
||||
@pytest.mark.parametrize(["server_fixture", "pypirc_fixture"], all_servers)
|
||||
def test_twine_register(server_fixture, pypirc_fixture, wheel_file, request):
|
||||
"""Test unauthenticated twine registration"""
|
||||
srv = open_server
|
||||
with pypirc_tmpfile(srv.port, "foo", "bar") as rcfile:
|
||||
twine_register([package.strpath], repository="test", conf=rcfile)
|
||||
|
||||
|
||||
def test_twine_register_authed_ok(protected_server, package):
|
||||
"""Test authenticated twine registration"""
|
||||
srv = protected_server
|
||||
user, pswd = "a", "a"
|
||||
with pypirc_tmpfile(srv.port, user, pswd) as rcfile:
|
||||
twine_register([package.strpath], repository="test", conf=rcfile)
|
||||
request.getfixturevalue(server_fixture)
|
||||
pypirc = request.getfixturevalue(pypirc_fixture)
|
||||
run_twine("register", wheel_file, conf=pypirc)
|
||||
|
20
tox.ini
20
tox.ini
@ -1,28 +1,14 @@
|
||||
[tox]
|
||||
envlist = py36, py37, py38, py39, pypy3
|
||||
envlist = py36, py37, py38, py39, py310, py311, pypy3
|
||||
|
||||
[testenv]
|
||||
deps=-r{toxinidir}/requirements/dev.pip
|
||||
deps=-r{toxinidir}/requirements/test.pip
|
||||
allowlist_externals=
|
||||
/bin/sh
|
||||
mypy
|
||||
sitepackages=False
|
||||
|
||||
[testenv:py{36,37,38,39}]
|
||||
commands=
|
||||
/bin/sh -c "{env:PYPISERVER_SETUP_CMD:true}"
|
||||
# individual mypy files for now, until we get the rest
|
||||
# of the project typechecking
|
||||
mypy \
|
||||
pypiserver/config.py \
|
||||
tests/test_init.py
|
||||
pytest --cov=pypiserver {posargs}
|
||||
|
||||
[testenv:pypy3]
|
||||
commands=
|
||||
/bin/sh -c "{env:PYPISERVER_SETUP_CMD:true}"
|
||||
# no mypy in pypy
|
||||
pytest --cov=pypiserver {posargs}
|
||||
pytest --cov=pypiserver {posargs:tests}
|
||||
|
||||
|
||||
[pytest]
|
||||
|
Loading…
Reference in New Issue
Block a user