From b2eb2ae9b37772e794f093d62be89c65ceb00f95 Mon Sep 17 00:00:00 2001 From: Donald Stufft Date: Mon, 3 Aug 2015 12:57:39 -0400 Subject: [PATCH] Implement the legacy file upload API --- requirements.txt | 1 + setup.py | 2 + tests/unit/legacy/api/test_pypi.py | 1143 +++++++++++++++++++++++++ tests/unit/packaging/test_models.py | 24 +- tests/unit/packaging/test_services.py | 39 + tests/unit/test_routes.py | 9 + warehouse/legacy/api/pypi.py | 681 +++++++++++++++ warehouse/packaging/interfaces.py | 5 + warehouse/packaging/models.py | 16 + warehouse/packaging/services.py | 12 + warehouse/routes.py | 3 + 11 files changed, 1934 insertions(+), 1 deletion(-) create mode 100644 tests/unit/legacy/api/test_pypi.py create mode 100644 warehouse/legacy/api/pypi.py diff --git a/requirements.txt b/requirements.txt index e33a2f598f65..9b76be754871 100644 --- a/requirements.txt +++ b/requirements.txt @@ -24,6 +24,7 @@ jmespath==0.7.1 # via boto3, botocore mako==1.0.1 # via alembic markupsafe==0.23 # via jinja2, mako, pyramid-jinja2 msgpack-python==0.4.6 +packaging==15.3 passlib==1.6.4 pastedeploy==1.5.2 # via pyramid psycopg2==2.6.1 diff --git a/setup.py b/setup.py index ef8d622b72b4..5f3eba749598 100644 --- a/setup.py +++ b/setup.py @@ -78,6 +78,7 @@ "itsdangerous", "Jinja2>=2.8", "msgpack-python", + "packaging>=15.2", "passlib>=1.6.4", "psycopg2", "pyramid>=1.6a1", @@ -88,6 +89,7 @@ "readme>=0.5.1", "redis", "setproctitle", + "setuptools", "sqlalchemy>=0.9", "sqlalchemy-citext", "structlog", diff --git a/tests/unit/legacy/api/test_pypi.py b/tests/unit/legacy/api/test_pypi.py new file mode 100644 index 000000000000..aea95956a90d --- /dev/null +++ b/tests/unit/legacy/api/test_pypi.py @@ -0,0 +1,1143 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import io +import os.path + +from unittest import mock + +import pkg_resources +import pretend +import pytest + +from pyramid.httpexceptions import HTTPBadRequest, HTTPForbidden +from webob.multidict import MultiDict +from wtforms.validators import ValidationError + +from warehouse.classifiers.models import Classifier +from warehouse.legacy.api import pypi +from warehouse.packaging.interfaces import IFileStorage +from warehouse.packaging.models import ( + File, Filename, Dependency, DependencyKind, Release, Project, Role, +) + +from ....common.db.accounts import UserFactory +from ....common.db.packaging import ( + ProjectFactory, ReleaseFactory, RoleFactory, +) + + +def test_exc_with_message(): + exc = pypi._exc_with_message(HTTPBadRequest, "My Test Message.") + assert isinstance(exc, HTTPBadRequest) + assert exc.status_code == 400 + assert exc.status == "400 My Test Message." + + +class TestValidation: + + @pytest.mark.parametrize("version", ["1.0", "30a1", "1!1", "1.0-1"]) + def test_validates_valid_pep440_version(self, version): + form, field = pretend.stub(), pretend.stub(data=version) + pypi._validate_pep440_version(form, field) + + @pytest.mark.parametrize("version", ["dog", "1.0.dev.a1", "1.0+local"]) + def test_validates_invalid_pep440_version(self, version): + form, field = pretend.stub(), pretend.stub(data=version) + with pytest.raises(ValidationError): + pypi._validate_pep440_version(form, field) + + @pytest.mark.parametrize( + ("requirement", "expected"), + [ + ("foo", ("foo", None)), + ("foo (>1.0)", ("foo", ">1.0")), + ], + ) + def test_parses_legacy_requirement_valid(self, requirement, expected): + parsed = pypi._parse_legacy_requirement(requirement) + assert parsed == expected + + @pytest.mark.parametrize("requirement", ["foo bar"]) + def test_parses_legacy_requirement_invalid(self, requirement): + with pytest.raises(ValueError): + pypi._parse_legacy_requirement(requirement) + + @pytest.mark.parametrize("specifier", [">=1.0", "<=1.0-1"]) + def test_validates_valid_pep440_specifier(self, specifier): + pypi._validate_pep440_specifier(specifier) + + @pytest.mark.parametrize("specifier", ["wat?"]) + def test_validates_invalid_pep440_specifier(self, specifier): + with pytest.raises(ValidationError): + pypi._validate_pep440_specifier(specifier) + + @pytest.mark.parametrize( + ("requirement", "specifier"), + [ + ("foo (>=1.0)", ">=1.0"), + ("foo", None), + ("_foo", None), + ("foo2", None), + ], + ) + def test_validates_legacy_non_dist_req_valid(self, monkeypatch, + requirement, specifier): + spec_validator = pretend.call_recorder(lambda spec: None) + monkeypatch.setattr(pypi, "_validate_pep440_specifier", spec_validator) + pypi._validate_legacy_non_dist_req(requirement) + + if specifier is not None: + assert spec_validator.calls == [pretend.call(specifier)] + else: + assert spec_validator.calls == [] + + @pytest.mark.parametrize( + "requirement", + [ + "foo-bar (>=1.0)", + "foo-bar", + "2foo (>=1.0)", + "2foo", + "☃ (>=1.0)", + "☃", + ], + ) + def test_validates_legacy_non_dist_req_invalid(self, monkeypatch, + requirement): + spec_validator = pretend.call_recorder(lambda spec: None) + monkeypatch.setattr(pypi, "_validate_pep440_specifier", spec_validator) + + with pytest.raises(ValidationError): + pypi._validate_legacy_non_dist_req(requirement) + + assert spec_validator.calls == [] + + def test_validate_legacy_non_dist_req_list(self, monkeypatch): + validator = pretend.call_recorder(lambda datum: None) + monkeypatch.setattr(pypi, "_validate_legacy_non_dist_req", validator) + + data = [pretend.stub(), pretend.stub(), pretend.stub()] + form, field = pretend.stub(), pretend.stub(data=data) + pypi._validate_legacy_non_dist_req_list(form, field) + + assert validator.calls == [pretend.call(datum) for datum in data] + + @pytest.mark.parametrize( + ("requirement", "specifier"), + [ + ("foo (>=1.0)", ">=1.0"), + ("foo", None), + ("foo2", None), + ("foo-bar", None), + ("foo_bar", None), + ], + ) + def test_validate_legacy_dist_req_valid(self, monkeypatch, requirement, + specifier): + spec_validator = pretend.call_recorder(lambda spec: None) + monkeypatch.setattr(pypi, "_validate_pep440_specifier", spec_validator) + pypi._validate_legacy_dist_req(requirement) + + if specifier is not None: + assert spec_validator.calls == [pretend.call(specifier)] + else: + assert spec_validator.calls == [] + + @pytest.mark.parametrize( + "requirement", + [ + "☃ (>=1.0)", + "☃", + "foo-", + "foo- (>=1.0)", + "_foo", + "_foo (>=1.0)", + ], + ) + def test_validate_legacy_dist_req_invalid(self, monkeypatch, requirement): + spec_validator = pretend.call_recorder(lambda spec: None) + monkeypatch.setattr(pypi, "_validate_pep440_specifier", spec_validator) + + with pytest.raises(ValidationError): + pypi._validate_legacy_dist_req(requirement) + + assert spec_validator.calls == [] + + def test_validate_legacy_dist_req_list(self, monkeypatch): + validator = pretend.call_recorder(lambda datum: None) + monkeypatch.setattr(pypi, "_validate_legacy_dist_req", validator) + + data = [pretend.stub(), pretend.stub(), pretend.stub()] + form, field = pretend.stub(), pretend.stub(data=data) + pypi._validate_legacy_dist_req_list(form, field) + + assert validator.calls == [pretend.call(datum) for datum in data] + + @pytest.mark.parametrize( + ("requirement", "specifier"), + [ + ("C", None), + ("openssl (>=1.0.0)", ">=1.0.0"), + ], + ) + def test_validate_requires_external(self, monkeypatch, requirement, + specifier): + spec_validator = pretend.call_recorder(lambda spec: None) + monkeypatch.setattr(pypi, "_validate_pep440_specifier", spec_validator) + + pypi._validate_requires_external(requirement) + + if specifier is not None: + assert spec_validator.calls == [pretend.call(specifier)] + else: + assert spec_validator.calls == [] + + def test_validate_requires_external_list(self, monkeypatch): + validator = pretend.call_recorder(lambda datum: None) + monkeypatch.setattr(pypi, "_validate_requires_external", validator) + + data = [pretend.stub(), pretend.stub(), pretend.stub()] + form, field = pretend.stub(), pretend.stub(data=data) + pypi._validate_requires_external_list(form, field) + + assert validator.calls == [pretend.call(datum) for datum in data] + + @pytest.mark.parametrize( + "project_url", + [ + "Home, https://pypi.python.org/", + ("A" * 32) + ", https://example.com/", + ], + ) + def test_validate_project_url_valid(self, project_url): + pypi._validate_project_url(project_url) + + @pytest.mark.parametrize( + "project_url", + [ + "Home,https://pypi.python.org/", + "https://pypi.python.org/", + ", https://pypi.python.org/", + "Home, ", + ("A" * 33) + ", https://example.com/", + ], + ) + def test_validate_project_url_invalid(self, project_url): + with pytest.raises(ValidationError): + pypi._validate_project_url(project_url) + + def test_validate_project_url_list(self, monkeypatch): + validator = pretend.call_recorder(lambda datum: None) + monkeypatch.setattr(pypi, "_validate_project_url", validator) + + data = [pretend.stub(), pretend.stub(), pretend.stub()] + form, field = pretend.stub(), pretend.stub(data=data) + pypi._validate_project_url_list(form, field) + + assert validator.calls == [pretend.call(datum) for datum in data] + + +def test_construct_dependencies(): + types = { + "requires": DependencyKind.requires, + "provides": DependencyKind.provides, + } + + form = pretend.stub( + requires=pretend.stub(data=["foo (>1)"]), + provides=pretend.stub(data=["bar (>2)"]), + ) + + for dep in pypi._construct_dependencies(form, types): + assert isinstance(dep, Dependency) + + if dep.kind == DependencyKind.requires: + assert dep.specifier == "foo (>1)" + elif dep.kind == DependencyKind.provides: + assert dep.specifier == "bar (>2)" + else: + pytest.fail("Unknown type of specifier") + + +class TestListField: + + @pytest.mark.parametrize( + ("data", "expected"), + [ + (["foo", "bar"], ["foo", "bar"]), + ([" foo"], ["foo"]), + (["f oo "], ["f oo"]), + ], + ) + def test_processes_form_data(self, data, expected): + field = pypi.ListField() + field = field.bind(pretend.stub(meta=pretend.stub()), "formname") + field.process_formdata(data) + assert field.data == expected + + +class TestMetadataForm: + + @pytest.mark.parametrize( + "data", + [ + {"filetype": "sdist"}, + {"filetpye": "bdist_wheel", "pyversion": "3.4"}, + ], + ) + def test_full_validate_valid(self, data): + form = pypi.MetadataForm(MultiDict(data)) + form.full_validate() + + @pytest.mark.parametrize( + "data", + [ + {"filetype": "sdist", "pyversion": "3.4"}, + {"filetype": "bdist_wheel"}, + ], + ) + def test_full_validate_invalid(self, data): + form = pypi.MetadataForm(MultiDict(data)) + with pytest.raises(ValidationError): + form.full_validate() + + +class TestFileUpload: + + @pytest.mark.parametrize("version", ["2", "3", "-1", "0", "dog", "cat"]) + def test_fails_invalid_version(self, pyramid_request, version): + pyramid_request.POST["protocol_version"] = version + + with pytest.raises(HTTPBadRequest) as excinfo: + pypi.file_upload(pyramid_request) + + resp = excinfo.value + + assert resp.status_code == 400 + assert resp.status == "400 Unknown protocol version." + + @pytest.mark.parametrize( + ("post_data", "message"), + [ + # metadata_version errors. + ({}, "metadata_version: This field is required."), + ( + {"metadata_version": "-1"}, + "metadata_version: Unknown Metadata Version", + ), + + # name errors. + ({"metadata_version": "1.2"}, "name: This field is required."), + ( + {"metadata_version": "1.2", "name": "foo-"}, + "name: Must start and end with a letter or numeral and " + "contain only ascii numeric and '.', '_' and '-'.", + ), + + # version errors. + ( + {"metadata_version": "1.2", "name": "example"}, + "version: This field is required.", + ), + ( + { + "metadata_version": "1.2", + "name": "example", + "version": "dog", + }, + "version: Must start and end with a letter or numeral and " + "contain only ascii numeric and '.', '_' and '-'.", + ), + + # filetype/pyversion errors. + ( + { + "metadata_version": "1.2", + "name": "example", + "version": "1.0", + }, + "filetype: This field is required.", + ), + ( + { + "metadata_version": "1.2", + "name": "example", + "version": "1.0", + "filetype": "bdist_wat", + }, + "__all__: Python version is required for binary distribution " + "uploads.", + ), + ( + { + "metadata_version": "1.2", + "name": "example", + "version": "1.0", + "filetype": "bdist_wat", + "pyversion": "1.0", + }, + "filetype: Unknown type of file.", + ), + ( + { + "metadata_version": "1.2", + "name": "example", + "version": "1.0", + "filetype": "sdist", + "pyversion": "1.0", + }, + "__all__: The only valid Python version for a sdist is " + "'source'.", + ), + + # md5_digest errors. + ( + { + "metadata_version": "1.2", + "name": "example", + "version": "1.0", + "filetype": "sdist", + }, + "md5_digest: This field is required.", + ), + + # summary errors + ( + { + "metadata_version": "1.2", + "name": "example", + "version": "1.0", + "filetype": "sdist", + "md5_digest": "a fake md5 digest", + "summary": "A" * 513, + }, + "summary: Field cannot be longer than 512 characters.", + ), + ( + { + "metadata_version": "1.2", + "name": "example", + "version": "1.0", + "filetype": "sdist", + "md5_digest": "a fake md5 digest", + "summary": "A\nB", + }, + "summary: Multiple lines are not allowed.", + ), + ], + ) + def test_fails_invalid_post_data(self, db_request, post_data, message): + db_request.POST = MultiDict(post_data) + + with pytest.raises(HTTPBadRequest) as excinfo: + pypi.file_upload(db_request) + + resp = excinfo.value + + assert resp.status_code == 400 + assert resp.status == "400 {}".format(message) + + @pytest.mark.parametrize("name", ["requirements.txt", "rrequirements.txt"]) + def test_fails_with_invalid_names(self, db_request, name): + db_request.POST = MultiDict({ + "metadata_version": "1.2", + "name": name, + "version": "1.0", + "filetype": "sdist", + "md5_digest": "a fake md5 digest", + }) + + with pytest.raises(HTTPBadRequest) as excinfo: + pypi.file_upload(db_request) + + resp = excinfo.value + + assert resp.status_code == 400 + assert resp.status == "400 The name {!r} is not allowed.".format(name) + + def test_upload_fails_without_file(self, db_request): + db_request.POST = MultiDict({ + "metadata_version": "1.2", + "name": "example", + "version": "1.0", + "filetype": "sdist", + "md5_digest": "a fake md5 digest", + }) + + with pytest.raises(HTTPBadRequest) as excinfo: + pypi.file_upload(db_request) + + resp = excinfo.value + + assert resp.status_code == 400 + assert resp.status == "400 Upload payload does not have a file." + + def test_upload_cleans_unknown_values(self, db_request): + db_request.POST = MultiDict({ + "metadata_version": "1.2", + "name": "UNKNOWN", + "version": "1.0", + "filetype": "sdist", + "md5_digest": "a fake md5 digest", + }) + + with pytest.raises(HTTPBadRequest): + pypi.file_upload(db_request) + + assert "name" not in db_request.POST + + @pytest.mark.parametrize("has_signature", [True, False]) + def test_successful_upload(self, db_request, has_signature): + user = UserFactory.create() + project = ProjectFactory.create() + release = ReleaseFactory.create(project=project, version="1.0") + RoleFactory.create(user=user, project=project) + + db_request.db.add( + Classifier(classifier="Environment :: Other Environment"), + ) + + filename = "{}-{}.tar.gz".format(project.name, release.version) + + db_request.POST = MultiDict({ + "metadata_version": "1.2", + "name": project.name, + "version": release.version, + "filetype": "sdist", + "pyversion": "source", + "md5_digest": "335c476dc930b959dda9ec82bd65ef19", + "content": pretend.stub( + filename=filename, + file=io.BytesIO(b"A fake file."), + ), + }) + db_request.POST.extend([ + ("classifiers", "Environment :: Other Environment"), + ]) + + if has_signature: + db_request.POST["gpg_signature"] = pretend.stub( + filename=filename + ".asc", + file=io.BytesIO( + b"-----BEGIN PGP SIGNATURE-----\n" + b" This is a Fake Signature" + ), + ) + + storage_service = pretend.stub( + store=pretend.call_recorder(lambda path, content: None), + ) + db_request.find_service = pretend.call_recorder( + lambda svc: storage_service + ) + + resp = pypi.file_upload(db_request) + + assert resp.status_code == 200 + assert db_request.find_service.calls == [pretend.call(IFileStorage)] + assert len(storage_service.store.calls) == 2 if has_signature else 1 + assert storage_service.store.calls[0] == pretend.call( + os.path.join( + "source", + project.name[0], + project.name, + filename, + ), + mock.ANY, + ) + assert storage_service.store.calls[0].args[1].read() == b"A fake file." + + if has_signature: + assert storage_service.store.calls[1] == pretend.call( + os.path.join( + "source", + project.name[0], + project.name, + filename + ".asc", + ), + mock.ANY, + ) + assert storage_service.store.calls[1].args[1].read() == \ + db_request.POST["gpg_signature"].file.getvalue() + + # Ensure that a File object has been created. + db_request.db.query(File) \ + .filter((File.release == release) & + (File.filename == filename)) \ + .one() + + # Ensure that a Filename object has been created. + db_request.db.query(Filename) \ + .filter(Filename.filename == filename).one() + + @pytest.mark.parametrize("sig", [b"lol nope"]) + def test_upload_fails_with_invalid_signature(self, db_request, sig): + user = UserFactory.create() + project = ProjectFactory.create() + release = ReleaseFactory.create(project=project, version="1.0") + RoleFactory.create(user=user, project=project) + + filename = "{}-{}.tar.gz".format(project.name, release.version) + + db_request.POST = MultiDict({ + "metadata_version": "1.2", + "name": project.name, + "version": release.version, + "filetype": "sdist", + "md5_digest": "335c476dc930b959dda9ec82bd65ef19", + "content": pretend.stub( + filename=filename, + file=io.BytesIO(b"A fake file."), + ), + "gpg_signature": pretend.stub( + filename=filename + ".asc", + file=io.BytesIO(sig), + ), + }) + + with pytest.raises(HTTPBadRequest) as excinfo: + pypi.file_upload(db_request) + + resp = excinfo.value + + assert resp.status_code == 400 + assert resp.status == "400 PGP signature is not ASCII armored." + + def test_upload_fails_with_invalid_classifier(self, db_request): + user = UserFactory.create() + project = ProjectFactory.create() + release = ReleaseFactory.create(project=project, version="1.0") + RoleFactory.create(user=user, project=project) + + filename = "{}-{}.tar.gz".format(project.name, release.version) + + db_request.POST = MultiDict({ + "metadata_version": "1.2", + "name": project.name, + "version": release.version, + "filetype": "sdist", + "md5_digest": "335c476dc930b959dda9ec82bd65ef19", + "content": pretend.stub( + filename=filename, + file=io.BytesIO(b"A fake file."), + ), + }) + db_request.POST.extend([ + ("classifiers", "Environment :: Other Environment"), + ]) + + with pytest.raises(HTTPBadRequest) as excinfo: + pypi.file_upload(db_request) + + resp = excinfo.value + + assert resp.status_code == 400 + assert resp.status == ( + "400 classifiers: 'Environment :: Other Environment' is not a " + "valid choice for this field" + ) + + def test_upload_fails_with_invalid_hash(self, db_request): + user = UserFactory.create() + project = ProjectFactory.create() + release = ReleaseFactory.create(project=project, version="1.0") + RoleFactory.create(user=user, project=project) + + filename = "{}-{}.tar.gz".format(project.name, release.version) + + db_request.POST = MultiDict({ + "metadata_version": "1.2", + "name": project.name, + "version": release.version, + "filetype": "sdist", + "md5_digest": "nope!", + "content": pretend.stub( + filename=filename, + file=io.BytesIO(b"A fake file."), + ), + }) + + with pytest.raises(HTTPBadRequest) as excinfo: + pypi.file_upload(db_request) + + resp = excinfo.value + + assert resp.status_code == 400 + assert resp.status == ( + "400 The MD5 digest supplied does not match a digest calculated " + "from the uploaded file." + ) + + def test_upload_fails_with_too_large_file(self, db_request): + user = UserFactory.create() + project = ProjectFactory.create() + release = ReleaseFactory.create(project=project, version="1.0") + RoleFactory.create(user=user, project=project) + + filename = "{}-{}.tar.gz".format(project.name, release.version) + + db_request.POST = MultiDict({ + "metadata_version": "1.2", + "name": project.name, + "version": release.version, + "filetype": "sdist", + "md5_digest": "nope!", + "content": pretend.stub( + filename=filename, + file=io.BytesIO(b"a" * (pypi.MAXIMUM_FILESIZE + 1)), + ), + }) + + with pytest.raises(HTTPBadRequest) as excinfo: + pypi.file_upload(db_request) + + resp = excinfo.value + + assert resp.status_code == 400 + assert resp.status == "400 File too large." + + def test_upload_fails_with_too_large_signature(self, db_request): + user = UserFactory.create() + project = ProjectFactory.create() + release = ReleaseFactory.create(project=project, version="1.0") + RoleFactory.create(user=user, project=project) + + filename = "{}-{}.tar.gz".format(project.name, release.version) + + db_request.POST = MultiDict({ + "metadata_version": "1.2", + "name": project.name, + "version": release.version, + "filetype": "sdist", + "md5_digest": "nope!", + "content": pretend.stub( + filename=filename, + file=io.BytesIO(b"a"), + ), + "gpg_signature": pretend.stub( + filename=filename + ".asc", + file=io.BytesIO(b"a" * (pypi.MAXIMUM_FILESIZE + 1)), + ), + }) + + with pytest.raises(HTTPBadRequest) as excinfo: + pypi.file_upload(db_request) + + resp = excinfo.value + + assert resp.status_code == 400 + assert resp.status == "400 Signature too large." + + def test_upload_fails_with_previously_used_filename(self, db_request): + user = UserFactory.create() + project = ProjectFactory.create() + release = ReleaseFactory.create(project=project, version="1.0") + RoleFactory.create(user=user, project=project) + + filename = "{}-{}.tar.gz".format(project.name, release.version) + + db_request.POST = MultiDict({ + "metadata_version": "1.2", + "name": project.name, + "version": release.version, + "filetype": "sdist", + "md5_digest": "nope!", + "content": pretend.stub( + filename=filename, + file=io.BytesIO(b"a" * (pypi.MAXIMUM_FILESIZE + 1)), + ), + }) + + db_request.db.add(Filename(filename=filename)) + + with pytest.raises(HTTPBadRequest) as excinfo: + pypi.file_upload(db_request) + + resp = excinfo.value + + assert resp.status_code == 400 + assert resp.status == ( + "400 This filename has previously been used, you should use a " + "different version." + ) + + def test_upload_fails_with_existing_file(self, db_request): + user = UserFactory.create() + project = ProjectFactory.create() + release = ReleaseFactory.create(project=project, version="1.0") + RoleFactory.create(user=user, project=project) + + filename = "{}-{}.tar.gz".format(project.name, release.version) + + db_request.POST = MultiDict({ + "metadata_version": "1.2", + "name": project.name, + "version": release.version, + "filetype": "sdist", + "md5_digest": "nope!", + "content": pretend.stub( + filename=filename, + file=io.BytesIO(b"a" * (pypi.MAXIMUM_FILESIZE + 1)), + ), + }) + + db_request.db.add(File(release=release, filename=filename)) + + with pytest.raises(HTTPBadRequest) as excinfo: + pypi.file_upload(db_request) + + resp = excinfo.value + + assert resp.status_code == 400 + assert resp.status == "400 File already exists." + + def test_upload_fails_with_wrong_filename(self, db_request): + user = UserFactory.create() + project = ProjectFactory.create() + release = ReleaseFactory.create(project=project, version="1.0") + RoleFactory.create(user=user, project=project) + + filename = "nope-{}.tar.gz".format(release.version) + + db_request.POST = MultiDict({ + "metadata_version": "1.2", + "name": project.name, + "version": release.version, + "filetype": "sdist", + "md5_digest": "nope!", + "content": pretend.stub( + filename=filename, + file=io.BytesIO(b"a" * (pypi.MAXIMUM_FILESIZE + 1)), + ), + }) + + with pytest.raises(HTTPBadRequest) as excinfo: + pypi.file_upload(db_request) + + resp = excinfo.value + + assert resp.status_code == 400 + assert resp.status == ( + "400 The filename for {!r} must start with {!r}.".format( + project.name, + pkg_resources.safe_name(project.name).lower(), + ) + ) + + def test_upload_fails_with_invalid_extension(self, db_request): + user = UserFactory.create() + project = ProjectFactory.create() + release = ReleaseFactory.create(project=project, version="1.0") + RoleFactory.create(user=user, project=project) + + filename = "{}-{}.tar.wat".format(project.name, release.version) + + db_request.POST = MultiDict({ + "metadata_version": "1.2", + "name": project.name, + "version": release.version, + "filetype": "sdist", + "md5_digest": "nope!", + "content": pretend.stub( + filename=filename, + file=io.BytesIO(b"a" * (pypi.MAXIMUM_FILESIZE + 1)), + ), + }) + + with pytest.raises(HTTPBadRequest) as excinfo: + pypi.file_upload(db_request) + + resp = excinfo.value + + assert resp.status_code == 400 + assert resp.status == "400 Invalid file extension." + + @pytest.mark.parametrize("character", ["/", "\\"]) + def test_upload_fails_with_unsafe_filename(self, db_request, character): + user = UserFactory.create() + project = ProjectFactory.create() + release = ReleaseFactory.create(project=project, version="1.0") + RoleFactory.create(user=user, project=project) + + filename = "{}-{}.tar.wat".format( + character + project.name, + release.version, + ) + + db_request.POST = MultiDict({ + "metadata_version": "1.2", + "name": project.name, + "version": release.version, + "filetype": "sdist", + "md5_digest": "nope!", + "content": pretend.stub( + filename=filename, + file=io.BytesIO(b"a" * (pypi.MAXIMUM_FILESIZE + 1)), + ), + }) + + with pytest.raises(HTTPBadRequest) as excinfo: + pypi.file_upload(db_request) + + resp = excinfo.value + + assert resp.status_code == 400 + assert resp.status == \ + "400 Cannot upload a file with '/' or '\\' in the name." + + def test_upload_fails_without_permission(self, pyramid_config, db_request): + pyramid_config.testing_securitypolicy(permissive=False) + + project = ProjectFactory.create() + release = ReleaseFactory.create(project=project, version="1.0") + + filename = "{}-{}.tar.wat".format(project.name, release.version) + + db_request.POST = MultiDict({ + "metadata_version": "1.2", + "name": project.name, + "version": release.version, + "filetype": "sdist", + "md5_digest": "nope!", + "content": pretend.stub( + filename=filename, + file=io.BytesIO(b"a" * (pypi.MAXIMUM_FILESIZE + 1)), + ), + }) + + with pytest.raises(HTTPForbidden): + pypi.file_upload(db_request) + + @pytest.mark.parametrize( + "plat", + ["any", "win32", "win-amd64", "win_amd64", "win-ia64", "win_ia64"], + ) + def test_upload_succeeds_with_wheel(self, db_request, plat): + user = UserFactory.create() + project = ProjectFactory.create() + release = ReleaseFactory.create(project=project, version="1.0") + RoleFactory.create(user=user, project=project) + + filename = "{}-{}-cp34-none-{}.whl".format( + project.name, + release.version, + plat, + ) + + db_request.POST = MultiDict({ + "metadata_version": "1.2", + "name": project.name, + "version": release.version, + "filetype": "bdist_wheel", + "pyversion": "cp34", + "md5_digest": "335c476dc930b959dda9ec82bd65ef19", + "content": pretend.stub( + filename=filename, + file=io.BytesIO(b"A fake file."), + ), + }) + + storage_service = pretend.stub( + store=pretend.call_recorder(lambda path, content: None), + ) + db_request.find_service = pretend.call_recorder( + lambda svc: storage_service + ) + + resp = pypi.file_upload(db_request) + + assert resp.status_code == 200 + assert db_request.find_service.calls == [pretend.call(IFileStorage)] + assert storage_service.store.calls == [ + pretend.call( + os.path.join( + "cp34", + project.name[0], + project.name, + filename, + ), + mock.ANY, + ), + ] + assert storage_service.store.calls[0].args[1].read() == b"A fake file." + + # Ensure that a File object has been created. + db_request.db.query(File) \ + .filter((File.release == release) & + (File.filename == filename)) \ + .one() + + # Ensure that a Filename object has been created. + db_request.db.query(Filename) \ + .filter(Filename.filename == filename).one() + + @pytest.mark.parametrize("plat", ["linux_x86_64", "linux_x86_64.win32"]) + def test_upload_fails_with_unsupported_wheel_plat(self, db_request, plat): + user = UserFactory.create() + project = ProjectFactory.create() + release = ReleaseFactory.create(project=project, version="1.0") + RoleFactory.create(user=user, project=project) + + filename = "{}-{}-cp34-none-{}.whl".format( + project.name, + release.version, + plat, + ) + + db_request.POST = MultiDict({ + "metadata_version": "1.2", + "name": project.name, + "version": release.version, + "filetype": "bdist_wheel", + "pyversion": "cp34", + "md5_digest": "335c476dc930b959dda9ec82bd65ef19", + "content": pretend.stub( + filename=filename, + file=io.BytesIO(b"A fake file."), + ), + }) + + with pytest.raises(HTTPBadRequest) as excinfo: + pypi.file_upload(db_request) + + resp = excinfo.value + + assert resp.status_code == 400 + assert resp.status == "400 Binary wheel for an unsupported platform." + + def test_upload_succeeds_creates_release(self, db_request): + user = UserFactory.create() + project = ProjectFactory.create() + RoleFactory.create(user=user, project=project) + + db_request.db.add( + Classifier(classifier="Environment :: Other Environment"), + ) + db_request.db.add( + Classifier(classifier="Programming Language :: Python"), + ) + + filename = "{}-{}.tar.gz".format(project.name, "1.0") + + db_request.POST = MultiDict({ + "metadata_version": "1.2", + "name": project.name, + "version": "1.0", + "summary": "This is my summary!", + "filetype": "sdist", + "md5_digest": "335c476dc930b959dda9ec82bd65ef19", + "content": pretend.stub( + filename=filename, + file=io.BytesIO(b"A fake file."), + ), + }) + db_request.POST.extend([ + ("classifiers", "Environment :: Other Environment"), + ("classifiers", "Programming Language :: Python"), + ("requires_dist", "foo"), + ("requires_dist", "bar (>1.0)"), + ("project_urls", "Test, https://example.com/"), + ("requires_external", "Cheese (>1.0)"), + ("provides", "testing"), + ]) + + storage_service = pretend.stub(store=lambda path, content: None) + db_request.find_service = lambda svc: storage_service + + resp = pypi.file_upload(db_request) + + assert resp.status_code == 200 + + # Ensure that a Release object has been created. + release = ( + db_request.db.query(Release) + .filter((Release.project == project) & + (Release.version == "1.0")) + .one() + ) + assert release.summary == "This is my summary!" + assert release.classifiers == [ + "Environment :: Other Environment", + "Programming Language :: Python", + ] + assert set(release.requires_dist) == {"foo", "bar (>1.0)"} + assert set(release.project_urls) == {"Test, https://example.com/"} + assert set(release.requires_external) == {"Cheese (>1.0)"} + assert set(release.provides) == {"testing"} + + # Ensure that a File object has been created. + db_request.db.query(File) \ + .filter((File.release == release) & + (File.filename == filename)) \ + .one() + + # Ensure that a Filename object has been created. + db_request.db.query(Filename) \ + .filter(Filename.filename == filename).one() + + def test_upload_succeeds_creates_project(self, db_request): + user = UserFactory.create() + + filename = "{}-{}.tar.gz".format("example", "1.0") + + db_request.user = user + db_request.POST = MultiDict({ + "metadata_version": "1.2", + "name": "example", + "version": "1.0", + "filetype": "sdist", + "md5_digest": "335c476dc930b959dda9ec82bd65ef19", + "content": pretend.stub( + filename=filename, + file=io.BytesIO(b"A fake file."), + ), + }) + + storage_service = pretend.stub(store=lambda path, content: None) + db_request.find_service = lambda svc: storage_service + + resp = pypi.file_upload(db_request) + + assert resp.status_code == 200 + + # Ensure that a Project object has been created. + project = ( + db_request.db.query(Project) + .filter(Project.name == "example") + .one() + ) + + # Ensure that a Role with the user as owner has been created. + role = db_request.db.query(Role) \ + .filter((Role.user == user) & + (Role.project == project)) \ + .one() + assert role.role_name == "Owner" + + # Ensure that a Release object has been created. + release = ( + db_request.db.query(Release) + .filter((Release.project == project) & + (Release.version == "1.0")) + .one() + ) + + # Ensure that a File object has been created. + db_request.db.query(File) \ + .filter((File.release == release) & + (File.filename == filename)) \ + .one() + + # Ensure that a Filename object has been created. + db_request.db.query(Filename) \ + .filter(Filename.filename == filename).one() diff --git a/tests/unit/packaging/test_models.py b/tests/unit/packaging/test_models.py index d1a49dac2aec..9e284cdec418 100644 --- a/tests/unit/packaging/test_models.py +++ b/tests/unit/packaging/test_models.py @@ -13,11 +13,13 @@ import pretend import pytest +from pyramid.security import Allow + from warehouse.packaging.models import ProjectFactory, File from ...common.db.packaging import ( ProjectFactory as DBProjectFactory, ReleaseFactory as DBReleaseFactory, - FileFactory as DBFileFactory, + FileFactory as DBFileFactory, RoleFactory as DBRoleFactory, ) @@ -74,6 +76,26 @@ def test_doc_url(self, pyramid_config, db_request): pretend.call("legacy.docs", project=project.name), ] + def test_acl(self, db_session): + project = DBProjectFactory.create() + owner1 = DBRoleFactory.create(project=project) + owner2 = DBRoleFactory.create(project=project) + maintainer1 = DBRoleFactory.create( + project=project, + role_name="Maintainer", + ) + maintainer2 = DBRoleFactory.create( + project=project, + role_name="Maintainer", + ) + + assert project.__acl__() == [ + (Allow, owner1.user.id, ["upload"]), + (Allow, owner2.user.id, ["upload"]), + (Allow, maintainer1.user.id, ["upload"]), + (Allow, maintainer2.user.id, ["upload"]), + ] + class TestFile: diff --git a/tests/unit/packaging/test_services.py b/tests/unit/packaging/test_services.py index 683d3ed1883e..ac8b506ab7a1 100644 --- a/tests/unit/packaging/test_services.py +++ b/tests/unit/packaging/test_services.py @@ -11,6 +11,7 @@ # limitations under the License. import io +import os.path import boto3.session import botocore.exceptions @@ -142,6 +143,21 @@ def test_raises_when_file_non_existant(self, tmpdir): with pytest.raises(FileNotFoundError): storage.get("file.txt") + def test_stores_file(self, tmpdir): + storage = LocalFileStorage(str(tmpdir)) + storage.store("foo/bar.txt", io.BytesIO(b"Test File!")) + with open(os.path.join(str(tmpdir), "foo/bar.txt"), "rb") as fp: + assert fp.read() == b"Test File!" + + def test_stores_two_files(self, tmpdir): + storage = LocalFileStorage(str(tmpdir)) + storage.store("foo/first.txt", io.BytesIO(b"First Test File!")) + storage.store("foo/second.txt", io.BytesIO(b"Second Test File!")) + with open(os.path.join(str(tmpdir), "foo/first.txt"), "rb") as fp: + assert fp.read() == b"First Test File!" + with open(os.path.join(str(tmpdir), "foo/second.txt"), "rb") as fp: + assert fp.read() == b"Second Test File!" + class TestS3FileStorage: @@ -203,3 +219,26 @@ def raiser(): with pytest.raises(botocore.exceptions.ClientError): storage.get("file.txt") + + def test_stores_file(self): + obj = pretend.stub(put=pretend.call_recorder(lambda Body: None)) + bucket = pretend.stub(Object=pretend.call_recorder(lambda path: obj)) + storage = S3FileStorage(bucket) + storage.store("foo/bar.txt", io.BytesIO(b"Test File!")) + assert bucket.Object.calls == [pretend.call("foo/bar.txt")] + assert obj.put.calls == [pretend.call(Body=b"Test File!")] + + def test_stores_two_files(self): + obj = pretend.stub(put=pretend.call_recorder(lambda Body: None)) + bucket = pretend.stub(Object=pretend.call_recorder(lambda path: obj)) + storage = S3FileStorage(bucket) + storage.store("foo/first.txt", io.BytesIO(b"First Test File!")) + storage.store("foo/second.txt", io.BytesIO(b"Second Test File!")) + assert bucket.Object.calls == [ + pretend.call("foo/first.txt"), + pretend.call("foo/second.txt"), + ] + assert obj.put.calls == [ + pretend.call(Body=b"First Test File!"), + pretend.call(Body=b"Second Test File!"), + ] diff --git a/tests/unit/test_routes.py b/tests/unit/test_routes.py index 81c8ea8137c0..af779f786806 100644 --- a/tests/unit/test_routes.py +++ b/tests/unit/test_routes.py @@ -33,6 +33,11 @@ def add_route(*args, **kwargs): def add_redirect(*args, **kwargs): pass + @staticmethod + @pretend.call_recorder + def add_pypi_action_route(name, action, **kwargs): + pass + config = FakeConfig() includeme(config) @@ -85,3 +90,7 @@ def add_redirect(*args, **kwargs): pretend.call("/pypi/{name}/", "/project/{name}/"), pretend.call("/pypi/{name}/{version}/", "/project/{name}/{version}/"), ] + + assert config.add_pypi_action_route.calls == [ + pretend.call("legacy.api.pypi.file_upload", "file_upload"), + ] diff --git a/warehouse/legacy/api/pypi.py b/warehouse/legacy/api/pypi.py new file mode 100644 index 000000000000..2cb29f31cfee --- /dev/null +++ b/warehouse/legacy/api/pypi.py @@ -0,0 +1,681 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import hashlib +import hmac +import io +import re + +import packaging.specifiers +import packaging.version +import pkg_resources +import wtforms +import wtforms.validators + +from pyramid.httpexceptions import HTTPBadRequest, HTTPForbidden +from pyramid.response import Response +from pyramid.view import view_config +from sqlalchemy import func +from sqlalchemy.orm.exc import NoResultFound + +from warehouse import forms +from warehouse.classifiers.models import Classifier +from warehouse.csrf import csrf_exempt +from warehouse.packaging.interfaces import IFileStorage +from warehouse.packaging.models import ( + Project, Release, Dependency, DependencyKind, Role, File, Filename, +) +from warehouse.sessions import uses_session +from warehouse.utils.http import require_POST + + +MAXIMUM_FILESIZE = 60 * 1024 * 1024 # 60M + + +ALLOWED_PLATFORMS = { + "any", "win32", "win-amd64", "win_amd64", "win-ia64", "win_ia64", +} + + +_error_message_order = ["metadata_version", "name", "version"] + + +_dist_file_re = re.compile( + r".+?\.(exe|tar\.gz|bz2|rpm|deb|zip|tgz|egg|dmg|msi|whl)$", + re.I, +) + + +_wheel_file_re = re.compile( + r""" + ^ + (?P(?P.+?)(-(?P\d.+?))?) + ( + (-(?P\d.*?))? + -(?P.+?) + -(?P.+?) + -(?P.+?) + (?:\.whl|\.dist-info) + ) + $ + """, + re.VERBOSE, +) + + +_project_name_re = re.compile( + r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", + re.IGNORECASE, +) + + +_legacy_specifier_re = re.compile( + r"^(?P\S+)(?: \((?P\S+)\))?$" +) + + +def _exc_with_message(exc, message): + # The crappy old API that PyPI offered uses the status to pass down + # messages to the client. So this function will make that easier to do. + resp = exc(message) + resp.status = "{} {}".format(resp.status_code, message) + return resp + + +def _validate_pep440_version(form, field): + parsed = packaging.version.parse(field.data) + + # Check that this version is a valid PEP 440 version at all. + if not isinstance(parsed, packaging.version.Version): + raise wtforms.validators.ValidationError( + "Must start and end with a letter or numeral and contain only " + "ascii numeric and '.', '_' and '-'." + ) + + # Check that this version does not have a PEP 440 local segment attached + # to it. + if parsed.local is not None: + raise wtforms.validators.ValidationError( + "Cannot use PEP 440 local versions." + ) + + +def _parse_legacy_requirement(requirement): + parsed = _legacy_specifier_re.search(requirement) + if parsed is None: + raise ValueError("Invalid Requirement.") + return parsed.groupdict()["name"], parsed.groupdict()["specifier"] + + +def _validate_pep440_specifier(specifier): + try: + packaging.specifiers.SpecifierSet(specifier) + except packaging.specifiers.InvalidSpecifier: + raise wtforms.validators.ValidationError( + "Invalid specifier in requirement." + ) from None + + +def _validate_legacy_non_dist_req(requirement): + name, specifier = _parse_legacy_requirement(requirement) + + if "_" in name: + name = name.replace("_", "") + + if not name.isalnum() or name[0].isdigit(): + raise wtforms.validators.ValidationError( + "Must be a valid Python identifier." + ) + + if specifier is not None: + _validate_pep440_specifier(specifier) + + +def _validate_legacy_non_dist_req_list(form, field): + for datum in field.data: + _validate_legacy_non_dist_req(datum) + + +def _validate_legacy_dist_req(requirement): + name, specifier = _parse_legacy_requirement(requirement) + + if not _project_name_re.search(name): + raise wtforms.validators.ValidationError( + "Must be a valid project name." + ) + + if specifier is not None: + _validate_pep440_specifier(specifier) + + +def _validate_legacy_dist_req_list(form, field): + for datum in field.data: + _validate_legacy_dist_req(datum) + + +def _validate_requires_external(requirement): + name, specifier = _parse_legacy_requirement(requirement) + + # TODO: Is it really reasonable to parse the specifier using PEP 440? + if specifier is not None: + _validate_pep440_specifier(specifier) + + +def _validate_requires_external_list(form, field): + for datum in field.data: + _validate_requires_external(datum) + + +def _validate_project_url(value): + try: + label, url = value.split(", ", 1) + except ValueError: + raise wtforms.validators.ValidationError( + "Must have both a label and an URL.", + ) from None + + if not label: + raise wtforms.validators.ValidationError("Must have a label.") + + if len(label) > 32: + raise wtforms.validators.ValidationError( + "Label must not be longer than 32 characters." + ) + + if not url: + raise wtforms.validators.ValidationError("Must have an URL.") + + # TODO: Actually validate that the URL is a valid URL. + + +def _validate_project_url_list(form, field): + for datum in field.data: + _validate_project_url(datum) + + +def _construct_dependencies(form, types): + for name, kind in types.items(): + for item in getattr(form, name).data: + yield Dependency(kind=kind.value, specifier=item) + + +class ListField(wtforms.Field): + + def process_formdata(self, valuelist): + self.data = [v.strip() for v in valuelist] + + +# TODO: Eventually this whole validation thing should move to the packaging +# library and we should just call that. However until PEP 426 is done +# that library won't have an API for this. +class MetadataForm(forms.Form): + + # Metadata version + metadata_version = wtforms.StringField( + validators=[ + wtforms.validators.DataRequired(), + wtforms.validators.AnyOf( + # Note: This isn't really Metadata 2.0, however bdist_wheel + # claims it is producing a Metadata 2.0 metadata when in + # reality it's more like 1.2 with some extensions. + ["1.0", "1.1", "1.2", "2.0"], + message="Unknown Metadata Version", + ), + ], + ) + + # Identity Project and Release + name = wtforms.StringField( + validators=[ + wtforms.validators.DataRequired(), + wtforms.validators.Regexp( + _project_name_re, + re.IGNORECASE, + message=( + "Must start and end with a letter or numeral and contain " + "only ascii numeric and '.', '_' and '-'." + ), + ), + ], + ) + version = wtforms.StringField( + validators=[ + wtforms.validators.DataRequired(), + wtforms.validators.Regexp( + r"^(?!\s).*(? MAXIMUM_FILESIZE: + raise _exc_with_message(HTTPBadRequest, "File too large.") + file_content.write(chunk) + file_hash.update(chunk) + file_content.seek(0) + + # Get the signature if it was included. + signature_size = 0 + if "gpg_signature" in request.POST: + signature = io.BytesIO() + for chunk in iter( + lambda: request.POST["gpg_signature"].file.read(8096), b""): + signature_size += len(chunk) + if signature_size > MAXIMUM_FILESIZE: + raise _exc_with_message(HTTPBadRequest, "Signature too large.") + signature.write(chunk) + signature.seek(0) + else: + signature = None + + # Actually verify that the md5 hash of the file matches the expected md5 + # hash. We probably don't actually need to use hmac.compare_digest here + # since both the md5_digest and the file whose file_hash we've compute + # comes from the remote user, however better safe than sorry. + if not hmac.compare_digest(form.md5_digest.data, file_hash.hexdigest()): + raise _exc_with_message( + HTTPBadRequest, + "The MD5 digest supplied does not match a digest calculated from " + "the uploaded file." + ) + + # TODO: Check the file to make sure it is a valid distribution file. + + # Check that if it's a binary wheel, it's on a supported platform + if filename.endswith(".whl"): + wheel_info = _wheel_file_re.match(filename) + plats = wheel_info.group("plat").split(".") + if set(plats) - ALLOWED_PLATFORMS: + raise _exc_with_message( + HTTPBadRequest, + "Binary wheel for an unsupported platform.", + ) + + # Check whether signature is ASCII armored + if (signature is not None and + not signature.getvalue().startswith( + b"-----BEGIN PGP SIGNATURE-----")): + raise _exc_with_message( + HTTPBadRequest, + "PGP signature is not ASCII armored.", + ) + + # TODO: We need some sort of trigger that will automatically add filenames + # to Filename instead of relying on this code running inside of our + # upload API. + request.db.add(Filename(filename=filename)) + + # Store the information about the file in the database. + file_ = File( + release=release, + filename=filename, + python_version=form.pyversion.data, + packagetype=form.filetype.data, + comment_text=form.comment.data, + size=file_size, + has_signature=bool(signature), + md5_digest=form.md5_digest.data, + ) + request.db.add(file_) + + # TODO: We need a better answer about how to make this transactional so + # this won't take affect until after a commit has happened, for now + # we'll just ignore it and save it before the transaction is + # commited. + storage = request.find_service(IFileStorage) + storage.store(file_.path, file_content) + if signature is not None: + storage.store(file_.pgp_path, signature) + + return Response() diff --git a/warehouse/packaging/interfaces.py b/warehouse/packaging/interfaces.py index bc283c95c4aa..6fcc4069aeda 100644 --- a/warehouse/packaging/interfaces.py +++ b/warehouse/packaging/interfaces.py @@ -44,3 +44,8 @@ def get(path): Return a file like object that can be read to access the file located at the given path. """ + + def store(path, file_obj): + """ + Save the file object to path. + """ diff --git a/warehouse/packaging/models.py b/warehouse/packaging/models.py index 179caab5c3fd..01483bd6a37b 100644 --- a/warehouse/packaging/models.py +++ b/warehouse/packaging/models.py @@ -13,6 +13,7 @@ import enum from citext import CIText +from pyramid.security import Allow from pyramid.threadlocal import get_current_request from sqlalchemy import ( CheckConstraint, Column, Enum, ForeignKey, ForeignKeyConstraint, Index, @@ -107,6 +108,21 @@ def __getitem__(self, version): except NoResultFound: raise KeyError from None + def __acl__(self): + session = orm.object_session(self) + acls = [] + + # Get all of the users for this project. + query = session.query(Role).filter(Role.project == self) + query = query.options(orm.lazyload("project")) + query = query.options(orm.joinedload("user").lazyload("emails")) + for role in sorted( + query.all(), + key=lambda x: ["Owner", "Maintainer"].index(x.role_name)): + acls.append((Allow, role.user.id, ["upload"])) + + return acls + @property def documentation_url(self): # TODO: Move this into the database and elimnate the use of the diff --git a/warehouse/packaging/services.py b/warehouse/packaging/services.py index 900d6ec6894d..fcfe13a8d743 100644 --- a/warehouse/packaging/services.py +++ b/warehouse/packaging/services.py @@ -105,6 +105,12 @@ def create_service(cls, context, request): def get(self, path): return open(os.path.join(self.base, path), "rb") + def store(self, path, file_obj): + destination = os.path.join(self.base, path) + os.makedirs(os.path.dirname(destination), exist_ok=True) + with open(destination, "wb") as fp: + fp.write(file_obj.read()) + @implementer(IFileStorage) class S3FileStorage: @@ -126,3 +132,9 @@ def get(self, path): if exc.response["Error"]["Code"] != "NoSuchKey": raise raise FileNotFoundError("No such key: {!r}".format(path)) from None + + def store(self, path, file_obj): + # TODO: This should ideally be using multipart uploading which will + # enable "commiting" and "rollingback" the upload based on the + # transaction state. + self.bucket.Object(path).put(Body=file_obj.read()) diff --git a/warehouse/routes.py b/warehouse/routes.py index 6d5bb7f0727e..81e002303b4f 100644 --- a/warehouse/routes.py +++ b/warehouse/routes.py @@ -60,6 +60,9 @@ def includeme(config): traverse="/{name}/{version}", ) + # Legacy Action URLs + config.add_pypi_action_route("legacy.api.pypi.file_upload", "file_upload") + # Legacy Documentation config.add_route("legacy.docs", config.registry.settings["docs.url"])