diff --git a/docs/api-reference/json.rst b/docs/api-reference/json.rst index be88035eafa0..3e97c4e53e3f 100644 --- a/docs/api-reference/json.rst +++ b/docs/api-reference/json.rst @@ -172,16 +172,27 @@ Project Release ------- +.. attention:: + Previously this response included the ``releases`` key, which had the URLs + for *all* files for every release of this project on PyPI. Due to stability + concerns, this had to be removed from the release specific page, which now + **ONLY** serves data specific to that release. + + To access all files, you should preferrably use the simple API, or otherwise + use the non versioned json api at ``/pypi//json``. + + .. http:get:: /pypi///json Returns metadata about an individual release at a specific version, - otherwise identical to ``/pypi//json``. + otherwise identical to ``/pypi//json`` minus the + ``releases`` key. **Example Request**: .. code:: http - GET /pypi/sampleproject/1.0/json HTTP/1.1 + GET /pypi/sampleproject/1.2.0/json HTTP/1.1 Host: pypi.org Accept: application/json @@ -220,53 +231,49 @@ Release "requires_dist": null, "requires_python": null, "summary": "", - "version": "1.0", + "version": "1.2.0", "yanked": false, "yanked_reason": null }, "last_serial": 1591652, - "releases": { - "1.0": [], - "1.2.0": [ - { - "comment_text": "", - "digests": { - "md5": "bab8eb22e6710eddae3c6c7ac3453bd9", - "sha256": "7a7a8b91086deccc54cac8d631e33f6a0e232ce5775c6be3dc44f86c2154019d" - }, - "downloads": -1, - "filename": "sampleproject-1.2.0-py2.py3-none-any.whl", - "has_sig": false, - "md5_digest": "bab8eb22e6710eddae3c6c7ac3453bd9", - "packagetype": "bdist_wheel", - "python_version": "2.7", - "size": 3795, - "upload_time_iso_8601": "2015-06-14T14:38:05.869374Z", - "url": "https://files.pythonhosted.org/packages/30/52/547eb3719d0e872bdd6fe3ab60cef92596f95262e925e1943f68f840df88/sampleproject-1.2.0-py2.py3-none-any.whl", - "yanked": false, - "yanked_reason": null + "urls": [ + { + "comment_text": "", + "digests": { + "md5": "bab8eb22e6710eddae3c6c7ac3453bd9", + "sha256": "7a7a8b91086deccc54cac8d631e33f6a0e232ce5775c6be3dc44f86c2154019d" }, - { - "comment_text": "", - "digests": { - "md5": "d3bd605f932b3fb6e91f49be2d6f9479", - "sha256": "3427a8a5dd0c1e176da48a44efb410875b3973bd9843403a0997e4187c408dc1" - }, - "downloads": -1, - "filename": "sampleproject-1.2.0.tar.gz", - "has_sig": false, - "md5_digest": "d3bd605f932b3fb6e91f49be2d6f9479", - "packagetype": "sdist", - "python_version": "source", - "size": 3148, - "upload_time_iso_8601": "2015-06-14T14:37:56.394783Z", - "url": "https://files.pythonhosted.org/packages/eb/45/79be82bdeafcecb9dca474cad4003e32ef8e4a0dec6abbd4145ccb02abe1/sampleproject-1.2.0.tar.gz", - "yanked": false, - "yanked_reason": null - } - ] - }, - "urls": [], + "downloads": -1, + "filename": "sampleproject-1.2.0-py2.py3-none-any.whl", + "has_sig": false, + "md5_digest": "bab8eb22e6710eddae3c6c7ac3453bd9", + "packagetype": "bdist_wheel", + "python_version": "2.7", + "size": 3795, + "upload_time_iso_8601": "2015-06-14T14:38:05.869374Z", + "url": "https://files.pythonhosted.org/packages/30/52/547eb3719d0e872bdd6fe3ab60cef92596f95262e925e1943f68f840df88/sampleproject-1.2.0-py2.py3-none-any.whl", + "yanked": false, + "yanked_reason": null + }, + { + "comment_text": "", + "digests": { + "md5": "d3bd605f932b3fb6e91f49be2d6f9479", + "sha256": "3427a8a5dd0c1e176da48a44efb410875b3973bd9843403a0997e4187c408dc1" + }, + "downloads": -1, + "filename": "sampleproject-1.2.0.tar.gz", + "has_sig": false, + "md5_digest": "d3bd605f932b3fb6e91f49be2d6f9479", + "packagetype": "sdist", + "python_version": "source", + "size": 3148, + "upload_time_iso_8601": "2015-06-14T14:37:56.394783Z", + "url": "https://files.pythonhosted.org/packages/eb/45/79be82bdeafcecb9dca474cad4003e32ef8e4a0dec6abbd4145ccb02abe1/sampleproject-1.2.0.tar.gz", + "yanked": false, + "yanked_reason": null + } + ], "vulnerabilities": [] } diff --git a/tests/unit/legacy/api/test_json.py b/tests/unit/legacy/api/test_json.py index 04e47e99b066..b753aca6f8d8 100644 --- a/tests/unit/legacy/api/test_json.py +++ b/tests/unit/legacy/api/test_json.py @@ -10,8 +10,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -import json as _json - import pretend from pyramid.httpexceptions import HTTPMovedPermanently, HTTPNotFound @@ -69,23 +67,6 @@ def test_missing_release(self, db_request): assert isinstance(resp, HTTPNotFound) _assert_has_cors_headers(resp.headers) - def test_calls_release_detail(self, monkeypatch, db_request): - project = ProjectFactory.create() - - ReleaseFactory.create(project=project, version="1.0") - ReleaseFactory.create(project=project, version="2.0") - - release = ReleaseFactory.create(project=project, version="3.0") - - response = pretend.stub() - json_release = pretend.call_recorder(lambda ctx, request: response) - monkeypatch.setattr(json, "json_release", json_release) - - resp = json.json_project(project, db_request) - - assert resp is response - assert json_release.calls == [pretend.call(release, db_request)] - def test_with_prereleases(self, monkeypatch, db_request): project = ProjectFactory.create() @@ -95,14 +76,18 @@ def test_with_prereleases(self, monkeypatch, db_request): release = ReleaseFactory.create(project=project, version="3.0") - response = pretend.stub() - json_release = pretend.call_recorder(lambda ctx, request: response) - monkeypatch.setattr(json, "json_release", json_release) + data = pretend.stub() + json_data = pretend.call_recorder( + lambda request, project, release, *, all_releases: data + ) + monkeypatch.setattr(json, "_json_data", json_data) - resp = json.json_project(project, db_request) + rvalue = json.json_project(project, db_request) - assert resp is response - assert json_release.calls == [pretend.call(release, db_request)] + assert rvalue is data + assert json_data.calls == [ + pretend.call(db_request, project, release, all_releases=True) + ] def test_only_prereleases(self, monkeypatch, db_request): project = ProjectFactory.create() @@ -112,14 +97,18 @@ def test_only_prereleases(self, monkeypatch, db_request): release = ReleaseFactory.create(project=project, version="3.0.dev0") - response = pretend.stub() - json_release = pretend.call_recorder(lambda ctx, request: response) - monkeypatch.setattr(json, "json_release", json_release) + data = pretend.stub() + json_data = pretend.call_recorder( + lambda request, project, release, *, all_releases: data + ) + monkeypatch.setattr(json, "_json_data", json_data) - resp = json.json_project(project, db_request) + rvalue = json.json_project(project, db_request) - assert resp is response - assert json_release.calls == [pretend.call(release, db_request)] + assert rvalue is data + assert json_data.calls == [ + pretend.call(db_request, project, release, all_releases=True) + ] def test_all_releases_yanked(self, monkeypatch, db_request): """ @@ -135,14 +124,18 @@ def test_all_releases_yanked(self, monkeypatch, db_request): release = ReleaseFactory.create(project=project, version="3.0", yanked=True) - response = pretend.stub() - json_release = pretend.call_recorder(lambda ctx, request: response) - monkeypatch.setattr(json, "json_release", json_release) + data = pretend.stub() + json_data = pretend.call_recorder( + lambda request, project, release, *, all_releases: data + ) + monkeypatch.setattr(json, "_json_data", json_data) - resp = json.json_project(project, db_request) + rvalue = json.json_project(project, db_request) - assert resp is response - assert json_release.calls == [pretend.call(release, db_request)] + assert rvalue is data + assert json_data.calls == [ + pretend.call(db_request, project, release, all_releases=True) + ] def test_latest_release_yanked(self, monkeypatch, db_request): """ @@ -158,14 +151,18 @@ def test_latest_release_yanked(self, monkeypatch, db_request): release = ReleaseFactory.create(project=project, version="2.0") - response = pretend.stub() - json_release = pretend.call_recorder(lambda ctx, request: response) - monkeypatch.setattr(json, "json_release", json_release) + data = pretend.stub() + json_data = pretend.call_recorder( + lambda request, project, release, *, all_releases: data + ) + monkeypatch.setattr(json, "_json_data", json_data) - resp = json.json_project(project, db_request) + rvalue = json.json_project(project, db_request) - assert resp is response - assert json_release.calls == [pretend.call(release, db_request)] + assert rvalue is data + assert json_data.calls == [ + pretend.call(db_request, project, release, all_releases=True) + ] def test_all_non_prereleases_yanked(self, monkeypatch, db_request): """ @@ -182,14 +179,227 @@ def test_all_non_prereleases_yanked(self, monkeypatch, db_request): release = ReleaseFactory.create(project=project, version="2.0.dev0") - response = pretend.stub() - json_release = pretend.call_recorder(lambda ctx, request: response) - monkeypatch.setattr(json, "json_release", json_release) + data = pretend.stub() + json_data = pretend.call_recorder( + lambda request, project, release, *, all_releases: data + ) + monkeypatch.setattr(json, "_json_data", json_data) - resp = json.json_project(project, db_request) + rvalue = json.json_project(project, db_request) - assert resp is response - assert json_release.calls == [pretend.call(release, db_request)] + assert rvalue is data + assert json_data.calls == [ + pretend.call(db_request, project, release, all_releases=True) + ] + + def test_renders(self, pyramid_config, db_request, db_session): + project = ProjectFactory.create(has_docs=True) + description_content_type = "text/x-rst" + url = "/the/fake/url/" + project_urls = [ + "url," + url, + "Homepage,https://example.com/home2/", + "Source Code,https://example.com/source-code/", + "uri,http://john.doe@www.example.com:123/forum/questions/?tag=networking&order=newest#top", # noqa: E501 + "ldap,ldap://[2001:db8::7]/c=GB?objectClass?one", + "tel,tel:+1-816-555-1212", + "telnet,telnet://192.0.2.16:80/", + "urn,urn:oasis:names:specification:docbook:dtd:xml:4.1.2", + "reservedchars,http://example.com?&$+/:;=@#", # Commas don't work! + r"unsafechars,http://example.com <>[]{}|\^%", + ] + expected_urls = [] + for project_url in sorted( + project_urls, key=lambda u: u.split(",", 1)[0].strip().lower() + ): + expected_urls.append(tuple(project_url.split(",", 1))) + expected_urls = dict(tuple(expected_urls)) + + releases = [ + ReleaseFactory.create(project=project, version=v) + for v in ["0.1", "1.0", "2.0"] + ] + releases += [ + ReleaseFactory.create( + project=project, + version="3.0", + description=DescriptionFactory.create( + content_type=description_content_type + ), + ) + ] + + for urlspec in project_urls: + label, _, purl = urlspec.partition(",") + db_session.add( + ReleaseURL( + release=releases[3], + name=label.strip(), + url=purl.strip(), + ) + ) + + files = [ + FileFactory.create( + release=r, + filename="{}-{}.tar.gz".format(project.name, r.version), + python_version="source", + size=200, + has_signature=True, + ) + for r in releases[1:] + ] + user = UserFactory.create() + JournalEntryFactory.reset_sequence() + je = JournalEntryFactory.create(name=project.name, submitted_by=user) + + db_request.route_url = pretend.call_recorder(lambda *args, **kw: url) + + result = json.json_project(project, db_request) + + assert set(db_request.route_url.calls) == { + pretend.call("packaging.file", path=files[0].path), + pretend.call("packaging.file", path=files[1].path), + pretend.call("packaging.file", path=files[2].path), + pretend.call("packaging.project", name=project.name), + pretend.call( + "packaging.release", name=project.name, version=releases[3].version + ), + pretend.call("legacy.docs", project=project.name), + } + + _assert_has_cors_headers(db_request.response.headers) + assert db_request.response.headers["X-PyPI-Last-Serial"] == str(je.id) + + assert result == { + "info": { + "author": None, + "author_email": None, + "bugtrack_url": None, + "classifiers": [], + "description_content_type": description_content_type, + "description": releases[-1].description.raw, + "docs_url": "/the/fake/url/", + "download_url": None, + "downloads": {"last_day": -1, "last_week": -1, "last_month": -1}, + "home_page": None, + "keywords": None, + "license": None, + "maintainer": None, + "maintainer_email": None, + "name": project.name, + "package_url": "/the/fake/url/", + "platform": None, + "project_url": "/the/fake/url/", + "project_urls": expected_urls, + "release_url": "/the/fake/url/", + "requires_dist": None, + "requires_python": None, + "summary": None, + "yanked": False, + "yanked_reason": None, + "version": "3.0", + }, + "releases": { + "0.1": [], + "1.0": [ + { + "comment_text": None, + "downloads": -1, + "filename": files[0].filename, + "has_sig": True, + "md5_digest": files[0].md5_digest, + "digests": { + "md5": files[0].md5_digest, + "sha256": files[0].sha256_digest, + }, + "packagetype": None, + "python_version": "source", + "size": 200, + "upload_time": files[0].upload_time.strftime( + "%Y-%m-%dT%H:%M:%S" + ), + "upload_time_iso_8601": files[0].upload_time.isoformat() + "Z", + "url": "/the/fake/url/", + "requires_python": None, + "yanked": False, + "yanked_reason": None, + } + ], + "2.0": [ + { + "comment_text": None, + "downloads": -1, + "filename": files[1].filename, + "has_sig": True, + "md5_digest": files[1].md5_digest, + "digests": { + "md5": files[1].md5_digest, + "sha256": files[1].sha256_digest, + }, + "packagetype": None, + "python_version": "source", + "size": 200, + "upload_time": files[1].upload_time.strftime( + "%Y-%m-%dT%H:%M:%S" + ), + "upload_time_iso_8601": files[1].upload_time.isoformat() + "Z", + "url": "/the/fake/url/", + "requires_python": None, + "yanked": False, + "yanked_reason": None, + } + ], + "3.0": [ + { + "comment_text": None, + "downloads": -1, + "filename": files[2].filename, + "has_sig": True, + "md5_digest": files[2].md5_digest, + "digests": { + "md5": files[2].md5_digest, + "sha256": files[2].sha256_digest, + }, + "packagetype": None, + "python_version": "source", + "size": 200, + "upload_time": files[2].upload_time.strftime( + "%Y-%m-%dT%H:%M:%S" + ), + "upload_time_iso_8601": files[2].upload_time.isoformat() + "Z", + "url": "/the/fake/url/", + "requires_python": None, + "yanked": False, + "yanked_reason": None, + } + ], + }, + "urls": [ + { + "comment_text": None, + "downloads": -1, + "filename": files[2].filename, + "has_sig": True, + "md5_digest": files[2].md5_digest, + "digests": { + "md5": files[2].md5_digest, + "sha256": files[2].sha256_digest, + }, + "packagetype": None, + "python_version": "source", + "size": 200, + "upload_time": files[2].upload_time.strftime("%Y-%m-%dT%H:%M:%S"), + "upload_time_iso_8601": files[2].upload_time.isoformat() + "Z", + "url": "/the/fake/url/", + "requires_python": None, + "yanked": False, + "yanked_reason": None, + } + ], + "last_serial": je.id, + "vulnerabilities": [], + } class TestJSONProjectSlash: @@ -301,11 +511,9 @@ def test_detail_renders(self, pyramid_config, db_request, db_session): db_request.route_url = pretend.call_recorder(lambda *args, **kw: url) - json.json_release(releases[3], db_request) + result = json.json_release(releases[3], db_request) assert set(db_request.route_url.calls) == { - pretend.call("packaging.file", path=files[0].path), - pretend.call("packaging.file", path=files[1].path), pretend.call("packaging.file", path=files[2].path), pretend.call("packaging.project", name=project.name), pretend.call( @@ -317,143 +525,60 @@ def test_detail_renders(self, pyramid_config, db_request, db_session): _assert_has_cors_headers(db_request.response.headers) assert db_request.response.headers["X-PyPI-Last-Serial"] == str(je.id) - assert db_request.response.body == _json.dumps( - { - "info": { - "author": None, - "author_email": None, - "bugtrack_url": None, - "classifiers": [], - "description_content_type": description_content_type, - "description": releases[-1].description.raw, - "docs_url": "/the/fake/url/", - "download_url": None, - "downloads": {"last_day": -1, "last_week": -1, "last_month": -1}, - "home_page": None, - "keywords": None, - "license": None, - "maintainer": None, - "maintainer_email": None, - "name": project.name, - "package_url": "/the/fake/url/", - "platform": None, - "project_url": "/the/fake/url/", - "project_urls": expected_urls, - "release_url": "/the/fake/url/", - "requires_dist": None, + assert result == { + "info": { + "author": None, + "author_email": None, + "bugtrack_url": None, + "classifiers": [], + "description_content_type": description_content_type, + "description": releases[-1].description.raw, + "docs_url": "/the/fake/url/", + "download_url": None, + "downloads": {"last_day": -1, "last_week": -1, "last_month": -1}, + "home_page": None, + "keywords": None, + "license": None, + "maintainer": None, + "maintainer_email": None, + "name": project.name, + "package_url": "/the/fake/url/", + "platform": None, + "project_url": "/the/fake/url/", + "project_urls": expected_urls, + "release_url": "/the/fake/url/", + "requires_dist": None, + "requires_python": None, + "summary": None, + "yanked": False, + "yanked_reason": None, + "version": "3.0", + }, + "urls": [ + { + "comment_text": None, + "downloads": -1, + "filename": files[2].filename, + "has_sig": True, + "md5_digest": files[2].md5_digest, + "digests": { + "md5": files[2].md5_digest, + "sha256": files[2].sha256_digest, + }, + "packagetype": None, + "python_version": "source", + "size": 200, + "upload_time": files[2].upload_time.strftime("%Y-%m-%dT%H:%M:%S"), + "upload_time_iso_8601": files[2].upload_time.isoformat() + "Z", + "url": "/the/fake/url/", "requires_python": None, - "summary": None, "yanked": False, "yanked_reason": None, - "version": "3.0", - }, - "releases": { - "0.1": [], - "1.0": [ - { - "comment_text": None, - "downloads": -1, - "filename": files[0].filename, - "has_sig": True, - "md5_digest": files[0].md5_digest, - "digests": { - "md5": files[0].md5_digest, - "sha256": files[0].sha256_digest, - }, - "packagetype": None, - "python_version": "source", - "size": 200, - "upload_time": files[0].upload_time.strftime( - "%Y-%m-%dT%H:%M:%S" - ), - "upload_time_iso_8601": files[0].upload_time.isoformat() - + "Z", - "url": "/the/fake/url/", - "requires_python": None, - "yanked": False, - "yanked_reason": None, - } - ], - "2.0": [ - { - "comment_text": None, - "downloads": -1, - "filename": files[1].filename, - "has_sig": True, - "md5_digest": files[1].md5_digest, - "digests": { - "md5": files[1].md5_digest, - "sha256": files[1].sha256_digest, - }, - "packagetype": None, - "python_version": "source", - "size": 200, - "upload_time": files[1].upload_time.strftime( - "%Y-%m-%dT%H:%M:%S" - ), - "upload_time_iso_8601": files[1].upload_time.isoformat() - + "Z", - "url": "/the/fake/url/", - "requires_python": None, - "yanked": False, - "yanked_reason": None, - } - ], - "3.0": [ - { - "comment_text": None, - "downloads": -1, - "filename": files[2].filename, - "has_sig": True, - "md5_digest": files[2].md5_digest, - "digests": { - "md5": files[2].md5_digest, - "sha256": files[2].sha256_digest, - }, - "packagetype": None, - "python_version": "source", - "size": 200, - "upload_time": files[2].upload_time.strftime( - "%Y-%m-%dT%H:%M:%S" - ), - "upload_time_iso_8601": files[2].upload_time.isoformat() - + "Z", - "url": "/the/fake/url/", - "requires_python": None, - "yanked": False, - "yanked_reason": None, - } - ], - }, - "urls": [ - { - "comment_text": None, - "downloads": -1, - "filename": files[2].filename, - "has_sig": True, - "md5_digest": files[2].md5_digest, - "digests": { - "md5": files[2].md5_digest, - "sha256": files[2].sha256_digest, - }, - "packagetype": None, - "python_version": "source", - "size": 200, - "upload_time": files[2].upload_time.strftime( - "%Y-%m-%dT%H:%M:%S" - ), - "upload_time_iso_8601": files[2].upload_time.isoformat() + "Z", - "url": "/the/fake/url/", - "requires_python": None, - "yanked": False, - "yanked_reason": None, - } - ], - "last_serial": je.id, - "vulnerabilities": [], - }, - sort_keys=True, - ).encode("utf8") + } + ], + "last_serial": je.id, + "vulnerabilities": [], + } def test_minimal_renders(self, pyramid_config, db_request): project = ProjectFactory.create(has_docs=False) @@ -473,7 +598,7 @@ def test_minimal_renders(self, pyramid_config, db_request): url = "/the/fake/url/" db_request.route_url = pretend.call_recorder(lambda *args, **kw: url) - json.json_release(release, db_request) + result = json.json_release(release, db_request) assert set(db_request.route_url.calls) == { pretend.call("packaging.file", path=file.path), @@ -486,89 +611,57 @@ def test_minimal_renders(self, pyramid_config, db_request): _assert_has_cors_headers(db_request.response.headers) assert db_request.response.headers["X-PyPI-Last-Serial"] == str(je.id) - assert db_request.response.body == _json.dumps( - { - "info": { - "author": None, - "author_email": None, - "bugtrack_url": None, - "classifiers": [], - "description_content_type": release.description.content_type, - "description": release.description.raw, - "docs_url": None, - "download_url": None, - "downloads": {"last_day": -1, "last_week": -1, "last_month": -1}, - "home_page": None, - "keywords": None, - "license": None, - "maintainer": None, - "maintainer_email": None, - "name": project.name, - "package_url": "/the/fake/url/", - "platform": None, - "project_url": "/the/fake/url/", - "project_urls": None, - "release_url": "/the/fake/url/", - "requires_dist": None, + assert result == { + "info": { + "author": None, + "author_email": None, + "bugtrack_url": None, + "classifiers": [], + "description_content_type": release.description.content_type, + "description": release.description.raw, + "docs_url": None, + "download_url": None, + "downloads": {"last_day": -1, "last_week": -1, "last_month": -1}, + "home_page": None, + "keywords": None, + "license": None, + "maintainer": None, + "maintainer_email": None, + "name": project.name, + "package_url": "/the/fake/url/", + "platform": None, + "project_url": "/the/fake/url/", + "project_urls": None, + "release_url": "/the/fake/url/", + "requires_dist": None, + "requires_python": None, + "summary": None, + "yanked": False, + "yanked_reason": None, + "version": "0.1", + }, + "urls": [ + { + "comment_text": None, + "downloads": -1, + "filename": file.filename, + "has_sig": True, + "md5_digest": file.md5_digest, + "digests": {"md5": file.md5_digest, "sha256": file.sha256_digest}, + "packagetype": None, + "python_version": "source", + "size": 200, + "upload_time": file.upload_time.strftime("%Y-%m-%dT%H:%M:%S"), + "upload_time_iso_8601": file.upload_time.isoformat() + "Z", + "url": "/the/fake/url/", "requires_python": None, - "summary": None, "yanked": False, "yanked_reason": None, - "version": "0.1", - }, - "releases": { - "0.1": [ - { - "comment_text": None, - "downloads": -1, - "filename": file.filename, - "has_sig": True, - "md5_digest": file.md5_digest, - "digests": { - "md5": file.md5_digest, - "sha256": file.sha256_digest, - }, - "packagetype": None, - "python_version": "source", - "size": 200, - "upload_time": file.upload_time.strftime( - "%Y-%m-%dT%H:%M:%S" - ), - "upload_time_iso_8601": file.upload_time.isoformat() + "Z", - "url": "/the/fake/url/", - "requires_python": None, - "yanked": False, - "yanked_reason": None, - } - ] - }, - "urls": [ - { - "comment_text": None, - "downloads": -1, - "filename": file.filename, - "has_sig": True, - "md5_digest": file.md5_digest, - "digests": { - "md5": file.md5_digest, - "sha256": file.sha256_digest, - }, - "packagetype": None, - "python_version": "source", - "size": 200, - "upload_time": file.upload_time.strftime("%Y-%m-%dT%H:%M:%S"), - "upload_time_iso_8601": file.upload_time.isoformat() + "Z", - "url": "/the/fake/url/", - "requires_python": None, - "yanked": False, - "yanked_reason": None, - } - ], - "last_serial": je.id, - "vulnerabilities": [], - }, - sort_keys=True, - ).encode("utf8") + } + ], + "last_serial": je.id, + "vulnerabilities": [], + } def test_vulnerabilities_renders(self, pyramid_config, db_request): project = ProjectFactory.create(has_docs=False) @@ -588,7 +681,7 @@ def test_vulnerabilities_renders(self, pyramid_config, db_request): result = json.json_release(release, db_request) - assert _json.loads(result.body)["vulnerabilities"] == [ + assert result["vulnerabilities"] == [ { "id": "PYSEC-001", "source": "the source", diff --git a/warehouse/legacy/api/json.py b/warehouse/legacy/api/json.py index bd911ae64806..6b4b7c0a0626 100644 --- a/warehouse/legacy/api/json.py +++ b/warehouse/legacy/api/json.py @@ -10,8 +10,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -import json - from pyramid.httpexceptions import HTTPMovedPermanently, HTTPNotFound from pyramid.view import view_config from sqlalchemy.orm import Load @@ -50,70 +48,7 @@ ] -@view_config( - route_name="legacy.api.json.project", - context=Project, - renderer="json", - decorator=_CACHE_DECORATOR, -) -def json_project(project, request): - if project.normalized_name != request.matchdict.get( - "name", project.normalized_name - ): - return HTTPMovedPermanently( - request.current_route_path(name=project.normalized_name), - headers=_CORS_HEADERS, - ) - - try: - release = ( - request.db.query(Release) - .filter(Release.project == project) - .order_by( - Release.yanked.asc(), - Release.is_prerelease.nullslast(), - Release._pypi_ordering.desc(), - ) - .limit(1) - .one() - ) - except NoResultFound: - return HTTPNotFound(headers=_CORS_HEADERS) - - return json_release(release, request) - - -@view_config( - route_name="legacy.api.json.project_slash", - context=Project, - decorator=_CACHE_DECORATOR, -) -def json_project_slash(project, request): - return json_project(project, request) - - -@view_config( - route_name="legacy.api.json.release", - context=Release, - decorator=_CACHE_DECORATOR, -) -def json_release(release, request): - project = release.project - - if project.normalized_name != request.matchdict.get( - "name", project.normalized_name - ): - return HTTPMovedPermanently( - request.current_route_path(name=project.normalized_name), - headers=_CORS_HEADERS, - ) - - # Apply CORS headers. - request.response.headers.update(_CORS_HEADERS) - - # Get the latest serial number for this project. - request.response.headers["X-PyPI-Last-Serial"] = str(project.last_serial) - +def _json_data(request, project, release, *, all_releases): # Get all of the releases and files for this project. release_files = ( request.db.query(Release, File) @@ -124,10 +59,18 @@ def json_release(release, request): ) .outerjoin(File) .filter(Release.project == project) - .order_by(Release._pypi_ordering.desc(), File.filename) - .all() ) + # If we're not looking for all_releases, then we'll filter this further + # to just this release. + if not all_releases: + release_files = release_files.filter(Release.id == release.id) + + # Finally set an ordering, and execute the query. + release_files = release_files.order_by( + Release._pypi_ordering.desc(), File.filename + ).all() + # Map our releases + files into a dictionary that maps each release to a # list of all its files. releases = {} @@ -211,22 +154,93 @@ def json_release(release, request): "yanked_reason": release.yanked_reason or None, }, "urls": releases[release.version], - "releases": releases, "vulnerabilities": vulnerabilities, "last_serial": project.last_serial, } - # Stream the results to the client instead of building them up, this will - # make it so that the JSON encoder uses less memory overall. - resp = request.response - resp.content_type = "application/json" - resp.app_iter = ( - c.encode("utf8") - for c in json.JSONEncoder(sort_keys=True, separators=(", ", ": ")).iterencode( - data + if all_releases: + data["releases"] = releases + + return data + + +@view_config( + route_name="legacy.api.json.project", + context=Project, + renderer="json", + decorator=_CACHE_DECORATOR, +) +def json_project(project, request): + if project.normalized_name != request.matchdict.get( + "name", project.normalized_name + ): + return HTTPMovedPermanently( + request.current_route_path(name=project.normalized_name), + headers=_CORS_HEADERS, ) - ) - return resp + + try: + release = ( + request.db.query(Release) + .filter(Release.project == project) + .order_by( + Release.yanked.asc(), + Release.is_prerelease.nullslast(), + Release._pypi_ordering.desc(), + ) + .limit(1) + .one() + ) + except NoResultFound: + return HTTPNotFound(headers=_CORS_HEADERS) + + # Apply CORS headers. + request.response.headers.update(_CORS_HEADERS) + + # Get the latest serial number for this project. + request.response.headers["X-PyPI-Last-Serial"] = str(project.last_serial) + + # Build our json data, including all releases because this is the root url + # and changing this breaks bandersnatch + # TODO: Eventually it would be nice to drop all_releases. + return _json_data(request, project, release, all_releases=True) + + +@view_config( + route_name="legacy.api.json.project_slash", + context=Project, + renderer="json", + decorator=_CACHE_DECORATOR, +) +def json_project_slash(project, request): + return json_project(project, request) + + +@view_config( + route_name="legacy.api.json.release", + context=Release, + renderer="json", + decorator=_CACHE_DECORATOR, +) +def json_release(release, request): + project = release.project + + if project.normalized_name != request.matchdict.get( + "name", project.normalized_name + ): + return HTTPMovedPermanently( + request.current_route_path(name=project.normalized_name), + headers=_CORS_HEADERS, + ) + + # Apply CORS headers. + request.response.headers.update(_CORS_HEADERS) + + # Get the latest serial number for this project. + request.response.headers["X-PyPI-Last-Serial"] = str(project.last_serial) + + # Build our json data, with only this releases because this is a versioned url + return _json_data(request, project, release, all_releases=False) @view_config(