From a45892a1a5b651957db9beab41d437cc7e5efc61 Mon Sep 17 00:00:00 2001 From: Barry Warsaw Date: Fri, 20 Sep 2024 18:09:50 -0700 Subject: [PATCH 01/26] Fix cherry-pick --- warehouse/packaging/models.py | 1 + 1 file changed, 1 insertion(+) diff --git a/warehouse/packaging/models.py b/warehouse/packaging/models.py index e0c27e9bf0b2..dfb8c56edce0 100644 --- a/warehouse/packaging/models.py +++ b/warehouse/packaging/models.py @@ -612,6 +612,7 @@ def __table_args__(cls): # noqa _pypi_ordering: Mapped[int | None] requires_python: Mapped[str | None] = mapped_column(Text) created: Mapped[datetime_now] = mapped_column() + published: Mapped[datetime_now | None] description_id: Mapped[UUID] = mapped_column( ForeignKey("release_descriptions.id", onupdate="CASCADE", ondelete="CASCADE"), From 831095a782c98278070bbee088cf588390bb6bcf Mon Sep 17 00:00:00 2001 From: Alan Velasco Date: Mon, 2 May 2022 14:24:40 -0600 Subject: [PATCH 02/26] Add `published` to the `ReleaseFactory` --- tests/common/db/packaging.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tests/common/db/packaging.py b/tests/common/db/packaging.py index 3b97b20cbd92..94bb396407f2 100644 --- a/tests/common/db/packaging.py +++ b/tests/common/db/packaging.py @@ -100,6 +100,9 @@ class Meta: uploader = factory.SubFactory(UserFactory) description = factory.SubFactory(DescriptionFactory) + published = factory.Faker( + "date_time_between_dates", datetime_start=datetime.datetime(2008, 1, 1) + ) class FileFactory(WarehouseFactory): From 5846e3045dac3b3e350425944c9a5f5e659436b7 Mon Sep 17 00:00:00 2001 From: Alexis Date: Fri, 6 Dec 2024 14:26:19 +0100 Subject: [PATCH 03/26] Add migrations --- .../3e7bf3217166_add_published_in_release.py | 45 +++++++++++++++++++ 1 file changed, 45 insertions(+) create mode 100644 warehouse/migrations/versions/3e7bf3217166_add_published_in_release.py diff --git a/warehouse/migrations/versions/3e7bf3217166_add_published_in_release.py b/warehouse/migrations/versions/3e7bf3217166_add_published_in_release.py new file mode 100644 index 000000000000..7303a6f00062 --- /dev/null +++ b/warehouse/migrations/versions/3e7bf3217166_add_published_in_release.py @@ -0,0 +1,45 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +add published in Release + +Revision ID: 3e7bf3217166 +Revises: f7720656a33c +Create Date: 2024-12-06 11:04:21.907167 +""" + +import sqlalchemy as sa + +from alembic import op + +revision = "3e7bf3217166" +down_revision = "f7720656a33c" + + +def upgrade(): + op.add_column( + "releases", + sa.Column( + "published", sa.DateTime(), server_default=sa.text("now()"), nullable=True + ), + ) + + op.execute( + """ + UPDATE releases + SET published = created + """ + ) + + +def downgrade(): + op.drop_column("releases", "published") From 60f4a0123798d173d336deea7deae512573240cc Mon Sep 17 00:00:00 2001 From: Alexis Date: Mon, 9 Dec 2024 14:47:59 +0100 Subject: [PATCH 04/26] Add a default value for Release.published field. --- tests/common/db/packaging.py | 3 ++- tests/unit/packaging/test_views.py | 10 ++++++++++ warehouse/forklift/legacy.py | 2 ++ .../versions/3e7bf3217166_add_published_in_release.py | 4 +--- warehouse/packaging/models.py | 3 ++- 5 files changed, 17 insertions(+), 5 deletions(-) diff --git a/tests/common/db/packaging.py b/tests/common/db/packaging.py index 94bb396407f2..68f728f44a93 100644 --- a/tests/common/db/packaging.py +++ b/tests/common/db/packaging.py @@ -123,7 +123,8 @@ class Meta: lambda o: hashlib.blake2b(o.filename.encode("utf8"), digest_size=32).hexdigest() ) upload_time = factory.Faker( - "date_time_between_dates", datetime_start=datetime.datetime(2008, 1, 1) + "date_time_between_dates", + datetime_start=datetime.datetime(2008, 1, 1), ) path = factory.LazyAttribute( lambda o: "/".join( diff --git a/tests/unit/packaging/test_views.py b/tests/unit/packaging/test_views.py index bf7aea3bcde5..130dedec8660 100644 --- a/tests/unit/packaging/test_views.py +++ b/tests/unit/packaging/test_views.py @@ -9,6 +9,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from datetime import datetime import pretend import pytest @@ -323,6 +324,15 @@ def test_long_singleline_license(self, db_request): "characters, it's really so lo..." ) + def test_created_with_published(self, db_request): + release = ReleaseFactory.create() + assert release.published > datetime(year=2008, month=1, day=1) + + def test_without_published_date(self, db_request): + release = ReleaseFactory.create(published=None) + db_request.db.flush() + assert release.published is None + class TestReportMalwareButton: def test_report_malware_button(self): diff --git a/warehouse/forklift/legacy.py b/warehouse/forklift/legacy.py index 795dfd113075..42249741c66b 100644 --- a/warehouse/forklift/legacy.py +++ b/warehouse/forklift/legacy.py @@ -19,6 +19,7 @@ import zipfile from cgi import FieldStorage +from datetime import datetime import packaging.requirements import packaging.specifiers @@ -902,6 +903,7 @@ def file_upload(request): }, uploader=request.user if request.user else None, uploaded_via=request.user_agent, + published=datetime.now(), ) request.db.add(release) is_new_release = True diff --git a/warehouse/migrations/versions/3e7bf3217166_add_published_in_release.py b/warehouse/migrations/versions/3e7bf3217166_add_published_in_release.py index 7303a6f00062..9bdb059c1f4c 100644 --- a/warehouse/migrations/versions/3e7bf3217166_add_published_in_release.py +++ b/warehouse/migrations/versions/3e7bf3217166_add_published_in_release.py @@ -28,9 +28,7 @@ def upgrade(): op.add_column( "releases", - sa.Column( - "published", sa.DateTime(), server_default=sa.text("now()"), nullable=True - ), + sa.Column("published", sa.DateTime(), nullable=True), ) op.execute( diff --git a/warehouse/packaging/models.py b/warehouse/packaging/models.py index dfb8c56edce0..b53c64c8b63a 100644 --- a/warehouse/packaging/models.py +++ b/warehouse/packaging/models.py @@ -11,6 +11,7 @@ # limitations under the License. from __future__ import annotations +import datetime import enum import typing @@ -612,7 +613,7 @@ def __table_args__(cls): # noqa _pypi_ordering: Mapped[int | None] requires_python: Mapped[str | None] = mapped_column(Text) created: Mapped[datetime_now] = mapped_column() - published: Mapped[datetime_now | None] + published: Mapped[datetime.datetime | None] description_id: Mapped[UUID] = mapped_column( ForeignKey("release_descriptions.id", onupdate="CASCADE", ondelete="CASCADE"), From 60de6e870192093cf374dfd19a22765acf4863a7 Mon Sep 17 00:00:00 2001 From: Alexis Date: Tue, 10 Dec 2024 14:46:53 +0100 Subject: [PATCH 05/26] Change to a boolean field --- tests/common/db/packaging.py | 3 - tests/unit/forklift/test_legacy.py | 1 + tests/unit/packaging/test_views.py | 8 +- warehouse/forklift/legacy.py | 3 +- warehouse/locale/messages.pot | 174 ++++++++++-------- ...py => bd2bf218e63f_add_published_field.py} | 19 +- warehouse/packaging/models.py | 5 +- .../templates/manage/project/history.html | 8 + 8 files changed, 113 insertions(+), 108 deletions(-) rename warehouse/migrations/versions/{3e7bf3217166_add_published_in_release.py => bd2bf218e63f_add_published_field.py} (73%) diff --git a/tests/common/db/packaging.py b/tests/common/db/packaging.py index 68f728f44a93..470f233a34c4 100644 --- a/tests/common/db/packaging.py +++ b/tests/common/db/packaging.py @@ -100,9 +100,6 @@ class Meta: uploader = factory.SubFactory(UserFactory) description = factory.SubFactory(DescriptionFactory) - published = factory.Faker( - "date_time_between_dates", datetime_start=datetime.datetime(2008, 1, 1) - ) class FileFactory(WarehouseFactory): diff --git a/tests/unit/forklift/test_legacy.py b/tests/unit/forklift/test_legacy.py index a7b4fb6c4dde..782271821d20 100644 --- a/tests/unit/forklift/test_legacy.py +++ b/tests/unit/forklift/test_legacy.py @@ -3613,6 +3613,7 @@ def test_upload_succeeds_creates_release( else None ), "uploaded_via_trusted_publisher": not test_with_user, + "published": True, } fileadd_event = { diff --git a/tests/unit/packaging/test_views.py b/tests/unit/packaging/test_views.py index 130dedec8660..c3f84fcb25bb 100644 --- a/tests/unit/packaging/test_views.py +++ b/tests/unit/packaging/test_views.py @@ -9,7 +9,6 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from datetime import datetime import pretend import pytest @@ -326,12 +325,7 @@ def test_long_singleline_license(self, db_request): def test_created_with_published(self, db_request): release = ReleaseFactory.create() - assert release.published > datetime(year=2008, month=1, day=1) - - def test_without_published_date(self, db_request): - release = ReleaseFactory.create(published=None) - db_request.db.flush() - assert release.published is None + assert release.published is True class TestReportMalwareButton: diff --git a/warehouse/forklift/legacy.py b/warehouse/forklift/legacy.py index 42249741c66b..68a4dec4b6d1 100644 --- a/warehouse/forklift/legacy.py +++ b/warehouse/forklift/legacy.py @@ -19,7 +19,6 @@ import zipfile from cgi import FieldStorage -from datetime import datetime import packaging.requirements import packaging.specifiers @@ -903,7 +902,6 @@ def file_upload(request): }, uploader=request.user if request.user else None, uploaded_via=request.user_agent, - published=datetime.now(), ) request.db.add(release) is_new_release = True @@ -934,6 +932,7 @@ def file_upload(request): else None ), "uploaded_via_trusted_publisher": bool(request.oidc_publisher), + "published": True, }, ) diff --git a/warehouse/locale/messages.pot b/warehouse/locale/messages.pot index 3acee6bc0bda..d867fe5123f6 100644 --- a/warehouse/locale/messages.pot +++ b/warehouse/locale/messages.pot @@ -781,7 +781,7 @@ msgid "Your report has been recorded. Thank you for your help." msgstr "" #: warehouse/subscriptions/models.py:35 -#: warehouse/templates/manage/project/history.html:230 +#: warehouse/templates/manage/project/history.html:238 msgid "Active" msgstr "" @@ -1757,10 +1757,10 @@ msgstr "" #: warehouse/templates/accounts/register.html:64 #: warehouse/templates/manage/account.html:139 #: warehouse/templates/manage/account.html:480 -#: warehouse/templates/manage/project/history.html:301 -#: warehouse/templates/manage/project/history.html:312 -#: warehouse/templates/manage/project/history.html:323 -#: warehouse/templates/manage/project/history.html:334 +#: warehouse/templates/manage/project/history.html:309 +#: warehouse/templates/manage/project/history.html:320 +#: warehouse/templates/manage/project/history.html:331 +#: warehouse/templates/manage/project/history.html:342 #: warehouse/templates/manage/project/settings.html:224 #: warehouse/templates/manage/project/settings.html:285 #: warehouse/templates/manage/project/settings.html:291 @@ -3446,7 +3446,7 @@ msgstr "" #: warehouse/templates/manage/account.html:564 #: warehouse/templates/manage/account.html:583 -#: warehouse/templates/manage/project/history.html:272 +#: warehouse/templates/manage/project/history.html:280 #: warehouse/templates/manage/unverified-account.html:287 #: warehouse/templates/manage/unverified-account.html:306 msgid "Reason:" @@ -3634,15 +3634,15 @@ msgstr "" #: warehouse/templates/manage/account.html:684 #: warehouse/templates/manage/account.html:707 -#: warehouse/templates/manage/project/history.html:263 -#: warehouse/templates/manage/project/history.html:270 +#: warehouse/templates/manage/project/history.html:271 +#: warehouse/templates/manage/project/history.html:278 #: warehouse/templates/manage/unverified-account.html:403 #: warehouse/templates/manage/unverified-account.html:426 msgid "Token name:" msgstr "" #: warehouse/templates/manage/account.html:701 -#: warehouse/templates/manage/project/history.html:265 +#: warehouse/templates/manage/project/history.html:273 #: warehouse/templates/manage/unverified-account.html:420 msgid "API token removed" msgstr "" @@ -3738,7 +3738,7 @@ msgstr "" #: warehouse/templates/manage/account.html:780 #: warehouse/templates/manage/organization/history.html:201 -#: warehouse/templates/manage/project/history.html:352 +#: warehouse/templates/manage/project/history.html:360 #: warehouse/templates/manage/team/history.html:108 #: warehouse/templates/manage/unverified-account.html:466 msgid "Event" @@ -3747,8 +3747,8 @@ msgstr "" #: warehouse/templates/manage/account.html:781 #: warehouse/templates/manage/organization/history.html:202 #: warehouse/templates/manage/organization/history.html:211 -#: warehouse/templates/manage/project/history.html:353 -#: warehouse/templates/manage/project/history.html:362 +#: warehouse/templates/manage/project/history.html:361 +#: warehouse/templates/manage/project/history.html:370 #: warehouse/templates/manage/team/history.html:109 #: warehouse/templates/manage/team/history.html:118 #: warehouse/templates/manage/unverified-account.html:467 @@ -3775,7 +3775,7 @@ msgstr "" #: warehouse/templates/manage/account.html:795 #: warehouse/templates/manage/organization/history.html:217 -#: warehouse/templates/manage/project/history.html:368 +#: warehouse/templates/manage/project/history.html:376 #: warehouse/templates/manage/team/history.html:124 #: warehouse/templates/manage/unverified-account.html:481 msgid "Device Info" @@ -4104,23 +4104,23 @@ msgstr "" #: warehouse/templates/manage/manage_base.html:582 #: warehouse/templates/manage/organization/history.html:166 #: warehouse/templates/manage/project/history.html:43 -#: warehouse/templates/manage/project/history.html:97 -#: warehouse/templates/manage/project/history.html:137 -#: warehouse/templates/manage/project/history.html:182 -#: warehouse/templates/manage/project/history.html:208 -#: warehouse/templates/manage/project/history.html:299 -#: warehouse/templates/manage/project/history.html:321 +#: warehouse/templates/manage/project/history.html:105 +#: warehouse/templates/manage/project/history.html:145 +#: warehouse/templates/manage/project/history.html:190 +#: warehouse/templates/manage/project/history.html:216 +#: warehouse/templates/manage/project/history.html:307 +#: warehouse/templates/manage/project/history.html:329 #: warehouse/templates/manage/team/history.html:88 msgid "Added by:" msgstr "" #: warehouse/templates/manage/manage_base.html:584 #: warehouse/templates/manage/organization/history.html:171 -#: warehouse/templates/manage/project/history.html:62 -#: warehouse/templates/manage/project/history.html:128 -#: warehouse/templates/manage/project/history.html:144 -#: warehouse/templates/manage/project/history.html:190 -#: warehouse/templates/manage/project/history.html:216 +#: warehouse/templates/manage/project/history.html:70 +#: warehouse/templates/manage/project/history.html:136 +#: warehouse/templates/manage/project/history.html:152 +#: warehouse/templates/manage/project/history.html:198 +#: warehouse/templates/manage/project/history.html:224 #: warehouse/templates/manage/team/history.html:93 msgid "Removed by:" msgstr "" @@ -4130,7 +4130,7 @@ msgid "Submitted by:" msgstr "" #: warehouse/templates/manage/manage_base.html:589 -#: warehouse/templates/manage/project/history.html:247 +#: warehouse/templates/manage/project/history.html:255 msgid "Workflow:" msgstr "" @@ -4144,7 +4144,7 @@ msgstr "" #: warehouse/templates/manage/manage_base.html:596 #: warehouse/templates/manage/project/history.html:52 -#: warehouse/templates/manage/project/history.html:106 +#: warehouse/templates/manage/project/history.html:114 msgid "URL:" msgstr "" @@ -5298,8 +5298,8 @@ msgid "Created by:" msgstr "" #: warehouse/templates/manage/organization/history.html:144 -#: warehouse/templates/manage/project/history.html:310 -#: warehouse/templates/manage/project/history.html:332 +#: warehouse/templates/manage/project/history.html:318 +#: warehouse/templates/manage/project/history.html:340 #: warehouse/templates/manage/team/history.html:76 msgid "Deleted by:" msgstr "" @@ -5318,26 +5318,26 @@ msgid "Declined by:" msgstr "" #: warehouse/templates/manage/organization/history.html:176 -#: warehouse/templates/manage/project/history.html:151 -#: warehouse/templates/manage/project/history.html:198 +#: warehouse/templates/manage/project/history.html:159 +#: warehouse/templates/manage/project/history.html:206 #: warehouse/templates/manage/team/history.html:98 msgid "Changed by:" msgstr "" #: warehouse/templates/manage/organization/history.html:181 #: warehouse/templates/manage/organization/history.html:186 -#: warehouse/templates/manage/project/history.html:158 -#: warehouse/templates/manage/project/history.html:165 +#: warehouse/templates/manage/project/history.html:166 +#: warehouse/templates/manage/project/history.html:173 msgid "Invited by:" msgstr "" #: warehouse/templates/manage/organization/history.html:191 -#: warehouse/templates/manage/project/history.html:172 +#: warehouse/templates/manage/project/history.html:180 msgid "Revoked by:" msgstr "" #: warehouse/templates/manage/organization/history.html:198 -#: warehouse/templates/manage/project/history.html:349 +#: warehouse/templates/manage/project/history.html:357 #: warehouse/templates/manage/team/history.html:105 #, python-format msgid "Security history for %(source_name)s" @@ -5834,188 +5834,200 @@ msgstr "" msgid "Version %(version)s created" msgstr "" +#: warehouse/templates/manage/project/history.html:57 +msgid "Published:" +msgstr "" + #: warehouse/templates/manage/project/history.html:59 +msgid "No" +msgstr "" + +#: warehouse/templates/manage/project/history.html:61 +msgid "Yes" +msgstr "" + +#: warehouse/templates/manage/project/history.html:67 #, python-format msgid "Version %(version)s removed" msgstr "" -#: warehouse/templates/manage/project/history.html:66 +#: warehouse/templates/manage/project/history.html:74 #, python-format msgid "Version %(version)s yanked" msgstr "" -#: warehouse/templates/manage/project/history.html:69 -#: warehouse/templates/manage/project/history.html:76 +#: warehouse/templates/manage/project/history.html:77 +#: warehouse/templates/manage/project/history.html:84 msgid "Yanked by:" msgstr "" -#: warehouse/templates/manage/project/history.html:73 +#: warehouse/templates/manage/project/history.html:81 #, python-format msgid "Version %(version)s unyanked" msgstr "" -#: warehouse/templates/manage/project/history.html:82 +#: warehouse/templates/manage/project/history.html:90 #, python-format msgid "File added to version %(version)s" msgstr "" -#: warehouse/templates/manage/project/history.html:94 -#: warehouse/templates/manage/project/history.html:125 +#: warehouse/templates/manage/project/history.html:102 +#: warehouse/templates/manage/project/history.html:133 #: warehouse/templates/manage/project/release.html:112 msgid "Filename:" msgstr "" -#: warehouse/templates/manage/project/history.html:113 +#: warehouse/templates/manage/project/history.html:121 #, python-format msgid "File removed from version %(version)s" msgstr "" -#: warehouse/templates/manage/project/history.html:134 +#: warehouse/templates/manage/project/history.html:142 #, python-format msgid "%(username)s added as project %(role_name)s" msgstr "" -#: warehouse/templates/manage/project/history.html:141 +#: warehouse/templates/manage/project/history.html:149 #, python-format msgid "%(username)s removed as project %(role_name)s" msgstr "" -#: warehouse/templates/manage/project/history.html:148 +#: warehouse/templates/manage/project/history.html:156 #, python-format msgid "%(username)s changed to project %(role_name)s" msgstr "" -#: warehouse/templates/manage/project/history.html:155 +#: warehouse/templates/manage/project/history.html:163 #, python-format msgid "" "%(username)s invited to join as project " "%(role_name)s" msgstr "" -#: warehouse/templates/manage/project/history.html:162 +#: warehouse/templates/manage/project/history.html:170 #, python-format msgid "" "%(username)s declined invitation to join as " "project %(role_name)s" msgstr "" -#: warehouse/templates/manage/project/history.html:169 +#: warehouse/templates/manage/project/history.html:177 #, python-format msgid "" "Revoked invitation for %(username)s to join as " "project %(role_name)s" msgstr "" -#: warehouse/templates/manage/project/history.html:179 +#: warehouse/templates/manage/project/history.html:187 #, python-format msgid "%(team_name)s team added as project %(role_name)s" msgstr "" -#: warehouse/templates/manage/project/history.html:187 +#: warehouse/templates/manage/project/history.html:195 #, python-format msgid "%(team_name)s team changed to project %(role_name)s" msgstr "" -#: warehouse/templates/manage/project/history.html:195 +#: warehouse/templates/manage/project/history.html:203 #, python-format msgid "%(team_name)s team removed as project %(role_name)s" msgstr "" -#: warehouse/templates/manage/project/history.html:205 +#: warehouse/templates/manage/project/history.html:213 #, python-format msgid "Project added to %(organization_name)s organization" msgstr "" -#: warehouse/templates/manage/project/history.html:213 +#: warehouse/templates/manage/project/history.html:221 #, python-format msgid "Project removed from %(organization_name)s organization" msgstr "" -#: warehouse/templates/manage/project/history.html:221 +#: warehouse/templates/manage/project/history.html:229 msgid "Short-lived API token created" msgstr "" -#: warehouse/templates/manage/project/history.html:222 -#: warehouse/templates/manage/project/history.html:252 -#: warehouse/templates/manage/project/history.html:266 +#: warehouse/templates/manage/project/history.html:230 +#: warehouse/templates/manage/project/history.html:260 +#: warehouse/templates/manage/project/history.html:274 msgid "Permissions: Can upload to this project" msgstr "" -#: warehouse/templates/manage/project/history.html:225 -#: warehouse/templates/manage/project/history.html:260 +#: warehouse/templates/manage/project/history.html:233 +#: warehouse/templates/manage/project/history.html:268 msgid "Expiration:" msgstr "" -#: warehouse/templates/manage/project/history.html:228 +#: warehouse/templates/manage/project/history.html:236 msgid "Expiration status:" msgstr "" -#: warehouse/templates/manage/project/history.html:232 +#: warehouse/templates/manage/project/history.html:240 msgid "Expired" msgstr "" -#: warehouse/templates/manage/project/history.html:237 +#: warehouse/templates/manage/project/history.html:245 msgid "Creator" msgstr "" -#: warehouse/templates/manage/project/history.html:251 +#: warehouse/templates/manage/project/history.html:259 msgid "API token created" msgstr "" -#: warehouse/templates/manage/project/history.html:255 -#: warehouse/templates/manage/project/history.html:268 +#: warehouse/templates/manage/project/history.html:263 +#: warehouse/templates/manage/project/history.html:276 msgid "Controlled by:" msgstr "" -#: warehouse/templates/manage/project/history.html:277 +#: warehouse/templates/manage/project/history.html:285 msgid "Trusted publisher added" msgstr "" -#: warehouse/templates/manage/project/history.html:280 +#: warehouse/templates/manage/project/history.html:288 msgid "Trusted publisher removed" msgstr "" -#: warehouse/templates/manage/project/history.html:285 +#: warehouse/templates/manage/project/history.html:293 msgid "2FA requirement enabled" msgstr "" -#: warehouse/templates/manage/project/history.html:287 +#: warehouse/templates/manage/project/history.html:295 msgid "Enabled by:" msgstr "" -#: warehouse/templates/manage/project/history.html:290 +#: warehouse/templates/manage/project/history.html:298 msgid "2FA requirement disabled" msgstr "" -#: warehouse/templates/manage/project/history.html:292 +#: warehouse/templates/manage/project/history.html:300 msgid "Disabled by:" msgstr "" -#: warehouse/templates/manage/project/history.html:297 -#: warehouse/templates/manage/project/history.html:319 +#: warehouse/templates/manage/project/history.html:305 +#: warehouse/templates/manage/project/history.html:327 msgid "Project alternate repository added" msgstr "" -#: warehouse/templates/manage/project/history.html:302 -#: warehouse/templates/manage/project/history.html:313 -#: warehouse/templates/manage/project/history.html:324 -#: warehouse/templates/manage/project/history.html:335 +#: warehouse/templates/manage/project/history.html:310 +#: warehouse/templates/manage/project/history.html:321 +#: warehouse/templates/manage/project/history.html:332 +#: warehouse/templates/manage/project/history.html:343 #: warehouse/templates/manage/project/settings.html:225 #: warehouse/templates/manage/project/settings.html:305 #: warehouse/templates/manage/project/settings.html:311 msgid "Url" msgstr "" -#: warehouse/templates/manage/project/history.html:308 -#: warehouse/templates/manage/project/history.html:330 +#: warehouse/templates/manage/project/history.html:316 +#: warehouse/templates/manage/project/history.html:338 msgid "Project alternate repository deleted" msgstr "" -#: warehouse/templates/manage/project/history.html:354 +#: warehouse/templates/manage/project/history.html:362 msgid "Additional info" msgstr "" -#: warehouse/templates/manage/project/history.html:366 +#: warehouse/templates/manage/project/history.html:374 #: warehouse/templates/manage/team/history.html:122 msgid "Location info" msgstr "" diff --git a/warehouse/migrations/versions/3e7bf3217166_add_published_in_release.py b/warehouse/migrations/versions/bd2bf218e63f_add_published_field.py similarity index 73% rename from warehouse/migrations/versions/3e7bf3217166_add_published_in_release.py rename to warehouse/migrations/versions/bd2bf218e63f_add_published_field.py index 9bdb059c1f4c..313e65679ecd 100644 --- a/warehouse/migrations/versions/3e7bf3217166_add_published_in_release.py +++ b/warehouse/migrations/versions/bd2bf218e63f_add_published_field.py @@ -10,32 +10,27 @@ # See the License for the specific language governing permissions and # limitations under the License. """ -add published in Release +add published field -Revision ID: 3e7bf3217166 +Revision ID: bd2bf218e63f Revises: f7720656a33c -Create Date: 2024-12-06 11:04:21.907167 +Create Date: 2024-12-10 10:40:19.588606 """ import sqlalchemy as sa from alembic import op -revision = "3e7bf3217166" +revision = "bd2bf218e63f" down_revision = "f7720656a33c" def upgrade(): op.add_column( "releases", - sa.Column("published", sa.DateTime(), nullable=True), - ) - - op.execute( - """ - UPDATE releases - SET published = created - """ + sa.Column( + "published", sa.Boolean(), server_default=sa.text("true"), nullable=False + ), ) diff --git a/warehouse/packaging/models.py b/warehouse/packaging/models.py index b53c64c8b63a..9cef52f2d034 100644 --- a/warehouse/packaging/models.py +++ b/warehouse/packaging/models.py @@ -11,7 +11,6 @@ # limitations under the License. from __future__ import annotations -import datetime import enum import typing @@ -80,7 +79,7 @@ from warehouse.sitemap.models import SitemapMixin from warehouse.utils import dotted_navigator, wheel from warehouse.utils.attrs import make_repr -from warehouse.utils.db.types import bool_false, datetime_now +from warehouse.utils.db.types import bool_false, bool_true, datetime_now if typing.TYPE_CHECKING: from warehouse.oidc.models import OIDCPublisher @@ -613,7 +612,7 @@ def __table_args__(cls): # noqa _pypi_ordering: Mapped[int | None] requires_python: Mapped[str | None] = mapped_column(Text) created: Mapped[datetime_now] = mapped_column() - published: Mapped[datetime.datetime | None] + published: Mapped[bool_true] description_id: Mapped[UUID] = mapped_column( ForeignKey("release_descriptions.id", onupdate="CASCADE", ondelete="CASCADE"), diff --git a/warehouse/templates/manage/project/history.html b/warehouse/templates/manage/project/history.html index 36950a26ed42..f757111fe412 100644 --- a/warehouse/templates/manage/project/history.html +++ b/warehouse/templates/manage/project/history.html @@ -53,6 +53,14 @@

{% trans %}Security history{% endtrans %}

{{ event.additional.publisher_url }} {% endif %} + + {% trans %}Published:{% endtrans %} + {% if event.additional.published is defined and event.additional.published is false %} + {% trans %}No{% endtrans %} + {% else %} + {% trans %}Yes{% endtrans %} + {% endif %} + {% elif event.tag == EventTag.Project.ReleaseRemove %} {# No link to removed release #} From a7927386f4d99f42ced3d4d66179e352fc1e0d14 Mon Sep 17 00:00:00 2001 From: Alexis Date: Tue, 10 Dec 2024 18:39:04 +0100 Subject: [PATCH 06/26] Filter out unpublished releases --- tests/unit/legacy/api/test_json.py | 16 +++++++ tests/unit/packaging/test_views.py | 77 ++++++++++++++++++++---------- warehouse/legacy/api/json.py | 8 +++- warehouse/locale/messages.pot | 2 +- warehouse/packaging/models.py | 6 ++- warehouse/packaging/utils.py | 4 +- warehouse/packaging/views.py | 5 +- warehouse/search/tasks.py | 2 +- 8 files changed, 89 insertions(+), 31 deletions(-) diff --git a/tests/unit/legacy/api/test_json.py b/tests/unit/legacy/api/test_json.py index 3a7b029e8c7e..5edea5594161 100644 --- a/tests/unit/legacy/api/test_json.py +++ b/tests/unit/legacy/api/test_json.py @@ -118,6 +118,13 @@ def test_all_non_prereleases_yanked(self, monkeypatch, db_request): db_request.matchdict = {"name": project.normalized_name} assert json.latest_release_factory(db_request) == release + def test_with_unpublished(self, db_request): + project = ProjectFactory.create() + release = ReleaseFactory.create(project=project, version="1.0") + ReleaseFactory.create(project=project, version="2.0", published=False) + db_request.matchdict = {"name": project.normalized_name} + assert json.latest_release_factory(db_request) == release + def test_project_quarantined(self, monkeypatch, db_request): project = ProjectFactory.create( lifecycle_status=LifecycleStatus.QuarantineEnter @@ -191,6 +198,15 @@ def test_renders(self, pyramid_config, db_request, db_session): ) ] + ReleaseFactory.create( + project=project, + version="3.1", + description=DescriptionFactory.create( + content_type=description_content_type + ), + published=False, + ) + for urlspec in project_urls: label, _, purl = urlspec.partition(",") db_session.add( diff --git a/tests/unit/packaging/test_views.py b/tests/unit/packaging/test_views.py index dc6ac4156dfd..a8ea5f6f805d 100644 --- a/tests/unit/packaging/test_views.py +++ b/tests/unit/packaging/test_views.py @@ -135,6 +135,19 @@ def test_only_yanked_release(self, monkeypatch, db_request): assert resp is response assert release_detail.calls == [pretend.call(release, db_request)] + def test_with_unpublished(self, monkeypatch, db_request): + project = ProjectFactory.create() + release = ReleaseFactory.create(project=project, version="1.0") + ReleaseFactory.create(project=project, version="1.1", published=False) + + response = pretend.stub() + release_detail = pretend.call_recorder(lambda ctx, request: response) + monkeypatch.setattr(views, "release_detail", release_detail) + + resp = views.project_detail(project, db_request) + assert resp is response + assert release_detail.calls == [pretend.call(release, db_request)] + class TestReleaseDetail: def test_normalizing_name_redirects(self, db_request): @@ -178,30 +191,45 @@ def test_normalizing_version_redirects(self, db_request): def test_detail_rendered(self, db_request): users = [UserFactory.create(), UserFactory.create(), UserFactory.create()] project = ProjectFactory.create() - releases = [ - ReleaseFactory.create( - project=project, - version=v, - description=DescriptionFactory.create( - raw="unrendered description", - html="rendered description", - content_type="text/html", - ), - ) - for v in ["1.0", "2.0", "3.0", "4.0.dev0"] - ] + [ - ReleaseFactory.create( - project=project, - version="5.0", - description=DescriptionFactory.create( - raw="plaintext description", - html="", - content_type="text/plain", - ), - yanked=True, - yanked_reason="plaintext yanked reason", - ) - ] + releases = ( + [ + ReleaseFactory.create( + project=project, + version=v, + description=DescriptionFactory.create( + raw="unrendered description", + html="rendered description", + content_type="text/html", + ), + ) + for v in ["1.0", "2.0", "3.0", "4.0.dev0"] + ] + + [ + ReleaseFactory.create( + project=project, + version="5.0", + description=DescriptionFactory.create( + raw="plaintext description", + html="", + content_type="text/plain", + ), + yanked=True, + yanked_reason="plaintext yanked reason", + ) + ] + + [ + ReleaseFactory.create( + project=project, + version="5.1", + description=DescriptionFactory.create( + raw="unrendered description", + html="rendered description", + content_type="text/html", + ), + published=False, + ) + ] + ) files = [ FileFactory.create( release=r, @@ -226,6 +254,7 @@ def test_detail_rendered(self, db_request): "bdists": [], "description": "rendered description", "latest_version": project.latest_version, + # Non published version are not listed here "all_versions": [ (r.version, r.created, r.is_prerelease, r.yanked, r.yanked_reason) for r in reversed(releases) diff --git a/warehouse/legacy/api/json.py b/warehouse/legacy/api/json.py index 513699cbd665..e7b8652527af 100644 --- a/warehouse/legacy/api/json.py +++ b/warehouse/legacy/api/json.py @@ -62,7 +62,10 @@ def _json_data(request, project, release, *, all_releases): ) ) .outerjoin(File) - .filter(Release.project == project) + .filter( + Release.project == project, + Release.published.is_(True), + ) ) # If we're not looking for all_releases, then we'll filter this further @@ -206,7 +209,8 @@ def latest_release_factory(request): .filter( Project.lifecycle_status.is_distinct_from( LifecycleStatus.QuarantineEnter - ) + ), + Release.published.is_(True), ) .order_by( Release.yanked.asc(), diff --git a/warehouse/locale/messages.pot b/warehouse/locale/messages.pot index 1b7302ad7579..5594badd3d11 100644 --- a/warehouse/locale/messages.pot +++ b/warehouse/locale/messages.pot @@ -776,7 +776,7 @@ msgstr "" msgid "Provide an Inspector link to specific lines of code." msgstr "" -#: warehouse/packaging/views.py:352 +#: warehouse/packaging/views.py:355 msgid "Your report has been recorded. Thank you for your help." msgstr "" diff --git a/warehouse/packaging/models.py b/warehouse/packaging/models.py index 9cef52f2d034..fe627855cb3b 100644 --- a/warehouse/packaging/models.py +++ b/warehouse/packaging/models.py @@ -454,7 +454,11 @@ def latest_version(self): return ( orm.object_session(self) .query(Release.version, Release.created, Release.is_prerelease) - .filter(Release.project == self, Release.yanked.is_(False)) + .filter( + Release.project == self, + Release.yanked.is_(False), + Release.published.is_(True), + ) .order_by(Release.is_prerelease.nullslast(), Release._pypi_ordering.desc()) .first() ) diff --git a/warehouse/packaging/utils.py b/warehouse/packaging/utils.py index 7397cf45a740..53bdc488a04a 100644 --- a/warehouse/packaging/utils.py +++ b/warehouse/packaging/utils.py @@ -53,9 +53,11 @@ def _simple_detail(project, request): .join(Release) .filter(Release.project == project) # Exclude projects that are in the `quarantine-enter` lifecycle status. + # And exclude un-published releases from the index .join(Project) .filter( - Project.lifecycle_status.is_distinct_from(LifecycleStatus.QuarantineEnter) + Project.lifecycle_status.is_distinct_from(LifecycleStatus.QuarantineEnter), + Release.published.is_(True), ) .all(), key=lambda f: (packaging_legacy.version.parse(f.release.version), f.filename), diff --git a/warehouse/packaging/views.py b/warehouse/packaging/views.py index 55f722c43c0b..d1c3998f293e 100644 --- a/warehouse/packaging/views.py +++ b/warehouse/packaging/views.py @@ -179,7 +179,10 @@ def project_detail(project, request): try: release = ( request.db.query(Release) - .filter(Release.project == project) + .filter( + Release.project == project, + Release.published.is_(True), + ) .order_by( Release.yanked, Release.is_prerelease.nullslast(), diff --git a/warehouse/search/tasks.py b/warehouse/search/tasks.py index 3b3d05999cfd..2326d95f9963 100644 --- a/warehouse/search/tasks.py +++ b/warehouse/search/tasks.py @@ -42,7 +42,7 @@ def _project_docs(db, project_name=None): releases_list = ( select(Release.id) - .filter(Release.yanked.is_(False), Release.files) + .filter(Release.yanked.is_(False), Release.published.is_(True), Release.files) .order_by( Release.project_id, Release.is_prerelease.nullslast(), From 4b8b395214ecfbdf91cbb878e5a19cf2282a2eec Mon Sep 17 00:00:00 2001 From: Alexis Date: Fri, 20 Dec 2024 17:46:35 +0100 Subject: [PATCH 07/26] Add `staged` releases publication --- tests/unit/forklift/test_legacy.py | 355 +++++++++++++++++- tests/unit/packaging/test_tasks.py | 37 +- warehouse/events/tags.py | 1 + warehouse/forklift/forms.py | 29 +- warehouse/forklift/legacy.py | 98 ++++- warehouse/packaging/tasks.py | 29 +- .../templates/manage/project/history.html | 13 +- 7 files changed, 521 insertions(+), 41 deletions(-) diff --git a/tests/unit/forklift/test_legacy.py b/tests/unit/forklift/test_legacy.py index 782271821d20..6d76c967fbac 100644 --- a/tests/unit/forklift/test_legacy.py +++ b/tests/unit/forklift/test_legacy.py @@ -690,15 +690,6 @@ def test_fails_invalid_version(self, pyramid_config, pyramid_request, version): "more information.", ), # filetype/pyversion errors. - ( - { - "metadata_version": "1.2", - "name": "example", - "version": "1.0", - "md5_digest": "bad", - }, - "Invalid value for filetype. Error: This field is required.", - ), ( { "metadata_version": "1.2", @@ -754,6 +745,25 @@ def test_fails_invalid_version(self, pyramid_config, pyramid_request, version): "Invalid value for sha256_digest. " "Error: Use a valid, hex-encoded, SHA256 message digest.", ), + # digest and filetype interactions + ( + { + "metadata_version": "1.2", + "name": "example", + "version": "1.0", + "md5_digest": "a fake md5 digest", + }, + "Error: No digest are allowed without a file.", + ), + ( + { + "metadata_version": "1.2", + "name": "example", + "version": "1.0", + "filetype": "sdist", + }, + "Error: Include at least one message digest.", + ), # summary errors ( { @@ -1084,6 +1094,7 @@ def test_upload_fails_without_file(self, pyramid_config, db_request): EmailFactory.create(user=user) pyramid_config.testing_securitypolicy(identity=user) db_request.user = user + db_request.user_agent = "warehouse-tests/6.6.6" db_request.POST = MultiDict( { "metadata_version": "1.2", @@ -1102,6 +1113,36 @@ def test_upload_fails_without_file(self, pyramid_config, db_request): assert resp.status_code == 400 assert resp.status == "400 Upload payload does not have a file." + def test_upload_fails_without_filetype(self, pyramid_config, db_request): + user = UserFactory.create() + EmailFactory.create(user=user) + pyramid_config.testing_securitypolicy(identity=user) + db_request.user = user + db_request.user_agent = "warehouse-tests/6.6.6" + db_request.POST = MultiDict( + { + "metadata_version": "1.2", + "name": "example", + "version": "1.0", + "content": pretend.stub( + filename="fails-without-filetype-1.0.tar.gz", + file=io.BytesIO(_TAR_GZ_PKG_TESTDATA), + type="application/tar", + ), + } + ) + + with pytest.raises(HTTPBadRequest) as excinfo: + legacy.file_upload(db_request) + + resp = excinfo.value + + assert resp.status_code == 400 + assert ( + resp.status + == "400 Invalid value for filetype. Error: This field is required." + ) + @pytest.mark.parametrize("value", [("UNKNOWN"), ("UNKNOWN\n\n")]) def test_upload_cleans_unknown_values(self, pyramid_config, db_request, value): user = UserFactory.create() @@ -1128,6 +1169,7 @@ def test_upload_escapes_nul_characters(self, pyramid_config, db_request): EmailFactory.create(user=user) pyramid_config.testing_securitypolicy(identity=user) db_request.user = user + db_request.user_agent = "warehouse-tests/6.6.6" db_request.POST = MultiDict( { "metadata_version": "1.2", @@ -5343,6 +5385,301 @@ def test_upload_fails_when_license_and_license_expression_are_present( ) +class TestStagedRelease: + @staticmethod + def get_identity(project, db_request, pyramid_config): + identity = UserFactory.create() + EmailFactory.create(user=identity) + RoleFactory.create(user=identity, project=project) + + db_request.user = identity + db_request.user_agent = "warehouse-tests/6.6.6" + + pyramid_config.testing_securitypolicy(identity=identity) + return identity + + def test_upload_with_stage(self, monkeypatch, db_request, pyramid_config, metrics): + from warehouse.events.models import HasEvents + from warehouse.events.tags import EventTag + + project = ProjectFactory.create() + identity = self.get_identity(project, db_request, pyramid_config) + + filename = "{}-{}.tar.gz".format( + project.normalized_name.replace("-", "_"), "1.0" + ) + + db_request.POST = MultiDict( + { + "metadata_version": "1.2", + "name": project.name, + "version": "1.0", + "summary": "This is my summary!", + "filetype": "sdist", + "md5_digest": _TAR_GZ_PKG_MD5, + "content": pretend.stub( + filename=filename, + file=io.BytesIO(_TAR_GZ_PKG_TESTDATA), + type="application/tar", + ), + } + ) + db_request.headers["X-PyPI-Is-Staged"] = "1" + + storage_service = pretend.stub(store=lambda path, filepath, meta: None) + db_request.find_service = lambda svc, name=None, context=None: { + IFileStorage: storage_service, + IMetricsService: metrics, + }.get(svc) + + record_event = pretend.call_recorder( + lambda self, *, tag, request=None, additional: None + ) + monkeypatch.setattr(HasEvents, "record_event", record_event) + + resp = legacy.file_upload(db_request) + assert resp.status_code == 200 + + # Ensure that a Release object has been created. + release = ( + db_request.db.query(Release) + .filter((Release.project == project) & (Release.version == "1.0")) + .one() + ) + + assert not release.published + + # Ensure that a File object has been created. + db_request.db.query(File).filter( + (File.release == release) & (File.filename == filename) + ).one() + + # Ensure that all of our journal entries have been created + journals = ( + db_request.db.query(JournalEntry) + .options(joinedload(JournalEntry.submitted_by)) + .order_by("submitted_date", "id") + .all() + ) + assert [(j.name, j.version, j.action, j.submitted_by) for j in journals] == [ + ( + release.project.name, + release.version, + "new release", + identity, + ), + ( + release.project.name, + release.version, + f"add source file {filename}", + identity, + ), + ] + + # Ensure that all of our events have been created + release_event = { + "submitted_by": identity.username, + "canonical_version": release.canonical_version, + "publisher_url": None, + "uploaded_via_trusted_publisher": False, + "published": False, + } + + fileadd_event = { + "filename": filename, + "submitted_by": identity.username, + "canonical_version": release.canonical_version, + "publisher_url": None, + "project_id": str(project.id), + "uploaded_via_trusted_publisher": False, + } + + assert record_event.calls == [ + pretend.call( + mock.ANY, + tag=EventTag.Project.ReleaseAdd, + request=db_request, + additional=release_event, + ), + pretend.call( + mock.ANY, + tag=EventTag.File.FileAdd, + request=db_request, + additional=fileadd_event, + ), + ] + + def test_publish_without_file( + self, monkeypatch, db_request, pyramid_config, metrics + ): + from warehouse.events.models import HasEvents + from warehouse.events.tags import EventTag + + project = ProjectFactory.create() + identity = self.get_identity(project, db_request, pyramid_config) + + release = ReleaseFactory.create(project=project, version="1.0") + release.published = False + FileFactory.create(release=release) + + db_request.POST = MultiDict( + { + "metadata_version": "1.2", + "name": project.name, + "version": "1.0", + } + ) + + record_event = pretend.call_recorder( + lambda self, *, tag, request=None, additional: None + ) + monkeypatch.setattr(HasEvents, "record_event", record_event) + db_request.find_service = lambda svc, name=None, context=None: { + IMetricsService: metrics, + }.get(svc) + + resp = legacy.file_upload(db_request) + assert resp.status_code == 200 + + release = ( + db_request.db.query(Release) + .filter((Release.project == project) & (Release.version == "1.0")) + .one() + ) + + assert release.published + assert record_event.calls == [ + pretend.call( + mock.ANY, + tag=EventTag.Project.ReleasePublish, + request=db_request, + additional={ + "submitted_by": identity.username, + "canonical_version": release.canonical_version, + "uploaded_via_trusted_publisher": False, + }, + ), + ] + + assert metrics.increment.calls == [ + pretend.call("warehouse.upload.attempt"), + pretend.call("warehouse.publish.ok"), + ] + + def test_publish_with_file(self, monkeypatch, db_request, pyramid_config, metrics): + from warehouse.events.models import HasEvents + from warehouse.events.tags import EventTag + + project = ProjectFactory.create() + identity = self.get_identity(project, db_request, pyramid_config) + + # Create a release and add a file + release = ReleaseFactory.create(project=project, version="1.0") + release.published = False + FileFactory.create(release=release, packagetype="bdist_wheel") + + filename = "{}-{}.tar.gz".format( + project.normalized_name.replace("-", "_"), "1.0" + ) + + db_request.POST = MultiDict( + { + "metadata_version": "1.2", + "name": project.name, + "version": "1.0", + "summary": "This is my summary!", + "filetype": "sdist", + "md5_digest": _TAR_GZ_PKG_MD5, + "content": pretend.stub( + filename=filename, + file=io.BytesIO(_TAR_GZ_PKG_TESTDATA), + type="application/tar", + ), + } + ) + + storage_service = pretend.stub(store=lambda path, filepath, meta: None) + db_request.find_service = lambda svc, name=None, context=None: { + IFileStorage: storage_service, + IMetricsService: metrics, + }.get(svc) + + record_event = pretend.call_recorder( + lambda self, *, tag, request=None, additional: None + ) + monkeypatch.setattr(HasEvents, "record_event", record_event) + + resp = legacy.file_upload(db_request) + assert resp.status_code == 200 + + # Ensure that a Release object has been created. + release = ( + db_request.db.query(Release) + .filter((Release.project == project) & (Release.version == "1.0")) + .one() + ) + + assert release.published + + # Ensure that a File object has been created. + db_request.db.query(File).filter( + (File.release == release) & (File.filename == filename) + ).one() + + # Ensure that all of our journal entries have been created + journals = ( + db_request.db.query(JournalEntry) + .options(joinedload(JournalEntry.submitted_by)) + .order_by("submitted_date", "id") + .all() + ) + assert [(j.name, j.version, j.action, j.submitted_by) for j in journals] == [ + ( + release.project.name, + release.version, + f"add source file {filename}", + identity, + ), + ( + release.project.name, + release.version, + "publish release", + identity, + ), + ] + + # Ensure that all of our events have been created + release_event = { + "submitted_by": identity.username, + "canonical_version": release.canonical_version, + "uploaded_via_trusted_publisher": False, + } + + fileadd_event = { + "filename": filename, + "submitted_by": (identity.username), + "canonical_version": release.canonical_version, + "publisher_url": None, + "project_id": str(project.id), + "uploaded_via_trusted_publisher": False, + } + + assert record_event.calls == [ + pretend.call( + mock.ANY, + tag=EventTag.File.FileAdd, + request=db_request, + additional=fileadd_event, + ), + pretend.call( + mock.ANY, + tag=EventTag.Project.ReleasePublish, + request=db_request, + additional=release_event, + ), + ] + + def test_submit(pyramid_request): resp = legacy.submit(pyramid_request) diff --git a/tests/unit/packaging/test_tasks.py b/tests/unit/packaging/test_tasks.py index 7d3fe2c73d5e..5c1b2be197ae 100644 --- a/tests/unit/packaging/test_tasks.py +++ b/tests/unit/packaging/test_tasks.py @@ -10,6 +10,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import datetime import tempfile from contextlib import contextmanager @@ -24,11 +25,12 @@ import warehouse.packaging.tasks from warehouse.accounts.models import WebAuthn -from warehouse.packaging.models import Description +from warehouse.packaging.models import Description, Release from warehouse.packaging.tasks import ( check_file_cache_tasks_outstanding, compute_2fa_metrics, compute_packaging_metrics, + delete_staged_stalled_releases, sync_bigquery_release_files, sync_file_to_cache, update_bigquery_release_files, @@ -909,3 +911,36 @@ def test_compute_2fa_metrics(db_request, monkeypatch): pretend.call("warehouse.2fa.total_users_with_webauthn_enabled", 1), pretend.call("warehouse.2fa.total_users_with_two_factor_enabled", 2), ] + + +def test_delete_staged_stalled_releases(db_request, metrics): + # We create 4 releases: + # - One unpublished and created 30 days ago + # - One published and created 25 days ago + # - One unpublished and created 3 days ago + # - One published and created now + # The task should only delete the first one + + release_30_days = ReleaseFactory.create(published=False) + release_30_days.created -= datetime.timedelta(days=30) + + release_25_days = ReleaseFactory.create(published=True) + release_25_days.created -= datetime.timedelta(days=30) + + release_3_days = ReleaseFactory.create(published=False) + release_3_days.created -= datetime.timedelta(days=3) + + ReleaseFactory.create(published=True) + + deleted_release_id = release_30_days.id + + assert db_request.db.query(Release).count() == 4 + delete_staged_stalled_releases(db_request) + assert db_request.db.query(Release).count() == 3 + assert ( + db_request.db.query(Release).filter(Release.id == deleted_release_id).count() + ) == 0 + + assert metrics.gauge.calls == [ + pretend.call("warehouse.release.stalled_releases_deleted", 1), + ] diff --git a/warehouse/events/tags.py b/warehouse/events/tags.py index 61e3161c8e37..074b6fd18b08 100644 --- a/warehouse/events/tags.py +++ b/warehouse/events/tags.py @@ -131,6 +131,7 @@ class Project(EventTagEnum): ReleaseRemove = "project:release:remove" ReleaseUnyank = "project:release:unyank" ReleaseYank = "project:release:yank" + ReleasePublish = "project:release:publish" RoleAdd = "project:role:add" RoleChange = "project:role:change" RoleDeclineInvite = "project:role:decline_invite" diff --git a/warehouse/forklift/forms.py b/warehouse/forklift/forms.py index a908568917fe..2b904e74fcc1 100644 --- a/warehouse/forklift/forms.py +++ b/warehouse/forklift/forms.py @@ -57,7 +57,7 @@ class UploadForm(wtforms.Form): pyversion = wtforms.StringField(validators=[wtforms.validators.Optional()]) filetype = wtforms.StringField( validators=[ - wtforms.validators.InputRequired(), + wtforms.validators.Optional(), wtforms.validators.AnyOf( _filetype_extension_mapping.keys(), message="Use a known file type." ), @@ -119,13 +119,24 @@ def validate(self, _extra_validators=None) -> bool: ) return False - # We *must* have at least one digest to verify against. - if ( - not self.md5_digest.data - and not self.sha256_digest.data - and not self.blake2_256_digest.data - ): - self.form_errors.append("Include at least one message digest.") - return False + # We *must* have: + # - either no filetype, no digests (and no file) + # - a filetype, at least one digest (and a file) + if not self.filetype.data: + if ( + self.md5_digest.data + or self.sha256_digest.data + or self.blake2_256_digest.data + ): + self.form_errors.append("No digest are allowed without a file.") + return False + else: + if ( + not self.md5_digest.data + and not self.sha256_digest.data + and not self.blake2_256_digest.data + ): + self.form_errors.append("Include at least one message digest.") + return False return success diff --git a/warehouse/forklift/legacy.py b/warehouse/forklift/legacy.py index 68a4dec4b6d1..d196e5798b0a 100644 --- a/warehouse/forklift/legacy.py +++ b/warehouse/forklift/legacy.py @@ -427,6 +427,40 @@ def _sort_releases(request: Request, project: Project): r._pypi_ordering = i +def publish_staged_release(request, project, release): + """ + Publish a staged release. + + This method assumes all preconditions are met and only modifies the release + state to 'published', creating the appropriate events. + """ + metrics = request.find_service(IMetricsService, context=None) + + request.db.add( + JournalEntry( + name=project.name, + action="publish release", + version=release.version, + submitted_by=request.user if request.user else None, + ) + ) + + project.record_event( + tag=EventTag.Project.ReleasePublish, + request=request, + additional={ + "submitted_by": ( + request.user.username if request.user else "OpenID created token" + ), + "uploaded_via_trusted_publisher": bool(request.oidc_publisher), + "canonical_version": release.canonical_version, + }, + ) + release.published = True + + metrics.increment("warehouse.publish.ok") + + @view_config( route_name="forklift.legacy.file_upload", uses_session=True, @@ -604,10 +638,6 @@ def file_upload(request): ), ) - # Ensure that we have file data in the request. - if "content" not in request.POST: - raise _exc_with_message(HTTPBadRequest, "Upload payload does not have a file.") - # Look up the project first before doing anything else, this is so we can # automatically register it if we need to and can check permissions before # going any further. @@ -750,6 +780,9 @@ def file_upload(request): ), ) from None + # Is the current release a staged release + staged_release = bool(request.headers.get("X-PyPI-Is-Staged", False)) + # Verify any verifiable URLs project_urls = ( {} @@ -902,6 +935,7 @@ def file_upload(request): }, uploader=request.user if request.user else None, uploaded_via=request.user_agent, + published=not staged_release, ) request.db.add(release) is_new_release = True @@ -932,7 +966,7 @@ def file_upload(request): else None ), "uploaded_via_trusted_publisher": bool(request.oidc_publisher), - "published": True, + "published": not staged_release, }, ) @@ -941,6 +975,28 @@ def file_upload(request): # at least this should be some sort of hook or trigger. _sort_releases(request, project) + # Ensure that we have file data in the request. + if "content" not in request.POST: + # We only allow empty file data to publish staged release + if is_new_release or release.published is True: + raise _exc_with_message( + HTTPBadRequest, "Upload payload does not have a file." + ) + + # In this case: publish the staged release and return early + publish_staged_release(request, project, release) + return HTTPOk() + + # From here, we know we have a file - let's start the validation + if not form.filetype.data: + raise _exc_with_message( + HTTPBadRequest, + # TODO(dm): This is the previous message (from the form validation) + # Can this be changed to something cleaner or will it break a + # downstream usage? + "Invalid value for filetype. Error: This field is required.", + ) + # Pull the filename out of our POST data. filename = request.POST["content"].filename @@ -1436,20 +1492,26 @@ def file_upload(request): # For existing releases, we check if any of the existing project URLs are unverified # and have been verified in the current upload. In that case, we mark them as # verified. - if not is_new_release and project_urls: - for name, release_url in release._project_urls.items(): - if ( - not release_url.verified - and name in project_urls - and project_urls[name]["url"] == release_url.url - and project_urls[name]["verified"] - ): - release_url.verified = True + if not is_new_release: + if project_urls: + for name, release_url in release._project_urls.items(): + if ( + not release_url.verified + and name in project_urls + and project_urls[name]["url"] == release_url.url + and project_urls[name]["verified"] + ): + release_url.verified = True + + if home_page_verified and not release.home_page_verified: + release.home_page_verified = True + if download_url_verified and not release.download_url_verified: + release.download_url_verified = True - if home_page_verified and not release.home_page_verified: - release.home_page_verified = True - if download_url_verified and not release.download_url_verified: - release.download_url_verified = True + # If we had a staged release and this request does not include the header, that + # means we are good to publish + if not staged_release and release.published is False: + publish_staged_release(request, project, release) request.db.flush() # flush db now so server default values are populated for celery diff --git a/warehouse/packaging/tasks.py b/warehouse/packaging/tasks.py index c56208903076..3235362acdb1 100644 --- a/warehouse/packaging/tasks.py +++ b/warehouse/packaging/tasks.py @@ -10,11 +10,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -import datetime import logging import tempfile from collections import namedtuple +from datetime import datetime, timedelta, timezone from itertools import product from celery.exceptions import SoftTimeLimitExceeded, TimeLimitExceeded @@ -318,7 +318,7 @@ def update_bigquery_release_files(task, request, dist_metadata): for sch in table_schema: field_data = dist_metadata.get(sch.name, None) - if isinstance(field_data, datetime.datetime): + if isinstance(field_data, datetime): field_data = field_data.isoformat() # Replace all empty objects to None will ensure @@ -384,7 +384,7 @@ def populate_data_using_schema(file): else: field_data = None - if isinstance(field_data, datetime.datetime): + if isinstance(field_data, datetime): field_data = field_data.isoformat() # Replace all empty objects to None will ensure @@ -444,3 +444,26 @@ def populate_data_using_schema(file): json_rows, table_name, job_config=LoadJobConfig(schema=table_schema) ).result() break + + +@tasks.task(ignore_result=True, acks_late=True) +def delete_staged_stalled_releases(request): + """ + Purge all staged (unpublished) releases that have not been updated for 15 + days. + """ + rows_deleted = ( + request.db.query(Release) + .filter(Release.published.is_(False)) + .filter( + # The token has been created at more than 1 day ago + Release.created + timedelta(days=15) + < datetime.now(tz=timezone.utc) + ) + .delete(synchronize_session=False) + ) + metrics = request.find_service(IMetricsService, context=None) + metrics.gauge( + "warehouse.release.stalled_releases_deleted", + rows_deleted, + ) diff --git a/warehouse/templates/manage/project/history.html b/warehouse/templates/manage/project/history.html index f757111fe412..953043b197a6 100644 --- a/warehouse/templates/manage/project/history.html +++ b/warehouse/templates/manage/project/history.html @@ -83,7 +83,18 @@

{% trans %}Security history{% endtrans %}

{% trans %}Yanked by:{% endtrans %} {{ event.additional.submitted_by }} - + {% elif event.tag == EventTag.Project.ReleasePublish %} + + {% trans href=request.route_path('manage.project.release', project_name=project.name, version=event.additional.canonical_version), version=event.additional.canonical_version %}Version {{ version }} published{% endtrans %} + + + {% trans %}Published by:{% endtrans %} + {% if event.additional.uploaded_via_trusted_publisher or event.additional.publisher_url %} + {{ event.additional.submitted_by }} + {% else %} + {{ event.additional.submitted_by }} + {% endif %} + {# Display file events #} {% elif event.tag == EventTag.File.FileAdd %} From 1d81d5da55aa762d2a462821cae619341091342a Mon Sep 17 00:00:00 2001 From: Alexis Date: Fri, 27 Dec 2024 14:59:45 +0100 Subject: [PATCH 08/26] Remove empty upload. --- tests/unit/forklift/test_legacy.py | 117 +++-------------------------- warehouse/forklift/forms.py | 29 +++---- warehouse/forklift/legacy.py | 25 +----- 3 files changed, 21 insertions(+), 150 deletions(-) diff --git a/tests/unit/forklift/test_legacy.py b/tests/unit/forklift/test_legacy.py index 6d76c967fbac..0aa48ea9b84c 100644 --- a/tests/unit/forklift/test_legacy.py +++ b/tests/unit/forklift/test_legacy.py @@ -690,6 +690,15 @@ def test_fails_invalid_version(self, pyramid_config, pyramid_request, version): "more information.", ), # filetype/pyversion errors. + ( + { + "metadata_version": "1.2", + "name": "example", + "version": "1.0", + "md5_digest": "bad", + }, + "Invalid value for filetype. Error: This field is required.", + ), ( { "metadata_version": "1.2", @@ -745,25 +754,6 @@ def test_fails_invalid_version(self, pyramid_config, pyramid_request, version): "Invalid value for sha256_digest. " "Error: Use a valid, hex-encoded, SHA256 message digest.", ), - # digest and filetype interactions - ( - { - "metadata_version": "1.2", - "name": "example", - "version": "1.0", - "md5_digest": "a fake md5 digest", - }, - "Error: No digest are allowed without a file.", - ), - ( - { - "metadata_version": "1.2", - "name": "example", - "version": "1.0", - "filetype": "sdist", - }, - "Error: Include at least one message digest.", - ), # summary errors ( { @@ -1094,7 +1084,6 @@ def test_upload_fails_without_file(self, pyramid_config, db_request): EmailFactory.create(user=user) pyramid_config.testing_securitypolicy(identity=user) db_request.user = user - db_request.user_agent = "warehouse-tests/6.6.6" db_request.POST = MultiDict( { "metadata_version": "1.2", @@ -1113,36 +1102,6 @@ def test_upload_fails_without_file(self, pyramid_config, db_request): assert resp.status_code == 400 assert resp.status == "400 Upload payload does not have a file." - def test_upload_fails_without_filetype(self, pyramid_config, db_request): - user = UserFactory.create() - EmailFactory.create(user=user) - pyramid_config.testing_securitypolicy(identity=user) - db_request.user = user - db_request.user_agent = "warehouse-tests/6.6.6" - db_request.POST = MultiDict( - { - "metadata_version": "1.2", - "name": "example", - "version": "1.0", - "content": pretend.stub( - filename="fails-without-filetype-1.0.tar.gz", - file=io.BytesIO(_TAR_GZ_PKG_TESTDATA), - type="application/tar", - ), - } - ) - - with pytest.raises(HTTPBadRequest) as excinfo: - legacy.file_upload(db_request) - - resp = excinfo.value - - assert resp.status_code == 400 - assert ( - resp.status - == "400 Invalid value for filetype. Error: This field is required." - ) - @pytest.mark.parametrize("value", [("UNKNOWN"), ("UNKNOWN\n\n")]) def test_upload_cleans_unknown_values(self, pyramid_config, db_request, value): user = UserFactory.create() @@ -1169,7 +1128,6 @@ def test_upload_escapes_nul_characters(self, pyramid_config, db_request): EmailFactory.create(user=user) pyramid_config.testing_securitypolicy(identity=user) db_request.user = user - db_request.user_agent = "warehouse-tests/6.6.6" db_request.POST = MultiDict( { "metadata_version": "1.2", @@ -5509,63 +5467,6 @@ def test_upload_with_stage(self, monkeypatch, db_request, pyramid_config, metric ), ] - def test_publish_without_file( - self, monkeypatch, db_request, pyramid_config, metrics - ): - from warehouse.events.models import HasEvents - from warehouse.events.tags import EventTag - - project = ProjectFactory.create() - identity = self.get_identity(project, db_request, pyramid_config) - - release = ReleaseFactory.create(project=project, version="1.0") - release.published = False - FileFactory.create(release=release) - - db_request.POST = MultiDict( - { - "metadata_version": "1.2", - "name": project.name, - "version": "1.0", - } - ) - - record_event = pretend.call_recorder( - lambda self, *, tag, request=None, additional: None - ) - monkeypatch.setattr(HasEvents, "record_event", record_event) - db_request.find_service = lambda svc, name=None, context=None: { - IMetricsService: metrics, - }.get(svc) - - resp = legacy.file_upload(db_request) - assert resp.status_code == 200 - - release = ( - db_request.db.query(Release) - .filter((Release.project == project) & (Release.version == "1.0")) - .one() - ) - - assert release.published - assert record_event.calls == [ - pretend.call( - mock.ANY, - tag=EventTag.Project.ReleasePublish, - request=db_request, - additional={ - "submitted_by": identity.username, - "canonical_version": release.canonical_version, - "uploaded_via_trusted_publisher": False, - }, - ), - ] - - assert metrics.increment.calls == [ - pretend.call("warehouse.upload.attempt"), - pretend.call("warehouse.publish.ok"), - ] - def test_publish_with_file(self, monkeypatch, db_request, pyramid_config, metrics): from warehouse.events.models import HasEvents from warehouse.events.tags import EventTag diff --git a/warehouse/forklift/forms.py b/warehouse/forklift/forms.py index 2b904e74fcc1..a908568917fe 100644 --- a/warehouse/forklift/forms.py +++ b/warehouse/forklift/forms.py @@ -57,7 +57,7 @@ class UploadForm(wtforms.Form): pyversion = wtforms.StringField(validators=[wtforms.validators.Optional()]) filetype = wtforms.StringField( validators=[ - wtforms.validators.Optional(), + wtforms.validators.InputRequired(), wtforms.validators.AnyOf( _filetype_extension_mapping.keys(), message="Use a known file type." ), @@ -119,24 +119,13 @@ def validate(self, _extra_validators=None) -> bool: ) return False - # We *must* have: - # - either no filetype, no digests (and no file) - # - a filetype, at least one digest (and a file) - if not self.filetype.data: - if ( - self.md5_digest.data - or self.sha256_digest.data - or self.blake2_256_digest.data - ): - self.form_errors.append("No digest are allowed without a file.") - return False - else: - if ( - not self.md5_digest.data - and not self.sha256_digest.data - and not self.blake2_256_digest.data - ): - self.form_errors.append("Include at least one message digest.") - return False + # We *must* have at least one digest to verify against. + if ( + not self.md5_digest.data + and not self.sha256_digest.data + and not self.blake2_256_digest.data + ): + self.form_errors.append("Include at least one message digest.") + return False return success diff --git a/warehouse/forklift/legacy.py b/warehouse/forklift/legacy.py index d196e5798b0a..933daddd3d4f 100644 --- a/warehouse/forklift/legacy.py +++ b/warehouse/forklift/legacy.py @@ -637,6 +637,9 @@ def file_upload(request): ] ), ) + # Ensure that we have file data in the request. + if "content" not in request.POST: + raise _exc_with_message(HTTPBadRequest, "Upload payload does not have a file.") # Look up the project first before doing anything else, this is so we can # automatically register it if we need to and can check permissions before @@ -975,28 +978,6 @@ def file_upload(request): # at least this should be some sort of hook or trigger. _sort_releases(request, project) - # Ensure that we have file data in the request. - if "content" not in request.POST: - # We only allow empty file data to publish staged release - if is_new_release or release.published is True: - raise _exc_with_message( - HTTPBadRequest, "Upload payload does not have a file." - ) - - # In this case: publish the staged release and return early - publish_staged_release(request, project, release) - return HTTPOk() - - # From here, we know we have a file - let's start the validation - if not form.filetype.data: - raise _exc_with_message( - HTTPBadRequest, - # TODO(dm): This is the previous message (from the form validation) - # Can this be changed to something cleaner or will it break a - # downstream usage? - "Invalid value for filetype. Error: This field is required.", - ) - # Pull the filename out of our POST data. filename = request.POST["content"].filename From c781e5bf7717598c2b91e97c8a322f13881c6ba5 Mon Sep 17 00:00:00 2001 From: Alexis Date: Fri, 27 Dec 2024 15:14:32 +0100 Subject: [PATCH 09/26] Add test with OIDC publishing --- tests/unit/forklift/test_legacy.py | 96 ++++++++++++++++++++++-------- warehouse/packaging/tasks.py | 2 +- 2 files changed, 71 insertions(+), 27 deletions(-) diff --git a/tests/unit/forklift/test_legacy.py b/tests/unit/forklift/test_legacy.py index 0aa48ea9b84c..de14febbcf1f 100644 --- a/tests/unit/forklift/test_legacy.py +++ b/tests/unit/forklift/test_legacy.py @@ -5345,23 +5345,39 @@ def test_upload_fails_when_license_and_license_expression_are_present( class TestStagedRelease: @staticmethod - def get_identity(project, db_request, pyramid_config): - identity = UserFactory.create() - EmailFactory.create(user=identity) - RoleFactory.create(user=identity, project=project) + def get_identity(test_with_user, project, db_request, pyramid_config): + + if test_with_user: + identity = UserFactory.create() + EmailFactory.create(user=identity) + RoleFactory.create(user=identity, project=project) + db_request.user = identity + else: + publisher = GitHubPublisherFactory.create(projects=[project]) + claims = {"sha": "somesha"} + identity = PublisherTokenContext(publisher, SignedClaims(claims)) + db_request.oidc_publisher = identity.publisher + db_request.oidc_claims = identity.claims + - db_request.user = identity db_request.user_agent = "warehouse-tests/6.6.6" pyramid_config.testing_securitypolicy(identity=identity) return identity - def test_upload_with_stage(self, monkeypatch, db_request, pyramid_config, metrics): + @pytest.mark.parametrize( + "test_with_user", + [ + True, + False, + ], + ) + def test_upload_with_stage(self, test_with_user, monkeypatch, db_request, pyramid_config, metrics): from warehouse.events.models import HasEvents from warehouse.events.tags import EventTag project = ProjectFactory.create() - identity = self.get_identity(project, db_request, pyramid_config) + identity = self.get_identity(test_with_user, project, db_request, pyramid_config) filename = "{}-{}.tar.gz".format( project.normalized_name.replace("-", "_"), "1.0" @@ -5424,32 +5440,44 @@ def test_upload_with_stage(self, monkeypatch, db_request, pyramid_config, metric release.project.name, release.version, "new release", - identity, + identity if test_with_user else None, ), ( release.project.name, release.version, f"add source file {filename}", - identity, + identity if test_with_user else None, ), ] # Ensure that all of our events have been created release_event = { - "submitted_by": identity.username, + "submitted_by": ( + identity.username if test_with_user else "OpenID created token" + ), "canonical_version": release.canonical_version, - "publisher_url": None, - "uploaded_via_trusted_publisher": False, + "publisher_url": ( + f"{identity.publisher.publisher_url()}/commit/somesha" + if not test_with_user + else None + ), + "uploaded_via_trusted_publisher": not test_with_user, "published": False, } fileadd_event = { "filename": filename, - "submitted_by": identity.username, + "submitted_by": ( + identity.username if test_with_user else "OpenID created token" + ), "canonical_version": release.canonical_version, - "publisher_url": None, + "publisher_url": ( + f"{identity.publisher.publisher_url()}/commit/somesha" + if not test_with_user + else None + ), "project_id": str(project.id), - "uploaded_via_trusted_publisher": False, + "uploaded_via_trusted_publisher": not test_with_user, } assert record_event.calls == [ @@ -5467,12 +5495,19 @@ def test_upload_with_stage(self, monkeypatch, db_request, pyramid_config, metric ), ] - def test_publish_with_file(self, monkeypatch, db_request, pyramid_config, metrics): + @pytest.mark.parametrize( + "test_with_user", + [ + True, + False, + ], + ) + def test_publish_with_file(self, test_with_user, monkeypatch, db_request, pyramid_config, metrics): from warehouse.events.models import HasEvents from warehouse.events.tags import EventTag project = ProjectFactory.create() - identity = self.get_identity(project, db_request, pyramid_config) + identity = self.get_identity(test_with_user, project, db_request, pyramid_config) # Create a release and add a file release = ReleaseFactory.create(project=project, version="1.0") @@ -5499,9 +5534,8 @@ def test_publish_with_file(self, monkeypatch, db_request, pyramid_config, metric } ) - storage_service = pretend.stub(store=lambda path, filepath, meta: None) db_request.find_service = lambda svc, name=None, context=None: { - IFileStorage: storage_service, + IFileStorage: pretend.stub(store=lambda path, filepath, meta: None), IMetricsService: metrics, }.get(svc) @@ -5510,6 +5544,8 @@ def test_publish_with_file(self, monkeypatch, db_request, pyramid_config, metric ) monkeypatch.setattr(HasEvents, "record_event", record_event) + assert not release.published + resp = legacy.file_upload(db_request) assert resp.status_code == 200 @@ -5539,30 +5575,38 @@ def test_publish_with_file(self, monkeypatch, db_request, pyramid_config, metric release.project.name, release.version, f"add source file {filename}", - identity, + identity if test_with_user else None, ), ( release.project.name, release.version, "publish release", - identity, + identity if test_with_user else None, ), ] # Ensure that all of our events have been created release_event = { - "submitted_by": identity.username, + "submitted_by": ( + identity.username if test_with_user else "OpenID created token" + ), "canonical_version": release.canonical_version, - "uploaded_via_trusted_publisher": False, + "uploaded_via_trusted_publisher": not test_with_user, } fileadd_event = { "filename": filename, - "submitted_by": (identity.username), + "submitted_by": ( + identity.username if test_with_user else "OpenID created token" + ), "canonical_version": release.canonical_version, - "publisher_url": None, + "publisher_url": ( + f"{identity.publisher.publisher_url()}/commit/somesha" + if not test_with_user + else None + ), "project_id": str(project.id), - "uploaded_via_trusted_publisher": False, + "uploaded_via_trusted_publisher": not test_with_user, } assert record_event.calls == [ diff --git a/warehouse/packaging/tasks.py b/warehouse/packaging/tasks.py index 3235362acdb1..f3d5a1a9c484 100644 --- a/warehouse/packaging/tasks.py +++ b/warehouse/packaging/tasks.py @@ -456,7 +456,7 @@ def delete_staged_stalled_releases(request): request.db.query(Release) .filter(Release.published.is_(False)) .filter( - # The token has been created at more than 1 day ago + # The release has been created at more than 15 day ago Release.created + timedelta(days=15) < datetime.now(tz=timezone.utc) ) From 90bc0b3e99f0fcd17286f1945c5f27f325af93c1 Mon Sep 17 00:00:00 2001 From: Alexis Date: Fri, 27 Dec 2024 16:02:56 +0100 Subject: [PATCH 10/26] Update translations --- warehouse/locale/messages.pot | 149 ++++++++++++++++++---------------- 1 file changed, 79 insertions(+), 70 deletions(-) diff --git a/warehouse/locale/messages.pot b/warehouse/locale/messages.pot index b9dddd13049f..4357ed484f0a 100644 --- a/warehouse/locale/messages.pot +++ b/warehouse/locale/messages.pot @@ -781,7 +781,7 @@ msgid "Your report has been recorded. Thank you for your help." msgstr "" #: warehouse/subscriptions/models.py:35 -#: warehouse/templates/manage/project/history.html:238 +#: warehouse/templates/manage/project/history.html:249 msgid "Active" msgstr "" @@ -1757,10 +1757,10 @@ msgstr "" #: warehouse/templates/accounts/register.html:64 #: warehouse/templates/manage/account.html:139 #: warehouse/templates/manage/account.html:480 -#: warehouse/templates/manage/project/history.html:309 #: warehouse/templates/manage/project/history.html:320 #: warehouse/templates/manage/project/history.html:331 #: warehouse/templates/manage/project/history.html:342 +#: warehouse/templates/manage/project/history.html:353 #: warehouse/templates/manage/project/settings.html:224 #: warehouse/templates/manage/project/settings.html:285 #: warehouse/templates/manage/project/settings.html:291 @@ -3446,7 +3446,7 @@ msgstr "" #: warehouse/templates/manage/account.html:564 #: warehouse/templates/manage/account.html:583 -#: warehouse/templates/manage/project/history.html:280 +#: warehouse/templates/manage/project/history.html:291 #: warehouse/templates/manage/unverified-account.html:287 #: warehouse/templates/manage/unverified-account.html:306 msgid "Reason:" @@ -3634,15 +3634,15 @@ msgstr "" #: warehouse/templates/manage/account.html:684 #: warehouse/templates/manage/account.html:707 -#: warehouse/templates/manage/project/history.html:271 -#: warehouse/templates/manage/project/history.html:278 +#: warehouse/templates/manage/project/history.html:282 +#: warehouse/templates/manage/project/history.html:289 #: warehouse/templates/manage/unverified-account.html:403 #: warehouse/templates/manage/unverified-account.html:426 msgid "Token name:" msgstr "" #: warehouse/templates/manage/account.html:701 -#: warehouse/templates/manage/project/history.html:273 +#: warehouse/templates/manage/project/history.html:284 #: warehouse/templates/manage/unverified-account.html:420 msgid "API token removed" msgstr "" @@ -3738,7 +3738,7 @@ msgstr "" #: warehouse/templates/manage/account.html:780 #: warehouse/templates/manage/organization/history.html:201 -#: warehouse/templates/manage/project/history.html:360 +#: warehouse/templates/manage/project/history.html:371 #: warehouse/templates/manage/team/history.html:108 #: warehouse/templates/manage/unverified-account.html:466 msgid "Event" @@ -3747,8 +3747,8 @@ msgstr "" #: warehouse/templates/manage/account.html:781 #: warehouse/templates/manage/organization/history.html:202 #: warehouse/templates/manage/organization/history.html:211 -#: warehouse/templates/manage/project/history.html:361 -#: warehouse/templates/manage/project/history.html:370 +#: warehouse/templates/manage/project/history.html:372 +#: warehouse/templates/manage/project/history.html:381 #: warehouse/templates/manage/team/history.html:109 #: warehouse/templates/manage/team/history.html:118 #: warehouse/templates/manage/unverified-account.html:467 @@ -3775,7 +3775,7 @@ msgstr "" #: warehouse/templates/manage/account.html:795 #: warehouse/templates/manage/organization/history.html:217 -#: warehouse/templates/manage/project/history.html:376 +#: warehouse/templates/manage/project/history.html:387 #: warehouse/templates/manage/team/history.html:124 #: warehouse/templates/manage/unverified-account.html:481 msgid "Device Info" @@ -4104,12 +4104,12 @@ msgstr "" #: warehouse/templates/manage/manage_base.html:582 #: warehouse/templates/manage/organization/history.html:166 #: warehouse/templates/manage/project/history.html:43 -#: warehouse/templates/manage/project/history.html:105 -#: warehouse/templates/manage/project/history.html:145 -#: warehouse/templates/manage/project/history.html:190 -#: warehouse/templates/manage/project/history.html:216 -#: warehouse/templates/manage/project/history.html:307 -#: warehouse/templates/manage/project/history.html:329 +#: warehouse/templates/manage/project/history.html:116 +#: warehouse/templates/manage/project/history.html:156 +#: warehouse/templates/manage/project/history.html:201 +#: warehouse/templates/manage/project/history.html:227 +#: warehouse/templates/manage/project/history.html:318 +#: warehouse/templates/manage/project/history.html:340 #: warehouse/templates/manage/team/history.html:88 msgid "Added by:" msgstr "" @@ -4117,10 +4117,10 @@ msgstr "" #: warehouse/templates/manage/manage_base.html:584 #: warehouse/templates/manage/organization/history.html:171 #: warehouse/templates/manage/project/history.html:70 -#: warehouse/templates/manage/project/history.html:136 -#: warehouse/templates/manage/project/history.html:152 -#: warehouse/templates/manage/project/history.html:198 -#: warehouse/templates/manage/project/history.html:224 +#: warehouse/templates/manage/project/history.html:147 +#: warehouse/templates/manage/project/history.html:163 +#: warehouse/templates/manage/project/history.html:209 +#: warehouse/templates/manage/project/history.html:235 #: warehouse/templates/manage/team/history.html:93 msgid "Removed by:" msgstr "" @@ -4130,7 +4130,7 @@ msgid "Submitted by:" msgstr "" #: warehouse/templates/manage/manage_base.html:589 -#: warehouse/templates/manage/project/history.html:255 +#: warehouse/templates/manage/project/history.html:266 msgid "Workflow:" msgstr "" @@ -4144,7 +4144,7 @@ msgstr "" #: warehouse/templates/manage/manage_base.html:596 #: warehouse/templates/manage/project/history.html:52 -#: warehouse/templates/manage/project/history.html:114 +#: warehouse/templates/manage/project/history.html:125 msgid "URL:" msgstr "" @@ -5298,8 +5298,8 @@ msgid "Created by:" msgstr "" #: warehouse/templates/manage/organization/history.html:144 -#: warehouse/templates/manage/project/history.html:318 -#: warehouse/templates/manage/project/history.html:340 +#: warehouse/templates/manage/project/history.html:329 +#: warehouse/templates/manage/project/history.html:351 #: warehouse/templates/manage/team/history.html:76 msgid "Deleted by:" msgstr "" @@ -5318,26 +5318,26 @@ msgid "Declined by:" msgstr "" #: warehouse/templates/manage/organization/history.html:176 -#: warehouse/templates/manage/project/history.html:159 -#: warehouse/templates/manage/project/history.html:206 +#: warehouse/templates/manage/project/history.html:170 +#: warehouse/templates/manage/project/history.html:217 #: warehouse/templates/manage/team/history.html:98 msgid "Changed by:" msgstr "" #: warehouse/templates/manage/organization/history.html:181 #: warehouse/templates/manage/organization/history.html:186 -#: warehouse/templates/manage/project/history.html:166 -#: warehouse/templates/manage/project/history.html:173 +#: warehouse/templates/manage/project/history.html:177 +#: warehouse/templates/manage/project/history.html:184 msgid "Invited by:" msgstr "" #: warehouse/templates/manage/organization/history.html:191 -#: warehouse/templates/manage/project/history.html:180 +#: warehouse/templates/manage/project/history.html:191 msgid "Revoked by:" msgstr "" #: warehouse/templates/manage/organization/history.html:198 -#: warehouse/templates/manage/project/history.html:357 +#: warehouse/templates/manage/project/history.html:368 #: warehouse/templates/manage/team/history.html:105 #, python-format msgid "Security history for %(source_name)s" @@ -5866,168 +5866,177 @@ msgstr "" msgid "Version %(version)s unyanked" msgstr "" -#: warehouse/templates/manage/project/history.html:90 +#: warehouse/templates/manage/project/history.html:88 +#, python-format +msgid "Version %(version)s published" +msgstr "" + +#: warehouse/templates/manage/project/history.html:91 +msgid "Published by:" +msgstr "" + +#: warehouse/templates/manage/project/history.html:101 #, python-format msgid "File added to version %(version)s" msgstr "" -#: warehouse/templates/manage/project/history.html:102 -#: warehouse/templates/manage/project/history.html:133 +#: warehouse/templates/manage/project/history.html:113 +#: warehouse/templates/manage/project/history.html:144 #: warehouse/templates/manage/project/release.html:112 msgid "Filename:" msgstr "" -#: warehouse/templates/manage/project/history.html:121 +#: warehouse/templates/manage/project/history.html:132 #, python-format msgid "File removed from version %(version)s" msgstr "" -#: warehouse/templates/manage/project/history.html:142 +#: warehouse/templates/manage/project/history.html:153 #, python-format msgid "%(username)s added as project %(role_name)s" msgstr "" -#: warehouse/templates/manage/project/history.html:149 +#: warehouse/templates/manage/project/history.html:160 #, python-format msgid "%(username)s removed as project %(role_name)s" msgstr "" -#: warehouse/templates/manage/project/history.html:156 +#: warehouse/templates/manage/project/history.html:167 #, python-format msgid "%(username)s changed to project %(role_name)s" msgstr "" -#: warehouse/templates/manage/project/history.html:163 +#: warehouse/templates/manage/project/history.html:174 #, python-format msgid "" "%(username)s invited to join as project " "%(role_name)s" msgstr "" -#: warehouse/templates/manage/project/history.html:170 +#: warehouse/templates/manage/project/history.html:181 #, python-format msgid "" "%(username)s declined invitation to join as " "project %(role_name)s" msgstr "" -#: warehouse/templates/manage/project/history.html:177 +#: warehouse/templates/manage/project/history.html:188 #, python-format msgid "" "Revoked invitation for %(username)s to join as " "project %(role_name)s" msgstr "" -#: warehouse/templates/manage/project/history.html:187 +#: warehouse/templates/manage/project/history.html:198 #, python-format msgid "%(team_name)s team added as project %(role_name)s" msgstr "" -#: warehouse/templates/manage/project/history.html:195 +#: warehouse/templates/manage/project/history.html:206 #, python-format msgid "%(team_name)s team changed to project %(role_name)s" msgstr "" -#: warehouse/templates/manage/project/history.html:203 +#: warehouse/templates/manage/project/history.html:214 #, python-format msgid "%(team_name)s team removed as project %(role_name)s" msgstr "" -#: warehouse/templates/manage/project/history.html:213 +#: warehouse/templates/manage/project/history.html:224 #, python-format msgid "Project added to %(organization_name)s organization" msgstr "" -#: warehouse/templates/manage/project/history.html:221 +#: warehouse/templates/manage/project/history.html:232 #, python-format msgid "Project removed from %(organization_name)s organization" msgstr "" -#: warehouse/templates/manage/project/history.html:229 +#: warehouse/templates/manage/project/history.html:240 msgid "Short-lived API token created" msgstr "" -#: warehouse/templates/manage/project/history.html:230 -#: warehouse/templates/manage/project/history.html:260 -#: warehouse/templates/manage/project/history.html:274 +#: warehouse/templates/manage/project/history.html:241 +#: warehouse/templates/manage/project/history.html:271 +#: warehouse/templates/manage/project/history.html:285 msgid "Permissions: Can upload to this project" msgstr "" -#: warehouse/templates/manage/project/history.html:233 -#: warehouse/templates/manage/project/history.html:268 +#: warehouse/templates/manage/project/history.html:244 +#: warehouse/templates/manage/project/history.html:279 msgid "Expiration:" msgstr "" -#: warehouse/templates/manage/project/history.html:236 +#: warehouse/templates/manage/project/history.html:247 msgid "Expiration status:" msgstr "" -#: warehouse/templates/manage/project/history.html:240 +#: warehouse/templates/manage/project/history.html:251 msgid "Expired" msgstr "" -#: warehouse/templates/manage/project/history.html:245 +#: warehouse/templates/manage/project/history.html:256 msgid "Creator" msgstr "" -#: warehouse/templates/manage/project/history.html:259 +#: warehouse/templates/manage/project/history.html:270 msgid "API token created" msgstr "" -#: warehouse/templates/manage/project/history.html:263 -#: warehouse/templates/manage/project/history.html:276 +#: warehouse/templates/manage/project/history.html:274 +#: warehouse/templates/manage/project/history.html:287 msgid "Controlled by:" msgstr "" -#: warehouse/templates/manage/project/history.html:285 +#: warehouse/templates/manage/project/history.html:296 msgid "Trusted publisher added" msgstr "" -#: warehouse/templates/manage/project/history.html:288 +#: warehouse/templates/manage/project/history.html:299 msgid "Trusted publisher removed" msgstr "" -#: warehouse/templates/manage/project/history.html:293 +#: warehouse/templates/manage/project/history.html:304 msgid "2FA requirement enabled" msgstr "" -#: warehouse/templates/manage/project/history.html:295 +#: warehouse/templates/manage/project/history.html:306 msgid "Enabled by:" msgstr "" -#: warehouse/templates/manage/project/history.html:298 +#: warehouse/templates/manage/project/history.html:309 msgid "2FA requirement disabled" msgstr "" -#: warehouse/templates/manage/project/history.html:300 +#: warehouse/templates/manage/project/history.html:311 msgid "Disabled by:" msgstr "" -#: warehouse/templates/manage/project/history.html:305 -#: warehouse/templates/manage/project/history.html:327 +#: warehouse/templates/manage/project/history.html:316 +#: warehouse/templates/manage/project/history.html:338 msgid "Project alternate repository added" msgstr "" -#: warehouse/templates/manage/project/history.html:310 #: warehouse/templates/manage/project/history.html:321 #: warehouse/templates/manage/project/history.html:332 #: warehouse/templates/manage/project/history.html:343 +#: warehouse/templates/manage/project/history.html:354 #: warehouse/templates/manage/project/settings.html:225 #: warehouse/templates/manage/project/settings.html:305 #: warehouse/templates/manage/project/settings.html:311 msgid "Url" msgstr "" -#: warehouse/templates/manage/project/history.html:316 -#: warehouse/templates/manage/project/history.html:338 +#: warehouse/templates/manage/project/history.html:327 +#: warehouse/templates/manage/project/history.html:349 msgid "Project alternate repository deleted" msgstr "" -#: warehouse/templates/manage/project/history.html:362 +#: warehouse/templates/manage/project/history.html:373 msgid "Additional info" msgstr "" -#: warehouse/templates/manage/project/history.html:374 +#: warehouse/templates/manage/project/history.html:385 #: warehouse/templates/manage/team/history.html:122 msgid "Location info" msgstr "" From 2b510927fccd4ad4f4528ba5f98238cf06e03df5 Mon Sep 17 00:00:00 2001 From: Alexis Date: Tue, 31 Dec 2024 10:49:36 +0100 Subject: [PATCH 11/26] Upgrade tests --- tests/functional/_fixtures/README.md | 8 + .../sampleproject-3.0.0-py3-none-any.whl | Bin 0 -> 4662 bytes tests/functional/forklift/test_legacy.py | 127 +++++++- tests/unit/forklift/test_legacy.py | 278 +++++++++++------- warehouse/forklift/legacy.py | 3 + 5 files changed, 308 insertions(+), 108 deletions(-) create mode 100644 tests/functional/_fixtures/sampleproject-3.0.0-py3-none-any.whl diff --git a/tests/functional/_fixtures/README.md b/tests/functional/_fixtures/README.md index f959f57bc7ee..5293d4b3a172 100644 --- a/tests/functional/_fixtures/README.md +++ b/tests/functional/_fixtures/README.md @@ -6,3 +6,11 @@ This is from https://pypi.org/project/sampleproject/3.0.0/#files, get it with: ``` $ wget https://files.pythonhosted.org/packages/67/2a/9f056e5fa36e43ef1037ff85581a2963cde420457de0ef29c779d41058ca/sampleproject-3.0.0.tar.gz ``` + +## `sampleproject-3.0.0-py3-none-any.whl` + +This is from https://pypi.org/project/sampleproject/3.0.0/#files, get it with: + +``` +$ wget https://files.pythonhosted.org/packages/ec/a8/5ec62d18adde798d33a170e7f72930357aa69a60839194c93eb0fb05e59c/sampleproject-3.0.0-py3-none-any.whl +``` diff --git a/tests/functional/_fixtures/sampleproject-3.0.0-py3-none-any.whl b/tests/functional/_fixtures/sampleproject-3.0.0-py3-none-any.whl new file mode 100644 index 0000000000000000000000000000000000000000..c36a12ca4c94c2230fe923e728d77e42b8340e89 GIT binary patch literal 4662 zcmai&cQ{<@_QyvZT_S39B0-E%qa-1s&nSr!Z3JPIVALoP!-yKuOAs7g5JWGDXi-KT zHG1y^(M7xCoabKW`2Ft9-p|_m*?+9hyWah*y`JxPKZ4@nQv(11BEaKVl=jk&$$cH{ zlN~mwu;FB4Yi|Pw8yQ(4telOE`0d@bJBrA?1<2%jl6lk$T1iR+Wzvj&Gp`slQx7Kx zhUQJ4BvHcjMiOI*P320@mf4GP1)8#fXXHZNQNKms2C;Q0^K-qG$k+|(H)%rAgF&8(c9K~@NJJFtqfth}10 zJioJ>v*BZC*n5F1UypR&I*!C(_8gW{#lYf>dH9^OHVK?Kw<@{B8?qjsdRi#C9EFEt z-qs$n2^R3CbK#} zet-a7N=f>-(X{xWY^g17K#%?=(6ajFy%GGhfR~jG!=X69W2r#|+r;(j&fZG2cQc4j zuYh+guZ}%r_OXM7yu$o4mmFpvjM$+lsK8<2xNfMmAJjC<*E>=T6cc#q;o%c3yRGTl z<1b$AFHVIodeR>mP_a0XG-iluUM$1E0oSa9@=WeKzZr5}FsR43~q^fx#h9e>NdYN2oWr7+NWy7#b8T6bi}Ge+O@=am)&~B(w8L$j~X6* zC-(-q1mi^`Mgjr@Tq}=CSS{`!fETW<6yj?=wf48||-{n+0O z;1B_KQ1nx|6}`4AZl$_12?gbnM4LMrE@&95(^`E+oBhP?7T4j%tAasaXDHwhPvm5L z2($q?S2sjTx5E+)8)AE@zg1~n7mtQ9#v>dHQpc7wv~lN!GeBlV{U106(*)E%yt#j+ zQ1?OEjJuk~TRTk6O|S4=$L#s`wmQgseBDCH1B+t`Ix!xym<3qDwbp}#+CX;*d$2kF z=c?B~AdiA~#F04>xheEHylZ3=#gdyNuR8#s-O2adstorsyIy~&WA+8h1NSc5B zw2J8AhY78jq$@)8PII}IaU5S!e+=1Od>ZFn93P?!#Q*^S?yCU6&EIyhs=StroQ#%C zjLvA}X!BLi7M)TZ%E(-$N5$9LO`hqnQRT_USKt-rKE#WFX9$9#OO}6!*q(jrqvSN z+R-^2j^ETYJf2ha;{~0zlg_)AQOC0ErN9!U_{mkYc%gUzPDAGBW#f7;Bfe(X8P%L7 zM~>-Px&Zyz{s@tLgL#SrGtG%@9^{1^+$YL*k1A4c`2nNJy&E)O zI7IgLkGFRSTZhhAXe2cmPeIIMxq6FjYcHdDq@IoI@?C%GypJ)3;Tc9nxOdS;i`>c! z9I2{Mi)GhlvLE#eKS?>EsB%J)rH+cc2ujcFL$f4`j=0wawch<$-s4`dPbM({&!W#^ zK)UxOvOUY+$ZQ-jSI+^ZlotH-)21B8*v2OT%FEVpNxig`btU6%O|B`X>M;g?dt6xp z`Q8=QVzPM_Y$V1GTKtXuN@{OhwePS!>;tLp6^g;+nU6j zb$QY9&idnn3V9~}cOqfxH`AA&QTYu8o2R3Hh$qyfS783=yp|x>$Bp`Fwob9GqA(p9 zqSol<{JML}RwO7gtR|{{l{@|?!}o1^Ta&BbltrqO@C8AL*``+v4M~C>-D$%w-u5+E zi9=r{hKiC*&c2Qnt2b*$(vc# z$8sdyL4bV1PtBUbnleuB%Wi_ynD3XSMc@GgSI?{FO@(bes@W3Yj)SAnPj-F(`y?}# zXjFCRVjpmd$;GlONpPvVVeq;UdcIS}x7P06G+H2}(HQqV^odjeQcJSkUw#yJRPobH zNvt|NW{PA?&mex<_>;V#nDx5P>y<}A^>ASABjl(JPi&(1!tGt4(jj4{hsi=UPgTgh zxK`(&g?U-S!|I8r5O5O?!Ob@Hh4)YJNEA|-w<8O8>Q-dFx3Ti9WU1BCF4Z0BvFufM zbvXF%zAaUSmvyOvty?I?p4dgFkLQL&?rw_5x+TRf`G%yj`!nzIwB{vMhKFGGp#@t2 zRjpV{K;v{HOqi~im{)rKL~~M7+NxD7b&Kqo{rbZqMpHf^BL!+LZ`Uy4pCL2%5Jn%^ z%;_QW^@a$JHrv;0#)pU*!Q||=BfI2}KO3V&Pj}2hz85AJZ2ijN<==rLH=iALE0}oi zjdrr1>~I3j`Be4u-Wa%fWk}qzk(SUhW_f}v$JF>5%=2hi zik`*PnL<#PcTPQbHL~yQDSU`ndQ_iGaCUE`iKpM z-o-bk0Kwi)1a}u7k|Dfr=RS`RG6Fc@TyvxDFKM2`2+rbUW?Bcjs9yc{qXRiGQ7C(( zl2|6(BT)xRAt4!UIZVLnGTG~TQ@VW6eIwFvFOeUHZ#<M!XXd}} zzf;yh6+JVaduK1S5dLC~(>1Swp~fj=oSEN^TT8T-&T?0orLG7R?9NB{9L_#MrO8*R zF%0?gPWQOOb);SN8nF&hfc)Ip8uk4fo-x6%=d#g$y#?v4bH%ybV}&Yvt|OUOVL2NT z-wo6zoA3$4>P7)+X?a-%r4dXrpZvAv_?06P7T6DAu>CH%cX+T5gsD>2M(S0=m{}N@U1{?iXDgWTH!FI*ajZ&ycmiH1PG@RXx^ClmzXLxYPolptt6{va?g=P^bABV3LV?wKQL>jwAi z@(q%6gCLz~m0}HU9$wXX5@nSN4ek#jgPrYIDX!X)H#dWONp_z|C63&{7Im)Y`F~p+ z9O3NfZe(v~g>ZH{*WSTuUENrlpd12L<8R~R<_VqWRqFt?DrWnuqh9{BNR$ zysWy0oL^6SWSRgq;gqK|>Cl*;JlK+|N(GNMko-NhaWvaIHliVQX|+!A`7X!+dj053 zGF#nRf7;boNtV!;cBgqqp0~9P=;G4EUwi13y4P1kh+Z!KoV;YwZ?-2TZh=nxuF3sg zG>$%baq?F!mFl+B%Z8;7)I> zIIFHh{rDtm2M?NHdUOhB#E?!si#x06C`#oDWwnZZM#dQ4QZ^5aoLQc!cf_zLg~n>f zb_9!9xT(z>-KZ4kN3pC>B9rX7G3a^*=E*3seNxV_*R3`X)lu=;gM0(NcAgdf-i=++ zX@c4cw&RyW4Zb<;Viv^sR8M;jiWeI{87$^6Gv}vmUX8Hk*;h^~>wq$dHO~86LK9NG zCc3|5wN-M+TI32DcOwyvo>sSZiF_fnrZeX6a#Ws_s@zoQf9gvHlBZ#D*iFQQkdxS; zAtG-^WwqjvH`jt~XwG)lHERB7VrWE2+0KNU#)5InF`E_MCKE{PH->{tjrZTZHMSG} zJTg${xBu(3FXvx2#eZc302!!&i~P$L`7-3PGroYdTtNQglmDLm|5@ORY$j}f|L?y3 zn<2i8z3fRYurqkS!~WZ$TxMRLX&1~r?4O1IUFx3`?lSQ59J&BTk^K&Mc`99IU!FM^ sY&`PcvHvlFE`u+Ro_~W&akPI6{%a6Df)Ws(XAxu1X6yjdr#yf97pIBpIRF3v literal 0 HcmV?d00001 diff --git a/tests/functional/forklift/test_legacy.py b/tests/functional/forklift/test_legacy.py index 3f0bc7dbd684..f10320ffb94b 100644 --- a/tests/functional/forklift/test_legacy.py +++ b/tests/functional/forklift/test_legacy.py @@ -75,7 +75,14 @@ def test_remove_doc_upload(webtest): ("/legacy/", {":action": "file_upload", "protocol_version": "1"}), ], ) -def test_file_upload(webtest, upload_url, additional_data): +@pytest.mark.parametrize( + "staged_release", + [ + True, + False, + ], +) +def test_file_upload(webtest, upload_url, additional_data, staged_release): user = UserFactory.create(with_verified_primary_email=True, clear_pwd="password") # Construct the macaroon @@ -118,9 +125,15 @@ def test_file_upload(webtest, upload_url, additional_data): params.add("classifiers", "Programming Language :: Python :: 3.10") params.add("classifiers", "Programming Language :: Python :: 3.11") + headers = { + "Authorization": f"Basic {credentials}", + } + if staged_release: + headers["X-PyPI-Is-Staged"] = "1" + webtest.post( upload_url, - headers={"Authorization": f"Basic {credentials}"}, + headers=headers, params=params, upload_files=[("content", "sampleproject-3.0.0.tar.gz", content)], status=HTTPStatus.OK, @@ -134,6 +147,116 @@ def test_file_upload(webtest, upload_url, additional_data): assert len(project.releases) == 1 release = project.releases[0] assert release.version == "3.0.0" + assert release.published != staged_release + + +@pytest.mark.parametrize( + "stage_first_file", + [ + True, + False, + ], +) +@pytest.mark.parametrize( + "stage_second_file", + [ + True, + False, + ], +) +def test_stage_release(webtest, stage_first_file, stage_second_file): + user = UserFactory.create(with_verified_primary_email=True, clear_pwd="password") + + # Construct the macaroon + dm = MacaroonFactory.create( + user_id=user.id, + caveats=[caveats.RequestUser(user_id=str(user.id))], + ) + + m = pymacaroons.Macaroon( + location="localhost", + identifier=str(dm.id), + key=dm.key, + version=pymacaroons.MACAROON_V2, + ) + for caveat in dm.caveats: + m.add_first_party_caveat(caveats.serialize(caveat)) + serialized_macaroon = f"pypi-{m.serialize()}" + + credentials = base64.b64encode(f"__token__:{serialized_macaroon}".encode()).decode( + "utf-8" + ) + + with open("./tests/functional/_fixtures/sampleproject-3.0.0.tar.gz", "rb") as f: + first_file = f.read() + + with open( + "./tests/functional/_fixtures/sampleproject-3.0.0-py3-none-any.whl", "rb" + ) as f: + second_file = f.read() + + webtest.post( + "/legacy/?:action=file_upload", + headers={ + "Authorization": f"Basic {credentials}", + **({"X-PyPI-Is-Staged": "1"} if stage_first_file else {}), + }, + params=MultiDict( + { + "name": "sampleproject", + "sha256_digest": ( + "117ed88e5db073bb92969a7545745fd977ee85b7019706dd256a64058f70963d" + ), + "filetype": "sdist", + "metadata_version": "2.1", + "version": "3.0.0", + "classifiers": "Programming Language :: Python :: 3.11", + } + ), + upload_files=[("content", "sampleproject-3.0.0.tar.gz", first_file)], + status=HTTPStatus.OK, + ) + + assert user.projects + assert len(user.projects) == 1 + project = user.projects[0] + assert project.name == "sampleproject" + assert project.releases + assert len(project.releases) == 1 + release = project.releases[0] + + assert release.published != stage_first_file + + second_request_status = ( + HTTPStatus.BAD_REQUEST + if stage_second_file and not stage_first_file + else HTTPStatus.OK + ) + webtest.post( + "/legacy/?:action=file_upload", + headers={ + "Authorization": f"Basic {credentials}", + **({"X-PyPI-Is-Staged": "1"} if stage_second_file else {}), + }, + params=MultiDict( + { + "name": "sampleproject", + "sha256_digest": ( + "2e52702990c22cf1ce50206606b769fe0dbd5646a32873916144bd5aec5473b3" + ), + "filetype": "bdist_wheel", + "metadata_version": "2.1", + "version": "3.0.0", + "pyversion": "3.11", + "classifiers": "Programming Language :: Python :: 3.11", + } + ), + upload_files=[("content", "sampleproject-3.0.0-py3-none-any.whl", second_file)], + status=second_request_status, + ) + + if second_request_status == HTTPStatus.OK: + assert release.published != stage_second_file def test_duplicate_file_upload_error(webtest): diff --git a/tests/unit/forklift/test_legacy.py b/tests/unit/forklift/test_legacy.py index de14febbcf1f..0b04e724bfa8 100644 --- a/tests/unit/forklift/test_legacy.py +++ b/tests/unit/forklift/test_legacy.py @@ -39,6 +39,8 @@ from warehouse.admin.flags import AdminFlag, AdminFlagValue from warehouse.attestations.interfaces import IIntegrityService from warehouse.classifiers.models import Classifier +from warehouse.events.models import HasEvents +from warehouse.events.tags import EventTag from warehouse.forklift import legacy, metadata from warehouse.macaroons import IMacaroonService, caveats, security_policy from warehouse.metrics import IMetricsService @@ -568,6 +570,44 @@ def test_is_duplicate_false(self, pyramid_config, db_request): assert legacy._is_duplicate_file(db_request.db, filename, wrong_hashes) is False +# Deduplication helpers +def fileadd_event(filename, identity, release, project): + """Constructs the FileAdd event mapping.""" + test_with_user = not hasattr(identity, "publisher") + return { + "filename": filename, + "submitted_by": ( + identity.username if test_with_user else "OpenID created token" + ), + "canonical_version": release.canonical_version, + "publisher_url": ( + f"{identity.publisher.publisher_url()}/commit/somesha" + if not test_with_user + else None + ), + "project_id": str(project.id), + "uploaded_via_trusted_publisher": not test_with_user, + } + + +def release_event(identity, release): + """Constructs the Release event mapping.""" + test_with_user = not hasattr(identity, "publisher") + return { + "submitted_by": ( + identity.username if test_with_user else "OpenID created token" + ), + "canonical_version": release.canonical_version, + "publisher_url": ( + f"{identity.publisher.publisher_url()}/commit/somesha" + if not test_with_user + else None + ), + "uploaded_via_trusted_publisher": not test_with_user, + "published": release.published, + } + + class TestFileUpload: def test_fails_disallow_new_upload(self, pyramid_config, pyramid_request): pyramid_request.flags = pretend.stub( @@ -3483,9 +3523,6 @@ def test_upload_succeeds_creates_release( expected_version, test_with_user, ): - from warehouse.events.models import HasEvents - from warehouse.events.tags import EventTag - project = ProjectFactory.create() if test_with_user: identity = UserFactory.create() @@ -3602,47 +3639,18 @@ def test_upload_succeeds_creates_release( ] # Ensure that all of our events have been created - release_event = { - "submitted_by": ( - identity.username if test_with_user else "OpenID created token" - ), - "canonical_version": release.canonical_version, - "publisher_url": ( - f"{identity.publisher.publisher_url()}/commit/somesha" - if not test_with_user - else None - ), - "uploaded_via_trusted_publisher": not test_with_user, - "published": True, - } - - fileadd_event = { - "filename": filename, - "submitted_by": ( - identity.username if test_with_user else "OpenID created token" - ), - "canonical_version": release.canonical_version, - "publisher_url": ( - f"{identity.publisher.publisher_url()}/commit/somesha" - if not test_with_user - else None - ), - "project_id": str(project.id), - "uploaded_via_trusted_publisher": not test_with_user, - } - assert record_event.calls == [ pretend.call( mock.ANY, tag=EventTag.Project.ReleaseAdd, request=db_request, - additional=release_event, + additional=release_event(identity, release), ), pretend.call( mock.ANY, tag=EventTag.File.FileAdd, request=db_request, - additional=fileadd_event, + additional=fileadd_event(filename, identity, release, project), ), ] @@ -3654,8 +3662,6 @@ def test_upload_succeeds_with_valid_attestation( metrics, integrity_service, ): - from warehouse.events.models import HasEvents - project = ProjectFactory.create() version = "1.0" publisher = GitHubPublisherFactory.create(projects=[project]) @@ -3759,8 +3765,6 @@ def test_upload_fails_attestation_error( db_request, invalid_attestations, ): - from warehouse.events.models import HasEvents - project = ProjectFactory.create() version = "1.0" publisher = GitHubPublisherFactory.create(projects=[project]) @@ -5346,7 +5350,6 @@ def test_upload_fails_when_license_and_license_expression_are_present( class TestStagedRelease: @staticmethod def get_identity(test_with_user, project, db_request, pyramid_config): - if test_with_user: identity = UserFactory.create() EmailFactory.create(user=identity) @@ -5359,7 +5362,6 @@ def get_identity(test_with_user, project, db_request, pyramid_config): db_request.oidc_publisher = identity.publisher db_request.oidc_claims = identity.claims - db_request.user_agent = "warehouse-tests/6.6.6" pyramid_config.testing_securitypolicy(identity=identity) @@ -5372,12 +5374,13 @@ def get_identity(test_with_user, project, db_request, pyramid_config): False, ], ) - def test_upload_with_stage(self, test_with_user, monkeypatch, db_request, pyramid_config, metrics): - from warehouse.events.models import HasEvents - from warehouse.events.tags import EventTag - + def test_upload_succeeds_with_stage_header( + self, test_with_user, monkeypatch, db_request, pyramid_config, metrics + ): project = ProjectFactory.create() - identity = self.get_identity(test_with_user, project, db_request, pyramid_config) + identity = self.get_identity( + test_with_user, project, db_request, pyramid_config + ) filename = "{}-{}.tar.gz".format( project.normalized_name.replace("-", "_"), "1.0" @@ -5451,47 +5454,18 @@ def test_upload_with_stage(self, test_with_user, monkeypatch, db_request, pyrami ] # Ensure that all of our events have been created - release_event = { - "submitted_by": ( - identity.username if test_with_user else "OpenID created token" - ), - "canonical_version": release.canonical_version, - "publisher_url": ( - f"{identity.publisher.publisher_url()}/commit/somesha" - if not test_with_user - else None - ), - "uploaded_via_trusted_publisher": not test_with_user, - "published": False, - } - - fileadd_event = { - "filename": filename, - "submitted_by": ( - identity.username if test_with_user else "OpenID created token" - ), - "canonical_version": release.canonical_version, - "publisher_url": ( - f"{identity.publisher.publisher_url()}/commit/somesha" - if not test_with_user - else None - ), - "project_id": str(project.id), - "uploaded_via_trusted_publisher": not test_with_user, - } - assert record_event.calls == [ pretend.call( mock.ANY, tag=EventTag.Project.ReleaseAdd, request=db_request, - additional=release_event, + additional=release_event(identity, release), ), pretend.call( mock.ANY, tag=EventTag.File.FileAdd, request=db_request, - additional=fileadd_event, + additional=fileadd_event(filename, identity, release, project), ), ] @@ -5502,12 +5476,13 @@ def test_upload_with_stage(self, test_with_user, monkeypatch, db_request, pyrami False, ], ) - def test_publish_with_file(self, test_with_user, monkeypatch, db_request, pyramid_config, metrics): - from warehouse.events.models import HasEvents - from warehouse.events.tags import EventTag - + def test_upload_succeeds_on_staged_release( + self, test_with_user, monkeypatch, db_request, pyramid_config, metrics + ): project = ProjectFactory.create() - identity = self.get_identity(test_with_user, project, db_request, pyramid_config) + identity = self.get_identity( + test_with_user, project, db_request, pyramid_config + ) # Create a release and add a file release = ReleaseFactory.create(project=project, version="1.0") @@ -5586,44 +5561,135 @@ def test_publish_with_file(self, test_with_user, monkeypatch, db_request, pyrami ] # Ensure that all of our events have been created - release_event = { - "submitted_by": ( - identity.username if test_with_user else "OpenID created token" - ), - "canonical_version": release.canonical_version, - "uploaded_via_trusted_publisher": not test_with_user, - } - - fileadd_event = { - "filename": filename, - "submitted_by": ( - identity.username if test_with_user else "OpenID created token" - ), - "canonical_version": release.canonical_version, - "publisher_url": ( - f"{identity.publisher.publisher_url()}/commit/somesha" - if not test_with_user - else None - ), - "project_id": str(project.id), - "uploaded_via_trusted_publisher": not test_with_user, - } - assert record_event.calls == [ pretend.call( mock.ANY, tag=EventTag.File.FileAdd, request=db_request, - additional=fileadd_event, + additional=fileadd_event(filename, identity, release, project), ), pretend.call( mock.ANY, tag=EventTag.Project.ReleasePublish, request=db_request, - additional=release_event, + additional={ + "submitted_by": ( + identity.username if test_with_user else "OpenID created token" + ), + "uploaded_via_trusted_publisher": not test_with_user, + "canonical_version": release.canonical_version, + }, ), ] + @pytest.mark.parametrize( + "test_with_user", + [ + True, + False, + ], + ) + def test_upload_succeeds_on_staged_release_with_stage_header( + self, test_with_user, monkeypatch, db_request, pyramid_config, metrics + ): + project = ProjectFactory.create() + identity = self.get_identity( + test_with_user, project, db_request, pyramid_config + ) + + release = ReleaseFactory.create(project=project, version="1.0", published=False) + FileFactory.create(release=release, packagetype="bdist_wheel") + + filename = "{}-{}.tar.gz".format( + project.normalized_name.replace("-", "_"), "1.0" + ) + + db_request.POST = MultiDict( + { + "metadata_version": "1.2", + "name": project.name, + "version": "1.0", + "summary": "This is my summary!", + "filetype": "sdist", + "md5_digest": _TAR_GZ_PKG_MD5, + "content": pretend.stub( + filename=filename, + file=io.BytesIO(_TAR_GZ_PKG_TESTDATA), + type="application/tar", + ), + } + ) + db_request.headers["X-PyPI-Is-Staged"] = "1" + + db_request.find_service = lambda svc, name=None, context=None: { + IFileStorage: pretend.stub(store=lambda path, filepath, meta: None), + IMetricsService: metrics, + }.get(svc) + + record_event = pretend.call_recorder( + lambda self, *, tag, request=None, additional: None + ) + monkeypatch.setattr(HasEvents, "record_event", record_event) + + resp = legacy.file_upload(db_request) + assert resp.status_code == 200 + + # Ensure that the release is still not published + release = ( + db_request.db.query(Release) + .filter((Release.project == project) & (Release.version == "1.0")) + .one() + ) + assert not release.published + + # Ensure that a File object has been created. + db_request.db.query(File).filter( + (File.release == release) & (File.filename == filename) + ).one() + + assert record_event.calls == [ + pretend.call( + mock.ANY, + tag=EventTag.File.FileAdd, + request=db_request, + additional=fileadd_event(filename, identity, release, project), + ), + ] + + def test_upload_fails_with_staged_on_existing_release( + self, monkeypatch, pyramid_config, db_request + ): + project = ProjectFactory.create() + self.get_identity(True, project, db_request, pyramid_config) + ReleaseFactory.create(project=project, version="1.0", published=True) + filename = "{}-{}.tar.gz".format( + project.normalized_name.replace("-", "_"), "1.0" + ) + + db_request.POST = MultiDict( + { + "metadata_version": "1.2", + "name": project.name, + "version": "1.0", + "summary": "This is my summary!", + "filetype": "sdist", + "md5_digest": _TAR_GZ_PKG_MD5, + "content": pretend.stub( + filename=filename, + file=io.BytesIO(_TAR_GZ_PKG_TESTDATA), + type="application/tar", + ), + } + ) + db_request.headers["X-PyPI-Is-Staged"] = "1" + + with pytest.raises(HTTPBadRequest) as excinfo: + legacy.file_upload(db_request) + + resp = excinfo.value + assert resp.status_code == 400 + assert resp.status == "400 Release already published." + def test_submit(pyramid_request): resp = legacy.submit(pyramid_request) diff --git a/warehouse/forklift/legacy.py b/warehouse/forklift/legacy.py index 933daddd3d4f..9b94466f27ce 100644 --- a/warehouse/forklift/legacy.py +++ b/warehouse/forklift/legacy.py @@ -978,6 +978,9 @@ def file_upload(request): # at least this should be some sort of hook or trigger. _sort_releases(request, project) + if release.published is True and staged_release: + raise _exc_with_message(HTTPBadRequest, "Release already published.") + # Pull the filename out of our POST data. filename = request.POST["content"].filename From 840951dd77da513f8bff52a2f7c6dc95c400452c Mon Sep 17 00:00:00 2001 From: Alexis Date: Wed, 22 Jan 2025 09:55:43 +0100 Subject: [PATCH 12/26] Add `published` column --- .../6cac7b706953_add_published_field.py | 42 +++++++++++++++++++ warehouse/packaging/models.py | 3 +- 2 files changed, 44 insertions(+), 1 deletion(-) create mode 100644 warehouse/migrations/versions/6cac7b706953_add_published_field.py diff --git a/warehouse/migrations/versions/6cac7b706953_add_published_field.py b/warehouse/migrations/versions/6cac7b706953_add_published_field.py new file mode 100644 index 000000000000..e00c3646cfb5 --- /dev/null +++ b/warehouse/migrations/versions/6cac7b706953_add_published_field.py @@ -0,0 +1,42 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +add published field + +Revision ID: 6cac7b706953 +Revises: 2a2c32c47a8f +Create Date: 2025-01-22 08:49:17.030343 +""" + +import sqlalchemy as sa + +from alembic import op + +revision = "6cac7b706953" +down_revision = "2a2c32c47a8f" + + +def upgrade(): + conn = op.get_bind() + conn.execute(sa.text("SET statement_timeout = 120000")) + conn.execute(sa.text("SET lock_timeout = 120000")) + + op.add_column( + "releases", + sa.Column( + "published", sa.Boolean(), server_default=sa.text("true"), nullable=False + ), + ) + + +def downgrade(): + op.drop_column("releases", "published") diff --git a/warehouse/packaging/models.py b/warehouse/packaging/models.py index 476638de019b..8f6fa0d63bd3 100644 --- a/warehouse/packaging/models.py +++ b/warehouse/packaging/models.py @@ -79,7 +79,7 @@ from warehouse.sitemap.models import SitemapMixin from warehouse.utils import dotted_navigator, wheel from warehouse.utils.attrs import make_repr -from warehouse.utils.db.types import bool_false, datetime_now +from warehouse.utils.db.types import bool_false, bool_true, datetime_now if typing.TYPE_CHECKING: from warehouse.oidc.models import OIDCPublisher @@ -633,6 +633,7 @@ def __table_args__(cls): # noqa _pypi_ordering: Mapped[int | None] requires_python: Mapped[str | None] = mapped_column(Text) created: Mapped[datetime_now] = mapped_column() + published: Mapped[bool_true] = mapped_column() description_id: Mapped[UUID] = mapped_column( ForeignKey("release_descriptions.id", onupdate="CASCADE", ondelete="CASCADE"), From deddcc1d5d006e7137d4ed7c2dbf330e4785bdb7 Mon Sep 17 00:00:00 2001 From: Alan Velasco Date: Mon, 2 May 2022 14:24:40 -0600 Subject: [PATCH 13/26] Add `published` to the `ReleaseFactory` --- tests/common/db/packaging.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tests/common/db/packaging.py b/tests/common/db/packaging.py index 3b97b20cbd92..94bb396407f2 100644 --- a/tests/common/db/packaging.py +++ b/tests/common/db/packaging.py @@ -100,6 +100,9 @@ class Meta: uploader = factory.SubFactory(UserFactory) description = factory.SubFactory(DescriptionFactory) + published = factory.Faker( + "date_time_between_dates", datetime_start=datetime.datetime(2008, 1, 1) + ) class FileFactory(WarehouseFactory): From c8affcefbd506c778f4d37cd52bc807d8c0c3224 Mon Sep 17 00:00:00 2001 From: Alexis Date: Fri, 6 Dec 2024 14:26:19 +0100 Subject: [PATCH 14/26] Add migrations --- .../3e7bf3217166_add_published_in_release.py | 45 +++++++++++++++++++ 1 file changed, 45 insertions(+) create mode 100644 warehouse/migrations/versions/3e7bf3217166_add_published_in_release.py diff --git a/warehouse/migrations/versions/3e7bf3217166_add_published_in_release.py b/warehouse/migrations/versions/3e7bf3217166_add_published_in_release.py new file mode 100644 index 000000000000..7303a6f00062 --- /dev/null +++ b/warehouse/migrations/versions/3e7bf3217166_add_published_in_release.py @@ -0,0 +1,45 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +add published in Release + +Revision ID: 3e7bf3217166 +Revises: f7720656a33c +Create Date: 2024-12-06 11:04:21.907167 +""" + +import sqlalchemy as sa + +from alembic import op + +revision = "3e7bf3217166" +down_revision = "f7720656a33c" + + +def upgrade(): + op.add_column( + "releases", + sa.Column( + "published", sa.DateTime(), server_default=sa.text("now()"), nullable=True + ), + ) + + op.execute( + """ + UPDATE releases + SET published = created + """ + ) + + +def downgrade(): + op.drop_column("releases", "published") From f830b63aa8e6c3cb54eaf16d3b07a5e432d6e728 Mon Sep 17 00:00:00 2001 From: Alexis Date: Mon, 9 Dec 2024 14:47:59 +0100 Subject: [PATCH 15/26] Add a default value for Release.published field. --- tests/common/db/packaging.py | 3 +- tests/unit/packaging/test_views.py | 10 ++ warehouse/forklift/legacy.py | 2 + .../3e7bf3217166_add_published_in_release.py | 4 +- warehouse/packaging/models.py | 112 +++++++----------- 5 files changed, 57 insertions(+), 74 deletions(-) diff --git a/tests/common/db/packaging.py b/tests/common/db/packaging.py index 94bb396407f2..68f728f44a93 100644 --- a/tests/common/db/packaging.py +++ b/tests/common/db/packaging.py @@ -123,7 +123,8 @@ class Meta: lambda o: hashlib.blake2b(o.filename.encode("utf8"), digest_size=32).hexdigest() ) upload_time = factory.Faker( - "date_time_between_dates", datetime_start=datetime.datetime(2008, 1, 1) + "date_time_between_dates", + datetime_start=datetime.datetime(2008, 1, 1), ) path = factory.LazyAttribute( lambda o: "/".join( diff --git a/tests/unit/packaging/test_views.py b/tests/unit/packaging/test_views.py index f914c3e97b1b..fe070d95ef67 100644 --- a/tests/unit/packaging/test_views.py +++ b/tests/unit/packaging/test_views.py @@ -9,6 +9,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from datetime import datetime import pretend import pytest @@ -324,6 +325,15 @@ def test_long_singleline_license(self, db_request): "characters, it's really so lo..." ) + def test_created_with_published(self, db_request): + release = ReleaseFactory.create() + assert release.published > datetime(year=2008, month=1, day=1) + + def test_without_published_date(self, db_request): + release = ReleaseFactory.create(published=None) + db_request.db.flush() + assert release.published is None + class TestPEP740AttestationViewer: diff --git a/warehouse/forklift/legacy.py b/warehouse/forklift/legacy.py index a3018ecdb2bd..4ea70897fcdc 100644 --- a/warehouse/forklift/legacy.py +++ b/warehouse/forklift/legacy.py @@ -19,6 +19,7 @@ import zipfile from cgi import FieldStorage +from datetime import datetime import packaging.requirements import packaging.specifiers @@ -902,6 +903,7 @@ def file_upload(request): }, uploader=request.user if request.user else None, uploaded_via=request.user_agent, + published=datetime.now(), ) request.db.add(release) is_new_release = True diff --git a/warehouse/migrations/versions/3e7bf3217166_add_published_in_release.py b/warehouse/migrations/versions/3e7bf3217166_add_published_in_release.py index 7303a6f00062..9bdb059c1f4c 100644 --- a/warehouse/migrations/versions/3e7bf3217166_add_published_in_release.py +++ b/warehouse/migrations/versions/3e7bf3217166_add_published_in_release.py @@ -28,9 +28,7 @@ def upgrade(): op.add_column( "releases", - sa.Column( - "published", sa.DateTime(), server_default=sa.text("now()"), nullable=True - ), + sa.Column("published", sa.DateTime(), nullable=True), ) op.execute( diff --git a/warehouse/packaging/models.py b/warehouse/packaging/models.py index 8f6fa0d63bd3..dfb8c56edce0 100644 --- a/warehouse/packaging/models.py +++ b/warehouse/packaging/models.py @@ -79,7 +79,7 @@ from warehouse.sitemap.models import SitemapMixin from warehouse.utils import dotted_navigator, wheel from warehouse.utils.attrs import make_repr -from warehouse.utils.db.types import bool_false, bool_true, datetime_now +from warehouse.utils.db.types import bool_false, datetime_now if typing.TYPE_CHECKING: from warehouse.oidc.models import OIDCPublisher @@ -166,7 +166,6 @@ def __contains__(self, project): class LifecycleStatus(enum.StrEnum): QuarantineEnter = "quarantine-enter" QuarantineExit = "quarantine-exit" - Archived = "archived" class Project(SitemapMixin, HasEvents, HasObservations, db.Model): @@ -328,36 +327,25 @@ def __acl__(self): (Allow, Authenticated, Permissions.SubmitMalwareObservation), ] - if self.lifecycle_status != LifecycleStatus.Archived: - # The project has zero or more OIDC publishers registered to it, - # each of which serves as an identity with the ability to upload releases - # (only if the project is not archived) - for publisher in self.oidc_publishers: - acls.append( - (Allow, f"oidc:{publisher.id}", [Permissions.ProjectsUpload]) - ) + # The project has zero or more OIDC publishers registered to it, + # each of which serves as an identity with the ability to upload releases. + for publisher in self.oidc_publishers: + acls.append((Allow, f"oidc:{publisher.id}", [Permissions.ProjectsUpload])) # Get all of the users for this project. - user_query = ( - session.query(Role) - .filter(Role.project == self) - .options(orm.lazyload(Role.project), orm.lazyload(Role.user)) - ) + query = session.query(Role).filter(Role.project == self) + query = query.options(orm.lazyload(Role.project)) + query = query.options(orm.lazyload(Role.user)) permissions = { (role.user_id, "Administer" if role.role_name == "Owner" else "Upload") - for role in user_query.all() + for role in query.all() } # Add all of the team members for this project. - team_query = ( - session.query(TeamProjectRole) - .filter(TeamProjectRole.project == self) - .options( - orm.lazyload(TeamProjectRole.project), - orm.lazyload(TeamProjectRole.team), - ) - ) - for role in team_query.all(): + query = session.query(TeamProjectRole).filter(TeamProjectRole.project == self) + query = query.options(orm.lazyload(TeamProjectRole.project)) + query = query.options(orm.lazyload(TeamProjectRole.team)) + for role in query.all(): permissions |= { (user.id, "Administer" if role.role_name.value == "Owner" else "Upload") for user in role.team.members @@ -365,41 +353,38 @@ def __acl__(self): # Add all organization owners for this project. if self.organization: - org_query = ( - session.query(OrganizationRole) - .filter( - OrganizationRole.organization == self.organization, - OrganizationRole.role_name == OrganizationRoleType.Owner, - ) - .options( - orm.lazyload(OrganizationRole.organization), - orm.lazyload(OrganizationRole.user), - ) + query = session.query(OrganizationRole).filter( + OrganizationRole.organization == self.organization, + OrganizationRole.role_name == OrganizationRoleType.Owner, ) - permissions |= {(role.user_id, "Administer") for role in org_query.all()} + query = query.options(orm.lazyload(OrganizationRole.organization)) + query = query.options(orm.lazyload(OrganizationRole.user)) + permissions |= {(role.user_id, "Administer") for role in query.all()} for user_id, permission_name in sorted(permissions, key=lambda x: (x[1], x[0])): # Disallow Write permissions for Projects in quarantine, allow Upload if self.lifecycle_status == LifecycleStatus.QuarantineEnter: - current_permissions = [ - Permissions.ProjectsRead, - Permissions.ProjectsUpload, - ] + acls.append( + ( + Allow, + f"user:{user_id}", + [Permissions.ProjectsRead, Permissions.ProjectsUpload], + ) + ) elif permission_name == "Administer": - current_permissions = [ - Permissions.ProjectsRead, - Permissions.ProjectsUpload, - Permissions.ProjectsWrite, - ] + acls.append( + ( + Allow, + f"user:{user_id}", + [ + Permissions.ProjectsRead, + Permissions.ProjectsUpload, + Permissions.ProjectsWrite, + ], + ) + ) else: - current_permissions = [Permissions.ProjectsUpload] - - if self.lifecycle_status == LifecycleStatus.Archived: - # Disallow upload permissions for archived projects - current_permissions.remove(Permissions.ProjectsUpload) - - if current_permissions: - acls.append((Allow, f"user:{user_id}", current_permissions)) + acls.append((Allow, f"user:{user_id}", [Permissions.ProjectsUpload])) return acls @property @@ -550,8 +535,8 @@ class ReleaseURL(db.Model): "Description", "Description-Content-Type", "Keywords", - "Home-Page", # Deprecated, but technically permitted by PEP 643 - "Download-Url", # Deprecated, but technically permitted by PEP 643 + "Home-Page", + "Download-Url", "Author", "Author-Email", "Maintainer", @@ -567,12 +552,6 @@ class ReleaseURL(db.Model): "Provides-Extra", "Provides-Dist", "Obsoletes-Dist", - # Although the following are deprecated fields, they are technically - # permitted as dynamic by PEP 643 - # https://github.com/pypa/setuptools/issues/4797#issuecomment-2589514950 - "Requires", - "Provides", - "Obsoletes", name="release_dynamic_fields", ) @@ -633,7 +612,7 @@ def __table_args__(cls): # noqa _pypi_ordering: Mapped[int | None] requires_python: Mapped[str | None] = mapped_column(Text) created: Mapped[datetime_now] = mapped_column() - published: Mapped[bool_true] = mapped_column() + published: Mapped[datetime_now | None] description_id: Mapped[UUID] = mapped_column( ForeignKey("release_descriptions.id", onupdate="CASCADE", ondelete="CASCADE"), @@ -781,7 +760,7 @@ def urls_by_verify_status(self, *, verified: bool): return _urls def verified_user_name_and_repo_name( - self, domains: set[str], reserved_names: typing.Collection[str] | None = None + self, domains: set[str], reserved_names: typing.Sequence[str] | None = None ): for _, url in self.urls_by_verify_status(verified=True).items(): try: @@ -1008,13 +987,6 @@ def __table_args__(cls): # noqa Index("journals_version_idx", "version"), Index("journals_submitted_by_idx", "submitted_by"), Index("journals_submitted_date_id_idx", cls.submitted_date, cls.id), - # Composite index for journals to be able to sort by - # `submitted_by`, and `submitted_date` in descending order. - Index( - "journals_submitted_by_and_reverse_date_idx", - cls._submitted_by, - cls.submitted_date.desc(), - ), ) id: Mapped[int] = mapped_column(primary_key=True) From cdf1acb394cdb80c6015bc6e18ab01a47d2c0b4b Mon Sep 17 00:00:00 2001 From: Alexis Date: Tue, 10 Dec 2024 14:46:53 +0100 Subject: [PATCH 16/26] Change to a boolean field --- tests/common/db/packaging.py | 3 --- tests/unit/forklift/test_legacy.py | 1 + tests/unit/packaging/test_views.py | 8 +------- warehouse/forklift/legacy.py | 3 +-- ...py => bd2bf218e63f_add_published_field.py} | 19 +++++++------------ .../templates/manage/project/history.html | 8 ++++++++ 6 files changed, 18 insertions(+), 24 deletions(-) rename warehouse/migrations/versions/{3e7bf3217166_add_published_in_release.py => bd2bf218e63f_add_published_field.py} (73%) diff --git a/tests/common/db/packaging.py b/tests/common/db/packaging.py index 68f728f44a93..470f233a34c4 100644 --- a/tests/common/db/packaging.py +++ b/tests/common/db/packaging.py @@ -100,9 +100,6 @@ class Meta: uploader = factory.SubFactory(UserFactory) description = factory.SubFactory(DescriptionFactory) - published = factory.Faker( - "date_time_between_dates", datetime_start=datetime.datetime(2008, 1, 1) - ) class FileFactory(WarehouseFactory): diff --git a/tests/unit/forklift/test_legacy.py b/tests/unit/forklift/test_legacy.py index 05959699419b..3042251d7f4a 100644 --- a/tests/unit/forklift/test_legacy.py +++ b/tests/unit/forklift/test_legacy.py @@ -3613,6 +3613,7 @@ def test_upload_succeeds_creates_release( else None ), "uploaded_via_trusted_publisher": not test_with_user, + "published": True, } fileadd_event = { diff --git a/tests/unit/packaging/test_views.py b/tests/unit/packaging/test_views.py index fe070d95ef67..dc6ac4156dfd 100644 --- a/tests/unit/packaging/test_views.py +++ b/tests/unit/packaging/test_views.py @@ -9,7 +9,6 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from datetime import datetime import pretend import pytest @@ -327,12 +326,7 @@ def test_long_singleline_license(self, db_request): def test_created_with_published(self, db_request): release = ReleaseFactory.create() - assert release.published > datetime(year=2008, month=1, day=1) - - def test_without_published_date(self, db_request): - release = ReleaseFactory.create(published=None) - db_request.db.flush() - assert release.published is None + assert release.published is True class TestPEP740AttestationViewer: diff --git a/warehouse/forklift/legacy.py b/warehouse/forklift/legacy.py index 4ea70897fcdc..863076c87383 100644 --- a/warehouse/forklift/legacy.py +++ b/warehouse/forklift/legacy.py @@ -19,7 +19,6 @@ import zipfile from cgi import FieldStorage -from datetime import datetime import packaging.requirements import packaging.specifiers @@ -903,7 +902,6 @@ def file_upload(request): }, uploader=request.user if request.user else None, uploaded_via=request.user_agent, - published=datetime.now(), ) request.db.add(release) is_new_release = True @@ -934,6 +932,7 @@ def file_upload(request): else None ), "uploaded_via_trusted_publisher": bool(request.oidc_publisher), + "published": True, }, ) diff --git a/warehouse/migrations/versions/3e7bf3217166_add_published_in_release.py b/warehouse/migrations/versions/bd2bf218e63f_add_published_field.py similarity index 73% rename from warehouse/migrations/versions/3e7bf3217166_add_published_in_release.py rename to warehouse/migrations/versions/bd2bf218e63f_add_published_field.py index 9bdb059c1f4c..313e65679ecd 100644 --- a/warehouse/migrations/versions/3e7bf3217166_add_published_in_release.py +++ b/warehouse/migrations/versions/bd2bf218e63f_add_published_field.py @@ -10,32 +10,27 @@ # See the License for the specific language governing permissions and # limitations under the License. """ -add published in Release +add published field -Revision ID: 3e7bf3217166 +Revision ID: bd2bf218e63f Revises: f7720656a33c -Create Date: 2024-12-06 11:04:21.907167 +Create Date: 2024-12-10 10:40:19.588606 """ import sqlalchemy as sa from alembic import op -revision = "3e7bf3217166" +revision = "bd2bf218e63f" down_revision = "f7720656a33c" def upgrade(): op.add_column( "releases", - sa.Column("published", sa.DateTime(), nullable=True), - ) - - op.execute( - """ - UPDATE releases - SET published = created - """ + sa.Column( + "published", sa.Boolean(), server_default=sa.text("true"), nullable=False + ), ) diff --git a/warehouse/templates/manage/project/history.html b/warehouse/templates/manage/project/history.html index 6ca65478a265..002d80500033 100644 --- a/warehouse/templates/manage/project/history.html +++ b/warehouse/templates/manage/project/history.html @@ -53,6 +53,14 @@

{% trans %}Security history{% endtrans %}

{{ event.additional.publisher_url }} {% endif %} + + {% trans %}Published:{% endtrans %} + {% if event.additional.published is defined and event.additional.published is false %} + {% trans %}No{% endtrans %} + {% else %} + {% trans %}Yes{% endtrans %} + {% endif %} + {% elif event.tag == EventTag.Project.ReleaseRemove %} {# No link to removed release #} From d3f59b42e474baaa1aad80c824af0d003b44b011 Mon Sep 17 00:00:00 2001 From: Alexis Date: Tue, 10 Dec 2024 18:39:04 +0100 Subject: [PATCH 17/26] Filter out unpublished releases --- tests/unit/legacy/api/test_json.py | 16 +++++++ tests/unit/packaging/test_views.py | 77 ++++++++++++++++++++---------- warehouse/legacy/api/json.py | 8 +++- warehouse/locale/messages.pot | 2 +- warehouse/packaging/models.py | 6 ++- warehouse/packaging/utils.py | 4 +- warehouse/packaging/views.py | 5 +- 7 files changed, 88 insertions(+), 30 deletions(-) diff --git a/tests/unit/legacy/api/test_json.py b/tests/unit/legacy/api/test_json.py index 3a7b029e8c7e..5edea5594161 100644 --- a/tests/unit/legacy/api/test_json.py +++ b/tests/unit/legacy/api/test_json.py @@ -118,6 +118,13 @@ def test_all_non_prereleases_yanked(self, monkeypatch, db_request): db_request.matchdict = {"name": project.normalized_name} assert json.latest_release_factory(db_request) == release + def test_with_unpublished(self, db_request): + project = ProjectFactory.create() + release = ReleaseFactory.create(project=project, version="1.0") + ReleaseFactory.create(project=project, version="2.0", published=False) + db_request.matchdict = {"name": project.normalized_name} + assert json.latest_release_factory(db_request) == release + def test_project_quarantined(self, monkeypatch, db_request): project = ProjectFactory.create( lifecycle_status=LifecycleStatus.QuarantineEnter @@ -191,6 +198,15 @@ def test_renders(self, pyramid_config, db_request, db_session): ) ] + ReleaseFactory.create( + project=project, + version="3.1", + description=DescriptionFactory.create( + content_type=description_content_type + ), + published=False, + ) + for urlspec in project_urls: label, _, purl = urlspec.partition(",") db_session.add( diff --git a/tests/unit/packaging/test_views.py b/tests/unit/packaging/test_views.py index dc6ac4156dfd..a8ea5f6f805d 100644 --- a/tests/unit/packaging/test_views.py +++ b/tests/unit/packaging/test_views.py @@ -135,6 +135,19 @@ def test_only_yanked_release(self, monkeypatch, db_request): assert resp is response assert release_detail.calls == [pretend.call(release, db_request)] + def test_with_unpublished(self, monkeypatch, db_request): + project = ProjectFactory.create() + release = ReleaseFactory.create(project=project, version="1.0") + ReleaseFactory.create(project=project, version="1.1", published=False) + + response = pretend.stub() + release_detail = pretend.call_recorder(lambda ctx, request: response) + monkeypatch.setattr(views, "release_detail", release_detail) + + resp = views.project_detail(project, db_request) + assert resp is response + assert release_detail.calls == [pretend.call(release, db_request)] + class TestReleaseDetail: def test_normalizing_name_redirects(self, db_request): @@ -178,30 +191,45 @@ def test_normalizing_version_redirects(self, db_request): def test_detail_rendered(self, db_request): users = [UserFactory.create(), UserFactory.create(), UserFactory.create()] project = ProjectFactory.create() - releases = [ - ReleaseFactory.create( - project=project, - version=v, - description=DescriptionFactory.create( - raw="unrendered description", - html="rendered description", - content_type="text/html", - ), - ) - for v in ["1.0", "2.0", "3.0", "4.0.dev0"] - ] + [ - ReleaseFactory.create( - project=project, - version="5.0", - description=DescriptionFactory.create( - raw="plaintext description", - html="", - content_type="text/plain", - ), - yanked=True, - yanked_reason="plaintext yanked reason", - ) - ] + releases = ( + [ + ReleaseFactory.create( + project=project, + version=v, + description=DescriptionFactory.create( + raw="unrendered description", + html="rendered description", + content_type="text/html", + ), + ) + for v in ["1.0", "2.0", "3.0", "4.0.dev0"] + ] + + [ + ReleaseFactory.create( + project=project, + version="5.0", + description=DescriptionFactory.create( + raw="plaintext description", + html="", + content_type="text/plain", + ), + yanked=True, + yanked_reason="plaintext yanked reason", + ) + ] + + [ + ReleaseFactory.create( + project=project, + version="5.1", + description=DescriptionFactory.create( + raw="unrendered description", + html="rendered description", + content_type="text/html", + ), + published=False, + ) + ] + ) files = [ FileFactory.create( release=r, @@ -226,6 +254,7 @@ def test_detail_rendered(self, db_request): "bdists": [], "description": "rendered description", "latest_version": project.latest_version, + # Non published version are not listed here "all_versions": [ (r.version, r.created, r.is_prerelease, r.yanked, r.yanked_reason) for r in reversed(releases) diff --git a/warehouse/legacy/api/json.py b/warehouse/legacy/api/json.py index 513699cbd665..e7b8652527af 100644 --- a/warehouse/legacy/api/json.py +++ b/warehouse/legacy/api/json.py @@ -62,7 +62,10 @@ def _json_data(request, project, release, *, all_releases): ) ) .outerjoin(File) - .filter(Release.project == project) + .filter( + Release.project == project, + Release.published.is_(True), + ) ) # If we're not looking for all_releases, then we'll filter this further @@ -206,7 +209,8 @@ def latest_release_factory(request): .filter( Project.lifecycle_status.is_distinct_from( LifecycleStatus.QuarantineEnter - ) + ), + Release.published.is_(True), ) .order_by( Release.yanked.asc(), diff --git a/warehouse/locale/messages.pot b/warehouse/locale/messages.pot index fcf8eabcf41e..c732859b809c 100644 --- a/warehouse/locale/messages.pot +++ b/warehouse/locale/messages.pot @@ -784,7 +784,7 @@ msgstr "" msgid "Provide an Inspector link to specific lines of code." msgstr "" -#: warehouse/packaging/views.py:352 +#: warehouse/packaging/views.py:355 msgid "Your report has been recorded. Thank you for your help." msgstr "" diff --git a/warehouse/packaging/models.py b/warehouse/packaging/models.py index dfb8c56edce0..c2d608ae8f0a 100644 --- a/warehouse/packaging/models.py +++ b/warehouse/packaging/models.py @@ -454,7 +454,11 @@ def latest_version(self): return ( orm.object_session(self) .query(Release.version, Release.created, Release.is_prerelease) - .filter(Release.project == self, Release.yanked.is_(False)) + .filter( + Release.project == self, + Release.yanked.is_(False), + Release.published.is_(True), + ) .order_by(Release.is_prerelease.nullslast(), Release._pypi_ordering.desc()) .first() ) diff --git a/warehouse/packaging/utils.py b/warehouse/packaging/utils.py index 7397cf45a740..53bdc488a04a 100644 --- a/warehouse/packaging/utils.py +++ b/warehouse/packaging/utils.py @@ -53,9 +53,11 @@ def _simple_detail(project, request): .join(Release) .filter(Release.project == project) # Exclude projects that are in the `quarantine-enter` lifecycle status. + # And exclude un-published releases from the index .join(Project) .filter( - Project.lifecycle_status.is_distinct_from(LifecycleStatus.QuarantineEnter) + Project.lifecycle_status.is_distinct_from(LifecycleStatus.QuarantineEnter), + Release.published.is_(True), ) .all(), key=lambda f: (packaging_legacy.version.parse(f.release.version), f.filename), diff --git a/warehouse/packaging/views.py b/warehouse/packaging/views.py index 55f722c43c0b..d1c3998f293e 100644 --- a/warehouse/packaging/views.py +++ b/warehouse/packaging/views.py @@ -179,7 +179,10 @@ def project_detail(project, request): try: release = ( request.db.query(Release) - .filter(Release.project == project) + .filter( + Release.project == project, + Release.published.is_(True), + ) .order_by( Release.yanked, Release.is_prerelease.nullslast(), From 8fcb92e1e9966d2387eb0bfc87978cab7dabcf9f Mon Sep 17 00:00:00 2001 From: Alexis Date: Fri, 3 Jan 2025 12:03:09 +0100 Subject: [PATCH 18/26] Use a SQLAlchemy event --- tests/unit/packaging/test_views.py | 76 +++++++++++++++--------------- warehouse/legacy/api/json.py | 2 - warehouse/locale/messages.pot | 2 +- warehouse/packaging/models.py | 22 +++++++-- warehouse/packaging/utils.py | 1 - warehouse/packaging/views.py | 5 +- 6 files changed, 58 insertions(+), 50 deletions(-) diff --git a/tests/unit/packaging/test_views.py b/tests/unit/packaging/test_views.py index a8ea5f6f805d..f2139dc97a36 100644 --- a/tests/unit/packaging/test_views.py +++ b/tests/unit/packaging/test_views.py @@ -191,45 +191,43 @@ def test_normalizing_version_redirects(self, db_request): def test_detail_rendered(self, db_request): users = [UserFactory.create(), UserFactory.create(), UserFactory.create()] project = ProjectFactory.create() - releases = ( - [ - ReleaseFactory.create( - project=project, - version=v, - description=DescriptionFactory.create( - raw="unrendered description", - html="rendered description", - content_type="text/html", - ), - ) - for v in ["1.0", "2.0", "3.0", "4.0.dev0"] - ] - + [ - ReleaseFactory.create( - project=project, - version="5.0", - description=DescriptionFactory.create( - raw="plaintext description", - html="", - content_type="text/plain", - ), - yanked=True, - yanked_reason="plaintext yanked reason", - ) - ] - + [ - ReleaseFactory.create( - project=project, - version="5.1", - description=DescriptionFactory.create( - raw="unrendered description", - html="rendered description", - content_type="text/html", - ), - published=False, - ) - ] + releases = [ + ReleaseFactory.create( + project=project, + version=v, + description=DescriptionFactory.create( + raw="unrendered description", + html="rendered description", + content_type="text/html", + ), + ) + for v in ["1.0", "2.0", "3.0", "4.0.dev0"] + ] + [ + ReleaseFactory.create( + project=project, + version="5.0", + description=DescriptionFactory.create( + raw="plaintext description", + html="", + content_type="text/plain", + ), + yanked=True, + yanked_reason="plaintext yanked reason", + ) + ] + + # Add an unpublished version + staged_release = ReleaseFactory.create( + project=project, + version="5.1", + description=DescriptionFactory.create( + raw="unrendered description", + html="rendered description", + content_type="text/html", + ), + published=False, ) + files = [ FileFactory.create( release=r, @@ -237,7 +235,7 @@ def test_detail_rendered(self, db_request): python_version="source", packagetype="sdist", ) - for r in releases + for r in releases + [staged_release] ] # Create a role for each user diff --git a/warehouse/legacy/api/json.py b/warehouse/legacy/api/json.py index e7b8652527af..8a95b4ccbb6d 100644 --- a/warehouse/legacy/api/json.py +++ b/warehouse/legacy/api/json.py @@ -64,7 +64,6 @@ def _json_data(request, project, release, *, all_releases): .outerjoin(File) .filter( Release.project == project, - Release.published.is_(True), ) ) @@ -210,7 +209,6 @@ def latest_release_factory(request): Project.lifecycle_status.is_distinct_from( LifecycleStatus.QuarantineEnter ), - Release.published.is_(True), ) .order_by( Release.yanked.asc(), diff --git a/warehouse/locale/messages.pot b/warehouse/locale/messages.pot index c732859b809c..fcf8eabcf41e 100644 --- a/warehouse/locale/messages.pot +++ b/warehouse/locale/messages.pot @@ -784,7 +784,7 @@ msgstr "" msgid "Provide an Inspector link to specific lines of code." msgstr "" -#: warehouse/packaging/views.py:355 +#: warehouse/packaging/views.py:352 msgid "Your report has been recorded. Thank you for your help." msgstr "" diff --git a/warehouse/packaging/models.py b/warehouse/packaging/models.py index c2d608ae8f0a..3b9fa99c745d 100644 --- a/warehouse/packaging/models.py +++ b/warehouse/packaging/models.py @@ -52,10 +52,12 @@ from sqlalchemy.ext.hybrid import hybrid_property from sqlalchemy.orm import ( Mapped, + ORMExecuteState, attribute_keyed_dict, declared_attr, mapped_column, validates, + with_loader_criteria, ) from urllib3.exceptions import LocationParseError from urllib3.util import parse_url @@ -79,7 +81,7 @@ from warehouse.sitemap.models import SitemapMixin from warehouse.utils import dotted_navigator, wheel from warehouse.utils.attrs import make_repr -from warehouse.utils.db.types import bool_false, datetime_now +from warehouse.utils.db.types import bool_false, bool_true, datetime_now if typing.TYPE_CHECKING: from warehouse.oidc.models import OIDCPublisher @@ -457,7 +459,6 @@ def latest_version(self): .filter( Release.project == self, Release.yanked.is_(False), - Release.published.is_(True), ) .order_by(Release.is_prerelease.nullslast(), Release._pypi_ordering.desc()) .first() @@ -616,7 +617,7 @@ def __table_args__(cls): # noqa _pypi_ordering: Mapped[int | None] requires_python: Mapped[str | None] = mapped_column(Text) created: Mapped[datetime_now] = mapped_column() - published: Mapped[datetime_now | None] + published: Mapped[bool_true] = mapped_column() description_id: Mapped[UUID] = mapped_column( ForeignKey("release_descriptions.id", onupdate="CASCADE", ondelete="CASCADE"), @@ -1034,6 +1035,21 @@ def ensure_monotonic_journals(config, session, flush_context, instances): return +@db.listens_for(db.Session, "do_orm_execute") +def filter_staged_release(config, state: ORMExecuteState): + if ( + state.is_select + and not state.is_column_load + and not state.is_relationship_load + and not state.statement.get_execution_options().get("include_staged", False) + ): + state.statement = state.statement.options( + with_loader_criteria( + Release, lambda cls: cls.published, include_aliases=True + ) + ) + + class ProhibitedProjectName(db.Model): __tablename__ = "prohibited_project_names" __table_args__ = ( diff --git a/warehouse/packaging/utils.py b/warehouse/packaging/utils.py index 53bdc488a04a..b58e62610871 100644 --- a/warehouse/packaging/utils.py +++ b/warehouse/packaging/utils.py @@ -57,7 +57,6 @@ def _simple_detail(project, request): .join(Project) .filter( Project.lifecycle_status.is_distinct_from(LifecycleStatus.QuarantineEnter), - Release.published.is_(True), ) .all(), key=lambda f: (packaging_legacy.version.parse(f.release.version), f.filename), diff --git a/warehouse/packaging/views.py b/warehouse/packaging/views.py index d1c3998f293e..55f722c43c0b 100644 --- a/warehouse/packaging/views.py +++ b/warehouse/packaging/views.py @@ -179,10 +179,7 @@ def project_detail(project, request): try: release = ( request.db.query(Release) - .filter( - Release.project == project, - Release.published.is_(True), - ) + .filter(Release.project == project) .order_by( Release.yanked, Release.is_prerelease.nullslast(), From cf2abc801bd9966b2eefb5ecb009c163f79fd9af Mon Sep 17 00:00:00 2001 From: Alexis Date: Wed, 22 Jan 2025 10:20:40 +0100 Subject: [PATCH 19/26] Remove field from PR --- .../bd2bf218e63f_add_published_field.py | 38 ------------------- warehouse/packaging/models.py | 1 - 2 files changed, 39 deletions(-) delete mode 100644 warehouse/migrations/versions/bd2bf218e63f_add_published_field.py diff --git a/warehouse/migrations/versions/bd2bf218e63f_add_published_field.py b/warehouse/migrations/versions/bd2bf218e63f_add_published_field.py deleted file mode 100644 index 313e65679ecd..000000000000 --- a/warehouse/migrations/versions/bd2bf218e63f_add_published_field.py +++ /dev/null @@ -1,38 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -""" -add published field - -Revision ID: bd2bf218e63f -Revises: f7720656a33c -Create Date: 2024-12-10 10:40:19.588606 -""" - -import sqlalchemy as sa - -from alembic import op - -revision = "bd2bf218e63f" -down_revision = "f7720656a33c" - - -def upgrade(): - op.add_column( - "releases", - sa.Column( - "published", sa.Boolean(), server_default=sa.text("true"), nullable=False - ), - ) - - -def downgrade(): - op.drop_column("releases", "published") diff --git a/warehouse/packaging/models.py b/warehouse/packaging/models.py index 3b9fa99c745d..0b5dcc5f8856 100644 --- a/warehouse/packaging/models.py +++ b/warehouse/packaging/models.py @@ -617,7 +617,6 @@ def __table_args__(cls): # noqa _pypi_ordering: Mapped[int | None] requires_python: Mapped[str | None] = mapped_column(Text) created: Mapped[datetime_now] = mapped_column() - published: Mapped[bool_true] = mapped_column() description_id: Mapped[UUID] = mapped_column( ForeignKey("release_descriptions.id", onupdate="CASCADE", ondelete="CASCADE"), From eb025b68576712073e0ec6621429330468f5ffd9 Mon Sep 17 00:00:00 2001 From: Alexis Date: Wed, 22 Jan 2025 10:46:58 +0100 Subject: [PATCH 20/26] Resolve merge conflict --- warehouse/packaging/models.py | 114 +++++++++++++++++++++------------- 1 file changed, 70 insertions(+), 44 deletions(-) diff --git a/warehouse/packaging/models.py b/warehouse/packaging/models.py index 0b5dcc5f8856..751657373a62 100644 --- a/warehouse/packaging/models.py +++ b/warehouse/packaging/models.py @@ -168,6 +168,7 @@ def __contains__(self, project): class LifecycleStatus(enum.StrEnum): QuarantineEnter = "quarantine-enter" QuarantineExit = "quarantine-exit" + Archived = "archived" class Project(SitemapMixin, HasEvents, HasObservations, db.Model): @@ -329,25 +330,36 @@ def __acl__(self): (Allow, Authenticated, Permissions.SubmitMalwareObservation), ] - # The project has zero or more OIDC publishers registered to it, - # each of which serves as an identity with the ability to upload releases. - for publisher in self.oidc_publishers: - acls.append((Allow, f"oidc:{publisher.id}", [Permissions.ProjectsUpload])) + if self.lifecycle_status != LifecycleStatus.Archived: + # The project has zero or more OIDC publishers registered to it, + # each of which serves as an identity with the ability to upload releases + # (only if the project is not archived) + for publisher in self.oidc_publishers: + acls.append( + (Allow, f"oidc:{publisher.id}", [Permissions.ProjectsUpload]) + ) # Get all of the users for this project. - query = session.query(Role).filter(Role.project == self) - query = query.options(orm.lazyload(Role.project)) - query = query.options(orm.lazyload(Role.user)) + user_query = ( + session.query(Role) + .filter(Role.project == self) + .options(orm.lazyload(Role.project), orm.lazyload(Role.user)) + ) permissions = { (role.user_id, "Administer" if role.role_name == "Owner" else "Upload") - for role in query.all() + for role in user_query.all() } # Add all of the team members for this project. - query = session.query(TeamProjectRole).filter(TeamProjectRole.project == self) - query = query.options(orm.lazyload(TeamProjectRole.project)) - query = query.options(orm.lazyload(TeamProjectRole.team)) - for role in query.all(): + team_query = ( + session.query(TeamProjectRole) + .filter(TeamProjectRole.project == self) + .options( + orm.lazyload(TeamProjectRole.project), + orm.lazyload(TeamProjectRole.team), + ) + ) + for role in team_query.all(): permissions |= { (user.id, "Administer" if role.role_name.value == "Owner" else "Upload") for user in role.team.members @@ -355,38 +367,41 @@ def __acl__(self): # Add all organization owners for this project. if self.organization: - query = session.query(OrganizationRole).filter( - OrganizationRole.organization == self.organization, - OrganizationRole.role_name == OrganizationRoleType.Owner, + org_query = ( + session.query(OrganizationRole) + .filter( + OrganizationRole.organization == self.organization, + OrganizationRole.role_name == OrganizationRoleType.Owner, + ) + .options( + orm.lazyload(OrganizationRole.organization), + orm.lazyload(OrganizationRole.user), + ) ) - query = query.options(orm.lazyload(OrganizationRole.organization)) - query = query.options(orm.lazyload(OrganizationRole.user)) - permissions |= {(role.user_id, "Administer") for role in query.all()} + permissions |= {(role.user_id, "Administer") for role in org_query.all()} for user_id, permission_name in sorted(permissions, key=lambda x: (x[1], x[0])): # Disallow Write permissions for Projects in quarantine, allow Upload if self.lifecycle_status == LifecycleStatus.QuarantineEnter: - acls.append( - ( - Allow, - f"user:{user_id}", - [Permissions.ProjectsRead, Permissions.ProjectsUpload], - ) - ) + current_permissions = [ + Permissions.ProjectsRead, + Permissions.ProjectsUpload, + ] elif permission_name == "Administer": - acls.append( - ( - Allow, - f"user:{user_id}", - [ - Permissions.ProjectsRead, - Permissions.ProjectsUpload, - Permissions.ProjectsWrite, - ], - ) - ) + current_permissions = [ + Permissions.ProjectsRead, + Permissions.ProjectsUpload, + Permissions.ProjectsWrite, + ] else: - acls.append((Allow, f"user:{user_id}", [Permissions.ProjectsUpload])) + current_permissions = [Permissions.ProjectsUpload] + + if self.lifecycle_status == LifecycleStatus.Archived: + # Disallow upload permissions for archived projects + current_permissions.remove(Permissions.ProjectsUpload) + + if current_permissions: + acls.append((Allow, f"user:{user_id}", current_permissions)) return acls @property @@ -456,10 +471,7 @@ def latest_version(self): return ( orm.object_session(self) .query(Release.version, Release.created, Release.is_prerelease) - .filter( - Release.project == self, - Release.yanked.is_(False), - ) + .filter(Release.project == self, Release.yanked.is_(False)) .order_by(Release.is_prerelease.nullslast(), Release._pypi_ordering.desc()) .first() ) @@ -540,8 +552,8 @@ class ReleaseURL(db.Model): "Description", "Description-Content-Type", "Keywords", - "Home-Page", - "Download-Url", + "Home-Page", # Deprecated, but technically permitted by PEP 643 + "Download-Url", # Deprecated, but technically permitted by PEP 643 "Author", "Author-Email", "Maintainer", @@ -557,6 +569,12 @@ class ReleaseURL(db.Model): "Provides-Extra", "Provides-Dist", "Obsoletes-Dist", + # Although the following are deprecated fields, they are technically + # permitted as dynamic by PEP 643 + # https://github.com/pypa/setuptools/issues/4797#issuecomment-2589514950 + "Requires", + "Provides", + "Obsoletes", name="release_dynamic_fields", ) @@ -617,6 +635,7 @@ def __table_args__(cls): # noqa _pypi_ordering: Mapped[int | None] requires_python: Mapped[str | None] = mapped_column(Text) created: Mapped[datetime_now] = mapped_column() + published: Mapped[bool_true] = mapped_column() description_id: Mapped[UUID] = mapped_column( ForeignKey("release_descriptions.id", onupdate="CASCADE", ondelete="CASCADE"), @@ -764,7 +783,7 @@ def urls_by_verify_status(self, *, verified: bool): return _urls def verified_user_name_and_repo_name( - self, domains: set[str], reserved_names: typing.Sequence[str] | None = None + self, domains: set[str], reserved_names: typing.Collection[str] | None = None ): for _, url in self.urls_by_verify_status(verified=True).items(): try: @@ -991,6 +1010,13 @@ def __table_args__(cls): # noqa Index("journals_version_idx", "version"), Index("journals_submitted_by_idx", "submitted_by"), Index("journals_submitted_date_id_idx", cls.submitted_date, cls.id), + # Composite index for journals to be able to sort by + # `submitted_by`, and `submitted_date` in descending order. + Index( + "journals_submitted_by_and_reverse_date_idx", + cls._submitted_by, + cls.submitted_date.desc(), + ), ) id: Mapped[int] = mapped_column(primary_key=True) From dec62389b7df61b5b6089dbdd31d0ed752e45144 Mon Sep 17 00:00:00 2001 From: Alexis Date: Tue, 28 Jan 2025 10:33:03 +0100 Subject: [PATCH 21/26] Revert un-needed changes --- warehouse/legacy/api/json.py | 6 ++---- warehouse/packaging/utils.py | 3 +-- 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/warehouse/legacy/api/json.py b/warehouse/legacy/api/json.py index 8a95b4ccbb6d..513699cbd665 100644 --- a/warehouse/legacy/api/json.py +++ b/warehouse/legacy/api/json.py @@ -62,9 +62,7 @@ def _json_data(request, project, release, *, all_releases): ) ) .outerjoin(File) - .filter( - Release.project == project, - ) + .filter(Release.project == project) ) # If we're not looking for all_releases, then we'll filter this further @@ -208,7 +206,7 @@ def latest_release_factory(request): .filter( Project.lifecycle_status.is_distinct_from( LifecycleStatus.QuarantineEnter - ), + ) ) .order_by( Release.yanked.asc(), diff --git a/warehouse/packaging/utils.py b/warehouse/packaging/utils.py index b58e62610871..7397cf45a740 100644 --- a/warehouse/packaging/utils.py +++ b/warehouse/packaging/utils.py @@ -53,10 +53,9 @@ def _simple_detail(project, request): .join(Release) .filter(Release.project == project) # Exclude projects that are in the `quarantine-enter` lifecycle status. - # And exclude un-published releases from the index .join(Project) .filter( - Project.lifecycle_status.is_distinct_from(LifecycleStatus.QuarantineEnter), + Project.lifecycle_status.is_distinct_from(LifecycleStatus.QuarantineEnter) ) .all(), key=lambda f: (packaging_legacy.version.parse(f.release.version), f.filename), From 56feeb07250e461a71cc318c02b8eb843e261f33 Mon Sep 17 00:00:00 2001 From: Alexis Date: Tue, 28 Jan 2025 11:06:59 +0100 Subject: [PATCH 22/26] Remove event handling --- warehouse/templates/manage/project/history.html | 8 -------- 1 file changed, 8 deletions(-) diff --git a/warehouse/templates/manage/project/history.html b/warehouse/templates/manage/project/history.html index 002d80500033..6ca65478a265 100644 --- a/warehouse/templates/manage/project/history.html +++ b/warehouse/templates/manage/project/history.html @@ -53,14 +53,6 @@

{% trans %}Security history{% endtrans %}

{{ event.additional.publisher_url }} {% endif %} - - {% trans %}Published:{% endtrans %} - {% if event.additional.published is defined and event.additional.published is false %} - {% trans %}No{% endtrans %} - {% else %} - {% trans %}Yes{% endtrans %} - {% endif %} - {% elif event.tag == EventTag.Project.ReleaseRemove %} {# No link to removed release #} From 69060967e90ad264199c2e21b830af3f9c51ce96 Mon Sep 17 00:00:00 2001 From: Alexis Date: Tue, 28 Jan 2025 14:42:06 +0100 Subject: [PATCH 23/26] Add more tests --- tests/unit/forklift/test_legacy.py | 1 - tests/unit/legacy/api/test_json.py | 78 +++++++++++++++++++++++++++++ tests/unit/packaging/test_models.py | 28 +++++++++++ warehouse/forklift/legacy.py | 1 - warehouse/packaging/models.py | 2 +- 5 files changed, 107 insertions(+), 3 deletions(-) diff --git a/tests/unit/forklift/test_legacy.py b/tests/unit/forklift/test_legacy.py index 8177010fba1a..3ba814a87bef 100644 --- a/tests/unit/forklift/test_legacy.py +++ b/tests/unit/forklift/test_legacy.py @@ -3586,7 +3586,6 @@ def test_upload_succeeds_creates_release( else None ), "uploaded_via_trusted_publisher": not test_with_user, - "published": True, } fileadd_event = { diff --git a/tests/unit/legacy/api/test_json.py b/tests/unit/legacy/api/test_json.py index 5edea5594161..1878fbe8cdf2 100644 --- a/tests/unit/legacy/api/test_json.py +++ b/tests/unit/legacy/api/test_json.py @@ -125,6 +125,84 @@ def test_with_unpublished(self, db_request): db_request.matchdict = {"name": project.normalized_name} assert json.latest_release_factory(db_request) == release + def test_only_unpublished(self, db_request): + project = ProjectFactory.create() + ReleaseFactory.create(project=project, version="1.0", published=False) + db_request.matchdict = {"name": project.normalized_name} + resp = json.latest_release_factory(db_request) + + assert isinstance(resp, HTTPNotFound) + _assert_has_cors_headers(resp.headers) + + @pytest.mark.parametrize( + ("release0_state", "release1_state", "release2_state", "latest_release"), + [ + ("published", "published", "published", 2), + ("published", "published", "unpublished", 1), + ("published", "published", "yanked", 1), + ("published", "unpublished", "published", 2), + ("published", "unpublished", "unpublished", 0), + ("published", "unpublished", "yanked", 0), + ("published", "yanked", "published", 2), + ("published", "yanked", "unpublished", 0), + ("published", "yanked", "yanked", 0), + ("unpublished", "published", "published", 2), + ("unpublished", "published", "unpublished", 1), + ("unpublished", "published", "yanked", 1), + ("unpublished", "unpublished", "published", 2), + ("unpublished", "unpublished", "unpublished", -1), + ("unpublished", "unpublished", "yanked", 2), # Same endpoint as none yanked + ("unpublished", "yanked", "published", 2), + ("unpublished", "yanked", "unpublished", 1), + ("unpublished", "yanked", "yanked", 2), + ("yanked", "published", "published", 2), + ("yanked", "published", "unpublished", 1), + ("yanked", "published", "yanked", 1), + ("yanked", "unpublished", "published", 2), + ("yanked", "unpublished", "unpublished", 0), + ("yanked", "unpublished", "yanked", 2), + ("yanked", "yanked", "published", 2), + ("yanked", "yanked", "unpublished", 1), + ("yanked", "yanked", "yanked", 2), + ], + ) + def test_with_mixed_states( + self, db_request, release0_state, release1_state, release2_state, latest_release + ): + project = ProjectFactory.create() + + releases = [] + for version, state in [ + ("1.0", release0_state), + ("1.1", release1_state), + ("2.0", release2_state), + ]: + if state == "published": + releases.append( + ReleaseFactory.create( + project=project, version=version, published=True + ) + ) + elif state == "unpublished": + releases.append( + ReleaseFactory.create( + project=project, version=version, published=False + ) + ) + else: + releases.append( + ReleaseFactory.create(project=project, version=version, yanked=True) + ) + + db_request.matchdict = {"name": project.normalized_name} + + resp = json.latest_release_factory(db_request) + if latest_release >= 0: + assert resp == releases[latest_release] + else: + assert isinstance(resp, HTTPNotFound) + _assert_has_cors_headers(resp.headers) + def test_project_quarantined(self, monkeypatch, db_request): project = ProjectFactory.create( lifecycle_status=LifecycleStatus.QuarantineEnter diff --git a/tests/unit/packaging/test_models.py b/tests/unit/packaging/test_models.py index 8a80d58129e2..6904a0c38a91 100644 --- a/tests/unit/packaging/test_models.py +++ b/tests/unit/packaging/test_models.py @@ -28,6 +28,7 @@ Project, ProjectFactory, ProjectMacaroonWarningAssociation, + Release, ReleaseURL, ) @@ -1215,6 +1216,33 @@ def test_description_relationship(self, db_request): assert release in db_request.db.deleted assert description in db_request.db.deleted + def test_published(self, db_request): + release = DBReleaseFactory.create() + assert release.published + + def test_unpublished(self, db_request): + release = DBReleaseFactory.create(published=False) + assert not release.published + + +@pytest.mark.parametrize( + "published", + [ + True, + False, + ], +) +def test_filter_staged_releases(db_request, published): + DBReleaseFactory.create(published=published) + assert db_request.db.query(Release).count() == (1 if published else 0) + + +def test_filter_staged_releases_unpublished(db_request): + DBReleaseFactory.create(published=False) + assert ( + db_request.db.query(Release).execution_options(include_staged=True).count() == 1 + ) + class TestFile: def test_requires_python(self, db_session): diff --git a/warehouse/forklift/legacy.py b/warehouse/forklift/legacy.py index 4e92034a86cd..1dbddefd09c5 100644 --- a/warehouse/forklift/legacy.py +++ b/warehouse/forklift/legacy.py @@ -970,7 +970,6 @@ def file_upload(request): else None ), "uploaded_via_trusted_publisher": bool(request.oidc_publisher), - "published": True, }, ) diff --git a/warehouse/packaging/models.py b/warehouse/packaging/models.py index 751657373a62..0aea9d6241f2 100644 --- a/warehouse/packaging/models.py +++ b/warehouse/packaging/models.py @@ -1061,7 +1061,7 @@ def ensure_monotonic_journals(config, session, flush_context, instances): @db.listens_for(db.Session, "do_orm_execute") -def filter_staged_release(config, state: ORMExecuteState): +def filter_staged_release(_, state: ORMExecuteState): if ( state.is_select and not state.is_column_load From 9397ef202420686d873bb28ab14bea18b1e54992 Mon Sep 17 00:00:00 2001 From: Alexis Date: Tue, 28 Jan 2025 14:48:39 +0100 Subject: [PATCH 24/26] Rename from unpublished to staged --- tests/unit/legacy/api/test_json.py | 44 ++++++++++++++--------------- tests/unit/packaging/test_models.py | 10 +------ tests/unit/packaging/test_views.py | 4 +-- 3 files changed, 25 insertions(+), 33 deletions(-) diff --git a/tests/unit/legacy/api/test_json.py b/tests/unit/legacy/api/test_json.py index 1878fbe8cdf2..23973fa4bb73 100644 --- a/tests/unit/legacy/api/test_json.py +++ b/tests/unit/legacy/api/test_json.py @@ -118,14 +118,14 @@ def test_all_non_prereleases_yanked(self, monkeypatch, db_request): db_request.matchdict = {"name": project.normalized_name} assert json.latest_release_factory(db_request) == release - def test_with_unpublished(self, db_request): + def test_with_staged(self, db_request): project = ProjectFactory.create() release = ReleaseFactory.create(project=project, version="1.0") ReleaseFactory.create(project=project, version="2.0", published=False) db_request.matchdict = {"name": project.normalized_name} assert json.latest_release_factory(db_request) == release - def test_only_unpublished(self, db_request): + def test_only_staged(self, db_request): project = ProjectFactory.create() ReleaseFactory.create(project=project, version="1.0", published=False) db_request.matchdict = {"name": project.normalized_name} @@ -138,31 +138,31 @@ def test_only_unpublished(self, db_request): ("release0_state", "release1_state", "release2_state", "latest_release"), [ ("published", "published", "published", 2), - ("published", "published", "unpublished", 1), + ("published", "published", "staged", 1), ("published", "published", "yanked", 1), - ("published", "unpublished", "published", 2), - ("published", "unpublished", "unpublished", 0), - ("published", "unpublished", "yanked", 0), + ("published", "staged", "published", 2), + ("published", "staged", "staged", 0), + ("published", "staged", "yanked", 0), ("published", "yanked", "published", 2), - ("published", "yanked", "unpublished", 0), + ("published", "yanked", "staged", 0), ("published", "yanked", "yanked", 0), - ("unpublished", "published", "published", 2), - ("unpublished", "published", "unpublished", 1), - ("unpublished", "published", "yanked", 1), - ("unpublished", "unpublished", "published", 2), - ("unpublished", "unpublished", "unpublished", -1), - ("unpublished", "unpublished", "yanked", 2), # Same endpoint as none yanked - ("unpublished", "yanked", "published", 2), - ("unpublished", "yanked", "unpublished", 1), - ("unpublished", "yanked", "yanked", 2), + ("staged", "published", "published", 2), + ("staged", "published", "staged", 1), + ("staged", "published", "yanked", 1), + ("staged", "staged", "published", 2), + ("staged", "staged", "staged", -1), + ("staged", "staged", "yanked", 2), # Same endpoint as none yanked + ("staged", "yanked", "published", 2), + ("staged", "yanked", "staged", 1), + ("staged", "yanked", "yanked", 2), ("yanked", "published", "published", 2), - ("yanked", "published", "unpublished", 1), + ("yanked", "published", "staged", 1), ("yanked", "published", "yanked", 1), - ("yanked", "unpublished", "published", 2), - ("yanked", "unpublished", "unpublished", 0), - ("yanked", "unpublished", "yanked", 2), + ("yanked", "staged", "published", 2), + ("yanked", "staged", "staged", 0), + ("yanked", "staged", "yanked", 2), ("yanked", "yanked", "published", 2), - ("yanked", "yanked", "unpublished", 1), + ("yanked", "yanked", "staged", 1), ("yanked", "yanked", "yanked", 2), ], ) @@ -183,7 +183,7 @@ def test_with_mixed_states( project=project, version=version, published=True ) ) - elif state == "unpublished": + elif state == "staged": releases.append( ReleaseFactory.create( project=project, version=version, published=False diff --git a/tests/unit/packaging/test_models.py b/tests/unit/packaging/test_models.py index 6904a0c38a91..46f5138b9a99 100644 --- a/tests/unit/packaging/test_models.py +++ b/tests/unit/packaging/test_models.py @@ -1216,14 +1216,6 @@ def test_description_relationship(self, db_request): assert release in db_request.db.deleted assert description in db_request.db.deleted - def test_published(self, db_request): - release = DBReleaseFactory.create() - assert release.published - - def test_unpublished(self, db_request): - release = DBReleaseFactory.create(published=False) - assert not release.published - @pytest.mark.parametrize( "published", @@ -1237,7 +1229,7 @@ def test_filter_staged_releases(db_request, published): assert db_request.db.query(Release).count() == (1 if published else 0) -def test_filter_staged_releases_unpublished(db_request): +def test_filter_staged_releases_with_staged(db_request): DBReleaseFactory.create(published=False) assert ( db_request.db.query(Release).execution_options(include_staged=True).count() == 1 diff --git a/tests/unit/packaging/test_views.py b/tests/unit/packaging/test_views.py index f2139dc97a36..8eba115cf851 100644 --- a/tests/unit/packaging/test_views.py +++ b/tests/unit/packaging/test_views.py @@ -135,7 +135,7 @@ def test_only_yanked_release(self, monkeypatch, db_request): assert resp is response assert release_detail.calls == [pretend.call(release, db_request)] - def test_with_unpublished(self, monkeypatch, db_request): + def test_with_staged(self, monkeypatch, db_request): project = ProjectFactory.create() release = ReleaseFactory.create(project=project, version="1.0") ReleaseFactory.create(project=project, version="1.1", published=False) @@ -216,7 +216,7 @@ def test_detail_rendered(self, db_request): ) ] - # Add an unpublished version + # Add a staged version staged_release = ReleaseFactory.create( project=project, version="5.1", From 76ad6b7ce9bbeeeca082b7ecbcc198c83bf3c002 Mon Sep 17 00:00:00 2001 From: Alexis Date: Tue, 28 Jan 2025 17:55:31 +0100 Subject: [PATCH 25/26] Update PR --- tests/common/db/packaging.py | 3 +- tests/unit/forklift/test_legacy.py | 1 + tests/unit/legacy/api/test_json.py | 8 +--- tests/unit/packaging/test_views.py | 13 ------- warehouse/forklift/legacy.py | 1 + warehouse/legacy/api/json.py | 8 +--- .../bd2bf218e63f_add_published_field.py | 38 ------------------- warehouse/packaging/utils.py | 4 +- warehouse/packaging/views.py | 5 +-- 9 files changed, 8 insertions(+), 73 deletions(-) delete mode 100644 warehouse/migrations/versions/bd2bf218e63f_add_published_field.py diff --git a/tests/common/db/packaging.py b/tests/common/db/packaging.py index 470f233a34c4..3b97b20cbd92 100644 --- a/tests/common/db/packaging.py +++ b/tests/common/db/packaging.py @@ -120,8 +120,7 @@ class Meta: lambda o: hashlib.blake2b(o.filename.encode("utf8"), digest_size=32).hexdigest() ) upload_time = factory.Faker( - "date_time_between_dates", - datetime_start=datetime.datetime(2008, 1, 1), + "date_time_between_dates", datetime_start=datetime.datetime(2008, 1, 1) ) path = factory.LazyAttribute( lambda o: "/".join( diff --git a/tests/unit/forklift/test_legacy.py b/tests/unit/forklift/test_legacy.py index c9abcfbbeb2c..e762fb7d4d49 100644 --- a/tests/unit/forklift/test_legacy.py +++ b/tests/unit/forklift/test_legacy.py @@ -43,6 +43,7 @@ from warehouse.events.tags import EventTag from warehouse.forklift import legacy, metadata from warehouse.macaroons import IMacaroonService, caveats, security_policy +from warehouse.metrics import IMetricsService from warehouse.oidc.interfaces import SignedClaims from warehouse.oidc.utils import PublisherTokenContext from warehouse.packaging.interfaces import IFileStorage, IProjectService diff --git a/tests/unit/legacy/api/test_json.py b/tests/unit/legacy/api/test_json.py index 5edea5594161..17b39273f8db 100644 --- a/tests/unit/legacy/api/test_json.py +++ b/tests/unit/legacy/api/test_json.py @@ -118,13 +118,6 @@ def test_all_non_prereleases_yanked(self, monkeypatch, db_request): db_request.matchdict = {"name": project.normalized_name} assert json.latest_release_factory(db_request) == release - def test_with_unpublished(self, db_request): - project = ProjectFactory.create() - release = ReleaseFactory.create(project=project, version="1.0") - ReleaseFactory.create(project=project, version="2.0", published=False) - db_request.matchdict = {"name": project.normalized_name} - assert json.latest_release_factory(db_request) == release - def test_project_quarantined(self, monkeypatch, db_request): project = ProjectFactory.create( lifecycle_status=LifecycleStatus.QuarantineEnter @@ -164,6 +157,7 @@ def test_normalizing_redirects(self, db_request): def test_renders(self, pyramid_config, db_request, db_session): project = ProjectFactory.create(has_docs=True) description_content_type = "text/x-rst" + print("NOJ") url = "/the/fake/url/" project_urls = [ "url," + url, diff --git a/tests/unit/packaging/test_views.py b/tests/unit/packaging/test_views.py index a8ea5f6f805d..65884e399c74 100644 --- a/tests/unit/packaging/test_views.py +++ b/tests/unit/packaging/test_views.py @@ -135,19 +135,6 @@ def test_only_yanked_release(self, monkeypatch, db_request): assert resp is response assert release_detail.calls == [pretend.call(release, db_request)] - def test_with_unpublished(self, monkeypatch, db_request): - project = ProjectFactory.create() - release = ReleaseFactory.create(project=project, version="1.0") - ReleaseFactory.create(project=project, version="1.1", published=False) - - response = pretend.stub() - release_detail = pretend.call_recorder(lambda ctx, request: response) - monkeypatch.setattr(views, "release_detail", release_detail) - - resp = views.project_detail(project, db_request) - assert resp is response - assert release_detail.calls == [pretend.call(release, db_request)] - class TestReleaseDetail: def test_normalizing_name_redirects(self, db_request): diff --git a/warehouse/forklift/legacy.py b/warehouse/forklift/legacy.py index 6b618af18e50..c752d8b10c95 100644 --- a/warehouse/forklift/legacy.py +++ b/warehouse/forklift/legacy.py @@ -659,6 +659,7 @@ def file_upload(request): ] ), ) + # Ensure that we have file data in the request. if "content" not in request.POST: request.metrics.increment("warehouse.upload.failed", tags=["reason:no-file"]) diff --git a/warehouse/legacy/api/json.py b/warehouse/legacy/api/json.py index e7b8652527af..513699cbd665 100644 --- a/warehouse/legacy/api/json.py +++ b/warehouse/legacy/api/json.py @@ -62,10 +62,7 @@ def _json_data(request, project, release, *, all_releases): ) ) .outerjoin(File) - .filter( - Release.project == project, - Release.published.is_(True), - ) + .filter(Release.project == project) ) # If we're not looking for all_releases, then we'll filter this further @@ -209,8 +206,7 @@ def latest_release_factory(request): .filter( Project.lifecycle_status.is_distinct_from( LifecycleStatus.QuarantineEnter - ), - Release.published.is_(True), + ) ) .order_by( Release.yanked.asc(), diff --git a/warehouse/migrations/versions/bd2bf218e63f_add_published_field.py b/warehouse/migrations/versions/bd2bf218e63f_add_published_field.py deleted file mode 100644 index 313e65679ecd..000000000000 --- a/warehouse/migrations/versions/bd2bf218e63f_add_published_field.py +++ /dev/null @@ -1,38 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -""" -add published field - -Revision ID: bd2bf218e63f -Revises: f7720656a33c -Create Date: 2024-12-10 10:40:19.588606 -""" - -import sqlalchemy as sa - -from alembic import op - -revision = "bd2bf218e63f" -down_revision = "f7720656a33c" - - -def upgrade(): - op.add_column( - "releases", - sa.Column( - "published", sa.Boolean(), server_default=sa.text("true"), nullable=False - ), - ) - - -def downgrade(): - op.drop_column("releases", "published") diff --git a/warehouse/packaging/utils.py b/warehouse/packaging/utils.py index 53bdc488a04a..7397cf45a740 100644 --- a/warehouse/packaging/utils.py +++ b/warehouse/packaging/utils.py @@ -53,11 +53,9 @@ def _simple_detail(project, request): .join(Release) .filter(Release.project == project) # Exclude projects that are in the `quarantine-enter` lifecycle status. - # And exclude un-published releases from the index .join(Project) .filter( - Project.lifecycle_status.is_distinct_from(LifecycleStatus.QuarantineEnter), - Release.published.is_(True), + Project.lifecycle_status.is_distinct_from(LifecycleStatus.QuarantineEnter) ) .all(), key=lambda f: (packaging_legacy.version.parse(f.release.version), f.filename), diff --git a/warehouse/packaging/views.py b/warehouse/packaging/views.py index d1c3998f293e..55f722c43c0b 100644 --- a/warehouse/packaging/views.py +++ b/warehouse/packaging/views.py @@ -179,10 +179,7 @@ def project_detail(project, request): try: release = ( request.db.query(Release) - .filter( - Release.project == project, - Release.published.is_(True), - ) + .filter(Release.project == project) .order_by( Release.yanked, Release.is_prerelease.nullslast(), From 6d14433dd1b6e86617af86b87c442a2ddb82749e Mon Sep 17 00:00:00 2001 From: Alexis Date: Tue, 28 Jan 2025 19:12:15 +0100 Subject: [PATCH 26/26] Rebase PR on top of other changes --- tests/unit/forklift/test_legacy.py | 5 +++-- tests/unit/legacy/api/test_json.py | 1 - warehouse/forklift/legacy.py | 2 ++ 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/tests/unit/forklift/test_legacy.py b/tests/unit/forklift/test_legacy.py index e762fb7d4d49..f941d9080b6a 100644 --- a/tests/unit/forklift/test_legacy.py +++ b/tests/unit/forklift/test_legacy.py @@ -5362,6 +5362,7 @@ def test_upload_succeeds_with_stage_header( release = ( db_request.db.query(Release) .filter((Release.project == project) & (Release.version == "1.0")) + .execution_options(include_staged=True) .one() ) @@ -5426,8 +5427,7 @@ def test_upload_succeeds_on_staged_release( ) # Create a release and add a file - release = ReleaseFactory.create(project=project, version="1.0") - release.published = False + release = ReleaseFactory.create(project=project, version="1.0", published=False) FileFactory.create(release=release, packagetype="bdist_wheel") filename = "{}-{}.tar.gz".format( @@ -5579,6 +5579,7 @@ def test_upload_succeeds_on_staged_release_with_stage_header( release = ( db_request.db.query(Release) .filter((Release.project == project) & (Release.version == "1.0")) + .execution_options(include_staged=True) .one() ) assert not release.published diff --git a/tests/unit/legacy/api/test_json.py b/tests/unit/legacy/api/test_json.py index bff6a36ce632..23973fa4bb73 100644 --- a/tests/unit/legacy/api/test_json.py +++ b/tests/unit/legacy/api/test_json.py @@ -242,7 +242,6 @@ def test_normalizing_redirects(self, db_request): def test_renders(self, pyramid_config, db_request, db_session): project = ProjectFactory.create(has_docs=True) description_content_type = "text/x-rst" - print("NOJ") url = "/the/fake/url/" project_urls = [ "url," + url, diff --git a/warehouse/forklift/legacy.py b/warehouse/forklift/legacy.py index c752d8b10c95..0fca7d687f92 100644 --- a/warehouse/forklift/legacy.py +++ b/warehouse/forklift/legacy.py @@ -60,6 +60,7 @@ from warehouse.forklift import metadata from warehouse.forklift.forms import UploadForm, _filetype_extension_mapping from warehouse.macaroons.models import Macaroon +from warehouse.metrics import IMetricsService from warehouse.packaging.interfaces import IFileStorage, IProjectService from warehouse.packaging.metadata_verification import verify_email, verify_url from warehouse.packaging.models import ( @@ -888,6 +889,7 @@ def file_upload(request): (Release.project == project) & (Release.canonical_version == canonical_version) ) + .execution_options(include_staged=True) .one() ) except MultipleResultsFound: