repo
stringclasses
358 values
pull_number
int64
6
67.9k
instance_id
stringlengths
12
49
issue_numbers
sequencelengths
1
7
base_commit
stringlengths
40
40
patch
stringlengths
87
101M
test_patch
stringlengths
72
22.3M
problem_statement
stringlengths
3
256k
hints_text
stringlengths
0
545k
created_at
stringlengths
20
20
PASS_TO_PASS
sequencelengths
0
0
FAIL_TO_PASS
sequencelengths
0
0
pypi/warehouse
6,119
pypi__warehouse-6119
[ "6114" ]
c76c0f9b230a28147965fd4dcdd63f7a561a9b23
diff --git a/warehouse/manage/views.py b/warehouse/manage/views.py --- a/warehouse/manage/views.py +++ b/warehouse/manage/views.py @@ -10,6 +10,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import base64 import io from collections import defaultdict @@ -323,6 +324,7 @@ def __init__(self, request): def default_response(self): totp_secret = self.request.session.get_totp_secret() return { + "provision_totp_secret": base64.b32encode(totp_secret).decode(), "provision_totp_form": ProvisionTOTPForm(totp_secret=totp_secret), "provision_totp_uri": otp.generate_totp_provisioning_uri( totp_secret,
diff --git a/tests/unit/manage/test_views.py b/tests/unit/manage/test_views.py --- a/tests/unit/manage/test_views.py +++ b/tests/unit/manage/test_views.py @@ -10,6 +10,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import base64 import datetime import uuid @@ -782,6 +783,7 @@ def test_totp_provision(self, monkeypatch): assert provision_totp_cls.calls == [pretend.call(totp_secret=b"secret")] assert result == { + "provision_totp_secret": base64.b32encode(b"secret").decode(), "provision_totp_form": provision_totp_obj, "provision_totp_uri": "not_a_real_uri", } @@ -912,7 +914,7 @@ def test_validate_totp_provision_invalid_form(self, monkeypatch): POST={}, session=pretend.stub( flash=pretend.call_recorder(lambda *a, **kw: None), - get_totp_secret=lambda: pretend.stub(), + get_totp_secret=lambda: b"secret", ), find_service=lambda *a, **kw: user_service, user=pretend.stub( @@ -943,6 +945,7 @@ def test_validate_totp_provision_invalid_form(self, monkeypatch): assert request.session.flash.calls == [] assert result == { + "provision_totp_secret": base64.b32encode(b"secret").decode(), "provision_totp_form": provision_totp_obj, "provision_totp_uri": "not_a_real_uri", }
Expose TOTP provisioning code for manual entry I use the command line tool "oathtool" for my 2FA. But your 2FA enrollment page only shows a QR code. How do I get the key for oathtool use?
Hi @garyemiller thanks for opening this ticket. Currently we don't expose this information in the user interface, but we can take a look into this. @woodruffw I assume we can simply output this information on the provisioning page? If you want to make a PR adding this to the template, I can style it. > I assume we can simply output this information on the provisioning page? If you want to make a PR adding this to the template, I can style it. Yep, should be as simple as that. I'll open a PR in a bit. Edit: An idea: we might want to expose this as a clipboard action. @nlhkabu would that be possible?
2019-06-28T14:24:30Z
[]
[]
pypi/warehouse
6,142
pypi__warehouse-6142
[ "4470" ]
6113af1ecb523e46f4f6f434b67e551b8a8cf319
diff --git a/warehouse/accounts/services.py b/warehouse/accounts/services.py --- a/warehouse/accounts/services.py +++ b/warehouse/accounts/services.py @@ -517,9 +517,7 @@ def __eq__(self, other): @implementer(IPasswordBreachedService) class HaveIBeenPwnedPasswordBreachedService: - _failure_message_preamble = ( - "This password appears in a breach or has been compromised and cannot be used." - ) + _failure_message_preamble = "This password appears in a security breach or has been compromised and cannot be used." def __init__( self, @@ -603,7 +601,7 @@ def check_password(self, password, *, tags=None): # 0136E006E24E7D152139815FB0FC6A50B15:2 # ... # - # THat is, it is a line delimited textual data, where each line is a hash, a + # That is, it is a line delimited textual data, where each line is a hash, a # colon, and then the number of times that password has appeared in a breach. # For our uses, we're going to consider any password that has ever appeared in # a breach to be insecure, even if only once. diff --git a/warehouse/csp.py b/warehouse/csp.py --- a/warehouse/csp.py +++ b/warehouse/csp.py @@ -82,6 +82,7 @@ def includeme(config): "https://api.github.com/repos/", "*.fastly-insights.com", "sentry.io", + "https://api.pwnedpasswords.com", ] + [ item
diff --git a/tests/frontend/__mocks__/debounce.js b/tests/frontend/__mocks__/debounce.js new file mode 100644 --- /dev/null +++ b/tests/frontend/__mocks__/debounce.js @@ -0,0 +1,33 @@ +/** + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* + +Mock debounce module. Jest will import this file automatically +on all test suites that import debounce. For more information see: +https://jestjs.io/docs/en/manual-mocks + +*/ + +/* global jest, module */ + +const debounce = jest.genMockFromModule("debounce"); + +function mockDebounce(fn) { + // Return the wrapped function unchanged. + return fn; +} + +debounce.debounce = mockDebounce; +module.exports = debounce; diff --git a/tests/frontend/password_breach_controller_test.js b/tests/frontend/password_breach_controller_test.js new file mode 100644 --- /dev/null +++ b/tests/frontend/password_breach_controller_test.js @@ -0,0 +1,87 @@ +/* Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* global expect, beforeEach, describe, it */ + +import { fireEvent } from "@testing-library/dom"; +import { Application } from "stimulus"; +import { delay } from "./utils"; +import PasswordBreachController from "../../warehouse/static/js/warehouse/controllers/password_breach_controller"; + +let application = null; + +describe("Password breach controller", () => { + beforeEach(() => { + document.body.innerHTML = ` + <div id="controller" data-controller="password-breach"> + <input id="password" data-target="password-breach.password" data-action="input->password-breach#check" placeholder="Your password" type="password" /> + <p id="message" data-target="password-breach.message" class="hidden">Password breached</p> + </div> + `; + + application = Application.start(); + application.register("password-breach", PasswordBreachController); + }); + + describe("initial state", () => { + describe("the message", () => { + it("is hidden", () => { + const message = document.getElementById("message"); + expect(message).toHaveClass("hidden"); + }); + }); + }); + + describe("functionality", () => { + beforeEach(() => { + fetch.resetMocks(); + }); + + describe("entering a password with less than 3 characters", () => { + it("does not call the HIBP API", async () => { + const passwordField = document.querySelector("#password"); + fireEvent.input(passwordField, { target: { value: "fo" } }); + + await delay(25); // arbitrary number of ms, too low may cause failures + expect(fetch.mock.calls.length).toEqual(0); + }); + }); + + describe("entering a breached password with more than 2 characters", () => { + it("calls the HIBP API and shows the message", async () => { + // The response must match the slice of the hashed password + fetch.mockResponse("7B5EA3F0FDBC95D0DD47F3C5BC275DA8A33:5270"); + const passwordField = document.querySelector("#password"); + fireEvent.input(passwordField, { target: { value: "foo" } }); + + await delay(25); + expect(fetch.mock.calls.length).toEqual(1); + expect(document.getElementById("message")).not.toHaveClass("hidden"); + }); + }); + + describe("entering a safe password with more than 2 characters", () => { + it("calls the HIBP API and does not show the message", async () => { + const verySecurePassword = "^woHw6w4j8zShVPyWtNKFn&DspydLQtIFPk97T@k$78H3pRsJ9RNB5SpLIux"; + // the response does not match the sliche of the hashed password + fetch.mockResponse("00DC70F3D981248DF52C620198328108406:4"); + const passwordField = document.querySelector("#password"); + fireEvent.input(passwordField, { target: { value: verySecurePassword } }); + + await delay(25); + expect(fetch.mock.calls.length).toEqual(1); + expect(document.getElementById("message")).toHaveClass("hidden"); + }); + }); + }); +}); diff --git a/tests/frontend/setup.js b/tests/frontend/setup.js --- a/tests/frontend/setup.js +++ b/tests/frontend/setup.js @@ -11,6 +11,8 @@ * limitations under the License. */ +/* global fetch */ + // Setup MutationObserver shim since jsdom doesn't // support it out of the box. @@ -36,3 +38,12 @@ import "@testing-library/jest-dom/extend-expect"; // Required to use async/await in tests import "@babel/polyfill"; + +// Monkeypatch the global fetch API +fetch = require("jest-fetch-mock"); // eslint-disable-line no-global-assign + +// Make TextEncoder and cryto available in the global scope +// in the same way as in a browser environment +window.TextEncoder = require("util").TextEncoder; +const WebCrypto = require("node-webcrypto-ossl"); +window.crypto = new WebCrypto(); diff --git a/tests/frontend/utils.js b/tests/frontend/utils.js new file mode 100644 --- /dev/null +++ b/tests/frontend/utils.js @@ -0,0 +1,27 @@ +/* Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +export function domReady() { + return new Promise(resolve => { + if (document.readyState == "loading") { + document.addEventListener("DOMContentLoaded", resolve); + } else { + resolve(); + } + }); +} + +export function delay(ms) { + return new Promise(resolve => setTimeout(resolve, ms || 0)); +} diff --git a/tests/unit/accounts/test_services.py b/tests/unit/accounts/test_services.py --- a/tests/unit/accounts/test_services.py +++ b/tests/unit/accounts/test_services.py @@ -904,15 +904,15 @@ def test_factory(self): ( None, ( - "This password appears in a breach or has been compromised and " - "cannot be used." + "This password appears in a security breach or has " + "been compromised and cannot be used." ), ), ( "http://localhost/help/#compromised-password", ( - "This password appears in a breach or has been compromised and " - "cannot be used. See " + "This password appears in a security breach or has been " + "compromised and cannot be used. See " '<a href="http://localhost/help/#compromised-password">' "this FAQ entry</a> for more information." ), @@ -939,15 +939,15 @@ def test_failure_message(self, help_url, expected): ( None, ( - "This password appears in a breach or has been compromised and " - "cannot be used." + "This password appears in a security breach or has been " + "compromised and cannot be used." ), ), ( "http://localhost/help/#compromised-password", ( - "This password appears in a breach or has been compromised and " - "cannot be used. See the FAQ entry at " + "This password appears in a security breach or has been " + "compromised and cannot be used. See the FAQ entry at " "http://localhost/help/#compromised-password for more information." ), ), diff --git a/tests/unit/test_csp.py b/tests/unit/test_csp.py --- a/tests/unit/test_csp.py +++ b/tests/unit/test_csp.py @@ -203,6 +203,7 @@ def test_includeme(): "https://api.github.com/repos/", "*.fastly-insights.com", "sentry.io", + "https://api.pwnedpasswords.com", "https://2p66nmmycsj3.statuspage.io", ], "default-src": ["'none'"],
Add javascript/frontend validation of breached passwords In https://github.com/pypa/warehouse/pull/4468 we've started blocking new passwords (via registration, reset, or password change) that have previously been in a data breach of a compromised website. However, that validation currently only occurs server side, it would be nice to also have it occur client side. This can happen in two ways: * Have the client JS hit the HIBP API directly, effectively re-implementing the logic that exists in the Python code. * Have the client JS hit a private API endpoint on Warehouse, that calls into the logic that exists there. * If we do this, what should this API look like? Do we want to just send the password over the wire (since that's going to happen on submit anyways?), do we want to prehash it with sha1 and send that over the wire? Or do we want to reimplement the HIBP api in Warehouse? (this would only prevent leaking our users PII via additional http requests to an external service). Of course, the other question is whether we want to do this or not. It's hard to do it without either reimplementing logic, or sending the password to Warehouse as the user is typing it, neither of which are ideal. Maybe having this validation happen only server side is ok?
I see value in having the registration form perform this check in the frontend alongside having the strength one using zxcvbn. Admittedtly the logic is simpler in that case but this one [doesn't look that difficult to replicate](https://github.com/pypa/warehouse/blob/99e6c7eaa60696098c06506366fc30613e484513/warehouse/accounts/services.py#L339-L391). Maybe we could call the HIBP API only when the user focuses away from the password field and/or when focusing on the confirmation input instead on every keypress? We'd be interested in having volunteers do this work and add this feature to Warehouse! @yeraydiazdiaz is this something you're interested in working on? Hi @brainwane, nice to have you back 🙂 Yeah, I could throw some cycles at this. @yeraydiazdiaz great! Have you had any time to poke at this? @brainwane I started some time ago but never got far with it, I may get some time to work on this soon though. That would be great if you could, @yeraydiazdiaz!
2019-07-03T17:34:08Z
[]
[]
pypi/warehouse
6,193
pypi__warehouse-6193
[ "6183" ]
5c569b08a675fb24ef7c30fa9e57840a39dd243b
diff --git a/warehouse/utils/webauthn.py b/warehouse/utils/webauthn.py --- a/warehouse/utils/webauthn.py +++ b/warehouse/utils/webauthn.py @@ -110,8 +110,9 @@ def verify_registration_response(response, challenge, *, rp_id, origin): # response's clientData.challenge is encoded twice: # first for the entire clientData payload, and then again # for the individual challenge. + encoded_challenge = _webauthn_b64encode(challenge.encode()).decode() response = pywebauthn.WebAuthnRegistrationResponse( - rp_id, origin, response, _webauthn_b64encode(challenge.encode()).decode() + rp_id, origin, response, encoded_challenge, self_attestation_permitted=True ) try: return response.verify() @@ -129,12 +130,13 @@ def verify_assertion_response(assertion, *, challenge, user, origin, icon_url, r """ webauthn_users = _get_webauthn_users(user, icon_url=icon_url, rp_id=rp_id) cred_ids = [cred.credential_id for cred in webauthn_users] + encoded_challenge = _webauthn_b64encode(challenge.encode()).decode() for webauthn_user in webauthn_users: response = pywebauthn.WebAuthnAssertionResponse( webauthn_user, assertion, - _webauthn_b64encode(challenge.encode()).decode(), + encoded_challenge, origin, allow_credentials=cred_ids, )
diff --git a/tests/unit/utils/test_webauthn.py b/tests/unit/utils/test_webauthn.py --- a/tests/unit/utils/test_webauthn.py +++ b/tests/unit/utils/test_webauthn.py @@ -46,6 +46,7 @@ def test_verify_registration_response(monkeypatch): "fake_origin", {}, webauthn._webauthn_b64encode("not_a_real_challenge".encode()).decode(), + self_attestation_permitted=True, ) ] assert resp == "not a real object"
2FA: Enrolling a TouchID sensor as a webauthn security key fails (Chrome, Mac) <!-- NOTE: This issue should be for problems with PyPI itself, including: * pypi.org * test.pypi.org * files.pythonhosted.org This issue should NOT be for a project installed from PyPI. If you are having an issue with a specific package, you should reach out to the maintainers of that project directly instead. Furthermore, this issue should NOT be for any non-PyPI properties (like python.org, docs.python.org, etc.) --> **Describe the bug** I'm trying to enroll a TouchID sensor as a webauthn device. PyPI and Chrome do let me select the sensor, and I do get prompted for a touch, but then PyPI throws an error: "Registration rejected. Error: Self attestation is not permitted.." **Expected behavior** I expect to be able to enroll a TouchID sensor. **To Reproduce** - PyPI --> Account Settings - Click "Add 2FA With Security Key" - Type a key name, click "Provision Key" - Chrome prompts to choose between a USB security key and a built-in sensor. Choose "Built-in sensor" - MacOS prompts to hit the TouchID sensor. Do so. - Chrome prompts, "Allow this site to see your security key?" Click "Allow" - PyPI displays an error: "Registration rejected. Error: Self attestation is not permitted.." **My Platform** - MacOS 10.14.5 - MacBook Air (2018 edition, with TouchID) - Chrome "75.0.3770.100 (Official Build) (64-bit)"
I can reproduce this with the same macOS/Chrome versions. Thanks! I'll take a look today.
2019-07-15T14:56:02Z
[]
[]
pypi/warehouse
6,195
pypi__warehouse-6195
[ "6172" ]
882974e75671910c7b120c6c5502691889b17182
diff --git a/warehouse/manage/views.py b/warehouse/manage/views.py --- a/warehouse/manage/views.py +++ b/warehouse/manage/views.py @@ -337,7 +337,7 @@ def default_response(self): def generate_totp_qr(self): if not self.request.user.two_factor_provisioning_allowed: self.request.session.flash( - "Modifying 2FA requires a verified email.", queue="error" + "Verify your email to modify two factor authentication", queue="error" ) return Response(status=403) @@ -355,13 +355,17 @@ def generate_totp_qr(self): def totp_provision(self): if not self.request.user.two_factor_provisioning_allowed: self.request.session.flash( - "Modifying 2FA requires a verified email.", queue="error" + "Verify your email to modify two factor authentication", queue="error" ) return Response(status=403) totp_secret = self.user_service.get_totp_secret(self.request.user.id) if totp_secret: - self.request.session.flash("TOTP already provisioned.", queue="error") + self.request.session.flash( + "Account cannot be linked to more than one authentication " + "application at a time", + queue="error", + ) return HTTPSeeOther(self.request.route_path("manage.account")) return self.default_response @@ -370,13 +374,17 @@ def totp_provision(self): def validate_totp_provision(self): if not self.request.user.two_factor_provisioning_allowed: self.request.session.flash( - "Modifying 2FA requires a verified email.", queue="error" + "Verify your email to modify two factor authentication", queue="error" ) return Response(status=403) totp_secret = self.user_service.get_totp_secret(self.request.user.id) if totp_secret: - self.request.session.flash("TOTP already provisioned.", queue="error") + self.request.session.flash( + "Account cannot be linked to more than one authentication " + "application at a time", + queue="error", + ) return HTTPSeeOther(self.request.route_path("manage.account")) form = ProvisionTOTPForm( @@ -390,7 +398,7 @@ def validate_totp_provision(self): self.request.session.clear_totp_secret() self.request.session.flash( - "TOTP application successfully provisioned.", queue="success" + "Authentication application successfully set up", queue="success" ) return HTTPSeeOther(self.request.route_path("manage.account")) @@ -401,13 +409,15 @@ def validate_totp_provision(self): def delete_totp(self): if not self.request.user.two_factor_provisioning_allowed: self.request.session.flash( - "Modifying 2FA requires a verified email.", queue="error" + "Verify your email to modify two factor authentication", queue="error" ) return Response(status=403) totp_secret = self.user_service.get_totp_secret(self.request.user.id) if not totp_secret: - self.request.session.flash("No TOTP application to delete.", queue="error") + self.request.session.flash( + "There is no authentication application to delete", queue="error" + ) return HTTPSeeOther(self.request.route_path("manage.account")) form = DeleteTOTPForm( @@ -418,9 +428,13 @@ def delete_totp(self): if form.validate(): self.user_service.update_user(self.request.user.id, totp_secret=None) - self.request.session.flash("TOTP application deleted.", queue="success") + self.request.session.flash( + "Authentication application deleted from PyPI. " + "Remember to remove PyPI from your application.", + queue="success", + ) else: - self.request.session.flash("Invalid credentials.", queue="error") + self.request.session.flash("Invalid credentials", queue="error") return HTTPSeeOther(self.request.route_path("manage.account")) @@ -488,9 +502,9 @@ def validate_webauthn_provision(self): sign_count=form.validated_credential.sign_count, ) self.request.session.flash( - "WebAuthn successfully provisioned.", queue="success" + "Physical security key successfully set up", queue="success" ) - return {"success": "WebAuthn successfully provisioned"} + return {"success": "Physical security key successfully set up"} errors = [ str(error) for error_list in form.errors.values() for error in error_list @@ -504,7 +518,9 @@ def validate_webauthn_provision(self): ) def delete_webauthn(self): if len(self.request.user.webauthn) == 0: - self.request.session.flash("No WebAuthhn device to delete.", queue="error") + self.request.session.flash( + "There is no physical security key to delete", queue="error" + ) return HTTPSeeOther(self.request.route_path("manage.account")) form = DeleteWebAuthnForm( @@ -516,9 +532,9 @@ def delete_webauthn(self): if form.validate(): self.request.user.webauthn.remove(form.webauthn) - self.request.session.flash("WebAuthn device deleted.", queue="success") + self.request.session.flash("Physical security key deleted", queue="success") else: - self.request.session.flash("Invalid credentials.", queue="error") + self.request.session.flash("Invalid credentials", queue="error") return HTTPSeeOther(self.request.route_path("manage.account")) @@ -769,7 +785,7 @@ def manage_project_roles(project, request, _form_class=CreateRoleForm): elif user.primary_email is None or not user.primary_email.verified: request.session.flash( f"User '{username}' does not have a verified primary email " - f"address and cannot be added as a {role_name} for project.", + f"address and cannot be added as a {role_name} for project", queue="error", ) else:
diff --git a/tests/unit/manage/test_views.py b/tests/unit/manage/test_views.py --- a/tests/unit/manage/test_views.py +++ b/tests/unit/manage/test_views.py @@ -744,7 +744,9 @@ def test_generate_totp_qr_two_factor_not_allowed(self): assert isinstance(result, Response) assert result.status_code == 403 assert request.session.flash.calls == [ - pretend.call("Modifying 2FA requires a verified email.", queue="error") + pretend.call( + "Verify your email to modify two factor authentication", queue="error" + ) ] def test_totp_provision(self, monkeypatch): @@ -812,7 +814,11 @@ def test_totp_provision_already_provisioned(self, monkeypatch): assert isinstance(result, HTTPSeeOther) assert result.headers["Location"] == "/foo/bar/" assert request.session.flash.calls == [ - pretend.call("TOTP already provisioned.", queue="error") + pretend.call( + "Account cannot be linked to more than one authentication " + "application at a time", + queue="error", + ) ] def test_totp_provision_two_factor_not_allowed(self): @@ -831,7 +837,9 @@ def test_totp_provision_two_factor_not_allowed(self): assert isinstance(result, Response) assert result.status_code == 403 assert request.session.flash.calls == [ - pretend.call("Modifying 2FA requires a verified email.", queue="error") + pretend.call( + "Verify your email to modify two factor authentication", queue="error" + ) ] def test_validate_totp_provision(self, monkeypatch): @@ -872,7 +880,9 @@ def test_validate_totp_provision(self, monkeypatch): pretend.call(request.user.id, totp_secret=b"secret") ] assert request.session.flash.calls == [ - pretend.call("TOTP application successfully provisioned.", queue="success") + pretend.call( + "Authentication application successfully set up", queue="success" + ) ] def test_validate_totp_provision_already_provisioned(self, monkeypatch): @@ -902,7 +912,11 @@ def test_validate_totp_provision_already_provisioned(self, monkeypatch): assert user_service.update_user.calls == [] assert request.route_path.calls == [pretend.call("manage.account")] assert request.session.flash.calls == [ - pretend.call("TOTP already provisioned.", queue="error") + pretend.call( + "Account cannot be linked to more than one authentication " + "application at a time", + queue="error", + ) ] assert isinstance(result, HTTPSeeOther) @@ -966,7 +980,9 @@ def test_validate_totp_provision_two_factor_not_allowed(self): assert isinstance(result, Response) assert result.status_code == 403 assert request.session.flash.calls == [ - pretend.call("Modifying 2FA requires a verified email.", queue="error") + pretend.call( + "Verify your email to modify two factor authentication", queue="error" + ) ] def test_delete_totp(self, monkeypatch, db_request): @@ -1000,7 +1016,11 @@ def test_delete_totp(self, monkeypatch, db_request): pretend.call(request.user.id, totp_secret=None) ] assert request.session.flash.calls == [ - pretend.call("TOTP application deleted.", queue="success") + pretend.call( + "Authentication application deleted from PyPI. " + "Remember to remove PyPI from your application.", + queue="success", + ) ] assert isinstance(result, HTTPSeeOther) assert result.headers["Location"] == "/foo/bar/" @@ -1033,7 +1053,7 @@ def test_delete_totp_bad_username(self, monkeypatch, db_request): assert user_service.update_user.calls == [] assert request.session.flash.calls == [ - pretend.call("Invalid credentials.", queue="error") + pretend.call("Invalid credentials", queue="error") ] assert isinstance(result, HTTPSeeOther) assert result.headers["Location"] == "/foo/bar/" @@ -1066,7 +1086,9 @@ def test_delete_totp_not_provisioned(self, monkeypatch, db_request): assert user_service.update_user.calls == [] assert request.session.flash.calls == [ - pretend.call("No TOTP application to delete.", queue="error") + pretend.call( + "There is no authentication application to delete", queue="error" + ) ] assert isinstance(result, HTTPSeeOther) assert result.headers["Location"] == "/foo/bar/" @@ -1087,7 +1109,9 @@ def test_delete_totp_two_factor_not_allowed(self): assert isinstance(result, Response) assert result.status_code == 403 assert request.session.flash.calls == [ - pretend.call("Modifying 2FA requires a verified email.", queue="error") + pretend.call( + "Verify your email to modify two factor authentication", queue="error" + ) ] @@ -1182,9 +1206,9 @@ def test_validate_webauthn_provision(self, monkeypatch): ) ] assert request.session.flash.calls == [ - pretend.call("WebAuthn successfully provisioned.", queue="success") + pretend.call("Physical security key successfully set up", queue="success") ] - assert result == {"success": "WebAuthn successfully provisioned"} + assert result == {"success": "Physical security key successfully set up"} def test_validate_webauthn_provision_invalid_form(self, monkeypatch): user_service = pretend.stub( @@ -1251,7 +1275,7 @@ def test_delete_webauthn(self, monkeypatch): result = view.delete_webauthn() assert request.session.flash.calls == [ - pretend.call("WebAuthn device deleted.", queue="success") + pretend.call("Physical security key deleted", queue="success") ] assert request.route_path.calls == [pretend.call("manage.account")] assert isinstance(result, HTTPSeeOther) @@ -1269,7 +1293,7 @@ def test_delete_webauthn_not_provisioned(self): result = view.delete_webauthn() assert request.session.flash.calls == [ - pretend.call("No WebAuthhn device to delete.", queue="error") + pretend.call("There is no physical security key to delete", queue="error") ] assert request.route_path.calls == [pretend.call("manage.account")] assert isinstance(result, HTTPSeeOther) @@ -1296,7 +1320,7 @@ def test_delete_webauthn_invalid_form(self, monkeypatch): result = view.delete_webauthn() assert request.session.flash.calls == [ - pretend.call("Invalid credentials.", queue="error") + pretend.call("Invalid credentials", queue="error") ] assert request.route_path.calls == [pretend.call("manage.account")] assert isinstance(result, HTTPSeeOther) @@ -2017,7 +2041,7 @@ def test_post_unverified_email(self, db_request, with_email): assert db_request.session.flash.calls == [ pretend.call( "User 'testuser' does not have a verified primary email address " - "and cannot be added as a Owner for project.", + "and cannot be added as a Owner for project", queue="error", ) ]
Improve 2fa error and confirmation messages ## Change webauthn error texts Current: Your browser does not support WebAuthn New (admin page): You cannot set up a USB security key, as your browser does not work with WebAuthn (the standard supported by PyPI) New (login workflow): You cannot use a USB security key, as your browser does not work with WebAuthn (the standard supported by PyPI) Current: "WebAuthn cannot be used without JavaScript." New (admin page): "Enable JavaScript to setup 2FA with a USB security key" New (login page): "Enable JavaScript to login to PyPI with a USB security key" ### Audit confirmation and error flash messages - Go through [this file](https://github.com/pypa/warehouse/blob/master/warehouse/manage/views.py) to ensure flash messages are consistent with content copy, e.g. "WebAuthn successfully provisioned", should become "USB security key set up" - Remind users to delete their TOTP entry on their phone after disabling 2fa with TOTP.
2019-07-15T18:41:35Z
[]
[]
pypi/warehouse
6,200
pypi__warehouse-6200
[ "6171" ]
eaa4f2bb533b744c95add9f1d1db8634f8233532
diff --git a/warehouse/manage/views.py b/warehouse/manage/views.py --- a/warehouse/manage/views.py +++ b/warehouse/manage/views.py @@ -429,7 +429,7 @@ def delete_totp(self): if form.validate(): self.user_service.update_user(self.request.user.id, totp_secret=None) self.request.session.flash( - "Authentication application deleted from PyPI. " + "Authentication application removed from PyPI. " "Remember to remove PyPI from your application.", queue="success", ) @@ -502,9 +502,9 @@ def validate_webauthn_provision(self): sign_count=form.validated_credential.sign_count, ) self.request.session.flash( - "Physical security key successfully set up", queue="success" + "Security device successfully set up", queue="success" ) - return {"success": "Physical security key successfully set up"} + return {"success": "Security device successfully set up"} errors = [ str(error) for error_list in form.errors.values() for error in error_list @@ -519,7 +519,7 @@ def validate_webauthn_provision(self): def delete_webauthn(self): if len(self.request.user.webauthn) == 0: self.request.session.flash( - "There is no physical security key to delete", queue="error" + "There is no security device to delete", queue="error" ) return HTTPSeeOther(self.request.route_path("manage.account")) @@ -532,7 +532,7 @@ def delete_webauthn(self): if form.validate(): self.request.user.webauthn.remove(form.webauthn) - self.request.session.flash("Physical security key deleted", queue="success") + self.request.session.flash("Security device removed", queue="success") else: self.request.session.flash("Invalid credentials", queue="error")
diff --git a/tests/unit/manage/test_views.py b/tests/unit/manage/test_views.py --- a/tests/unit/manage/test_views.py +++ b/tests/unit/manage/test_views.py @@ -1017,7 +1017,7 @@ def test_delete_totp(self, monkeypatch, db_request): ] assert request.session.flash.calls == [ pretend.call( - "Authentication application deleted from PyPI. " + "Authentication application removed from PyPI. " "Remember to remove PyPI from your application.", queue="success", ) @@ -1206,9 +1206,9 @@ def test_validate_webauthn_provision(self, monkeypatch): ) ] assert request.session.flash.calls == [ - pretend.call("Physical security key successfully set up", queue="success") + pretend.call("Security device successfully set up", queue="success") ] - assert result == {"success": "Physical security key successfully set up"} + assert result == {"success": "Security device successfully set up"} def test_validate_webauthn_provision_invalid_form(self, monkeypatch): user_service = pretend.stub( @@ -1275,7 +1275,7 @@ def test_delete_webauthn(self, monkeypatch): result = view.delete_webauthn() assert request.session.flash.calls == [ - pretend.call("Physical security key deleted", queue="success") + pretend.call("Security device removed", queue="success") ] assert request.route_path.calls == [pretend.call("manage.account")] assert isinstance(result, HTTPSeeOther) @@ -1293,7 +1293,7 @@ def test_delete_webauthn_not_provisioned(self): result = view.delete_webauthn() assert request.session.flash.calls == [ - pretend.call("There is no physical security key to delete", queue="error") + pretend.call("There is no security device to delete", queue="error") ] assert request.route_path.calls == [pretend.call("manage.account")] assert isinstance(result, HTTPSeeOther)
Improve 2fa instructions ### On the webauthn provisioning page - "Webauthn" is not commonly understood. Instead, refer to webauthn as "phyiscal security key (e.g. USB)". Include examples of key brands (e.g. "Yubikey") in instructional text - Avoid using 'provisioning'. Instead use 'set up' - "Name key" is not a clear enough input label. Add an instruction above, or rename the label to "Enter key name to begin" - Remove 'How it works' session and replace with set up guidelines - Add 'troubleshooting' callout block - something like "Not working? You may be using an old USB security key that's not FIDO compatible (the standard supported by PyPI)." - Add/fix link to help page - Consider adding image of USB to page to give visual instruction ### On the TOTP provisioning page - "TOTP application" should be referred to as "Authentication application" - Avoid using 'provisioning'. Instead use 'set up' - Remove 'How it works' session and replace with set up guidelines - Add list of application names to page (e.g. "Authy", "Google Auth" etc.) - Consider adding image of phone/app to page to give visual instruction - Add/fix link to help page
2019-07-17T06:05:31Z
[]
[]
pypi/warehouse
6,207
pypi__warehouse-6207
[ "6157" ]
67bf46dcee5533a7e35377f63ea6479a7fb824e2
diff --git a/warehouse/manage/forms.py b/warehouse/manage/forms.py --- a/warehouse/manage/forms.py +++ b/warehouse/manage/forms.py @@ -111,11 +111,11 @@ def validate_totp_value(self, field): class DeleteWebAuthnForm(forms.Form): - __params__ = ["confirm_key_name"] + __params__ = ["confirm_device_name"] label = wtforms.StringField( validators=[ - wtforms.validators.DataRequired(message="Specify a label"), + wtforms.validators.DataRequired(message="Specify a device name"), wtforms.validators.Length( max=64, message=("Label must be 64 characters or less") ),
diff --git a/tests/unit/manage/test_forms.py b/tests/unit/manage/test_forms.py --- a/tests/unit/manage/test_forms.py +++ b/tests/unit/manage/test_forms.py @@ -293,7 +293,7 @@ def test_validate_label_missing(self): ) assert not form.validate() - assert form.label.errors.pop() == "Specify a label" + assert form.label.errors.pop() == "Specify a device name" def test_validate_label_not_in_use(self): user_service = pretend.stub(
Improve webauthn errors Problems: - We currently have two pieces of JS that control the display of webauthn errors - some in `index.js`, some in `webauthn.js` - The errors are not announced to the screenreader (via `role=alert`) - The errors are not associated with the webauthn label field (on the provisioning page) - we should use `aria-describedby` for this - The user is able to put text into the label field on the provisioning page - it should be disabled
@woodruffw as most of this JS is yours, it would be helpful if we could look into this together? Thanks Yep! Let's set aside some time for this before ending the WebAuthn beta.
2019-07-18T20:39:45Z
[]
[]
pypi/warehouse
6,274
pypi__warehouse-6274
[ "6266" ]
0ab3d80b1127f379e4f68dbad218c47e71c63a8a
diff --git a/warehouse/manage/forms.py b/warehouse/manage/forms.py --- a/warehouse/manage/forms.py +++ b/warehouse/manage/forms.py @@ -196,7 +196,7 @@ def __init__(self, *args, user_id, macaroon_service, project_names, **kwargs): description = wtforms.StringField( validators=[ - wtforms.validators.DataRequired(message="Specify a description"), + wtforms.validators.DataRequired(message="Specify a token name"), wtforms.validators.Length( max=100, message="Description must be 100 characters or less" ), @@ -204,7 +204,7 @@ def __init__(self, *args, user_id, macaroon_service, project_names, **kwargs): ) token_scope = wtforms.StringField( - validators=[wtforms.validators.DataRequired(message="Specify a token scope")] + validators=[wtforms.validators.DataRequired(message="Specify the token scope")] ) def validate_description(self, field): @@ -224,6 +224,9 @@ def validate_token_scope(self, field): except ValueError: raise wtforms.ValidationError(f"Unknown token scope: {scope}") + if scope_kind == "unspecified": + raise wtforms.ValidationError(f"Specify the token scope") + if scope_kind == "user": self.validated_scope = scope_kind return
diff --git a/tests/unit/manage/test_forms.py b/tests/unit/manage/test_forms.py --- a/tests/unit/manage/test_forms.py +++ b/tests/unit/manage/test_forms.py @@ -347,7 +347,7 @@ def test_validate_description_missing(self): ) assert not form.validate() - assert form.description.errors.pop() == "Specify a description" + assert form.description.errors.pop() == "Specify a token name" def test_validate_description_in_use(self): form = forms.CreateMacaroonForm( @@ -371,7 +371,18 @@ def test_validate_token_scope_missing(self): ) assert not form.validate() - assert form.token_scope.errors.pop() == "Specify a token scope" + assert form.token_scope.errors.pop() == "Specify the token scope" + + def test_validate_token_scope_unspecified(self): + form = forms.CreateMacaroonForm( + data={"description": "dummy", "token_scope": "scope:unspecified"}, + user_id=pretend.stub(), + macaroon_service=pretend.stub(get_macaroon_by_description=lambda *a: None), + project_names=pretend.stub(), + ) + + assert not form.validate() + assert form.token_scope.errors.pop() == "Specify the token scope" @pytest.mark.parametrize( ("scope"), ["not a real scope", "scope:project", "scope:foo:bar"]
API token default scope: user or project? Followup to #994: @brettcannon [asks](https://discuss.python.org/t/pypi-security-work-multifactor-auth-progress-help-needed/1042/33): > > @ewdurbin said: By default, newly created tokens will have “user” scope, meaning that they’ll behave exactly like your password. > Are there plans to change this default so that using such a strong token is not the default so that people have to opt into it? (I’m no security expert so this is more inquisitive.) As far as I know there are no such plans but I'd like @woodruffw and @nlhkabu to weigh in.
An idea: We could add some additional UI on creation of a user-scoped token, warning the user that their new token will have access to all of their projects. This would allow us to retain it as a default (which I think is sensible, at least insofar as it doesn't make sense to choose a random project from the user's list as a default) while also making the security properties clear.
2019-07-26T16:05:50Z
[]
[]
pypi/warehouse
6,294
pypi__warehouse-6294
[ "6240" ]
8d11fb45aba885249bc002dfa96145bcb064d6bc
diff --git a/warehouse/forklift/legacy.py b/warehouse/forklift/legacy.py --- a/warehouse/forklift/legacy.py +++ b/warehouse/forklift/legacy.py @@ -252,6 +252,11 @@ def _validate_legacy_dist_req(requirement): "Can't have direct dependency: {!r}".format(requirement) ) + if any(packaging.version.Version(spec.version).local for spec in req.specifier): + raise wtforms.validators.ValidationError( + "Can't have dependency with local version: {!r}".format(requirement) + ) + def _validate_legacy_dist_req_list(form, field): for datum in field.data:
diff --git a/tests/unit/forklift/test_legacy.py b/tests/unit/forklift/test_legacy.py --- a/tests/unit/forklift/test_legacy.py +++ b/tests/unit/forklift/test_legacy.py @@ -170,6 +170,7 @@ def test_validate_legacy_dist_req_valid(self, requirement): "_foo", "_foo (>=1.0)", "name @ https://github.com/pypa", + "test-pypi-version-specifier-dep==0.0.1+cuda9", ], ) def test_validate_legacy_dist_req_invalid(self, requirement):
PyPI accepts packages with dependencies on local versions (e.g., 0.1.0+local) PyPI accepts packages with dependencies on local versions (e.g., 0.1.0+local). I'm not sure if this is intentional or not, since PyPI will reject packages whose version is a local version. I tested this was the case using this test package: ``` import setuptools import os import re setuptools.setup( name="test-pypi-version-specifier-main", version="0.0.2", author="Edward Z. Yang", author_email="[email protected]", description="Testing package", long_description="Yarr", long_description_content_type="text/markdown", url="https://localhost/", packages=setuptools.find_packages(), include_package_data=True, classifiers=[ "Programming Language :: Python :: 3", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", ], install_requires=[ "test-pypi-version-specifier-dep==0.0.1+cuda9", ], ) ```
2019-07-29T15:36:04Z
[]
[]
pypi/warehouse
6,301
pypi__warehouse-6301
[ "6262" ]
4620c88f035440ef3a4f05f1c11e0470b671f5bf
diff --git a/warehouse/manage/views.py b/warehouse/manage/views.py --- a/warehouse/manage/views.py +++ b/warehouse/manage/views.py @@ -559,8 +559,7 @@ def __init__(self, request): @property def project_names(self): - projects = user_projects(self.request)["projects_owned"] - return [project.name for project in projects] + return sorted(project.name for project in self.request.user.projects) @property def default_response(self):
diff --git a/tests/unit/manage/test_views.py b/tests/unit/manage/test_views.py --- a/tests/unit/manage/test_views.py +++ b/tests/unit/manage/test_views.py @@ -1363,6 +1363,41 @@ def test_default_response(self, monkeypatch): "delete_macaroon_form": delete_macaroon_obj, } + def test_project_names(self, db_request): + user = UserFactory.create() + another_user = UserFactory.create() + + db_request.user = user + db_request.find_service = lambda *a, **kw: pretend.stub() + + # A project with a sole owner that is the user + with_sole_owner = ProjectFactory.create(name="foo") + RoleFactory.create(user=user, project=with_sole_owner, role_name="Owner") + RoleFactory.create( + user=another_user, project=with_sole_owner, role_name="Maintainer" + ) + + # A project with multiple owners, including the user + with_multiple_owners = ProjectFactory.create(name="bar") + RoleFactory.create(user=user, project=with_multiple_owners, role_name="Owner") + RoleFactory.create( + user=another_user, project=with_multiple_owners, role_name="Owner" + ) + + # A project with a sole owner that is not the user + not_an_owner = ProjectFactory.create(name="baz") + RoleFactory.create(user=user, project=not_an_owner, role_name="Maintainer") + RoleFactory.create(user=another_user, project=not_an_owner, role_name="Owner") + + # A project that the user is neither owner nor maintainer of + neither_owner_nor_maintainer = ProjectFactory.create(name="quux") + RoleFactory.create( + user=another_user, project=neither_owner_nor_maintainer, role_name="Owner" + ) + + view = views.ProvisionMacaroonViews(db_request) + assert set(view.project_names) == {"foo", "bar", "baz"} + def test_manage_macaroons(self, monkeypatch): request = pretend.stub(find_service=lambda *a, **kw: pretend.stub()) @@ -1412,10 +1447,10 @@ def test_create_macaroon_invalid_form(self, monkeypatch): ) monkeypatch.setattr(views, "CreateMacaroonForm", create_macaroon_cls) - user_projects = pretend.call_recorder( - lambda r: {"projects_owned": [pretend.stub(name=pretend.stub())]} + project_names = [pretend.stub()] + monkeypatch.setattr( + views.ProvisionMacaroonViews, "project_names", project_names ) - monkeypatch.setattr(views, "user_projects", user_projects) default_response = {"default": "response"} monkeypatch.setattr( @@ -1458,11 +1493,10 @@ def test_create_macaroon(self, monkeypatch): ) monkeypatch.setattr(views, "CreateMacaroonForm", create_macaroon_cls) - project_name = pretend.stub() - user_projects = pretend.call_recorder( - lambda r: {"projects_owned": [pretend.stub(name=project_name)]} + project_names = [pretend.stub()] + monkeypatch.setattr( + views.ProvisionMacaroonViews, "project_names", project_names ) - monkeypatch.setattr(views, "user_projects", user_projects) default_response = {"default": "response"} monkeypatch.setattr(
[Project-scoped API tokens] aren't available to maintainers **Describe the bug** <!-- A clear and concise description the bug --> When I use a "bot" account with "Maintainer" level access to projects, there are no projects to select from in the form for the token creation. **Expected behavior** <!-- A clear and concise description of what you expected to happen --> Since this "bot" can upload dists using user/password auth, it should also have similar privileges set when using tokens. **To Reproduce** <!-- Steps to reproduce the bug, or a link to PyPI where the bug is visible --> Go to https://pypi.org/manage/account/token and try selecting a project where you have only "Maintainer"-level access, not "Owner". **My Platform** N/A **Additional context** <!-- Add any other context, links, etc. about the feature here. --> N/A
Yup, I can confirm this. If I am logged in as a user who has maintainer-level collaborator status on a project, that project does not appear in the Scope dropdown list at `manage/account/token/`. This is a bug IMO and we should fix it before further publicizing the feature (per rollout tracked in #5661). Thanks for finding and reporting this bug @webknjaz! :trophy: Hmm, yeah, this is probably just a matter of how/which projects we iterate over for the user. I'll check this out, thanks @webknjaz! > just a matter of how/which projects we iterate over That's my guess too. Yeah, here's the root-cause: We use the `user_projects` helper in `manage/views.py`, which returns projects based on just ownership: ```python def user_projects(request): """ Return all the projects for which the user is a sole owner """ projects_owned = ( request.db.query(Project.id) .join(Role.project) .filter(Role.role_name == "Owner", Role.user == request.user) .subquery() ) with_sole_owner = ( request.db.query(Role.project_id) .join(projects_owned) .filter(Role.role_name == "Owner") .group_by(Role.project_id) .having(func.count(Role.project_id) == 1) .subquery() ) return { "projects_owned": ( request.db.query(Project) .join(projects_owned, Project.id == projects_owned.c.id) .order_by(Project.name) .all() ), "projects_sole_owned": ( request.db.query(Project).join(with_sole_owner).order_by(Project.name).all() ), } ``` We could either amend that helper to include a sub-dict for maintainer roles as well, or add a new helper just for macaroons that returns just project names for those two roles.
2019-07-29T18:19:43Z
[]
[]
pypi/warehouse
6,337
pypi__warehouse-6337
[ "6336" ]
289c053522c48326207622bc31658606db20611b
diff --git a/warehouse/forklift/legacy.py b/warehouse/forklift/legacy.py --- a/warehouse/forklift/legacy.py +++ b/warehouse/forklift/legacy.py @@ -252,11 +252,6 @@ def _validate_legacy_dist_req(requirement): "Can't have direct dependency: {!r}".format(requirement) ) - if any(packaging.version.Version(spec.version).local for spec in req.specifier): - raise wtforms.validators.ValidationError( - "Can't have dependency with local version: {!r}".format(requirement) - ) - def _validate_legacy_dist_req_list(form, field): for datum in field.data:
diff --git a/tests/unit/forklift/test_legacy.py b/tests/unit/forklift/test_legacy.py --- a/tests/unit/forklift/test_legacy.py +++ b/tests/unit/forklift/test_legacy.py @@ -155,7 +155,8 @@ def test_validate_legacy_non_dist_req_list(self, monkeypatch): assert validator.calls == [pretend.call(datum) for datum in data] @pytest.mark.parametrize( - "requirement", ["foo (>=1.0)", "foo", "foo2", "foo-bar", "foo_bar"] + "requirement", + ["foo (>=1.0)", "foo", "foo2", "foo-bar", "foo_bar", "foo == 2.*"], ) def test_validate_legacy_dist_req_valid(self, requirement): legacy._validate_legacy_dist_req(requirement) @@ -170,7 +171,6 @@ def test_validate_legacy_dist_req_valid(self, requirement): "_foo", "_foo (>=1.0)", "name @ https://github.com/pypa", - "test-pypi-version-specifier-dep==0.0.1+cuda9", ], ) def test_validate_legacy_dist_req_invalid(self, requirement):
Upload to PyPI fails when dependency version contains * **Describe the bug** PyPI raises `400 Client error` when uploading a package that specifies requirement using `== 2.*`. This is a valid version specifier referred to [PEP 440](https://www.python.org/dev/peps/pep-0440/#compatible-release). The whole error is: ``` HTTPError: 400 Client Error: Invalid value for requires_dist. Error: Invalid version: '2.*' for url: https://test.pypi.org/legacy/ ``` **Expected behavior** Upload should pass with no errors. (Note that in the example below the expected behaviour would be to fail with authentication error, as you don't have sufficient permissions on the project.) **To Reproduce** This is a minimal reproducer: https://github.com/dblenkus/warehouse-requirements-issue Install twine with `pip install twine` and try to upload the package with `twine upload -r testpypi dist/*`. **My Platform** MacOS 10.14.6 twine 1.13.0 **Additional context** This has worked few days ago on July 26th 2019.
2019-08-01T10:18:37Z
[]
[]
pypi/warehouse
6,339
pypi__warehouse-6339
[ "5863" ]
de6eb8e51faaa49ee374fda6ddfa694b2f3b65eb
diff --git a/warehouse/accounts/interfaces.py b/warehouse/accounts/interfaces.py --- a/warehouse/accounts/interfaces.py +++ b/warehouse/accounts/interfaces.py @@ -179,6 +179,14 @@ def get_webauthn_by_credential_id(user_id, credential_id): or None of the user doesn't have a credential with this ID. """ + def record_event(user_id, *, tag, ip_address, additional=None): + """ + Creates a new UserEvent for the given user with the given + tag, IP address, and additional metadata. + + Returns the event. + """ + class ITokenService(Interface): def dumps(data): diff --git a/warehouse/accounts/models.py b/warehouse/accounts/models.py --- a/warehouse/accounts/models.py +++ b/warehouse/accounts/models.py @@ -10,6 +10,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import datetime import enum from citext import CIText @@ -29,7 +30,7 @@ select, sql, ) -from sqlalchemy.dialects.postgresql import UUID +from sqlalchemy.dialects.postgresql import JSONB, UUID from sqlalchemy.ext.hybrid import hybrid_property from sqlalchemy.orm.exc import NoResultFound @@ -95,6 +96,18 @@ class User(SitemapMixin, db.Model): "Macaroon", backref="user", cascade="all, delete-orphan", lazy=False ) + events = orm.relationship("UserEvent", backref="user", lazy=False) + + def record_event(self, *, tag, ip_address, additional): + session = orm.object_session(self) + event = UserEvent( + user=self, tag=tag, ip_address=ip_address, additional=additional + ) + session.add(event) + session.flush() + + return event + @property def primary_email(self): primaries = [x for x in self.emails if x.primary] @@ -122,6 +135,17 @@ def has_two_factor(self): def has_primary_verified_email(self): return self.primary_email is not None and self.primary_email.verified + @property + def recent_events(self): + session = orm.object_session(self) + last_fortnight = datetime.datetime.now() - datetime.timedelta(days=14) + return ( + session.query(UserEvent) + .filter((UserEvent.user_id == self.id) & (UserEvent.time >= last_fortnight)) + .order_by(UserEvent.time.desc()) + .all() + ) + class WebAuthn(db.Model): __tablename__ = "user_security_keys" @@ -140,6 +164,20 @@ class WebAuthn(db.Model): sign_count = Column(Integer, default=0) +class UserEvent(db.Model): + __tablename__ = "user_events" + + user_id = Column( + UUID(as_uuid=True), + ForeignKey("users.id", deferrable=True, initially="DEFERRED"), + nullable=False, + ) + tag = Column(String, nullable=False) + time = Column(DateTime, nullable=False, server_default=sql.func.now()) + ip_address = Column(String, nullable=False) + additional = Column(JSONB, nullable=True) + + class UnverifyReasons(enum.Enum): SpamComplaint = "spam complaint" diff --git a/warehouse/accounts/services.py b/warehouse/accounts/services.py --- a/warehouse/accounts/services.py +++ b/warehouse/accounts/services.py @@ -428,6 +428,16 @@ def get_webauthn_by_credential_id(self, user_id, credential_id): None, ) + def record_event(self, user_id, *, tag, ip_address, additional=None): + """ + Creates a new UserEvent for the given user with the given + tag, IP address, and additional metadata. + + Returns the event. + """ + user = self.get_user(user_id) + return user.record_event(tag=tag, ip_address=ip_address, additional=additional) + @implementer(ITokenService) class TokenService: diff --git a/warehouse/accounts/views.py b/warehouse/accounts/views.py --- a/warehouse/accounts/views.py +++ b/warehouse/accounts/views.py @@ -138,7 +138,6 @@ def login(request, redirect_field_name=REDIRECT_FIELD_NAME, _form_class=LoginFor resp = HTTPSeeOther( request.route_path("accounts.two-factor", _query=token) ) - return resp else: # If the user-originating redirection url is not safe, then @@ -171,7 +170,6 @@ def login(request, redirect_field_name=REDIRECT_FIELD_NAME, _form_class=LoginFor .hexdigest() .lower(), ) - return resp return { @@ -216,7 +214,7 @@ def two_factor_and_totp_validate(request, _form_class=TOTPAuthenticationForm): if request.method == "POST": form = two_factor_state["totp_form"] if form.validate(): - _login_user(request, userid) + _login_user(request, userid, two_factor_method="totp") resp = HTTPSeeOther(redirect_to) resp.set_cookie( @@ -295,7 +293,7 @@ def webauthn_authentication_validate(request): webauthn = user_service.get_webauthn_by_credential_id(userid, credential_id) webauthn.sign_count = sign_count - _login_user(request, userid) + _login_user(request, userid, two_factor_method="webauthn") request.response.set_cookie( USER_ID_INSECURE_COOKIE, @@ -400,6 +398,12 @@ def register(request, _form_class=RegistrationForm): form.username.data, form.full_name.data, form.new_password.data ) email = user_service.add_email(user.id, form.email.data, primary=True) + user_service.record_event( + user.id, + tag="account:create", + ip_address=request.remote_addr, + additional={"email": form.email.data}, + ) send_email_verification_email(request, (user, email)) @@ -433,6 +437,11 @@ def request_password_reset(request, _form_class=RequestPasswordResetForm): ) send_password_reset_email(request, (user, email)) + user_service.record_event( + user.id, + tag="account:password:reset:request", + ip_address=request.remote_addr, + ) token_service = request.find_service(ITokenService, name="password") n_hours = token_service.max_age // 60 // 60 @@ -507,6 +516,9 @@ def _error(message): if request.method == "POST" and form.validate(): # Update password. user_service.update_user(user.id, password=form.new_password.data) + user_service.record_event( + user.id, tag="account:password:reset", ip_address=request.remote_addr + ) # Flash a success message request.session.flash("You have reset your password", queue="success") @@ -556,6 +568,11 @@ def _error(message): email.verified = True email.unverify_reason = None email.transient_bounces = 0 + email.user.record_event( + tag="account:email:verified", + ip_address=request.remote_addr, + additional={"email": email.email, "primary": email.primary}, + ) if not email.primary: confirm_message = "You can now set this email as your primary address" @@ -586,7 +603,7 @@ def _get_two_factor_data(request, _redirect_to="/"): return two_factor_data -def _login_user(request, userid): +def _login_user(request, userid, two_factor_method=None): # We have a session factory associated with this request, so in order # to protect against session fixation attacks we're going to make sure # that we create a new session (which for sessions with an identifier @@ -625,6 +642,12 @@ def _login_user(request, userid): # records when the last login was. user_service = request.find_service(IUserService, context=None) user_service.update_user(userid, last_login=datetime.datetime.utcnow()) + user_service.record_event( + userid, + tag="account:login:success", + ip_address=request.remote_addr, + additional={"two_factor_method": two_factor_method}, + ) return headers diff --git a/warehouse/forklift/legacy.py b/warehouse/forklift/legacy.py --- a/warehouse/forklift/legacy.py +++ b/warehouse/forklift/legacy.py @@ -937,6 +937,21 @@ def file_upload(request): ) ) + project.record_event( + tag="project:create", + ip_address=request.remote_addr, + additional={"created_by": request.user.username}, + ) + project.record_event( + tag="project:role:add", + ip_address=request.remote_addr, + additional={ + "submitted_by": request.user.username, + "role_name": "Owner", + "target_user": request.user.username, + }, + ) + # Check that the user has permission to do things to this project, if this # is a new project this will act as a sanity check for the role we just # added above. @@ -1077,6 +1092,15 @@ def file_upload(request): ) ) + project.record_event( + tag="project:release:add", + ip_address=request.remote_addr, + additional={ + "submitted_by": request.user.username, + "canonical_version": release.canonical_version, + }, + ) + # TODO: We need a better solution to this than to just do it inline inside # this method. Ideally the version field would just be sortable, but # at least this should be some sort of hook or trigger. diff --git a/warehouse/manage/views.py b/warehouse/manage/views.py --- a/warehouse/manage/views.py +++ b/warehouse/manage/views.py @@ -52,7 +52,14 @@ ProvisionWebAuthnForm, SaveAccountForm, ) -from warehouse.packaging.models import File, JournalEntry, Project, Release, Role +from warehouse.packaging.models import ( + File, + JournalEntry, + Project, + ProjectEvent, + Release, + Role, +) from warehouse.utils.http import is_safe_url from warehouse.utils.paginate import paginate_url_factory from warehouse.utils.project import confirm_project, destroy_docs, remove_project @@ -146,6 +153,12 @@ def add_email(self): if form.validate(): email = self.user_service.add_email(self.request.user.id, form.email.data) + self.user_service.record_event( + self.request.user.id, + tag="account:email:add", + ip_address=self.request.remote_addr, + additional={"email": email.email}, + ) send_email_verification_email(self.request, (self.request.user, email)) @@ -179,6 +192,12 @@ def delete_email(self): ) else: self.request.user.emails.remove(email) + self.user_service.record_event( + self.request.user.id, + tag="account:email:remove", + ip_address=self.request.remote_addr, + additional={"email": email.email}, + ) self.request.session.flash( f"Email address {email.email} removed", queue="success" ) @@ -206,6 +225,17 @@ def change_primary_email(self): ).update(values={"primary": False}) new_primary_email.primary = True + self.user_service.record_event( + self.request.user.id, + tag="account:email:primary:change", + ip_address=self.request.remote_addr, + additional={ + "old_primary": previous_primary_email.email + if previous_primary_email + else None, + "new_primary": new_primary_email.email, + }, + ) self.request.session.flash( f"Email address {new_primary_email.email} set as primary", queue="success" @@ -236,6 +266,11 @@ def reverify_email(self): self.request.session.flash("Email is already verified", queue="error") else: send_email_verification_email(self.request, (self.request.user, email)) + email.user.record_event( + tag="account:email:reverify", + ip_address=self.request.remote_addr, + additional={"email": email.email}, + ) self.request.session.flash( f"Verification email for {email.email} resent", queue="success" @@ -259,6 +294,11 @@ def change_password(self): self.user_service.update_user( self.request.user.id, password=form.new_password.data ) + self.user_service.record_event( + self.request.user.id, + tag="account:password:change", + ip_address=self.request.remote_addr, + ) send_password_change_email(self.request, self.request.user) self.request.session.flash("Password updated", queue="success") @@ -399,8 +439,13 @@ def validate_totp_provision(self): self.user_service.update_user( self.request.user.id, totp_secret=self.request.session.get_totp_secret() ) - self.request.session.clear_totp_secret() + self.user_service.record_event( + self.request.user.id, + tag="account:two_factor:method_added", + ip_address=self.request.remote_addr, + additional={"method": "totp"}, + ) self.request.session.flash( "Authentication application successfully set up", queue="success" ) @@ -432,6 +477,12 @@ def delete_totp(self): if form.validate(): self.user_service.update_user(self.request.user.id, totp_secret=None) + self.user_service.record_event( + self.request.user.id, + tag="account:two_factor:method_removed", + ip_address=self.request.remote_addr, + additional={"method": "totp"}, + ) self.request.session.flash( "Authentication application removed from PyPI. " "Remember to remove PyPI from your application.", @@ -502,6 +553,12 @@ def validate_webauthn_provision(self): public_key=form.validated_credential.public_key.decode(), sign_count=form.validated_credential.sign_count, ) + self.user_service.record_event( + self.request.user.id, + tag="account:two_factor:method_added", + ip_address=self.request.remote_addr, + additional={"method": "webauthn", "label": form.label.data}, + ) self.request.session.flash( "Security device successfully set up", queue="success" ) @@ -533,6 +590,12 @@ def delete_webauthn(self): if form.validate(): self.request.user.webauthn.remove(form.webauthn) + self.user_service.record_event( + self.request.user.id, + tag="account:two_factor:method_removed", + ip_address=self.request.remote_addr, + additional={"method": "webauthn", "label": form.label.data}, + ) self.request.session.flash("Security device removed", queue="success") else: self.request.session.flash("Invalid credentials", queue="error") @@ -593,12 +656,42 @@ def create_macaroon(self): response = {**self.default_response} if form.validate(): + macaroon_caveats = {"permissions": form.validated_scope, "version": 1} serialized_macaroon, macaroon = self.macaroon_service.create_macaroon( location=self.request.domain, user_id=self.request.user.id, description=form.description.data, - caveats={"permissions": form.validated_scope, "version": 1}, + caveats=macaroon_caveats, + ) + self.user_service.record_event( + self.request.user.id, + tag="account:api_token:added", + ip_address=self.request.remote_addr, + additional={ + "description": form.description.data, + "caveats": macaroon_caveats, + }, ) + if "projects" in form.validated_scope: + projects = [ + project + for project in self.request.user.projects + if project.normalized_name in form.validated_scope["projects"] + ] + for project in projects: + # NOTE: We don't disclose the full caveats for this token + # to the project event log, since the token could also + # have access to projects that this project's owner + # isn't aware of. + project.record_event( + tag="project:api_token:added", + ip_address=self.request.remote_addr, + additional={ + "description": form.description.data, + "user": self.request.user.username, + }, + ) + response.update(serialized_macaroon=serialized_macaroon, macaroon=macaroon) return {**response, "create_macaroon_form": form} @@ -610,12 +703,32 @@ def delete_macaroon(self): ) if form.validate(): - description = self.macaroon_service.find_macaroon( - form.macaroon_id.data - ).description + macaroon = self.macaroon_service.find_macaroon(form.macaroon_id.data) self.macaroon_service.delete_macaroon(form.macaroon_id.data) + self.user_service.record_event( + self.request.user.id, + tag="account:api_token:removed", + ip_address=self.request.remote_addr, + additional={"macaroon_id": form.macaroon_id.data}, + ) + if "projects" in macaroon.caveats["permissions"]: + projects = [ + project + for project in self.request.user.projects + if project.normalized_name + in macaroon.caveats["permissions"]["projects"] + ] + for project in projects: + project.record_event( + tag="project:api_token:removed", + ip_address=self.request.remote_addr, + additional={ + "description": macaroon.description, + "user": self.request.user.username, + }, + ) self.request.session.flash( - f"Deleted API token '{description}'.", queue="success" + f"Deleted API token '{macaroon.description}'.", queue="success" ) redirect_to = self.request.referer @@ -764,6 +877,15 @@ def delete_project_release(self): ) ) + self.release.project.record_event( + tag="project:release:remove", + ip_address=self.request.remote_addr, + additional={ + "submitted_by": self.request.user.username, + "canonical_version": self.release.canonical_version, + }, + ) + self.request.db.delete(self.release) self.request.session.flash( @@ -823,6 +945,16 @@ def _error(message): ) ) + self.release.project.record_event( + tag="project:release:file:remove", + ip_address=self.request.remote_addr, + additional={ + "submitted_by": self.request.user.username, + "canonical_version": self.release.canonical_version, + "filename": release_file.filename, + }, + ) + self.request.db.delete(release_file) self.request.session.flash( @@ -885,6 +1017,15 @@ def manage_project_roles(project, request, _form_class=CreateRoleForm): submitted_from=request.remote_addr, ) ) + project.record_event( + tag="project:role:add", + ip_address=request.remote_addr, + additional={ + "submitted_by": request.user.username, + "role_name": role_name, + "target_user": username, + }, + ) owner_roles = ( request.db.query(Role) @@ -980,6 +1121,15 @@ def change_project_role(project, request, _form_class=ChangeRoleForm): submitted_from=request.remote_addr, ) ) + project.record_event( + tag="project:role:delete", + ip_address=request.remote_addr, + additional={ + "submitted_by": request.user.username, + "role_name": role.role_name, + "target_user": role.user.username, + }, + ) request.session.flash("Changed role", queue="success") else: # This user only has one role, so get it and change the type. @@ -1008,6 +1158,15 @@ def change_project_role(project, request, _form_class=ChangeRoleForm): ) ) role.role_name = form.role_name.data + project.record_event( + tag="project:role:change", + ip_address=request.remote_addr, + additional={ + "submitted_by": request.user.username, + "role_name": form.role_name.data, + "target_user": role.user.username, + }, + ) request.session.flash("Changed role", queue="success") except NoResultFound: request.session.flash("Could not find role", queue="error") @@ -1053,6 +1212,15 @@ def delete_project_role(project, request): submitted_from=request.remote_addr, ) ) + project.record_event( + tag="project:role:delete", + ip_address=request.remote_addr, + additional={ + "submitted_by": request.user.username, + "role_name": role.role_name, + "target_user": role.user.username, + }, + ) request.session.flash("Removed role", queue="success") return HTTPSeeOther( @@ -1073,6 +1241,39 @@ def manage_project_history(project, request): except ValueError: raise HTTPBadRequest("'page' must be an integer.") + events_query = ( + request.db.query(ProjectEvent) + .join(ProjectEvent.project) + .filter(ProjectEvent.project_id == project.id) + .order_by(ProjectEvent.time.desc()) + ) + + events = SQLAlchemyORMPage( + events_query, + page=page_num, + items_per_page=25, + url_maker=paginate_url_factory(request), + ) + + if events.page_count and page_num > events.page_count: + raise HTTPNotFound + + return {"project": project, "events": events} + + +@view_config( + route_name="manage.project.journal", + context=Project, + renderer="manage/journal.html", + uses_session=True, + permission="manage:project", +) +def manage_project_journal(project, request): + try: + page_num = int(request.params.get("page", 1)) + except ValueError: + raise HTTPBadRequest("'page' must be an integer.") + journals_query = ( request.db.query(JournalEntry) .options(joinedload("submitted_by")) diff --git a/warehouse/migrations/versions/0ac2f506ef2e_user_and_project_event_models.py b/warehouse/migrations/versions/0ac2f506ef2e_user_and_project_event_models.py new file mode 100644 --- /dev/null +++ b/warehouse/migrations/versions/0ac2f506ef2e_user_and_project_event_models.py @@ -0,0 +1,74 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +User and Project event models + +Revision ID: 0ac2f506ef2e +Revises: d83f20495c10 +Create Date: 2019-07-31 21:50:43.407231 +""" + +import sqlalchemy as sa + +from alembic import op +from sqlalchemy.dialects import postgresql + +revision = "0ac2f506ef2e" +down_revision = "d83f20495c10" + + +def upgrade(): + op.create_table( + "project_events", + sa.Column( + "id", + postgresql.UUID(as_uuid=True), + server_default=sa.text("gen_random_uuid()"), + nullable=False, + ), + sa.Column("project_id", postgresql.UUID(as_uuid=True), nullable=False), + sa.Column("tag", sa.String(), nullable=False), + sa.Column( + "time", sa.DateTime(), server_default=sa.text("now()"), nullable=False + ), + sa.Column("ip_address", sa.String(), nullable=False), + sa.Column("additional", postgresql.JSONB(astext_type=sa.Text()), nullable=True), + sa.ForeignKeyConstraint( + ["project_id"], ["projects.id"], initially="DEFERRED", deferrable=True + ), + sa.PrimaryKeyConstraint("id"), + ) + op.create_table( + "user_events", + sa.Column( + "id", + postgresql.UUID(as_uuid=True), + server_default=sa.text("gen_random_uuid()"), + nullable=False, + ), + sa.Column("user_id", postgresql.UUID(as_uuid=True), nullable=False), + sa.Column("tag", sa.String(), nullable=False), + sa.Column( + "time", sa.DateTime(), server_default=sa.text("now()"), nullable=False + ), + sa.Column("ip_address", sa.String(), nullable=False), + sa.Column("additional", postgresql.JSONB(astext_type=sa.Text()), nullable=True), + sa.ForeignKeyConstraint( + ["user_id"], ["users.id"], initially="DEFERRED", deferrable=True + ), + sa.PrimaryKeyConstraint("id"), + ) + + +def downgrade(): + op.drop_table("user_events") + op.drop_table("project_events") diff --git a/warehouse/packaging/models.py b/warehouse/packaging/models.py --- a/warehouse/packaging/models.py +++ b/warehouse/packaging/models.py @@ -31,13 +31,14 @@ ForeignKey, Index, Integer, + String, Table, Text, func, orm, sql, ) -from sqlalchemy.dialects.postgresql import UUID +from sqlalchemy.dialects.postgresql import JSONB, UUID from sqlalchemy.ext.associationproxy import association_proxy from sqlalchemy.ext.declarative import declared_attr from sqlalchemy.ext.hybrid import hybrid_property @@ -135,6 +136,8 @@ class Project(SitemapMixin, db.Model): passive_deletes=True, ) + events = orm.relationship("ProjectEvent", backref="project", lazy=False) + def __getitem__(self, version): session = orm.object_session(self) canonical_version = packaging.utils.canonicalize_version(version) @@ -187,6 +190,16 @@ def __acl__(self): acls.append((Allow, str(role.user.id), ["upload"])) return acls + def record_event(self, *, tag, ip_address, additional=None): + session = orm.object_session(self) + event = ProjectEvent( + project=self, tag=tag, ip_address=ip_address, additional=additional + ) + session.add(event) + session.flush() + + return event + @property def documentation_url(self): # TODO: Move this into the database and eliminate the use of the @@ -220,6 +233,20 @@ def latest_version(self): ) +class ProjectEvent(db.Model): + __tablename__ = "project_events" + + project_id = Column( + UUID(as_uuid=True), + ForeignKey("projects.id", deferrable=True, initially="DEFERRED"), + nullable=False, + ) + tag = Column(String, nullable=False) + time = Column(DateTime, nullable=False, server_default=sql.func.now()) + ip_address = Column(String, nullable=False) + additional = Column(JSONB, nullable=True) + + class DependencyKind(enum.IntEnum): requires = 1 diff --git a/warehouse/routes.py b/warehouse/routes.py --- a/warehouse/routes.py +++ b/warehouse/routes.py @@ -232,6 +232,13 @@ def includeme(config): traverse="/{project_name}", domain=warehouse, ) + config.add_route( + "manage.project.journal", + "/manage/project/{project_name}/journal/", + factory="warehouse.packaging.models:ProjectFactory", + traverse="/{project_name}", + domain=warehouse, + ) # Packaging config.add_redirect("/p/{name}/", "/project/{name}/", domain=warehouse)
diff --git a/tests/common/db/accounts.py b/tests/common/db/accounts.py --- a/tests/common/db/accounts.py +++ b/tests/common/db/accounts.py @@ -15,7 +15,7 @@ import factory import factory.fuzzy -from warehouse.accounts.models import Email, User +from warehouse.accounts.models import Email, User, UserEvent from .base import FuzzyEmail, WarehouseFactory @@ -36,6 +36,13 @@ class Meta: last_login = factory.fuzzy.FuzzyNaiveDateTime(datetime.datetime(2011, 1, 1)) +class UserEventFactory(WarehouseFactory): + class Meta: + model = UserEvent + + user = factory.SubFactory(User) + + class EmailFactory(WarehouseFactory): class Meta: model = Email diff --git a/tests/common/db/packaging.py b/tests/common/db/packaging.py --- a/tests/common/db/packaging.py +++ b/tests/common/db/packaging.py @@ -26,6 +26,7 @@ File, JournalEntry, Project, + ProjectEvent, Release, Role, ) @@ -43,6 +44,13 @@ class Meta: name = factory.fuzzy.FuzzyText(length=12) +class ProjectEventFactory(WarehouseFactory): + class Meta: + model = ProjectEvent + + project = factory.SubFactory(ProjectFactory) + + class DescriptionFactory(WarehouseFactory): class Meta: model = Description diff --git a/tests/unit/accounts/test_models.py b/tests/unit/accounts/test_models.py --- a/tests/unit/accounts/test_models.py +++ b/tests/unit/accounts/test_models.py @@ -10,12 +10,15 @@ # See the License for the specific language governing permissions and # limitations under the License. +import datetime + import pytest from warehouse.accounts.models import Email, User, UserFactory from ...common.db.accounts import ( EmailFactory as DBEmailFactory, + UserEventFactory as DBUserEventFactory, UserFactory as DBUserFactory, ) @@ -83,3 +86,18 @@ def test_query_by_email_when_not_primary(self, db_session): result = db_session.query(User).filter(User.email == email.email).first() assert result is None + + def test_recent_events(self, db_session): + user = DBUserFactory.create() + recent_event = DBUserEventFactory(user=user, tag="foo", ip_address="0.0.0.0") + stale_event = DBUserEventFactory( + user=user, + tag="bar", + ip_address="0.0.0.0", + time=datetime.datetime.now() - datetime.timedelta(days=15), + ) + + assert len(user.events) == 2 + assert len(user.recent_events) == 1 + assert user.events == [recent_event, stale_event] + assert user.recent_events == [recent_event] diff --git a/tests/unit/accounts/test_views.py b/tests/unit/accounts/test_views.py --- a/tests/unit/accounts/test_views.py +++ b/tests/unit/accounts/test_views.py @@ -159,6 +159,7 @@ def test_post_validate_redirects( find_userid=pretend.call_recorder(lambda username: user_id), update_user=pretend.call_recorder(lambda *a, **kw: None), has_two_factor=lambda userid: False, + record_event=pretend.call_recorder(lambda *a, **kw: None), ) breach_service = pretend.stub(check_password=lambda password, tags=None: False) @@ -174,6 +175,7 @@ def test_post_validate_redirects( invalidate=pretend.call_recorder(lambda: None), new_csrf_token=pretend.call_recorder(lambda: None), ) + pyramid_request.remote_addr = "0.0.0.0" pyramid_request.set_property( lambda r: str(uuid.uuid4()) if with_user else None, @@ -214,6 +216,14 @@ def test_post_validate_redirects( assert user_service.find_userid.calls == [pretend.call("theuser")] assert user_service.update_user.calls == [pretend.call(user_id, last_login=now)] + assert user_service.record_event.calls == [ + pretend.call( + user_id, + tag="account:login:success", + ip_address="0.0.0.0", + additional={"two_factor_method": None}, + ) + ] if with_user: assert new_session == {} @@ -237,6 +247,7 @@ def test_post_validate_no_redirects( find_userid=pretend.call_recorder(lambda username: 1), update_user=lambda *a, **k: None, has_two_factor=lambda userid: False, + record_event=pretend.call_recorder(lambda *a, **kw: None), ) breach_service = pretend.stub(check_password=lambda password, tags=None: False) @@ -247,6 +258,7 @@ def test_post_validate_no_redirects( pyramid_request.method = "POST" pyramid_request.POST["next"] = expected_next_url + pyramid_request.remote_addr = "0.0.0.0" form_obj = pretend.stub( validate=pretend.call_recorder(lambda: True), @@ -259,6 +271,14 @@ def test_post_validate_no_redirects( assert isinstance(result, HTTPSeeOther) assert result.headers["Location"] == observed_next_url + assert user_service.record_event.calls == [ + pretend.call( + 1, + tag="account:login:success", + ip_address="0.0.0.0", + additional={"two_factor_method": None}, + ) + ] def test_redirect_authenticated_user(self): pyramid_request = pretend.stub(authenticated_userid=1) @@ -275,6 +295,7 @@ def test_two_factor_auth(self, pyramid_request, redirect_url, token_service): find_userid=pretend.call_recorder(lambda username: 1), update_user=lambda *a, **k: None, has_two_factor=lambda userid: True, + record_event=pretend.call_recorder(lambda *a, **kw: None), ) breach_service = pretend.stub(check_password=lambda pw: False) @@ -288,6 +309,7 @@ def test_two_factor_auth(self, pyramid_request, redirect_url, token_service): pyramid_request.method = "POST" if redirect_url: pyramid_request.POST["next"] = redirect_url + pyramid_request.remote_addr = "0.0.0.0" form_obj = pretend.stub( validate=pretend.call_recorder(lambda: True), @@ -312,6 +334,7 @@ def test_two_factor_auth(self, pyramid_request, redirect_url, token_service): ("Content-Length", "0"), ("Location", "/account/two-factor"), ] + assert user_service.record_event.calls == [] class TestTwoFactor: @@ -406,6 +429,7 @@ def test_totp_auth(self, monkeypatch, pyramid_request, redirect_url): has_totp=lambda userid: True, has_webauthn=lambda userid: False, check_totp_value=lambda userid, totp_value: True, + record_event=pretend.call_recorder(lambda *a, **kw: None), ) new_session = {} @@ -416,6 +440,7 @@ def test_totp_auth(self, monkeypatch, pyramid_request, redirect_url): }[interface] pyramid_request.method = "POST" + pyramid_request.remote_addr = "0.0.0.0" pyramid_request.session = pretend.stub( items=lambda: [("a", "b"), ("foo", "bar")], update=new_session.update, @@ -451,52 +476,14 @@ def test_totp_auth(self, monkeypatch, pyramid_request, redirect_url): assert remember.calls == [pretend.call(pyramid_request, str(1))] assert pyramid_request.session.invalidate.calls == [pretend.call()] assert pyramid_request.session.new_csrf_token.calls == [pretend.call()] - - @pytest.mark.parametrize("redirect_url", ["test_redirect_url", None]) - def test_totp_auth_invalid(self, pyramid_request, redirect_url): - query_params = {"userid": str(1)} - if redirect_url: - query_params["redirect_to"] = redirect_url - - token_service = pretend.stub( - loads=pretend.call_recorder(lambda s: query_params) - ) - - user_service = pretend.stub( - find_userid=pretend.call_recorder(lambda username: 1), - update_user=lambda *a, **k: None, - has_totp=lambda userid: True, - has_webauthn=lambda userid: False, - check_totp_value=lambda userid, totp_value: False, - ) - - pyramid_request.find_service = lambda interface, **kwargs: { - ITokenService: token_service, - IUserService: user_service, - }[interface] - - pyramid_request.method = "POST" - - form_obj = pretend.stub( - validate=pretend.call_recorder(lambda: True), - totp_value=pretend.stub(data="test-otp-secret"), - ) - form_class = pretend.call_recorder(lambda d, user_service, **kw: form_obj) - pyramid_request.route_path = pretend.call_recorder( - lambda a: "/account/two-factor" - ) - pyramid_request.params = pretend.stub( - get=pretend.call_recorder(lambda k: query_params.get(k)) - ) - result = views.two_factor_and_totp_validate( - pyramid_request, _form_class=form_class - ) - - token_expected_data = {"userid": str(1)} - if redirect_url: - token_expected_data["redirect_to"] = redirect_url - - assert isinstance(result, HTTPSeeOther) + assert user_service.record_event.calls == [ + pretend.call( + "1", + tag="account:login:success", + ip_address="0.0.0.0", + additional={"two_factor_method": "totp"}, + ) + ] def test_totp_auth_already_authed(self): request = pretend.stub( @@ -698,7 +685,7 @@ def test_webauthn_validate(self, monkeypatch): ) monkeypatch.setattr(views, "_get_two_factor_data", _get_two_factor_data) - _login_user = pretend.call_recorder(lambda req, uid: pretend.stub()) + _login_user = pretend.call_recorder(lambda *a, **kw: pretend.stub()) monkeypatch.setattr(views, "_login_user", _login_user) user = pretend.stub(webauthn=pretend.stub(sign_count=pretend.stub())) @@ -738,7 +725,9 @@ def test_webauthn_validate(self, monkeypatch): result = views.webauthn_authentication_validate(request) assert _get_two_factor_data.calls == [pretend.call(request)] - assert _login_user.calls == [pretend.call(request, 1)] + assert _login_user.calls == [ + pretend.call(request, 1, two_factor_method="webauthn") + ] assert request.session.get_webauthn_challenge.calls == [pretend.call()] assert request.session.clear_webauthn_challenge.calls == [pretend.call()] @@ -858,6 +847,7 @@ def test_register_redirect(self, db_request, monkeypatch): email = pretend.stub() create_user = pretend.call_recorder(lambda *args, **kwargs: user) add_email = pretend.call_recorder(lambda *args, **kwargs: email) + record_event = pretend.call_recorder(lambda *a, **kw: None) db_request.find_service = pretend.call_recorder( lambda *args, **kwargs: pretend.stub( csp_policy={}, @@ -870,9 +860,11 @@ def test_register_redirect(self, db_request, monkeypatch): create_user=create_user, add_email=add_email, check_password=lambda pw, tags=None: False, + record_event=record_event, ) ) db_request.route_path = pretend.call_recorder(lambda name: "/") + db_request.remote_addr = "0.0.0.0" db_request.POST.update( { "username": "username_value", @@ -894,6 +886,20 @@ def test_register_redirect(self, db_request, monkeypatch): ] assert add_email.calls == [pretend.call(user.id, "[email protected]", primary=True)] assert send_email.calls == [pretend.call(db_request, (user, email))] + assert record_event.calls == [ + pretend.call( + user.id, + tag="account:create", + ip_address=db_request.remote_addr, + additional={"email": "[email protected]"}, + ), + pretend.call( + user.id, + tag="account:login:success", + ip_address=db_request.remote_addr, + additional={"two_factor_method": None}, + ), + ] def test_register_fails_with_admin_flag_set(self, db_request): # This flag was already set via migration, just need to enable it @@ -951,10 +957,12 @@ def test_request_password_reset( self, monkeypatch, pyramid_request, pyramid_config, user_service, token_service ): - stub_user = pretend.stub(username=pretend.stub()) + stub_user = pretend.stub(id=pretend.stub(), username=pretend.stub()) pyramid_request.method = "POST" + pyramid_request.remote_addr = "0.0.0.0" token_service.dumps = pretend.call_recorder(lambda a: "TOK") user_service.get_user_by_username = pretend.call_recorder(lambda a: stub_user) + user_service.record_event = pretend.call_recorder(lambda *a, **kw: None) pyramid_request.find_service = pretend.call_recorder( lambda interface, **kw: { IUserService: user_service, @@ -991,18 +999,29 @@ def test_request_password_reset( assert send_password_reset_email.calls == [ pretend.call(pyramid_request, (stub_user, None)) ] + assert user_service.record_event.calls == [ + pretend.call( + stub_user.id, + tag="account:password:reset:request", + ip_address=pyramid_request.remote_addr, + ) + ] def test_request_password_reset_with_email( self, monkeypatch, pyramid_request, pyramid_config, user_service, token_service ): stub_user = pretend.stub( - email="[email protected]", emails=[pretend.stub(email="[email protected]")] + id=pretend.stub(), + email="[email protected]", + emails=[pretend.stub(email="[email protected]")], ) pyramid_request.method = "POST" + pyramid_request.remote_addr = "0.0.0.0" token_service.dumps = pretend.call_recorder(lambda a: "TOK") user_service.get_user_by_username = pretend.call_recorder(lambda a: None) user_service.get_user_by_email = pretend.call_recorder(lambda a: stub_user) + user_service.record_event = pretend.call_recorder(lambda *a, **kw: None) pyramid_request.find_service = pretend.call_recorder( lambda interface, **kw: { IUserService: user_service, @@ -1040,12 +1059,20 @@ def test_request_password_reset_with_email( assert send_password_reset_email.calls == [ pretend.call(pyramid_request, (stub_user, stub_user.emails[0])) ] + assert user_service.record_event.calls == [ + pretend.call( + stub_user.id, + tag="account:password:reset:request", + ip_address=pyramid_request.remote_addr, + ) + ] def test_request_password_reset_with_non_primary_email( self, monkeypatch, pyramid_request, pyramid_config, user_service, token_service ): stub_user = pretend.stub( + id=pretend.stub(), email="[email protected]", emails=[ pretend.stub(email="[email protected]"), @@ -1053,9 +1080,11 @@ def test_request_password_reset_with_non_primary_email( ], ) pyramid_request.method = "POST" + pyramid_request.remote_addr = "0.0.0.0" token_service.dumps = pretend.call_recorder(lambda a: "TOK") user_service.get_user_by_username = pretend.call_recorder(lambda a: None) user_service.get_user_by_email = pretend.call_recorder(lambda a: stub_user) + user_service.record_event = pretend.call_recorder(lambda *a, **kw: None) pyramid_request.find_service = pretend.call_recorder( lambda interface, **kw: { IUserService: user_service, @@ -1095,6 +1124,13 @@ def test_request_password_reset_with_non_primary_email( assert send_password_reset_email.calls == [ pretend.call(pyramid_request, (stub_user, stub_user.emails[1])) ] + assert user_service.record_event.calls == [ + pretend.call( + stub_user.id, + tag="account:password:reset:request", + ip_address=pyramid_request.remote_addr, + ) + ] def test_redirect_authenticated_user(self): pyramid_request = pretend.stub(authenticated_userid=1) @@ -1163,6 +1199,7 @@ def test_reset_password(self, db_request, user_service, token_service): breach_service = pretend.stub(check_password=lambda pw: False) db_request.route_path = pretend.call_recorder(lambda name: "/account/login") + db_request.remote_addr = "0.0.0.0" token_service.loads = pretend.call_recorder( lambda token: { "action": "password-reset", @@ -1380,6 +1417,7 @@ def test_verify_email( db_request.user = user db_request.GET.update({"token": "RANDOM_KEY"}) db_request.route_path = pretend.call_recorder(lambda name: "/") + db_request.remote_addr = "0.0.0.0" token_service.loads = pretend.call_recorder( lambda token: {"action": "email-verify", "email.id": str(email.id)} ) diff --git a/tests/unit/manage/test_views.py b/tests/unit/manage/test_views.py --- a/tests/unit/manage/test_views.py +++ b/tests/unit/manage/test_views.py @@ -29,7 +29,14 @@ from warehouse.accounts.interfaces import IPasswordBreachedService, IUserService from warehouse.macaroons.interfaces import IMacaroonService from warehouse.manage import views -from warehouse.packaging.models import File, JournalEntry, Project, Role, User +from warehouse.packaging.models import ( + File, + JournalEntry, + Project, + ProjectEvent, + Role, + User, +) from warehouse.utils.paginate import paginate_url_factory from warehouse.utils.project import remove_documentation @@ -37,6 +44,7 @@ from ...common.db.packaging import ( FileFactory, JournalEntryFactory, + ProjectEventFactory, ProjectFactory, ReleaseFactory, RoleFactory, @@ -186,7 +194,8 @@ def test_add_email(self, monkeypatch, pyramid_config): email_address = "[email protected]" email = pretend.stub(id=pretend.stub(), email=email_address) user_service = pretend.stub( - add_email=pretend.call_recorder(lambda *a, **kw: email) + add_email=pretend.call_recorder(lambda *a, **kw: email), + record_event=pretend.call_recorder(lambda *a, **kw: None), ) request = pretend.stub( POST={"email": email_address}, @@ -197,6 +206,7 @@ def test_add_email(self, monkeypatch, pyramid_config): emails=[], username="username", name="Name", id=pretend.stub() ), task=pretend.call_recorder(lambda *args, **kwargs: send_email), + remote_addr="0.0.0.0", ) monkeypatch.setattr( views, @@ -226,6 +236,14 @@ def test_add_email(self, monkeypatch, pyramid_config): ) ] assert send_email.calls == [pretend.call(request, (request.user, email))] + assert user_service.record_event.calls == [ + pretend.call( + request.user.id, + tag="account:email:add", + ip_address=request.remote_addr, + additional={"email": email_address}, + ) + ] def test_add_email_validation_fails(self, monkeypatch): email_address = "[email protected]" @@ -262,6 +280,9 @@ def test_add_email_validation_fails(self, monkeypatch): def test_delete_email(self, monkeypatch): email = pretend.stub(id=pretend.stub(), primary=False, email=pretend.stub()) some_other_email = pretend.stub() + user_service = pretend.stub( + record_event=pretend.call_recorder(lambda *a, **kw: None) + ) request = pretend.stub( POST={"delete_email_id": email.id}, user=pretend.stub( @@ -272,8 +293,9 @@ def test_delete_email(self, monkeypatch): filter=lambda *a: pretend.stub(one=lambda: email) ) ), - find_service=lambda *a, **kw: pretend.stub(), + find_service=lambda *a, **kw: user_service, session=pretend.stub(flash=pretend.call_recorder(lambda *a, **kw: None)), + remote_addr="0.0.0.0", ) monkeypatch.setattr( views.ManageAccountViews, "default_response", {"_": pretend.stub()} @@ -285,6 +307,14 @@ def test_delete_email(self, monkeypatch): pretend.call(f"Email address {email.email} removed", queue="success") ] assert request.user.emails == [some_other_email] + assert user_service.record_event.calls == [ + pretend.call( + request.user.id, + tag="account:email:remove", + ip_address=request.remote_addr, + additional={"email": email.email}, + ) + ] def test_delete_email_not_found(self, monkeypatch): email = pretend.stub() @@ -341,13 +371,17 @@ def test_delete_email_is_primary(self, monkeypatch): def test_change_primary_email(self, monkeypatch, db_request): user = UserFactory() - old_primary = EmailFactory(primary=True, user=user) - new_primary = EmailFactory(primary=False, verified=True, user=user) + old_primary = EmailFactory(primary=True, user=user, email="old") + new_primary = EmailFactory(primary=False, verified=True, user=user, email="new") db_request.user = user - db_request.find_service = lambda *a, **kw: pretend.stub() + user_service = pretend.stub( + record_event=pretend.call_recorder(lambda *a, **kw: None) + ) + db_request.find_service = lambda *a, **kw: user_service db_request.POST = {"primary_email_id": new_primary.id} + db_request.remote_addr = "0.0.0.0" db_request.session.flash = pretend.call_recorder(lambda *a, **kw: None) monkeypatch.setattr( views.ManageAccountViews, "default_response", {"_": pretend.stub()} @@ -367,6 +401,14 @@ def test_change_primary_email(self, monkeypatch, db_request): ] assert not old_primary.primary assert new_primary.primary + assert user_service.record_event.calls == [ + pretend.call( + user.id, + tag="account:email:primary:change", + ip_address=db_request.remote_addr, + additional={"old_primary": "old", "new_primary": "new"}, + ) + ] def test_change_primary_email_without_current(self, monkeypatch, db_request): user = UserFactory() @@ -374,8 +416,12 @@ def test_change_primary_email_without_current(self, monkeypatch, db_request): db_request.user = user - db_request.find_service = lambda *a, **kw: pretend.stub() + user_service = pretend.stub( + record_event=pretend.call_recorder(lambda *a, **kw: None) + ) + db_request.find_service = lambda *a, **kw: user_service db_request.POST = {"primary_email_id": new_primary.id} + db_request.remote_addr = "0.0.0.0" db_request.session.flash = pretend.call_recorder(lambda *a, **kw: None) monkeypatch.setattr( views.ManageAccountViews, "default_response", {"_": pretend.stub()} @@ -392,6 +438,14 @@ def test_change_primary_email_without_current(self, monkeypatch, db_request): ) ] assert new_primary.primary + assert user_service.record_event.calls == [ + pretend.call( + user.id, + tag="account:email:primary:change", + ip_address=db_request.remote_addr, + additional={"old_primary": None, "new_primary": new_primary.email}, + ) + ] def test_change_primary_email_not_found(self, monkeypatch, db_request): user = UserFactory() @@ -414,7 +468,13 @@ def test_change_primary_email_not_found(self, monkeypatch, db_request): assert old_primary.primary def test_reverify_email(self, monkeypatch): - email = pretend.stub(verified=False, email="email_address") + email = pretend.stub( + verified=False, + email="email_address", + user=pretend.stub( + record_event=pretend.call_recorder(lambda *a, **kw: None) + ), + ) request = pretend.stub( POST={"reverify_email_id": pretend.stub()}, @@ -426,6 +486,7 @@ def test_reverify_email(self, monkeypatch): session=pretend.stub(flash=pretend.call_recorder(lambda *a, **kw: None)), find_service=lambda *a, **kw: pretend.stub(), user=pretend.stub(id=pretend.stub(), username="username", name="Name"), + remote_addr="0.0.0.0", ) send_email = pretend.call_recorder(lambda *a: None) monkeypatch.setattr(views, "send_email_verification_email", send_email) @@ -439,6 +500,13 @@ def test_reverify_email(self, monkeypatch): pretend.call("Verification email for email_address resent", queue="success") ] assert send_email.calls == [pretend.call(request, (request.user, email))] + assert email.user.record_event.calls == [ + pretend.call( + tag="account:email:reverify", + ip_address=request.remote_addr, + additional={"email": email.email}, + ) + ] def test_reverify_email_not_found(self, monkeypatch): def raise_no_result(): @@ -499,7 +567,8 @@ def test_change_password(self, monkeypatch): old_password = "0ld_p455w0rd" new_password = "n3w_p455w0rd" user_service = pretend.stub( - update_user=pretend.call_recorder(lambda *a, **kw: None) + update_user=pretend.call_recorder(lambda *a, **kw: None), + record_event=pretend.call_recorder(lambda *a, **kw: None), ) request = pretend.stub( POST={ @@ -515,6 +584,7 @@ def test_change_password(self, monkeypatch): email=pretend.stub(), name=pretend.stub(), ), + remote_addr="0.0.0.0", ) change_pwd_obj = pretend.stub( validate=lambda: True, new_password=pretend.stub(data=new_password) @@ -540,6 +610,13 @@ def test_change_password(self, monkeypatch): assert user_service.update_user.calls == [ pretend.call(request.user.id, password=new_password) ] + assert user_service.record_event.calls == [ + pretend.call( + request.user.id, + tag="account:password:change", + ip_address=request.remote_addr, + ) + ] def test_change_password_validation_fails(self, monkeypatch): old_password = "0ld_p455w0rd" @@ -847,6 +924,7 @@ def test_validate_totp_provision(self, monkeypatch): user_service = pretend.stub( get_totp_secret=lambda id: None, update_user=pretend.call_recorder(lambda *a, **kw: None), + record_event=pretend.call_recorder(lambda *a, **kw: None), ) request = pretend.stub( POST={"totp_value": "123456"}, @@ -866,6 +944,7 @@ def test_validate_totp_provision(self, monkeypatch): has_primary_verified_email=True, ), route_path=lambda *a, **kw: "/foo/bar/", + remote_addr="0.0.0.0", ) provision_totp_obj = pretend.stub(validate=lambda: True) @@ -885,6 +964,14 @@ def test_validate_totp_provision(self, monkeypatch): "Authentication application successfully set up", queue="success" ) ] + assert user_service.record_event.calls == [ + pretend.call( + request.user.id, + tag="account:two_factor:method_added", + ip_address=request.remote_addr, + additional={"method": "totp"}, + ) + ] def test_validate_totp_provision_already_provisioned(self, monkeypatch): user_service = pretend.stub( @@ -990,6 +1077,7 @@ def test_delete_totp(self, monkeypatch, db_request): user_service = pretend.stub( get_totp_secret=lambda id: b"secret", update_user=pretend.call_recorder(lambda *a, **kw: None), + record_event=pretend.call_recorder(lambda *a, **kw: None), ) request = pretend.stub( POST={"confirm_username": pretend.stub()}, @@ -1004,6 +1092,7 @@ def test_delete_totp(self, monkeypatch, db_request): has_primary_verified_email=True, ), route_path=lambda *a, **kw: "/foo/bar/", + remote_addr="0.0.0.0", ) delete_totp_obj = pretend.stub(validate=lambda: True) @@ -1025,6 +1114,14 @@ def test_delete_totp(self, monkeypatch, db_request): ] assert isinstance(result, HTTPSeeOther) assert result.headers["Location"] == "/foo/bar/" + assert user_service.record_event.calls == [ + pretend.call( + request.user.id, + tag="account:two_factor:method_removed", + ip_address=request.remote_addr, + additional={"method": "totp"}, + ) + ] def test_delete_totp_bad_username(self, monkeypatch, db_request): user_service = pretend.stub( @@ -1157,7 +1254,8 @@ def test_get_webauthn_options(self): def test_validate_webauthn_provision(self, monkeypatch): user_service = pretend.stub( - add_webauthn=pretend.call_recorder(lambda *a, **kw: pretend.stub()) + add_webauthn=pretend.call_recorder(lambda *a, **kw: pretend.stub()), + record_event=pretend.call_recorder(lambda *a, **kw: None), ) request = pretend.stub( POST={}, @@ -1170,6 +1268,7 @@ def test_validate_webauthn_provision(self, monkeypatch): find_service=lambda *a, **kw: user_service, domain="fake_domain", host_url="fake_host_url", + remote_addr="0.0.0.0", ) provision_webauthn_obj = pretend.stub( @@ -1204,6 +1303,17 @@ def test_validate_webauthn_provision(self, monkeypatch): pretend.call("Security device successfully set up", queue="success") ] assert result == {"success": "Security device successfully set up"} + assert user_service.record_event.calls == [ + pretend.call( + request.user.id, + tag="account:two_factor:method_added", + ip_address=request.remote_addr, + additional={ + "method": "webauthn", + "label": provision_webauthn_obj.label.data, + }, + ) + ] def test_validate_webauthn_provision_invalid_form(self, monkeypatch): user_service = pretend.stub( @@ -1242,6 +1352,9 @@ def test_validate_webauthn_provision_invalid_form(self, monkeypatch): assert result == {"fail": {"errors": ["Not a real error"]}} def test_delete_webauthn(self, monkeypatch): + user_service = pretend.stub( + record_event=pretend.call_recorder(lambda *a, **kw: None) + ) request = pretend.stub( POST={}, user=pretend.stub( @@ -1255,11 +1368,14 @@ def test_delete_webauthn(self, monkeypatch): ), session=pretend.stub(flash=pretend.call_recorder(lambda *a, **kw: None)), route_path=pretend.call_recorder(lambda x: "/foo/bar"), - find_service=lambda *a, **kw: pretend.stub(), + find_service=lambda *a, **kw: user_service, + remote_addr="0.0.0.0", ) delete_webauthn_obj = pretend.stub( - validate=lambda: True, webauthn=pretend.stub() + validate=lambda: True, + webauthn=pretend.stub(), + label=pretend.stub(data="fake label"), ) delete_webauthn_cls = pretend.call_recorder( lambda *a, **kw: delete_webauthn_obj @@ -1275,6 +1391,17 @@ def test_delete_webauthn(self, monkeypatch): assert request.route_path.calls == [pretend.call("manage.account")] assert isinstance(result, HTTPSeeOther) assert result.headers["Location"] == "/foo/bar" + assert user_service.record_event.calls == [ + pretend.call( + request.user.id, + tag="account:two_factor:method_removed", + ip_address=request.remote_addr, + additional={ + "method": "webauthn", + "label": delete_webauthn_obj.label.data, + }, + ) + ] def test_delete_webauthn_not_provisioned(self): request = pretend.stub( @@ -1467,20 +1594,107 @@ def test_create_macaroon(self, monkeypatch): lambda *a, **kw: ("not a real raw macaroon", macaroon) ) ) + user_service = pretend.stub( + record_event=pretend.call_recorder(lambda *a, **kw: None) + ) request = pretend.stub( POST={}, domain=pretend.stub(), user=pretend.stub(id=pretend.stub(), has_primary_verified_email=True), find_service=lambda interface, **kw: { IMacaroonService: macaroon_service, - IUserService: pretend.stub(), + IUserService: user_service, + }[interface], + remote_addr="0.0.0.0", + ) + + create_macaroon_obj = pretend.stub( + validate=lambda: True, + description=pretend.stub(data=pretend.stub()), + validated_scope="foobar", + ) + create_macaroon_cls = pretend.call_recorder( + lambda *a, **kw: create_macaroon_obj + ) + monkeypatch.setattr(views, "CreateMacaroonForm", create_macaroon_cls) + + project_names = [pretend.stub()] + monkeypatch.setattr( + views.ProvisionMacaroonViews, "project_names", project_names + ) + + default_response = {"default": "response"} + monkeypatch.setattr( + views.ProvisionMacaroonViews, "default_response", default_response + ) + + view = views.ProvisionMacaroonViews(request) + result = view.create_macaroon() + + assert macaroon_service.create_macaroon.calls == [ + pretend.call( + location=request.domain, + user_id=request.user.id, + description=create_macaroon_obj.description.data, + caveats={ + "permissions": create_macaroon_obj.validated_scope, + "version": 1, + }, + ) + ] + assert result == { + **default_response, + "serialized_macaroon": "not a real raw macaroon", + "macaroon": macaroon, + "create_macaroon_form": create_macaroon_obj, + } + assert user_service.record_event.calls == [ + pretend.call( + request.user.id, + tag="account:api_token:added", + ip_address=request.remote_addr, + additional={ + "description": create_macaroon_obj.description.data, + "caveats": { + "permissions": create_macaroon_obj.validated_scope, + "version": 1, + }, + }, + ) + ] + + def test_create_macaroon_records_events_for_each_project(self, monkeypatch): + macaroon = pretend.stub() + macaroon_service = pretend.stub( + create_macaroon=pretend.call_recorder( + lambda *a, **kw: ("not a real raw macaroon", macaroon) + ) + ) + record_event = pretend.call_recorder(lambda *a, **kw: None) + user_service = pretend.stub(record_event=record_event) + request = pretend.stub( + POST={}, + domain=pretend.stub(), + user=pretend.stub( + id=pretend.stub(), + has_primary_verified_email=True, + username=pretend.stub(), + projects=[ + pretend.stub(normalized_name="foo", record_event=record_event), + pretend.stub(normalized_name="bar", record_event=record_event), + ], + ), + find_service=lambda interface, **kw: { + IMacaroonService: macaroon_service, + IUserService: user_service, }[interface], + remote_addr="0.0.0.0", ) create_macaroon_obj = pretend.stub( validate=lambda: True, description=pretend.stub(data=pretend.stub()), - validated_scope=pretend.stub(), + validated_scope={"projects": ["foo", "bar"]}, ) create_macaroon_cls = pretend.call_recorder( lambda *a, **kw: create_macaroon_obj @@ -1517,6 +1731,36 @@ def test_create_macaroon(self, monkeypatch): "macaroon": macaroon, "create_macaroon_form": create_macaroon_obj, } + assert record_event.calls == [ + pretend.call( + request.user.id, + tag="account:api_token:added", + ip_address=request.remote_addr, + additional={ + "description": create_macaroon_obj.description.data, + "caveats": { + "permissions": create_macaroon_obj.validated_scope, + "version": 1, + }, + }, + ), + pretend.call( + tag="project:api_token:added", + ip_address=request.remote_addr, + additional={ + "description": create_macaroon_obj.description.data, + "user": request.user.username, + }, + ), + pretend.call( + tag="project:api_token:added", + ip_address=request.remote_addr, + additional={ + "description": create_macaroon_obj.description.data, + "user": request.user.username, + }, + ), + ] def test_delete_macaroon_invalid_form(self, monkeypatch): macaroon_service = pretend.stub( @@ -1577,22 +1821,29 @@ def test_delete_macaroon_dangerous_redirect(self, monkeypatch): assert macaroon_service.delete_macaroon.calls == [] def test_delete_macaroon(self, monkeypatch): + macaroon = pretend.stub( + description="fake macaroon", caveats={"version": 1, "permissions": "user"} + ) macaroon_service = pretend.stub( delete_macaroon=pretend.call_recorder(lambda id: pretend.stub()), - find_macaroon=pretend.call_recorder( - lambda id: pretend.stub(description="fake macaroon") - ), + find_macaroon=pretend.call_recorder(lambda id: macaroon), + ) + record_event = pretend.call_recorder( + pretend.call_recorder(lambda *a, **kw: None) ) + user_service = pretend.stub(record_event=record_event) request = pretend.stub( POST={}, route_path=pretend.call_recorder(lambda x: pretend.stub()), find_service=lambda interface, **kw: { IMacaroonService: macaroon_service, - IUserService: pretend.stub(), + IUserService: user_service, }[interface], session=pretend.stub(flash=pretend.call_recorder(lambda *a, **kw: None)), referer="/fake/safe/route", host=None, + user=pretend.stub(id=pretend.stub()), + remote_addr="0.0.0.0", ) delete_macaroon_obj = pretend.stub( @@ -1618,6 +1869,96 @@ def test_delete_macaroon(self, monkeypatch): assert request.session.flash.calls == [ pretend.call("Deleted API token 'fake macaroon'.", queue="success") ] + assert record_event.calls == [ + pretend.call( + request.user.id, + tag="account:api_token:removed", + ip_address=request.remote_addr, + additional={"macaroon_id": delete_macaroon_obj.macaroon_id.data}, + ) + ] + + def test_delete_macaroon_records_events_for_each_project(self, monkeypatch): + macaroon = pretend.stub( + description="fake macaroon", + caveats={"version": 1, "permissions": {"projects": ["foo", "bar"]}}, + ) + macaroon_service = pretend.stub( + delete_macaroon=pretend.call_recorder(lambda id: pretend.stub()), + find_macaroon=pretend.call_recorder(lambda id: macaroon), + ) + record_event = pretend.call_recorder( + pretend.call_recorder(lambda *a, **kw: None) + ) + user_service = pretend.stub(record_event=record_event) + request = pretend.stub( + POST={}, + route_path=pretend.call_recorder(lambda x: pretend.stub()), + find_service=lambda interface, **kw: { + IMacaroonService: macaroon_service, + IUserService: user_service, + }[interface], + session=pretend.stub(flash=pretend.call_recorder(lambda *a, **kw: None)), + referer="/fake/safe/route", + host=None, + user=pretend.stub( + id=pretend.stub(), + username=pretend.stub(), + projects=[ + pretend.stub(normalized_name="foo", record_event=record_event), + pretend.stub(normalized_name="bar", record_event=record_event), + ], + ), + remote_addr="0.0.0.0", + ) + + delete_macaroon_obj = pretend.stub( + validate=lambda: True, macaroon_id=pretend.stub(data=pretend.stub()) + ) + delete_macaroon_cls = pretend.call_recorder( + lambda *a, **kw: delete_macaroon_obj + ) + monkeypatch.setattr(views, "DeleteMacaroonForm", delete_macaroon_cls) + + view = views.ProvisionMacaroonViews(request) + result = view.delete_macaroon() + + assert request.route_path.calls == [] + assert isinstance(result, HTTPSeeOther) + assert result.location == "/fake/safe/route" + assert macaroon_service.delete_macaroon.calls == [ + pretend.call(delete_macaroon_obj.macaroon_id.data) + ] + assert macaroon_service.find_macaroon.calls == [ + pretend.call(delete_macaroon_obj.macaroon_id.data) + ] + assert request.session.flash.calls == [ + pretend.call("Deleted API token 'fake macaroon'.", queue="success") + ] + assert record_event.calls == [ + pretend.call( + request.user.id, + tag="account:api_token:removed", + ip_address=request.remote_addr, + additional={"macaroon_id": delete_macaroon_obj.macaroon_id.data}, + ), + pretend.call( + tag="project:api_token:removed", + ip_address=request.remote_addr, + additional={ + "description": "fake macaroon", + "user": request.user.username, + }, + ), + pretend.call( + tag="project:api_token:removed", + ip_address=request.remote_addr, + additional={ + "description": "fake macaroon", + "user": request.user.username, + }, + ), + ] class TestManageProjects: @@ -1832,7 +2173,13 @@ def test_manage_project_release(self): } def test_delete_project_release(self, monkeypatch): - release = pretend.stub(version="1.2.3", project=pretend.stub(name="foobar")) + release = pretend.stub( + version="1.2.3", + canonical_version="1.2.3", + project=pretend.stub( + name="foobar", record_event=pretend.call_recorder(lambda *a, **kw: None) + ), + ) request = pretend.stub( POST={"confirm_version": release.version}, method="POST", @@ -1842,7 +2189,7 @@ def test_delete_project_release(self, monkeypatch): ), route_path=pretend.call_recorder(lambda *a, **kw: "/the-redirect"), session=pretend.stub(flash=pretend.call_recorder(lambda *a, **kw: None)), - user=pretend.stub(), + user=pretend.stub(username=pretend.stub()), remote_addr=pretend.stub(), ) journal_obj = pretend.stub() @@ -1873,6 +2220,16 @@ def test_delete_project_release(self, monkeypatch): assert request.route_path.calls == [ pretend.call("manage.project.releases", project_name=release.project.name) ] + assert release.project.record_event.calls == [ + pretend.call( + tag="project:release:remove", + ip_address=request.remote_addr, + additional={ + "submitted_by": request.user.username, + "canonical_version": release.canonical_version, + }, + ) + ] def test_delete_project_release_no_confirm(self): release = pretend.stub(version="1.2.3", project=pretend.stub(name="foobar")) @@ -2597,6 +2954,129 @@ def test_delete_own_owner_role(self, db_request): class TestManageProjectHistory: + def test_get(self, db_request): + project = ProjectFactory.create() + older_event = ProjectEventFactory.create( + project=project, + tag="fake:event", + ip_address="0.0.0.0", + time=datetime.datetime(2017, 2, 5, 17, 18, 18, 462_634), + ) + newer_event = ProjectEventFactory.create( + project=project, + tag="fake:event", + ip_address="0.0.0.0", + time=datetime.datetime(2018, 2, 5, 17, 18, 18, 462_634), + ) + + assert views.manage_project_history(project, db_request) == { + "project": project, + "events": [newer_event, older_event], + } + + def test_raises_400_with_pagenum_type_str(self, monkeypatch, db_request): + params = MultiDict({"page": "abc"}) + db_request.params = params + + events_query = pretend.stub() + db_request.events_query = pretend.stub( + events_query=lambda *a, **kw: events_query + ) + + page_obj = pretend.stub(page_count=10, item_count=1000) + page_cls = pretend.call_recorder(lambda *a, **kw: page_obj) + monkeypatch.setattr(views, "SQLAlchemyORMPage", page_cls) + + url_maker = pretend.stub() + url_maker_factory = pretend.call_recorder(lambda request: url_maker) + monkeypatch.setattr(views, "paginate_url_factory", url_maker_factory) + + project = ProjectFactory.create() + with pytest.raises(HTTPBadRequest): + views.manage_project_history(project, db_request) + + assert page_cls.calls == [] + + def test_first_page(self, db_request): + page_number = 1 + params = MultiDict({"page": page_number}) + db_request.params = params + + project = ProjectFactory.create() + items_per_page = 25 + total_items = items_per_page + 2 + for _ in range(total_items): + ProjectEventFactory.create( + project=project, tag="fake:event", ip_address="0.0.0.0" + ) + events_query = ( + db_request.db.query(ProjectEvent) + .join(ProjectEvent.project) + .filter(ProjectEvent.project_id == project.id) + .order_by(ProjectEvent.time.desc()) + ) + + events_page = SQLAlchemyORMPage( + events_query, + page=page_number, + items_per_page=items_per_page, + item_count=total_items, + url_maker=paginate_url_factory(db_request), + ) + assert views.manage_project_history(project, db_request) == { + "project": project, + "events": events_page, + } + + def test_last_page(self, db_request): + page_number = 2 + params = MultiDict({"page": page_number}) + db_request.params = params + + project = ProjectFactory.create() + items_per_page = 25 + total_items = items_per_page + 2 + for _ in range(total_items): + ProjectEventFactory.create( + project=project, tag="fake:event", ip_address="0.0.0.0" + ) + events_query = ( + db_request.db.query(ProjectEvent) + .join(ProjectEvent.project) + .filter(ProjectEvent.project_id == project.id) + .order_by(ProjectEvent.time.desc()) + ) + + events_page = SQLAlchemyORMPage( + events_query, + page=page_number, + items_per_page=items_per_page, + item_count=total_items, + url_maker=paginate_url_factory(db_request), + ) + assert views.manage_project_history(project, db_request) == { + "project": project, + "events": events_page, + } + + def test_raises_404_with_out_of_range_page(self, db_request): + page_number = 3 + params = MultiDict({"page": page_number}) + db_request.params = params + + project = ProjectFactory.create() + items_per_page = 25 + total_items = items_per_page + 2 + for _ in range(total_items): + ProjectEventFactory.create( + project=project, tag="fake:event", ip_address="0.0.0.0" + ) + + with pytest.raises(HTTPNotFound): + assert views.manage_project_history(project, db_request) + + +class TestManageProjectJournal: def test_get(self, db_request): project = ProjectFactory.create() older_journal = JournalEntryFactory.create( @@ -2608,7 +3088,7 @@ def test_get(self, db_request): submitted_date=datetime.datetime(2018, 2, 5, 17, 18, 18, 462_634), ) - assert views.manage_project_history(project, db_request) == { + assert views.manage_project_journal(project, db_request) == { "project": project, "journals": [newer_journal, older_journal], } @@ -2632,7 +3112,7 @@ def test_raises_400_with_pagenum_type_str(self, monkeypatch, db_request): project = ProjectFactory.create() with pytest.raises(HTTPBadRequest): - views.manage_project_history(project, db_request) + views.manage_project_journal(project, db_request) assert page_cls.calls == [] @@ -2662,7 +3142,7 @@ def test_first_page(self, db_request): item_count=total_items, url_maker=paginate_url_factory(db_request), ) - assert views.manage_project_history(project, db_request) == { + assert views.manage_project_journal(project, db_request) == { "project": project, "journals": journals_page, } @@ -2693,7 +3173,7 @@ def test_last_page(self, db_request): item_count=total_items, url_maker=paginate_url_factory(db_request), ) - assert views.manage_project_history(project, db_request) == { + assert views.manage_project_journal(project, db_request) == { "project": project, "journals": journals_page, } @@ -2712,4 +3192,4 @@ def test_raises_404_with_out_of_range_page(self, db_request): ) with pytest.raises(HTTPNotFound): - assert views.manage_project_history(project, db_request) + assert views.manage_project_journal(project, db_request) diff --git a/tests/unit/test_routes.py b/tests/unit/test_routes.py --- a/tests/unit/test_routes.py +++ b/tests/unit/test_routes.py @@ -257,6 +257,13 @@ def add_policy(name, filename): traverse="/{project_name}", domain=warehouse, ), + pretend.call( + "manage.project.journal", + "/manage/project/{project_name}/journal/", + factory="warehouse.packaging.models:ProjectFactory", + traverse="/{project_name}", + domain=warehouse, + ), pretend.call( "packaging.project", "/project/{name}/",
Audit trail: implement auditable event logging for sensitive actions Warehouse is adding an advanced audit trail of user actions beyond the current (existing) journal. This will, for instance, allow publishers to track all actions taken by third party services on their behalf. - [x] Add auditing for user actions in PyPI - [x] Add auditing for project actions in PyPI - [x] Implement a User view for User auditing, allowing publishers to track all actions taken by third party services on their behalf - [x] Implement a Project view for Project auditing for project maintainers to audit actions similarly - [x] Implement an Admin view for PyPI.org administrators to audit actions similarly So: - Each user will be able to view a log of sensitive actions performed that are relevant to their user account. - Each user who maintains at least one project on PyPI will be able to view a log of sensitive actions (performed by ANY user) relevant to projects they act in the Owner Role on. - And PyPI administrators will be able to view the full audit log. We'll be working on this in 2019. The [Packaging Working Group](https://wiki.python.org/psf/PackagingWG), seeking [donations](http://donate.pypi.org/) and further grants to fund more work, got [some new funding](https://pyfound.blogspot.com/2018/12/upcoming-pypi-improvements-for-2019.html) from the Open Technology Fund, and [the audit log is part of the current grant-funded project](https://pyfound.blogspot.com/2019/03/commencing-security-accessibility-and.html).
Noting here that sensitive actions worth including in the event log would include _renaming_ a project, per #1919. Ok, @woodruffw gave me a clue, and asked me to look at all the metrics calls, like so: ```bash $ git grep -C1 self._metrics warehouse/accounts/services.py- ) warehouse/accounts/services.py: self._metrics = metrics warehouse/accounts/services.py- -- warehouse/accounts/services.py- warehouse/accounts/services.py: self._metrics.increment("warehouse.authentication.start", tags=tags) warehouse/accounts/services.py- -- warehouse/accounts/services.py- logger.warning("Global failed login threshold reached.") warehouse/accounts/services.py: self._metrics.increment( warehouse/accounts/services.py- "warehouse.authentication.ratelimited", -- warehouse/accounts/services.py- if not self.ratelimiters["user"].test(user.id): warehouse/accounts/services.py: self._metrics.increment( warehouse/accounts/services.py- "warehouse.authentication.ratelimited", -- warehouse/accounts/services.py- warehouse/accounts/services.py: self._metrics.increment("warehouse.authentication.ok", tags=tags) warehouse/accounts/services.py- -- warehouse/accounts/services.py- else: warehouse/accounts/services.py: self._metrics.increment( warehouse/accounts/services.py- "warehouse.authentication.failure", -- warehouse/accounts/services.py- else: warehouse/accounts/services.py: self._metrics.increment( warehouse/accounts/services.py- "warehouse.authentication.failure", tags=tags + ["failure_reason:user"] -- warehouse/accounts/services.py- tags = tags if tags is not None else [] warehouse/accounts/services.py: self._metrics.increment("warehouse.authentication.two_factor.start", tags=tags) warehouse/accounts/services.py- -- warehouse/accounts/services.py- logger.warning("Global failed login threshold reached.") warehouse/accounts/services.py: self._metrics.increment( warehouse/accounts/services.py- "warehouse.authentication.two_factor.ratelimited", -- warehouse/accounts/services.py- if not self.ratelimiters["user"].test(user_id): warehouse/accounts/services.py: self._metrics.increment( warehouse/accounts/services.py- "warehouse.authentication.two_factor.ratelimited", -- warehouse/accounts/services.py- if totp_secret is None: warehouse/accounts/services.py: self._metrics.increment( warehouse/accounts/services.py- "warehouse.authentication.two_factor.failure", -- warehouse/accounts/services.py- if valid: warehouse/accounts/services.py: self._metrics.increment("warehouse.authentication.two_factor.ok", tags=tags) warehouse/accounts/services.py- else: warehouse/accounts/services.py: self._metrics.increment( warehouse/accounts/services.py- "warehouse.authentication.two_factor.failure", -- warehouse/accounts/services.py- self._api_base = api_base warehouse/accounts/services.py: self._metrics = metrics warehouse/accounts/services.py- self._help_url = help_url -- warehouse/accounts/services.py- def _metrics_increment(self, *args, **kwargs): warehouse/accounts/services.py: self._metrics.increment(*args, **kwargs) warehouse/accounts/services.py- -- warehouse/accounts/services.py- warehouse/accounts/services.py: self._metrics_increment("warehouse.compromised_password_check.start", tags=tags) warehouse/accounts/services.py- -- warehouse/accounts/services.py- logger.warning("Error contacting HaveIBeenPwned: %r", exc) warehouse/accounts/services.py: self._metrics_increment( warehouse/accounts/services.py- "warehouse.compromised_password_check.error", tags=tags -- warehouse/accounts/services.py- if hashed_password[5:] == possible.lower(): warehouse/accounts/services.py: self._metrics_increment( warehouse/accounts/services.py- "warehouse.compromised_password_check.compromised", tags=tags -- warehouse/accounts/services.py- # If we made it to this point, then the password is safe. warehouse/accounts/services.py: self._metrics_increment("warehouse.compromised_password_check.ok", tags=tags) warehouse/accounts/services.py- return False -- warehouse/rate_limiting/__init__.py- logging.warning("Error computing rate limits: %r", exc) warehouse/rate_limiting/__init__.py: self._metrics.increment( warehouse/rate_limiting/__init__.py- "warehouse.ratelimiter.error", tags=[f"call:{fn.__name__}"] -- warehouse/rate_limiting/__init__.py- self._identifiers = identifiers warehouse/rate_limiting/__init__.py: self._metrics = metrics warehouse/rate_limiting/__init__.py- ``` From the metrics available so far, I will say that: 1. Maintainers should not see compromised password checks for Owners and other Maintainers, only themselves. 2. Maintainers should not see rate limit messages for Owners and other Maintainers, only themselves. 3. Maintainers should not see MFA authentication messages for Owners and other Maintainers, only themselves. I'm not saying the following is necessarily correct, but they should provide a way to reason about this. Hope this helps! ![image](https://user-images.githubusercontent.com/33133073/59150773-04aec200-89f7-11e9-87d3-efa393abbdd2.png) ![image](https://user-images.githubusercontent.com/33133073/59150775-08dadf80-89f7-11e9-99fc-6e81bd0e630b.png) ([Here's a textual version of that image above.](https://docs.google.com/document/d/15QfDOy0dlZn5-ic6Gmcq35SxPLacYPbvuZ_RBUhA7E8/edit)) I want to propose also flagging projects as spam/malware by the community. Self-regulation is good thing and some rules we can take from stack overflow community. This will greatly help the administrators @eirnym Thanks for the suggestion -- we're tracking that feature request as #3896. This work is part of the [milestone of work we're doing for the Open Tech Fund-supported security work](https://github.com/pypa/warehouse/milestone/13). Right now, as I understand it, @woodruffw and @nlhkabu are working on the API key work, but after that, they'll be working on this issue. User exarkun in IRC just pointed out that the public might want to know _who_ uploaded a particular release, and that info perhaps should be in the ownership history/audit log for a project.
2019-08-01T14:25:13Z
[]
[]
pypi/warehouse
6,342
pypi__warehouse-6342
[ "6287" ]
c31a3cf1d3669122a4761ba071f5d4f3d23d80a7
diff --git a/warehouse/macaroons/auth_policy.py b/warehouse/macaroons/auth_policy.py --- a/warehouse/macaroons/auth_policy.py +++ b/warehouse/macaroons/auth_policy.py @@ -38,7 +38,8 @@ def _extract_basic_macaroon(auth): except ValueError: return None - if auth_method != "@token": + # TODO: Remove @token as an acceptable token username (GH-6345) + if auth_method != "@token" and auth_method != "__token__": return None return auth diff --git a/warehouse/macaroons/services.py b/warehouse/macaroons/services.py --- a/warehouse/macaroons/services.py +++ b/warehouse/macaroons/services.py @@ -31,7 +31,7 @@ class DatabaseMacaroonService: def __init__(self, db_session): self.db = db_session - def _extract_raw_macaroon(self, raw_macaroon): + def _extract_raw_macaroon(self, prefixed_macaroon): """ Returns the base64-encoded macaroon component of a PyPI macaroon, dropping the prefix. @@ -39,13 +39,13 @@ def _extract_raw_macaroon(self, raw_macaroon): Returns None if the macaroon is None, has no prefix, or has the wrong prefix. """ - if raw_macaroon is None: + if prefixed_macaroon is None: return None - try: - prefix, raw_macaroon = raw_macaroon.split(":", 1) - except ValueError: - return None + prefix, split, raw_macaroon = prefixed_macaroon.partition("-") + # TODO: Remove ':' as an acceptable delimiter for tokens (GH-6345) + if prefix != "pypi" or not split: + prefix, _, raw_macaroon = prefixed_macaroon.partition(":") if prefix != "pypi": return None @@ -129,7 +129,7 @@ def create_macaroon(self, location, user_id, description, caveats): version=pymacaroons.MACAROON_V2, ) m.add_first_party_caveat(json.dumps(caveats)) - serialized_macaroon = f"pypi:{m.serialize()}" + serialized_macaroon = f"pypi-{m.serialize()}" return serialized_macaroon, dm def delete_macaroon(self, macaroon_id):
diff --git a/tests/unit/macaroons/test_auth_policy.py b/tests/unit/macaroons/test_auth_policy.py --- a/tests/unit/macaroons/test_auth_policy.py +++ b/tests/unit/macaroons/test_auth_policy.py @@ -32,6 +32,7 @@ ("maybeafuturemethod foobar", None), ("token foobar", "foobar"), ("basic QHRva2VuOmZvb2Jhcg==", "foobar"), # "@token:foobar" + ("basic X190b2tlbl9fOmZvb2Jhcg==", "foobar"), # "__token__:foobar" ], ) def test_extract_http_macaroon(auth, result): @@ -49,6 +50,7 @@ def test_extract_http_macaroon(auth, result): ("bm90YXJlYWx0b2tlbg==", None), # "notarealtoken" ("QGJhZHVzZXI6Zm9vYmFy", None), # "@baduser:foobar" ("QHRva2VuOmZvb2Jhcg==", "foobar"), # "@token:foobar" + ("X190b2tlbl9fOmZvb2Jhcg==", "foobar"), # "__token__:foobar" ], ) def test_extract_basic_macaroon(auth, result): diff --git a/tests/unit/macaroons/test_services.py b/tests/unit/macaroons/test_services.py --- a/tests/unit/macaroons/test_services.py +++ b/tests/unit/macaroons/test_services.py @@ -45,6 +45,7 @@ def test_creation(self): ("noprefixhere", None), ("invalid:prefix", None), ("pypi:validprefix", "validprefix"), + ("pypi-validprefix", "validprefix"), ], ) def test_extract_raw_macaroon(self, macaroon_service, raw_macaroon, result): @@ -74,7 +75,7 @@ def test_find_userid_invalid_macaroon(self, macaroon_service): key=b"fake key", version=pymacaroons.MACAROON_V2, ).serialize() - raw_macaroon = f"pypi:{raw_macaroon}" + raw_macaroon = f"pypi-{raw_macaroon}" assert macaroon_service.find_userid(raw_macaroon) is None @@ -107,7 +108,7 @@ def test_verify_no_macaroon(self, macaroon_service): key=b"fake key", version=pymacaroons.MACAROON_V2, ).serialize() - raw_macaroon = f"pypi:{raw_macaroon}" + raw_macaroon = f"pypi-{raw_macaroon}" with pytest.raises(services.InvalidMacaroon): macaroon_service.verify(
Change username & API token prefix, to make Travis auth easier Just wanted to share a potential pain point for folks using the new PyPI API tokens for authentication via a travis.yaml. The @token username needs to be wrapped in quotes and the `:` after `pypi` needs to be escaped to work, otherwise you hit a 403 error (thanks for the help tracking this down @ewdurbin). If you're using the environment variables through Travis' UI, the following works: ``` deploy: provider: pypi user: "@token" # quotes required for travis # server: https://test.pypi.org/legacy/ # uncomment to do a test deploy password: secure: $PASSWORD # stored in travis env var, with `:` after `pypi` escaped (pypi\:) on: branch: master skip_existing: true ``` If you're encrypting the token via the command line, you can just wrap your token in quotes: ```travis encrypt "<your-pypi_token>" --add deploy.password [--com]```
A bit unfortunate that we chose something that requires quoting as the username. It'd be nice to not have this friction when folks migrate to API keys. Since we're still in the beta period, I think it would be reasonable to change this (but still support `@token` for a bit for backwards compatibility). Some options that would not require quoting: * `$token` * `^token` * `=token` @dstufft any thoughts here? I'm +1 for updating the keys to `pypi-` or `pypi_` prefix and changing the username to something without an `@`. I'm -1 on backward compatibility for longer than a few days. We know who have provisioned API tokens and can email them to give them a headsup 24 hours before disabling the older grammar. Yeah, I should be clear: I meant backwards compatibility until ~the end of the beta period. I'm +1 for updating keys & username, and -1 on backward compatibility for longer than a few days. @woodruffw heads-up. Thanks for the ping! I'll make a PR for these changes now.
2019-08-01T17:42:50Z
[]
[]
pypi/warehouse
6,346
pypi__warehouse-6346
[ "6343" ]
ac7ee19af9f6a55296e84d4fcbf0f23e3cf7f837
diff --git a/warehouse/accounts/forms.py b/warehouse/accounts/forms.py --- a/warehouse/accounts/forms.py +++ b/warehouse/accounts/forms.py @@ -268,11 +268,10 @@ def validate_totp_value(self, field): class WebAuthnAuthenticationForm(WebAuthnCredentialMixin, _TwoFactorAuthenticationForm): __params__ = ["credential"] - def __init__(self, *args, challenge, origin, icon_url, rp_id, **kwargs): + def __init__(self, *args, challenge, origin, rp_id, **kwargs): super().__init__(*args, **kwargs) self.challenge = challenge self.origin = origin - self.icon_url = icon_url self.rp_id = rp_id def validate_credential(self, field): @@ -289,7 +288,6 @@ def validate_credential(self, field): assertion_dict, challenge=self.challenge, origin=self.origin, - icon_url=self.icon_url, rp_id=self.rp_id, ) diff --git a/warehouse/accounts/interfaces.py b/warehouse/accounts/interfaces.py --- a/warehouse/accounts/interfaces.py +++ b/warehouse/accounts/interfaces.py @@ -137,15 +137,13 @@ def add_webauthn(user_id, **kwargs): Returns None if the user already has this credential. """ - def get_webauthn_credential_options( - user_id, *, challenge, rp_name, rp_id, icon_url - ): + def get_webauthn_credential_options(user_id, *, challenge, rp_name, rp_id): """ Returns a dictionary of credential options suitable for beginning the WebAuthn provisioning process for the given user. """ - def get_webauthn_assertion_options(user_id, *, challenge, icon_url, rp_id): + def get_webauthn_assertion_options(user_id, *, challenge, rp_id): """ Returns a dictionary of assertion options suitable for beginning the WebAuthn authentication process for the given user. @@ -160,9 +158,7 @@ def verify_webauthn_credential(credential, *, challenge, rp_id, origin): webauthn.RegistrationRejectedException on failure. """ - def verify_webauthn_assertion( - user_id, assertion, *, challenge, origin, icon_url, rp_id - ): + def verify_webauthn_assertion(user_id, assertion, *, challenge, origin, rp_id): """ Checks whether the given assertion was produced by the given user's WebAuthn device. diff --git a/warehouse/accounts/services.py b/warehouse/accounts/services.py --- a/warehouse/accounts/services.py +++ b/warehouse/accounts/services.py @@ -327,9 +327,7 @@ def check_totp_value(self, user_id, totp_value, *, tags=None): return valid - def get_webauthn_credential_options( - self, user_id, *, challenge, rp_name, rp_id, icon_url - ): + def get_webauthn_credential_options(self, user_id, *, challenge, rp_name, rp_id): """ Returns a dictionary of credential options suitable for beginning the WebAuthn provisioning process for the given user. @@ -337,19 +335,17 @@ def get_webauthn_credential_options( user = self.get_user(user_id) return webauthn.get_credential_options( - user, challenge=challenge, rp_name=rp_name, rp_id=rp_id, icon_url=icon_url + user, challenge=challenge, rp_name=rp_name, rp_id=rp_id ) - def get_webauthn_assertion_options(self, user_id, *, challenge, icon_url, rp_id): + def get_webauthn_assertion_options(self, user_id, *, challenge, rp_id): """ Returns a dictionary of assertion options suitable for beginning the WebAuthn authentication process for the given user. """ user = self.get_user(user_id) - return webauthn.get_assertion_options( - user, challenge=challenge, icon_url=icon_url, rp_id=rp_id - ) + return webauthn.get_assertion_options(user, challenge=challenge, rp_id=rp_id) def verify_webauthn_credential(self, credential, *, challenge, rp_id, origin): """ @@ -375,7 +371,7 @@ def verify_webauthn_credential(self, credential, *, challenge, rp_id, origin): return validated_credential def verify_webauthn_assertion( - self, user_id, assertion, *, challenge, origin, icon_url, rp_id + self, user_id, assertion, *, challenge, origin, rp_id ): """ Checks whether the given assertion was produced by the given user's WebAuthn @@ -387,12 +383,7 @@ def verify_webauthn_assertion( user = self.get_user(user_id) return webauthn.verify_assertion_response( - assertion, - challenge=challenge, - user=user, - origin=origin, - icon_url=icon_url, - rp_id=rp_id, + assertion, challenge=challenge, user=user, origin=origin, rp_id=rp_id ) def add_webauthn(self, user_id, **kwargs): diff --git a/warehouse/accounts/views.py b/warehouse/accounts/views.py --- a/warehouse/accounts/views.py +++ b/warehouse/accounts/views.py @@ -252,10 +252,7 @@ def webauthn_authentication_options(request): userid = two_factor_data.get("userid") user_service = request.find_service(IUserService, context=None) return user_service.get_webauthn_assertion_options( - userid, - challenge=request.session.get_webauthn_challenge(), - icon_url=request.registry.settings.get("warehouse.domain", request.domain), - rp_id=request.domain, + userid, challenge=request.session.get_webauthn_challenge(), rp_id=request.domain ) @@ -288,7 +285,6 @@ def webauthn_authentication_validate(request): user_service=user_service, challenge=request.session.get_webauthn_challenge(), origin=request.host_url, - icon_url=request.registry.settings.get("warehouse.domain", request.domain), rp_id=request.domain, ) diff --git a/warehouse/manage/views.py b/warehouse/manage/views.py --- a/warehouse/manage/views.py +++ b/warehouse/manage/views.py @@ -474,9 +474,6 @@ def webauthn_provision_options(self): challenge=self.request.session.get_webauthn_challenge(), rp_name=self.request.registry.settings["site.name"], rp_id=self.request.domain, - icon_url=self.request.registry.settings.get( - "warehouse.domain", self.request.domain - ), ) @view_config( diff --git a/warehouse/utils/webauthn.py b/warehouse/utils/webauthn.py --- a/warehouse/utils/webauthn.py +++ b/warehouse/utils/webauthn.py @@ -32,7 +32,7 @@ class RegistrationRejectedException(Exception): WebAuthnCredential = pywebauthn.WebAuthnCredential -def _get_webauthn_users(user, *, icon_url, rp_id): +def _get_webauthn_users(user, *, rp_id): """ Returns a webauthn.WebAuthnUser instance corresponding to the given user model, with properties suitable for @@ -43,7 +43,7 @@ def _get_webauthn_users(user, *, icon_url, rp_id): str(user.id), user.username, user.name, - icon_url, + None, credential.credential_id, credential.public_key, credential.sign_count, @@ -74,25 +74,25 @@ def generate_webauthn_challenge(): return _webauthn_b64encode(os.urandom(32)).decode() -def get_credential_options(user, *, challenge, rp_name, rp_id, icon_url): +def get_credential_options(user, *, challenge, rp_name, rp_id): """ Returns a dictionary of options for credential creation on the client side. """ options = pywebauthn.WebAuthnMakeCredentialOptions( - challenge, rp_name, rp_id, str(user.id), user.username, user.name, icon_url + challenge, rp_name, rp_id, str(user.id), user.username, user.name, None ) return options.registration_dict -def get_assertion_options(user, *, challenge, icon_url, rp_id): +def get_assertion_options(user, *, challenge, rp_id): """ Returns a dictionary of options for assertion retrieval on the client side. """ options = pywebauthn.WebAuthnAssertionOptions( - _get_webauthn_users(user, icon_url=icon_url, rp_id=rp_id), challenge + _get_webauthn_users(user, rp_id=rp_id), challenge ) return options.assertion_dict @@ -120,7 +120,7 @@ def verify_registration_response(response, challenge, *, rp_id, origin): raise RegistrationRejectedException(str(e)) -def verify_assertion_response(assertion, *, challenge, user, origin, icon_url, rp_id): +def verify_assertion_response(assertion, *, challenge, user, origin, rp_id): """ Validates the challenge and assertion information sent from the client during authentication. @@ -128,7 +128,7 @@ def verify_assertion_response(assertion, *, challenge, user, origin, icon_url, r Returns an updated signage count on success. Raises AuthenticationRejectedException on failure. """ - webauthn_users = _get_webauthn_users(user, icon_url=icon_url, rp_id=rp_id) + webauthn_users = _get_webauthn_users(user, rp_id=rp_id) cred_ids = [cred.credential_id for cred in webauthn_users] encoded_challenge = _webauthn_b64encode(challenge.encode()).decode()
diff --git a/tests/unit/accounts/test_forms.py b/tests/unit/accounts/test_forms.py --- a/tests/unit/accounts/test_forms.py +++ b/tests/unit/accounts/test_forms.py @@ -615,7 +615,6 @@ def test_creation(self): user_service = pretend.stub() challenge = pretend.stub() origin = pretend.stub() - icon_url = pretend.stub() rp_id = pretend.stub() form = forms.WebAuthnAuthenticationForm( @@ -623,7 +622,6 @@ def test_creation(self): user_service=user_service, challenge=challenge, origin=origin, - icon_url=icon_url, rp_id=rp_id, ) @@ -636,7 +634,6 @@ def test_credential_bad_payload(self): user_service=pretend.stub(), challenge=pretend.stub(), origin=pretend.stub(), - icon_url=pretend.stub(), rp_id=pretend.stub(), ) assert not form.validate() @@ -653,7 +650,6 @@ def test_credential_invalid(self): ), challenge=pretend.stub(), origin=pretend.stub(), - icon_url=pretend.stub(), rp_id=pretend.stub(), ) assert not form.validate() @@ -670,7 +666,6 @@ def test_credential_valid(self): ), challenge=pretend.stub(), origin=pretend.stub(), - icon_url=pretend.stub(), rp_id=pretend.stub(), ) assert form.validate() diff --git a/tests/unit/accounts/test_services.py b/tests/unit/accounts/test_services.py --- a/tests/unit/accounts/test_services.py +++ b/tests/unit/accounts/test_services.py @@ -426,22 +426,15 @@ def test_check_totp_value_user_rate_limited(self, user_service, metrics): ] @pytest.mark.parametrize( - ("challenge", "rp_name", "rp_id", "icon_url"), - ( - ["fake_challenge", "fake_rp_name", "fake_rp_id", "fake_icon_url"], - [None, None, None, None], - ), + ("challenge", "rp_name", "rp_id"), + (["fake_challenge", "fake_rp_name", "fake_rp_id"], [None, None, None]), ) def test_get_webauthn_credential_options( - self, user_service, challenge, rp_name, rp_id, icon_url + self, user_service, challenge, rp_name, rp_id ): user = UserFactory.create() options = user_service.get_webauthn_credential_options( - user.id, - challenge=challenge, - rp_name=rp_name, - rp_id=rp_id, - icon_url=icon_url, + user.id, challenge=challenge, rp_name=rp_name, rp_id=rp_id ) assert options["user"]["id"] == str(user.id) @@ -450,11 +443,7 @@ def test_get_webauthn_credential_options( assert options["challenge"] == challenge assert options["rp"]["name"] == rp_name assert options["rp"]["id"] == rp_id - - if icon_url: - assert options["user"]["icon"] == icon_url - else: - assert "icon" not in options["user"] + assert "icon" not in options["user"] def test_get_webauthn_assertion_options(self, user_service): user = UserFactory.create() @@ -467,10 +456,7 @@ def test_get_webauthn_assertion_options(self, user_service): ) options = user_service.get_webauthn_assertion_options( - user.id, - challenge="fake_challenge", - icon_url="fake_icon_url", - rp_id="fake_rp_id", + user.id, challenge="fake_challenge", rp_id="fake_rp_id" ) assert options["challenge"] == "fake_challenge" @@ -550,7 +536,6 @@ def test_verify_webauthn_assertion(self, user_service, monkeypatch): pretend.stub(), challenge=pretend.stub(), origin=pretend.stub(), - icon_url=pretend.stub(), rp_id=pretend.stub(), ) assert updated_sign_count == 2 diff --git a/tests/unit/manage/test_views.py b/tests/unit/manage/test_views.py --- a/tests/unit/manage/test_views.py +++ b/tests/unit/manage/test_views.py @@ -1138,12 +1138,7 @@ def test_get_webauthn_options(self): get_webauthn_challenge=pretend.call_recorder(lambda: "fake_challenge") ), find_service=lambda *a, **kw: user_service, - registry=pretend.stub( - settings={ - "site.name": "fake_site_name", - "warehouse.domain": "fake_domain", - } - ), + registry=pretend.stub(settings={"site.name": "fake_site_name"}), domain="fake_domain", ) @@ -1157,7 +1152,6 @@ def test_get_webauthn_options(self): challenge="fake_challenge", rp_name=request.registry.settings["site.name"], rp_id=request.domain, - icon_url=request.registry.settings["warehouse.domain"], ) ] diff --git a/tests/unit/utils/test_webauthn.py b/tests/unit/utils/test_webauthn.py --- a/tests/unit/utils/test_webauthn.py +++ b/tests/unit/utils/test_webauthn.py @@ -80,12 +80,11 @@ def test_verify_assertion_response(monkeypatch): challenge="not_a_real_challenge", user=not_a_real_user, origin="fake_origin", - icon_url="fake_icon_url", rp_id="fake_rp_id", ) assert get_webauthn_users.calls == [ - pretend.call(not_a_real_user, icon_url="fake_icon_url", rp_id="fake_rp_id") + pretend.call(not_a_real_user, rp_id="fake_rp_id") ] assert assertion_cls.calls == [ pretend.call( @@ -117,6 +116,5 @@ def test_verify_assertion_response_failure(monkeypatch): challenge="not_a_real_challenge", user=pretend.stub(), origin="fake_origin", - icon_url="fake_icon_url", rp_id="fake_rp_id", )
"set up security device" doesn't work, with error message "'user.icon' should be a secure URL" **Describe the bug** The "set up security device" button on https://pypi.org/manage/account/webauthn-provision shows an error message: ![image](https://user-images.githubusercontent.com/716529/62319181-4174c380-b452-11e9-9b2c-85668deb4dd9.png) **Expected behavior** Prompt me to plug in my YubiKey, I would hope? **To Reproduce** Click the button. **My Platform** Browser: Google Chrome Version 76.0.3809.87 (Official Build) (64-bit) Operating System: ProductName: Mac OS X ProductVersion: 10.14.6 BuildVersion: 18G84
Thanks for the report @glyph! I was able to repro, checking out now.
2019-08-01T20:51:46Z
[]
[]
pypi/warehouse
6,353
pypi__warehouse-6353
[ "6232" ]
164d0b332af4e0d0da4324c7d5d9c05784030bda
diff --git a/warehouse/errors.py b/warehouse/errors.py --- a/warehouse/errors.py +++ b/warehouse/errors.py @@ -11,7 +11,15 @@ # limitations under the License. from pyramid.httpexceptions import HTTPUnauthorized +from pyramid.security import Denied class BasicAuthBreachedPassword(HTTPUnauthorized): pass + + +class WarehouseDenied(Denied): + def __new__(cls, s, *args, reason=None, **kwargs): + inner = super().__new__(cls, s, *args, **kwargs) + inner.reason = reason + return inner diff --git a/warehouse/forklift/legacy.py b/warehouse/forklift/legacy.py --- a/warehouse/forklift/legacy.py +++ b/warehouse/forklift/legacy.py @@ -973,19 +973,22 @@ def file_upload(request): # Check that the user has permission to do things to this project, if this # is a new project this will act as a sanity check for the role we just # added above. - if not request.has_permission("upload", project): - raise _exc_with_message( - HTTPForbidden, + allowed = request.has_permission("upload", project) + if not allowed: + reason = getattr(allowed, "reason", None) + msg = ( ( - "The credential associated with user '{0}' " - "isn't allowed to upload to project '{1}'. " + "The user '{0}' isn't allowed to upload to project '{1}'. " "See {2} for more information." ).format( request.user.username, project.name, request.help_url(_anchor="project-name"), - ), + ) + if reason is None + else allowed.msg ) + raise _exc_with_message(HTTPForbidden, msg) # Update name if it differs but is still equivalent. We don't need to check if # they are equivalent when normalized because that's already been done when we diff --git a/warehouse/macaroons/auth_policy.py b/warehouse/macaroons/auth_policy.py --- a/warehouse/macaroons/auth_policy.py +++ b/warehouse/macaroons/auth_policy.py @@ -14,11 +14,11 @@ from pyramid.authentication import CallbackAuthenticationPolicy from pyramid.interfaces import IAuthenticationPolicy, IAuthorizationPolicy -from pyramid.security import Denied from pyramid.threadlocal import get_current_request from zope.interface import implementer from warehouse.cache.http import add_vary_callback +from warehouse.errors import WarehouseDenied from warehouse.macaroons.interfaces import IMacaroonService from warehouse.macaroons.services import InvalidMacaroon @@ -119,7 +119,9 @@ def permits(self, context, principals, permission): # that case we're going to always deny, because without a request, we can't # determine if this request is authorized or not. if request is None: - return Denied("There was no active request.") + return WarehouseDenied( + "There was no active request.", reason="no_active_request" + ) # Re-extract our Macaroon from the request, it sucks to have to do this work # twice, but I believe it is inevitable unless we pass the Macaroon back as @@ -136,7 +138,9 @@ def permits(self, context, principals, permission): try: macaroon_service.verify(macaroon, context, principals, permission) except InvalidMacaroon as exc: - return Denied(f"The supplied token was invalid: {str(exc)!r}") + return WarehouseDenied( + f"Invalid API Token: {exc}!r", reason="invalid_api_token" + ) # If our Macaroon is verified, and for a valid permission then we'll pass # this request to our underlying Authorization policy, so it can handle its @@ -144,7 +148,10 @@ def permits(self, context, principals, permission): if permission in valid_permissions: return self.policy.permits(context, principals, permission) else: - return Denied(f"API tokens are not valid for permission: {permission}!") + return WarehouseDenied( + f"API tokens are not valid for permission: {permission}!", + reason="invalid_permission", + ) else: return self.policy.permits(context, principals, permission) diff --git a/warehouse/macaroons/caveats.py b/warehouse/macaroons/caveats.py --- a/warehouse/macaroons/caveats.py +++ b/warehouse/macaroons/caveats.py @@ -45,7 +45,9 @@ def verify_projects(self, projects): if project.normalized_name in projects: return True - raise InvalidMacaroon("project-scoped token matches no projects") + raise InvalidMacaroon( + f"project-scoped token is not valid for project '{project.name}'" + ) def verify(self, predicate): try:
diff --git a/tests/unit/forklift/test_legacy.py b/tests/unit/forklift/test_legacy.py --- a/tests/unit/forklift/test_legacy.py +++ b/tests/unit/forklift/test_legacy.py @@ -2234,7 +2234,7 @@ def test_upload_fails_without_permission(self, pyramid_config, db_request): assert db_request.help_url.calls == [pretend.call(_anchor="project-name")] assert resp.status_code == 403 assert resp.status == ( - "403 The credential associated with user '{0}' " + "403 The user '{0}' " "isn't allowed to upload to project '{1}'. " "See /the/help/url/ for more information." ).format(user2.username, project.name)
Improved error reporting for token permissions As a follow on to #6084, we should improve the message returned to users when their provided token isn't acceptable in the given context (e.g., if it doesn't match the package or doesn't have the right permissions at all). Current `master` provides this message, which is misleading: > `The user 'brainwane' isn't allowed to upload to project 'Forms990-analysis'` #6084 changes that message to the more generic: > `The credential associated with user 'brainwane' isn't allowed to upload to project 'Forms990-analysis'` But ideally we'd have something like this: > `This API token is not valid for project 'Forms990-analysis'` cc @nlhkabu @brainwane
2019-08-02T20:58:07Z
[]
[]
pypi/warehouse
6,368
pypi__warehouse-6368
[ "6345" ]
37c2e183e6d26bd0534d452cfcc97d014d160d5d
diff --git a/warehouse/macaroons/auth_policy.py b/warehouse/macaroons/auth_policy.py --- a/warehouse/macaroons/auth_policy.py +++ b/warehouse/macaroons/auth_policy.py @@ -38,8 +38,7 @@ def _extract_basic_macaroon(auth): except ValueError: return None - # TODO: Remove @token as an acceptable token username (GH-6345) - if auth_method != "@token" and auth_method != "__token__": + if auth_method != "__token__": return None return auth diff --git a/warehouse/macaroons/services.py b/warehouse/macaroons/services.py --- a/warehouse/macaroons/services.py +++ b/warehouse/macaroons/services.py @@ -43,12 +43,8 @@ def _extract_raw_macaroon(self, prefixed_macaroon): if prefixed_macaroon is None: return None - prefix, split, raw_macaroon = prefixed_macaroon.partition("-") - # TODO: Remove ':' as an acceptable delimiter for tokens (GH-6345) - if prefix != "pypi" or not split: - prefix, _, raw_macaroon = prefixed_macaroon.partition(":") - - if prefix != "pypi": + prefix, _, raw_macaroon = prefixed_macaroon.partition("-") + if prefix != "pypi" or not raw_macaroon: return None return raw_macaroon
diff --git a/tests/unit/macaroons/test_auth_policy.py b/tests/unit/macaroons/test_auth_policy.py --- a/tests/unit/macaroons/test_auth_policy.py +++ b/tests/unit/macaroons/test_auth_policy.py @@ -31,7 +31,6 @@ ("notarealtoken", None), ("maybeafuturemethod foobar", None), ("token foobar", "foobar"), - ("basic QHRva2VuOmZvb2Jhcg==", "foobar"), # "@token:foobar" ("basic X190b2tlbl9fOmZvb2Jhcg==", "foobar"), # "__token__:foobar" ], ) @@ -49,7 +48,6 @@ def test_extract_http_macaroon(auth, result): ("notbase64", None), ("bm90YXJlYWx0b2tlbg==", None), # "notarealtoken" ("QGJhZHVzZXI6Zm9vYmFy", None), # "@baduser:foobar" - ("QHRva2VuOmZvb2Jhcg==", "foobar"), # "@token:foobar" ("X190b2tlbl9fOmZvb2Jhcg==", "foobar"), # "__token__:foobar" ], ) diff --git a/tests/unit/macaroons/test_services.py b/tests/unit/macaroons/test_services.py --- a/tests/unit/macaroons/test_services.py +++ b/tests/unit/macaroons/test_services.py @@ -44,7 +44,6 @@ def test_creation(self): (None, None), ("noprefixhere", None), ("invalid:prefix", None), - ("pypi:validprefix", "validprefix"), ("pypi-validprefix", "validprefix"), ], )
API tokens: Remove @token and pypi: cases At some point in the future (perhaps at the end of the API token beta?), support for `@token` as a token username and `pypi:` as a token prefix should end in favor of `__token__` and `pypi-`, respectively. See #6287, #6342. cc @brainwane @di @ewdurbin @dstufft
I'm in favor of doing this before the end of the beta, so that we can tell beta testers to try the new token usernames and prefixes, so we can find out during the beta whether the new ones cause any glitches. would it be too much to just reserve the "token" username? Is it already in use? GitHub does that for things like "issues" etc. Looks like the `token` username is unused: https://pypi.org/user/token/ @fschulze @hugovk you want something that other pypis eg devpi can use so you want something that was impossible to register previously I’ve gone ahead and squatted that name. Obviously, I’m cool with just removing that account or let PyPI admins do whatever they deem necessary with that name. how about '🎟' as the username? eg "\N{ADMISSION TICKETS}" @graingert what does the pypi.org username have to do with devpi? The ``devpi push`` command needs the pypi credentials anyway when used for pypi release. @fschulze devpi would probably want to support this API token authentication scheme too. If an install has a user called "token" it would break that user for them. I'm thinking downstream "twine upload -r devpi" not upstream pypi @graingert it would use separate credentials anyway as it already does now. I don't see any reason accounts on pypi would interfere with devpi or the other way around. Yes but the "token" username is part of the API tokens Auth protocol @graingert yes and it doesn't matter in devpi, because the API is specific to pypi.org. Otherwise all users in devpi would conflict with pypi.org already. If we implement upload tokens in devpi, then we would handle that on the devpi side. It is pretty easy to determine whether a token was used or not. Any ``token`` user in devpi would not interfere, because we can check whether we got a token or not by inspecting or validating it. So twine would work with devpi as soon as we would add token support and we wouldn't even have to block the ``token`` user name. It's ultimately the maintainers' call, but I'm 👎 on plain `token` (or similar) for a few reasons: 1. Semantically, tokens are not passwords, so it doesn't make sense for them to have a valid username associated with them (apart from their relationship to the bearing user). The only reason we have a "username" at all in the scheme is backwards compatibility with `Authorization: basic` -- IIUC, the eventual plan is to update tools like `twine` and `setuptools` to use `Authorization: token <macaroon>` directly. 1. Similarly, it's _especially_ confusing to have a valid, _different_ username (e.g., the squatted `token`) built into the validation scheme for all users' tokens. 1. `^token` or similar is a lot easier to search for than `token`. Is there a regex of currently valid PyPI usernames so we can be more informed when picking an intentionaly invalid one? https://github.com/pypa/warehouse/blob/fe4b61d3e9f9bea4fa037de4aa17fac6599b63c2/warehouse/accounts/models.py#L60-L66 so some examples: `_token`, `-token`, `.token` `token_`, `token-`, `token.` and my personal favorite: `__token__` `tokentokentokentokentokentokentokentokentokentokentoken` is also an invalid username because it is too long `__token__` me likey.
2019-08-05T20:29:04Z
[]
[]
pypi/warehouse
6,408
pypi__warehouse-6408
[ "5825" ]
7b7588d23eb7b173ce7f604c83c2b237a87b2a3b
diff --git a/warehouse/manage/forms.py b/warehouse/manage/forms.py --- a/warehouse/manage/forms.py +++ b/warehouse/manage/forms.py @@ -87,9 +87,9 @@ def __init__(self, *args, user_service, **kwargs): self.user_service = user_service -class DeleteTOTPForm(UsernameMixin, forms.Form): +class DeleteTOTPForm(UsernameMixin, PasswordMixin, forms.Form): - __params__ = ["confirm_username"] + __params__ = ["confirm_password"] def __init__(self, *args, user_service, **kwargs): super().__init__(*args, **kwargs) @@ -246,15 +246,16 @@ def validate_token_scope(self, field): self.validated_scope = {"projects": [scope_value]} -class DeleteMacaroonForm(forms.Form): - __params__ = ["macaroon_id"] +class DeleteMacaroonForm(UsernameMixin, PasswordMixin, forms.Form): + __params__ = ["confirm_password", "macaroon_id"] macaroon_id = wtforms.StringField( validators=[wtforms.validators.DataRequired(message="Identifier required")] ) - def __init__(self, *args, macaroon_service, **kwargs): + def __init__(self, *args, macaroon_service, user_service, **kwargs): super().__init__(*args, **kwargs) + self.user_service = user_service self.macaroon_service = macaroon_service def validate_macaroon_id(self, field): diff --git a/warehouse/manage/views.py b/warehouse/manage/views.py --- a/warehouse/manage/views.py +++ b/warehouse/manage/views.py @@ -470,7 +470,7 @@ def delete_totp(self): return HTTPSeeOther(self.request.route_path("manage.account")) form = DeleteTOTPForm( - **self.request.POST, + password=self.request.POST["confirm_password"], username=self.request.user.username, user_service=self.user_service, ) @@ -489,7 +489,7 @@ def delete_totp(self): queue="success", ) else: - self.request.session.flash("Invalid credentials", queue="error") + self.request.session.flash("Invalid credentials. Try again", queue="error") return HTTPSeeOther(self.request.route_path("manage.account")) @@ -631,7 +631,9 @@ def default_response(self): project_names=self.project_names, ), "delete_macaroon_form": DeleteMacaroonForm( - macaroon_service=self.macaroon_service + username=self.request.user.username, + user_service=self.user_service, + macaroon_service=self.macaroon_service, ), } @@ -699,7 +701,11 @@ def create_macaroon(self): @view_config(request_method="POST", request_param=DeleteMacaroonForm.__params__) def delete_macaroon(self): form = DeleteMacaroonForm( - **self.request.POST, macaroon_service=self.macaroon_service + password=self.request.POST["confirm_password"], + macaroon_id=self.request.POST["macaroon_id"], + macaroon_service=self.macaroon_service, + username=self.request.user.username, + user_service=self.user_service, ) if form.validate(): @@ -730,6 +736,8 @@ def delete_macaroon(self): self.request.session.flash( f"Deleted API token '{macaroon.description}'.", queue="success" ) + else: + self.request.session.flash("Invalid credentials. Try again", queue="error") redirect_to = self.request.referer if not is_safe_url(redirect_to, host=self.request.host):
diff --git a/tests/unit/manage/test_forms.py b/tests/unit/manage/test_forms.py --- a/tests/unit/manage/test_forms.py +++ b/tests/unit/manage/test_forms.py @@ -167,6 +167,19 @@ def test_creation(self): assert form.user_service is user_service + def test_validate_confirm_password(self): + user_service = pretend.stub( + find_userid=pretend.call_recorder(lambda userid: 1), + check_password=pretend.call_recorder( + lambda userid, password, tags=None: True + ), + ) + form = forms.DeleteTOTPForm( + username="username", user_service=user_service, password="password" + ) + + assert form.validate() + class TestProvisionWebAuthnForm: def test_creation(self): @@ -433,16 +446,26 @@ def test_validate_token_scope_valid_project(self): class TestDeleteMacaroonForm: def test_creation(self): macaroon_service = pretend.stub() - form = forms.DeleteMacaroonForm(macaroon_service=macaroon_service) + user_service = pretend.stub() + form = forms.DeleteMacaroonForm( + macaroon_service=macaroon_service, user_service=user_service + ) assert form.macaroon_service is macaroon_service + assert form.user_service is user_service def test_validate_macaroon_id_invalid(self): macaroon_service = pretend.stub( find_macaroon=pretend.call_recorder(lambda id: None) ) + user_service = pretend.stub( + find_userid=lambda *a, **kw: 1, check_password=lambda *a, **kw: True + ) form = forms.DeleteMacaroonForm( - data={"macaroon_id": pretend.stub()}, macaroon_service=macaroon_service + data={"macaroon_id": pretend.stub(), "password": "password"}, + macaroon_service=macaroon_service, + user_service=user_service, + username="username", ) assert not form.validate() @@ -452,8 +475,14 @@ def test_validate_macaroon_id(self): macaroon_service = pretend.stub( find_macaroon=pretend.call_recorder(lambda id: pretend.stub()) ) + user_service = pretend.stub( + find_userid=lambda *a, **kw: 1, check_password=lambda *a, **kw: True + ) form = forms.DeleteMacaroonForm( - data={"macaroon_id": pretend.stub()}, macaroon_service=macaroon_service + data={"macaroon_id": pretend.stub(), "password": "password"}, + macaroon_service=macaroon_service, + username="username", + user_service=user_service, ) assert form.validate() diff --git a/tests/unit/manage/test_views.py b/tests/unit/manage/test_views.py --- a/tests/unit/manage/test_views.py +++ b/tests/unit/manage/test_views.py @@ -1080,7 +1080,7 @@ def test_delete_totp(self, monkeypatch, db_request): record_event=pretend.call_recorder(lambda *a, **kw: None), ) request = pretend.stub( - POST={"confirm_username": pretend.stub()}, + POST={"confirm_password": pretend.stub()}, session=pretend.stub(flash=pretend.call_recorder(lambda *a, **kw: None)), find_service=lambda *a, **kw: user_service, user=pretend.stub( @@ -1123,13 +1123,13 @@ def test_delete_totp(self, monkeypatch, db_request): ) ] - def test_delete_totp_bad_username(self, monkeypatch, db_request): + def test_delete_totp_bad_password(self, monkeypatch, db_request): user_service = pretend.stub( get_totp_secret=lambda id: b"secret", update_user=pretend.call_recorder(lambda *a, **kw: None), ) request = pretend.stub( - POST={"confirm_username": pretend.stub()}, + POST={"confirm_password": pretend.stub()}, session=pretend.stub(flash=pretend.call_recorder(lambda *a, **kw: None)), find_service=lambda *a, **kw: user_service, user=pretend.stub( @@ -1151,7 +1151,7 @@ def test_delete_totp_bad_username(self, monkeypatch, db_request): assert user_service.update_user.calls == [] assert request.session.flash.calls == [ - pretend.call("Invalid credentials", queue="error") + pretend.call("Invalid credentials. Try again", queue="error") ] assert isinstance(result, HTTPSeeOther) assert result.headers["Location"] == "/foo/bar/" @@ -1162,7 +1162,7 @@ def test_delete_totp_not_provisioned(self, monkeypatch, db_request): update_user=pretend.call_recorder(lambda *a, **kw: None), ) request = pretend.stub( - POST={"confirm_username": pretend.stub()}, + POST={"confirm_password": pretend.stub()}, session=pretend.stub(flash=pretend.call_recorder(lambda *a, **kw: None)), find_service=lambda *a, **kw: user_service, user=pretend.stub( @@ -1469,7 +1469,7 @@ def test_default_response(self, monkeypatch): ) request = pretend.stub( - user=pretend.stub(id=pretend.stub()), + user=pretend.stub(id=pretend.stub(), username=pretend.stub()), find_service=lambda interface, **kw: { IMacaroonService: pretend.stub(), IUserService: pretend.stub(), @@ -1767,7 +1767,7 @@ def test_delete_macaroon_invalid_form(self, monkeypatch): delete_macaroon=pretend.call_recorder(lambda id: pretend.stub()) ) request = pretend.stub( - POST={}, + POST={"confirm_password": "password", "macaroon_id": "macaroon_id"}, route_path=pretend.call_recorder(lambda x: pretend.stub()), find_service=lambda interface, **kw: { IMacaroonService: macaroon_service, @@ -1775,6 +1775,8 @@ def test_delete_macaroon_invalid_form(self, monkeypatch): }[interface], referer="/fake/safe/route", host=None, + user=pretend.stub(username=pretend.stub()), + session=pretend.stub(flash=pretend.call_recorder(lambda *a, **kw: None)), ) delete_macaroon_obj = pretend.stub(validate=lambda: False) @@ -1790,13 +1792,16 @@ def test_delete_macaroon_invalid_form(self, monkeypatch): assert isinstance(result, HTTPSeeOther) assert result.location == "/fake/safe/route" assert macaroon_service.delete_macaroon.calls == [] + assert request.session.flash.calls == [ + pretend.call("Invalid credentials. Try again", queue="error") + ] def test_delete_macaroon_dangerous_redirect(self, monkeypatch): macaroon_service = pretend.stub( delete_macaroon=pretend.call_recorder(lambda id: pretend.stub()) ) request = pretend.stub( - POST={}, + POST={"confirm_password": "password", "macaroon_id": "macaroon_id"}, route_path=pretend.call_recorder(lambda x: "/safe/route"), find_service=lambda interface, **kw: { IMacaroonService: macaroon_service, @@ -1804,6 +1809,8 @@ def test_delete_macaroon_dangerous_redirect(self, monkeypatch): }[interface], referer="http://google.com/", host=None, + user=pretend.stub(username=pretend.stub()), + session=pretend.stub(flash=pretend.call_recorder(lambda *a, **kw: None)), ) delete_macaroon_obj = pretend.stub(validate=lambda: False) @@ -1833,7 +1840,7 @@ def test_delete_macaroon(self, monkeypatch): ) user_service = pretend.stub(record_event=record_event) request = pretend.stub( - POST={}, + POST={"confirm_password": "password", "macaroon_id": "macaroon_id"}, route_path=pretend.call_recorder(lambda x: pretend.stub()), find_service=lambda interface, **kw: { IMacaroonService: macaroon_service, @@ -1842,7 +1849,7 @@ def test_delete_macaroon(self, monkeypatch): session=pretend.stub(flash=pretend.call_recorder(lambda *a, **kw: None)), referer="/fake/safe/route", host=None, - user=pretend.stub(id=pretend.stub()), + user=pretend.stub(id=pretend.stub(), username=pretend.stub()), remote_addr="0.0.0.0", ) @@ -1892,7 +1899,7 @@ def test_delete_macaroon_records_events_for_each_project(self, monkeypatch): ) user_service = pretend.stub(record_event=record_event) request = pretend.stub( - POST={}, + POST={"confirm_password": pretend.stub(), "macaroon_id": pretend.stub()}, route_path=pretend.call_recorder(lambda x: pretend.stub()), find_service=lambda interface, **kw: { IMacaroonService: macaroon_service,
2FA Ask for password not username when disabling 2FA <!-- NOTE: This issue should be for problems with PyPI itself, including: * pypi.org * test.pypi.org * files.pythonhosted.org This issue should NOT be for a project installed from PyPI. If you are having an issue with a specific package, you should reach out to the maintainers of that project directly instead. Furthermore, this issue should NOT be for any non-PyPI properties (like python.org, docs.python.org, etc.) --> **Describe the bug** When disabling 2FA for an account, it would make sense to ask for the user's password and not the username for confirmation as that would make it much harder to do drive-by disabling on unattended computers. **Expected behavior** <!-- A clear and concise description of what you expected to happen --> Ask for password when disabling 2FA. **To Reproduce** <!-- Steps to reproduce the bug, or a link to PyPI where the bug is visible --> Try disabling 2FA on https://pypi.org/manage/account/.
Yeah, I thought this was a bit odd. Separate ticket, but possibly related: should Warehouse require a password to enable 2FA? To prevent similar drive-by enabling of 2FA on unattended computers. This was a subject of some discussion in #5567 (although I'm currently having trouble finding the exact comment). As far as I know, requiring a password (or just a second factor) when disabling 2FA is not a consistent practice across major services. Both Google and GitHub will prompt you for some kind of authentication if you haven't accessed their security pages in a while but both retain a cookie that allows future visits without re-authentication, eliminating the drive-by protection. Google requires the user's password, while GitHub allows the user to use one of their second factor methods. Twitter requires the user's password twice: once to access the security page, and again to actually disable 2FA. The theory behind the current behavior is that (1) the user is already in an authenticated context, one where they have (almost) absolute control over the state of their account, (2) anything that strictly decreases the security of their account should require confirmation, but **not** require them to jump through hoops. Therefore, we make them confirm their intent by typing their username out, but don't encourage security fatigue by re-prompting for their password. On a more general note, a drive-by attacker can currently do far worse than disable the user's 2FA method: they can transfer all packages to a temporary account, delete the current account (only the username is required), and then create a new, identically named account that they control completely. As far as I know (I might be wrong!), none of that would be visible from an package consumer's perspective. In summary: I think we should treat drive-by attacks as black swan events and mitigate them in other ways (audit logging, email alerts when an account's security posture changes, &c). That isn't to say that changing the behavior to require a password would be _bad_; I just think it's unnecessary and attempts to cover an attack that I think is unlikely (why reduce the account's security to a single factor when you can own it entirely?) We could take an approach like Google or GitHub, but I see relatively little payoff: the two are still vulnerable to drive-bys once the user is authenticated. > This was a subject of some discussion in #5567 (although I'm currently having trouble finding the exact comment). > > As far as I know, requiring a password (or just a second factor) when disabling 2FA is not a consistent practice across major services. Both Google and GitHub will prompt you for some kind of authentication if you haven't accessed their security pages in a while but both retain a cookie that allows future visits without re-authentication, eliminating the drive-by protection. Google requires the user's password, while GitHub allows the user to use one of their second factor methods. Twitter requires the user's password twice: once to access the security page, and again to actually disable 2FA. > > The theory behind the current behavior is that (1) the user is already in an authenticated context, one where they have (almost) absolute control over the state of their account, (2) anything that strictly decreases the security of their account should require confirmation, but **not** require them to jump through hoops. Therefore, we make them confirm their intent by typing their username out, but don't encourage security fatigue by re-prompting for their password. > I understand (1) for sure, but don't follow you regarding (2) to be honest. It seems to me that confirmation via a password is not an unreasonable amount of effort, while using the username effectively is no effort at all. IOW entering a username to authorize a possibly dangerous change is not an expected UX pattern (to me!), in contrast to entering a password. > On a more general note, a drive-by attacker can currently do far worse than disable the user's 2FA method: they can transfer all packages to a temporary account, delete the current account (only the username is required), and then create a new, identically named account that they control completely. As far as I know (I might be wrong!), none of that would be visible from an package consumer's perspective. > Sure, although I didn't raise this issue for any complex drive-by scenario, but about one that is relatively easy to achieve with very little exposure to a victim's computer, which would reduce the risk for an attack. Additional means to inform the user *when* 2FA is disabled would of course be useful. (Another idea for a more complex attack: a compromised browser extension can disable 2FA without user interaction since it can parse the username from the account settings page) > In summary: I think we should treat drive-by attacks as black swan events and mitigate them in other ways (audit logging, email alerts when an account's security posture changes, &c). That isn't to say that changing the behavior to require a password would be _bad_; I just think it's unnecessary and attempts to cover an attack that I think is unlikely (why reduce the account's security to a single factor when you can own it entirely?) > FWIW I gave feedback to a feature that I'm beta-testing, so this seems like a case of an unfinished product feature. What struck me as odd about the use of a username for disabling 2FA was basically me not having seen the pattern before (at all) and the chance to simply use the established pattern to use the user's password for confirming a possibly dangerous action. If in doubt I think it makes sense to not use the username but the password, basically. > We could take an approach like Google or GitHub, but I see relatively little payoff: the two are still vulnerable to drive-bys once the user is authenticated. *shrug* I haven't done any user research to proof my experience, so I let you decide what to do with my feedback. I too think asking for the username is odd, and asking for the password is more appropriate for an action like removing 2FA. Asking for a username is functionally equivalent to a confirmation dialog but requiring slightly more typing/effort for nothing much here. I don't think asking for the username for a destructive action is a pattern I've seen elsewhere and switching to either a confirmation dialog or to asking for the password are both wins in this case IMHO. @nlhkabu I have a suggestion here, which is: perhaps you could skim https://simplysecure.org/knowledge-base/ to check whether there's research or advice there on what to require of a user in this scenario? Thanks for elaborating on your thinking here, @jezdez -- I appreciate the detail! @brainwane thank you for this link - looks like a great resource, however, unfortunately, I couldn't find anything specific to this scenario. My preference here is to change this to a password validation pattern, based on this feedback. I don't think it _matters_ if it is no more secure (although obviously it would matter if it were _less_ secure) -> if it is more familiar and/or makes people feel more comfortable, that's enough to justify the change from a UX perspective. @woodruffw I've added this to the OTF security work milestone. Let's make a PR for this once the webauthn PR is done? The new modal should look like this: ![Screenshot from 2019-05-23 06-35-57](https://user-images.githubusercontent.com/3323703/58228026-29334a80-7d25-11e9-89d8-90f5d7e940a4.png) Additional features: - The user should be able to toggle the visibility of the password, as on our other password fields - The button should be disabled until a password is entered (as per the current behaviour) > Let's make a PR for this once the webauthn PR is done? Sounds good to me! @brainwane 💯 agreed that as in with other websites (say GitHub or Google), and assuming the user is logged in, the password should be required to edit any sensitive security setting, not just 2FA. Maybe a decorator would help? As discussed today with @woodruffw, we plan to update this UI to: - Ask for password on TOTP modal - Ask for password and key label on webauthn modal > The button should be disabled until the correct password is entered (as per the current behaviour) I'll flag that this seems a little odd and tricky to achieve to me; since the password checking should always happen on the backend. >> The button should be disabled until the correct password is entered (as per the current behaviour) > > I'll flag that this seems a little odd and tricky to achieve to me; since the password checking should always happen on the backend. Agreed. I wonder if this should actually be: > The button should be disabled until **a non-empty** password is entered --- Two notes: * **Elsewhere in the UI, the button is disabled until you enter *any* password, regardless of correctness.** I made a todo note a while back to poke at this a bit and see what’s going on. But I went to look just now and I couldn’t find anywhere in the settings panels where the user-side code is checking the *correctness* of the password. Closest I could find was the “Change Password” section: ![Screenshot 2019-06-19 at 22 21 12](https://user-images.githubusercontent.com/301220/59802292-a16b3c80-92e0-11e9-894d-c12d53e13acc.png) Here, the button is disabled until you enter *something* in the “Old password” field (my password is not three characters 😉 ) and both the passwords entered in the “New password” field match (which is a reasonably check to perform client-side). If I go ahead and click “Update password”, I get an error: ![Screenshot 2019-06-19 at 22 23 50](https://user-images.githubusercontent.com/301220/59802435-f9a23e80-92e0-11e9-8c45-c1bd7fbb8756.png) * **The current “disable 2FA” screen checks for correctness of username.** This is a PyPI username (I think), but it's not my username: ![Screenshot 2019-06-19 at 22 26 09](https://user-images.githubusercontent.com/301220/59802626-48e86f00-92e1-11e9-894d-00b2b84e1ce2.png) The button doesn’t get enabled until I enter my username – which isn’t an unreasonable check to perform client-side, because my username isn’t secret information. Is this what @nlhkabu meant by “as per the current behaviour”? Yes, you're right @alexwlchan - I'll update my comment above :) This requires some JavaScript work. @yeraydiazdiaz or @di do you have time to help with this? I can have a go at it. That would be great, thank you, @yeraydiazdiaz! This is an issue we want to resolve before we can declare the WebAuthn beta finished, so I particularly appreciate your help with this.
2019-08-10T18:49:50Z
[]
[]
pypi/warehouse
6,419
pypi__warehouse-6419
[ "6380" ]
11938380f8431bb0aa6f14eeb3445622b21df3bf
diff --git a/warehouse/config.py b/warehouse/config.py --- a/warehouse/config.py +++ b/warehouse/config.py @@ -284,6 +284,7 @@ def configure(settings=None): # We'll want to configure some filters for Jinja2 as well. filters = config.get_settings().setdefault("jinja2.filters", {}) filters.setdefault("format_classifiers", "warehouse.filters:format_classifiers") + filters.setdefault("classifier_id", "warehouse.filters:classifier_id") filters.setdefault("format_tags", "warehouse.filters:format_tags") filters.setdefault("json", "warehouse.filters:tojson") filters.setdefault("camoify", "warehouse.filters:camoify") diff --git a/warehouse/filters.py b/warehouse/filters.py --- a/warehouse/filters.py +++ b/warehouse/filters.py @@ -144,6 +144,10 @@ def format_classifiers(classifiers): return structured +def classifier_id(classifier): + return classifier.replace(" ", "_").replace("::", ".") + + def contains_valid_uris(items): """Returns boolean representing whether the input list contains any valid URIs
diff --git a/tests/functional/test_templates.py b/tests/functional/test_templates.py --- a/tests/functional/test_templates.py +++ b/tests/functional/test_templates.py @@ -40,6 +40,7 @@ def test_templates_for_empty_titles(): "format_rfc822_datetime": "warehouse.i18n.filters:format_rfc822_datetime", "format_number": "warehouse.i18n.filters:format_number", "format_classifiers": "warehouse.filters:format_classifiers", + "classifier_id": "warehouse.filters:classifier_id", "format_tags": "warehouse.filters:format_tags", "json": "warehouse.filters:tojson", "camoify": "warehouse.filters:camoify", diff --git a/tests/unit/test_filters.py b/tests/unit/test_filters.py --- a/tests/unit/test_filters.py +++ b/tests/unit/test_filters.py @@ -152,6 +152,13 @@ def test_format_classifiers(inp, expected): assert list(filters.format_classifiers(inp).items()) == expected [email protected]( + ("inp", "expected"), [("Foo", "Foo"), ("Foo :: Foo", "Foo_._Foo")] +) +def test_classifier_id(inp, expected): + assert filters.classifier_id(inp) == expected + + @pytest.mark.parametrize( ("inp", "expected"), [
Invalid classifier IDs on search results page Each classifier on the search results page displays in the sidebar like so: ```html <li> <input name="c" type="checkbox" id="Framework :: AsyncIO" class="-js-form-submit-trigger checkbox-tree__checkbox" value="Framework :: AsyncIO"> <label class="checkbox-tree__label" for="Framework :: AsyncIO">AsyncIO</label> </li> ``` The ID here e.g. (`"Framework :: AsyncIO"`) is invalid HTML as it contains spaces. See https://developer.mozilla.org/en-US/docs/Web/HTML/Global_attributes/id We should also check that we are following MDNs recommendation: > Note: Using characters except ASCII letters, digits, '_', '-' and '.' may cause compatibility problems, as they weren't allowed in HTML 4. Though this restriction has been lifted in HTML5, an ID should start with a letter for compatibility.
2019-08-13T16:26:10Z
[]
[]
pypi/warehouse
6,518
pypi__warehouse-6518
[ "3218" ]
7212190cb9aed5ab8197f1bec544db68ca7d70f2
diff --git a/warehouse/accounts/views.py b/warehouse/accounts/views.py --- a/warehouse/accounts/views.py +++ b/warehouse/accounts/views.py @@ -44,6 +44,7 @@ TooManyFailedLogins, ) from warehouse.accounts.models import Email, User +from warehouse.admin.flags import AdminFlagValue from warehouse.cache.origin import origin_cache from warehouse.email import send_email_verification_email, send_password_reset_email from warehouse.packaging.models import Project, Release @@ -377,7 +378,7 @@ def register(request, _form_class=RegistrationForm): if request.method == "POST" and request.POST.get("confirm_form"): return HTTPSeeOther(request.route_path("index")) - if request.flags.enabled("disallow-new-user-registration"): + if request.flags.enabled(AdminFlagValue.DISALLOW_NEW_USER_REGISTRATION): request.session.flash( ( "New user registration temporarily disabled. " diff --git a/warehouse/admin/flags.py b/warehouse/admin/flags.py --- a/warehouse/admin/flags.py +++ b/warehouse/admin/flags.py @@ -10,11 +10,21 @@ # See the License for the specific language governing permissions and # limitations under the License. +import enum + from sqlalchemy import Boolean, Column, Text, sql from warehouse import db +class AdminFlagValue(enum.Enum): + DISALLOW_DELETION = "disallow-deletion" + DISALLOW_NEW_PROJECT_REGISTRATION = "disallow-new-project-registration" + DISALLOW_NEW_UPLOAD = "disallow-new-upload" + DISALLOW_NEW_USER_REGISTRATION = "disallow-new-user-registration" + READ_ONLY = "read-only" + + class AdminFlag(db.ModelBase): __tablename__ = "admin_flags" @@ -36,8 +46,8 @@ def notifications(self): .all() ) - def enabled(self, flag_name): - flag = self.request.db.query(AdminFlag).get(flag_name) + def enabled(self, flag_member): + flag = self.request.db.query(AdminFlag).get(flag_member.value) return flag.enabled if flag else False diff --git a/warehouse/db.py b/warehouse/db.py --- a/warehouse/db.py +++ b/warehouse/db.py @@ -208,9 +208,9 @@ def cleanup(request): connection.close() # Check if we're in read-only mode - from warehouse.admin.flags import AdminFlag + from warehouse.admin.flags import AdminFlag, AdminFlagValue - flag = session.query(AdminFlag).get("read-only") + flag = session.query(AdminFlag).get(AdminFlagValue.READ_ONLY.value) if flag and flag.enabled and not request.user.is_superuser: request.tm.doom() diff --git a/warehouse/forklift/legacy.py b/warehouse/forklift/legacy.py --- a/warehouse/forklift/legacy.py +++ b/warehouse/forklift/legacy.py @@ -39,6 +39,7 @@ from sqlalchemy.orm.exc import MultipleResultsFound, NoResultFound from warehouse import forms +from warehouse.admin.flags import AdminFlagValue from warehouse.admin.squats import Squat from warehouse.classifiers.models import Classifier from warehouse.metrics import IMetricsService @@ -728,11 +729,20 @@ def validate_no_deprecated_classifiers(form, field): ) def file_upload(request): # If we're in read-only mode, let upload clients know - if request.flags.enabled("read-only"): + if request.flags.enabled(AdminFlagValue.READ_ONLY): raise _exc_with_message( HTTPForbidden, "Read-only mode: Uploads are temporarily disabled" ) + if request.flags.enabled(AdminFlagValue.DISALLOW_NEW_UPLOAD): + raise _exc_with_message( + HTTPForbidden, + "New uploads are temporarily disabled. " + "See {projecthelp} for details".format( + projecthelp=request.help_url(_anchor="admin-intervention") + ), + ) + # Log an attempt to upload metrics = request.find_service(IMetricsService, context=None) metrics.increment("warehouse.upload.attempt") @@ -850,7 +860,7 @@ def file_upload(request): # Check for AdminFlag set by a PyPI Administrator disabling new project # registration, reasons for this include Spammers, security # vulnerabilities, or just wanting to be lazy and not worry ;) - if request.flags.enabled("disallow-new-project-registration"): + if request.flags.enabled(AdminFlagValue.DISALLOW_NEW_PROJECT_REGISTRATION): raise _exc_with_message( HTTPForbidden, ( diff --git a/warehouse/manage/views.py b/warehouse/manage/views.py --- a/warehouse/manage/views.py +++ b/warehouse/manage/views.py @@ -30,6 +30,7 @@ from warehouse.accounts.interfaces import IPasswordBreachedService, IUserService from warehouse.accounts.models import Email, User from warehouse.accounts.views import logout +from warehouse.admin.flags import AdminFlagValue from warehouse.email import ( send_account_deletion_email, send_added_as_collaborator_email, @@ -783,6 +784,18 @@ def manage_project_settings(project, request): permission="manage:project", ) def delete_project(project, request): + if request.flags.enabled(AdminFlagValue.DISALLOW_DELETION): + request.session.flash( + ( + "Project deletion temporarily disabled. " + "See https://pypi.org/help#admin-intervention for details." + ), + queue="error", + ) + return HTTPSeeOther( + request.route_path("manage.project.settings", project_name=project.name) + ) + confirm_project(project, request, fail_route="manage.project.settings") remove_project(project, request) @@ -872,6 +885,22 @@ def manage_project_release(self): @view_config(request_method="POST", request_param=["confirm_version"]) def delete_project_release(self): + if self.request.flags.enabled(AdminFlagValue.DISALLOW_DELETION): + self.request.session.flash( + ( + "Project deletion temporarily disabled. " + "See https://pypi.org/help#admin-intervention for details." + ), + queue="error", + ) + return HTTPSeeOther( + self.request.route_path( + "manage.project.release", + project_name=self.release.project.name, + version=self.release.version, + ) + ) + version = self.request.POST.get("confirm_version") if not version: self.request.session.flash("Confirm the request", queue="error") @@ -942,6 +971,13 @@ def _error(message): ) ) + if self.request.flags.enabled(AdminFlagValue.DISALLOW_DELETION): + message = ( + "Project deletion temporarily disabled. " + "See https://pypi.org/help#admin-intervention for details." + ) + return _error(message) + project_name = self.request.POST.get("confirm_project_name") if not project_name: diff --git a/warehouse/migrations/versions/8650482fb903_add_disallow_deletion_adminflag.py b/warehouse/migrations/versions/8650482fb903_add_disallow_deletion_adminflag.py new file mode 100644 --- /dev/null +++ b/warehouse/migrations/versions/8650482fb903_add_disallow_deletion_adminflag.py @@ -0,0 +1,41 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +Add disallow-deletion AdminFlag + +Revision ID: 8650482fb903 +Revises: 34b18e18775c +Create Date: 2019-08-23 13:29:17.110252 +""" + +from alembic import op + +revision = "8650482fb903" +down_revision = "34b18e18775c" + + +def upgrade(): + op.execute( + """ + INSERT INTO admin_flags(id, description, enabled, notify) + VALUES ( + 'disallow-deletion', + 'Disallow ALL project and release deletions', + FALSE, + FALSE + ) + """ + ) + + +def downgrade(): + op.execute("DELETE FROM admin_flags WHERE id = 'disallow-deletion'") diff --git a/warehouse/migrations/versions/ee4c59b2ef3a_add_disallow_new_upload_adminflag.py b/warehouse/migrations/versions/ee4c59b2ef3a_add_disallow_new_upload_adminflag.py new file mode 100644 --- /dev/null +++ b/warehouse/migrations/versions/ee4c59b2ef3a_add_disallow_new_upload_adminflag.py @@ -0,0 +1,41 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +Add disallow-new-upload AdminFlag + +Revision ID: ee4c59b2ef3a +Revises: 8650482fb903 +Create Date: 2019-08-23 22:34:29.180163 +""" + +from alembic import op + +revision = "ee4c59b2ef3a" +down_revision = "8650482fb903" + + +def upgrade(): + op.execute( + """ + INSERT INTO admin_flags(id, description, enabled, notify) + VALUES ( + 'disallow-new-upload', + 'Disallow ALL new uploads', + FALSE, + FALSE + ) + """ + ) + + +def downgrade(): + op.execute("DELETE FROM admin_flags WHERE id = 'disallow-new-upload'")
diff --git a/tests/unit/accounts/test_views.py b/tests/unit/accounts/test_views.py --- a/tests/unit/accounts/test_views.py +++ b/tests/unit/accounts/test_views.py @@ -31,7 +31,7 @@ TokenMissing, TooManyFailedLogins, ) -from warehouse.admin.flags import AdminFlag +from warehouse.admin.flags import AdminFlag, AdminFlagValue from ...common.db.accounts import EmailFactory, UserFactory @@ -903,7 +903,9 @@ def test_register_redirect(self, db_request, monkeypatch): def test_register_fails_with_admin_flag_set(self, db_request): # This flag was already set via migration, just need to enable it - flag = db_request.db.query(AdminFlag).get("disallow-new-user-registration") + flag = db_request.db.query(AdminFlag).get( + AdminFlagValue.DISALLOW_NEW_USER_REGISTRATION.value + ) flag.enabled = True db_request.method = "POST" diff --git a/tests/unit/admin/test_flags.py b/tests/unit/admin/test_flags.py --- a/tests/unit/admin/test_flags.py +++ b/tests/unit/admin/test_flags.py @@ -10,14 +10,21 @@ # See the License for the specific language governing permissions and # limitations under the License. +import enum + from ...common.db.admin import AdminFlagFactory +class TestAdminFlagValues(enum.Enum): + NOT_A_REAL_FLAG = "not-a-real-flag" + THIS_FLAG_IS_ENABLED = "this-flag-is-enabled" + + class TestAdminFlag: def test_default(self, db_request): - assert not db_request.flags.enabled("not-a-real-flag") + assert not db_request.flags.enabled(TestAdminFlagValues.NOT_A_REAL_FLAG) def test_enabled(self, db_request): AdminFlagFactory(id="this-flag-is-enabled") - assert db_request.flags.enabled("this-flag-is-enabled") + assert db_request.flags.enabled(TestAdminFlagValues.THIS_FLAG_IS_ENABLED) diff --git a/tests/unit/forklift/test_legacy.py b/tests/unit/forklift/test_legacy.py --- a/tests/unit/forklift/test_legacy.py +++ b/tests/unit/forklift/test_legacy.py @@ -31,7 +31,7 @@ from wtforms.form import Form from wtforms.validators import ValidationError -from warehouse.admin.flags import AdminFlag +from warehouse.admin.flags import AdminFlag, AdminFlagValue from warehouse.admin.squats import Squat from warehouse.classifiers.models import Classifier from warehouse.forklift import legacy @@ -753,6 +753,25 @@ def test_is_duplicate_false(self, pyramid_config, db_request): class TestFileUpload: + def test_fails_disallow_new_upload(self, pyramid_config, pyramid_request): + pyramid_config.testing_securitypolicy(userid=1) + pyramid_request.flags = pretend.stub( + enabled=lambda value: value == AdminFlagValue.DISALLOW_NEW_UPLOAD + ) + pyramid_request.help_url = pretend.call_recorder(lambda **kw: "/the/help/url/") + pyramid_request.user = pretend.stub(primary_email=pretend.stub(verified=True)) + + with pytest.raises(HTTPForbidden) as excinfo: + legacy.file_upload(pyramid_request) + + resp = excinfo.value + + assert resp.status_code == 403 + assert resp.status == ( + "403 New uploads are temporarily disabled. " + "See /the/help/url/ for details" + ) + @pytest.mark.parametrize("version", ["2", "3", "-1", "0", "dog", "cat"]) def test_fails_invalid_version(self, pyramid_config, pyramid_request, version): pyramid_config.testing_securitypolicy(userid=1) @@ -1118,7 +1137,9 @@ def test_fails_with_stdlib_names(self, pyramid_config, db_request, name): def test_fails_with_admin_flag_set(self, pyramid_config, db_request): admin_flag = ( db_request.db.query(AdminFlag) - .filter(AdminFlag.id == "disallow-new-project-registration") + .filter( + AdminFlag.id == AdminFlagValue.DISALLOW_NEW_PROJECT_REGISTRATION.value + ) .first() ) admin_flag.enabled = True diff --git a/tests/unit/manage/test_views.py b/tests/unit/manage/test_views.py --- a/tests/unit/manage/test_views.py +++ b/tests/unit/manage/test_views.py @@ -27,6 +27,7 @@ import warehouse.utils.otp as otp from warehouse.accounts.interfaces import IPasswordBreachedService, IUserService +from warehouse.admin.flags import AdminFlagValue from warehouse.macaroons.interfaces import IMacaroonService from warehouse.manage import views from warehouse.packaging.models import ( @@ -2014,6 +2015,7 @@ def test_delete_project_no_confirm(self): project = pretend.stub(normalized_name="foo") request = pretend.stub( POST={}, + flags=pretend.stub(enabled=pretend.call_recorder(lambda *a: False)), session=pretend.stub(flash=pretend.call_recorder(lambda *a, **kw: None)), route_path=lambda *a, **kw: "/foo/bar/", ) @@ -2023,6 +2025,9 @@ def test_delete_project_no_confirm(self): assert exc.value.status_code == 303 assert exc.value.headers["Location"] == "/foo/bar/" + assert request.flags.enabled.calls == [ + pretend.call(AdminFlagValue.DISALLOW_DELETION) + ] assert request.session.flash.calls == [ pretend.call("Confirm the request", queue="error") ] @@ -2031,6 +2036,7 @@ def test_delete_project_wrong_confirm(self): project = pretend.stub(normalized_name="foo") request = pretend.stub( POST={"confirm_project_name": "bar"}, + flags=pretend.stub(enabled=pretend.call_recorder(lambda *a: False)), session=pretend.stub(flash=pretend.call_recorder(lambda *a, **kw: None)), route_path=lambda *a, **kw: "/foo/bar/", ) @@ -2040,6 +2046,9 @@ def test_delete_project_wrong_confirm(self): assert exc.value.status_code == 303 assert exc.value.headers["Location"] == "/foo/bar/" + assert request.flags.enabled.calls == [ + pretend.call(AdminFlagValue.DISALLOW_DELETION) + ] assert request.session.flash.calls == [ pretend.call( "Could not delete project - 'bar' is not the same as 'foo'", @@ -2047,6 +2056,36 @@ def test_delete_project_wrong_confirm(self): ) ] + def test_delete_project_disallow_deletion(self): + project = pretend.stub(name="foo", normalized_name="foo") + request = pretend.stub( + flags=pretend.stub(enabled=pretend.call_recorder(lambda *a: True)), + route_path=pretend.call_recorder(lambda *a, **kw: "/the-redirect"), + session=pretend.stub(flash=pretend.call_recorder(lambda *a, **kw: None)), + ) + + result = views.delete_project(project, request) + assert isinstance(result, HTTPSeeOther) + assert result.headers["Location"] == "/the-redirect" + + assert request.flags.enabled.calls == [ + pretend.call(AdminFlagValue.DISALLOW_DELETION) + ] + + assert request.session.flash.calls == [ + pretend.call( + ( + "Project deletion temporarily disabled. " + "See https://pypi.org/help#admin-intervention for details." + ), + queue="error", + ) + ] + + assert request.route_path.calls == [ + pretend.call("manage.project.settings", project_name="foo") + ] + def test_delete_project(self, db_request): project = ProjectFactory.create(name="foo") @@ -2159,6 +2198,7 @@ def test_manage_project_releases(self, db_request): filename=f"foobar-{release.version}.tar.gz", packagetype="sdist", ) + db_request.flags = pretend.stub(enabled=pretend.call_recorder(lambda *a: False)) assert views.manage_project_releases(project, db_request) == { "project": project, @@ -2182,6 +2222,48 @@ def test_manage_project_release(self): "files": files, } + def test_delete_project_release_disallow_deletion(self, monkeypatch): + release = pretend.stub( + version="1.2.3", + canonical_version="1.2.3", + project=pretend.stub( + name="foobar", record_event=pretend.call_recorder(lambda *a, **kw: None) + ), + ) + request = pretend.stub( + flags=pretend.stub(enabled=pretend.call_recorder(lambda *a: True)), + method="POST", + route_path=pretend.call_recorder(lambda *a, **kw: "/the-redirect"), + session=pretend.stub(flash=pretend.call_recorder(lambda *a, **kw: None)), + ) + view = views.ManageProjectRelease(release, request) + + result = view.delete_project_release() + assert isinstance(result, HTTPSeeOther) + assert result.headers["Location"] == "/the-redirect" + + assert request.flags.enabled.calls == [ + pretend.call(AdminFlagValue.DISALLOW_DELETION) + ] + + assert request.session.flash.calls == [ + pretend.call( + ( + "Project deletion temporarily disabled. " + "See https://pypi.org/help#admin-intervention for details." + ), + queue="error", + ) + ] + + assert request.route_path.calls == [ + pretend.call( + "manage.project.release", + project_name=release.project.name, + version=release.version, + ) + ] + def test_delete_project_release(self, monkeypatch): release = pretend.stub( version="1.2.3", @@ -2197,6 +2279,7 @@ def test_delete_project_release(self, monkeypatch): delete=pretend.call_recorder(lambda a: None), add=pretend.call_recorder(lambda a: None), ), + flags=pretend.stub(enabled=pretend.call_recorder(lambda *a: False)), route_path=pretend.call_recorder(lambda *a, **kw: "/the-redirect"), session=pretend.stub(flash=pretend.call_recorder(lambda *a, **kw: None)), user=pretend.stub(username=pretend.stub()), @@ -2215,6 +2298,9 @@ def test_delete_project_release(self, monkeypatch): assert request.db.delete.calls == [pretend.call(release)] assert request.db.add.calls == [pretend.call(journal_obj)] + assert request.flags.enabled.calls == [ + pretend.call(AdminFlagValue.DISALLOW_DELETION) + ] assert journal_cls.calls == [ pretend.call( name=release.project.name, @@ -2247,6 +2333,7 @@ def test_delete_project_release_no_confirm(self): POST={"confirm_version": ""}, method="POST", db=pretend.stub(delete=pretend.call_recorder(lambda a: None)), + flags=pretend.stub(enabled=pretend.call_recorder(lambda *a: False)), route_path=pretend.call_recorder(lambda *a, **kw: "/the-redirect"), session=pretend.stub(flash=pretend.call_recorder(lambda *a, **kw: None)), ) @@ -2261,6 +2348,9 @@ def test_delete_project_release_no_confirm(self): assert request.session.flash.calls == [ pretend.call("Confirm the request", queue="error") ] + assert request.flags.enabled.calls == [ + pretend.call(AdminFlagValue.DISALLOW_DELETION) + ] assert request.route_path.calls == [ pretend.call( "manage.project.release", @@ -2275,6 +2365,7 @@ def test_delete_project_release_bad_confirm(self): POST={"confirm_version": "invalid"}, method="POST", db=pretend.stub(delete=pretend.call_recorder(lambda a: None)), + flags=pretend.stub(enabled=pretend.call_recorder(lambda *a: False)), route_path=pretend.call_recorder(lambda *a, **kw: "/the-redirect"), session=pretend.stub(flash=pretend.call_recorder(lambda *a, **kw: None)), ) @@ -2301,6 +2392,42 @@ def test_delete_project_release_bad_confirm(self): ) ] + def test_delete_project_release_file_disallow_deletion(self): + release = pretend.stub(version="1.2.3", project=pretend.stub(name="foobar")) + request = pretend.stub( + method="POST", + flags=pretend.stub(enabled=pretend.call_recorder(lambda *a: True)), + route_path=pretend.call_recorder(lambda *a, **kw: "/the-redirect"), + session=pretend.stub(flash=pretend.call_recorder(lambda *a, **kw: None)), + ) + view = views.ManageProjectRelease(release, request) + + result = view.delete_project_release_file() + + assert isinstance(result, HTTPSeeOther) + assert result.headers["Location"] == "/the-redirect" + + assert request.flags.enabled.calls == [ + pretend.call(AdminFlagValue.DISALLOW_DELETION) + ] + + assert request.session.flash.calls == [ + pretend.call( + ( + "Project deletion temporarily disabled. " + "See https://pypi.org/help#admin-intervention for details." + ), + queue="error", + ) + ] + assert request.route_path.calls == [ + pretend.call( + "manage.project.release", + project_name=release.project.name, + version=release.version, + ) + ] + def test_delete_project_release_file(self, db_request): user = UserFactory.create() @@ -2359,6 +2486,7 @@ def test_delete_project_release_file_no_confirm(self): POST={"confirm_project_name": ""}, method="POST", db=pretend.stub(delete=pretend.call_recorder(lambda a: None)), + flags=pretend.stub(enabled=pretend.call_recorder(lambda *a: False)), route_path=pretend.call_recorder(lambda *a, **kw: "/the-redirect"), session=pretend.stub(flash=pretend.call_recorder(lambda *a, **kw: None)), ) @@ -2370,6 +2498,9 @@ def test_delete_project_release_file_no_confirm(self): assert result.headers["Location"] == "/the-redirect" assert request.db.delete.calls == [] + assert request.flags.enabled.calls == [ + pretend.call(AdminFlagValue.DISALLOW_DELETION) + ] assert request.session.flash.calls == [ pretend.call("Confirm the request", queue="error") ] @@ -2396,6 +2527,7 @@ def no_result_found(): filter=lambda *a: pretend.stub(one=no_result_found) ), ) + db_request.flags = pretend.stub(enabled=pretend.call_recorder(lambda *a: False)) db_request.route_path = pretend.call_recorder(lambda *a, **kw: "/the-redirect") db_request.session = pretend.stub( flash=pretend.call_recorder(lambda *a, **kw: None) @@ -2409,6 +2541,9 @@ def no_result_found(): assert result.headers["Location"] == "/the-redirect" assert db_request.db.delete.calls == [] + assert db_request.flags.enabled.calls == [ + pretend.call(AdminFlagValue.DISALLOW_DELETION) + ] assert db_request.session.flash.calls == [ pretend.call("Could not find file", queue="error") ] diff --git a/tests/unit/test_db.py b/tests/unit/test_db.py --- a/tests/unit/test_db.py +++ b/tests/unit/test_db.py @@ -24,6 +24,7 @@ from sqlalchemy.exc import OperationalError from warehouse import db +from warehouse.admin.flags import AdminFlagValue from warehouse.db import ( DEFAULT_ISOLATION, DatabaseNotAvailable, @@ -273,7 +274,7 @@ def test_create_session_read_only_mode( ) assert _create_session(request) is session_obj - assert get.calls == [pretend.call("read-only")] + assert get.calls == [pretend.call(AdminFlagValue.READ_ONLY.value)] assert request.tm.doom.calls == doom_calls
Implement an AdminFlag for halting project/release modifications Similar to #2967, we should implement an `AdminFlag` that allows us to: * halt all new uploads * prevent release/project deletion
As with the other spam-related features, I'm leaving this out of the milestone bucketing since it isn't pertinent specifically to [the MOSS grant](https://blog.mozilla.org/blog/2018/01/23/moss-q4-supporting-python-ecosystem/). 2 new admin flags: - Disallow uploads - Disallow release AND project deletion @di It looks like disallowing uploads is already implemented in #3393. Can you confirm the requirements?
2019-08-23T15:18:13Z
[]
[]
pypi/warehouse
6,520
pypi__warehouse-6520
[ "3200" ]
d4cdf9f1f6ea740ed713fc6de2ac8b57559157e0
diff --git a/warehouse/manage/views.py b/warehouse/manage/views.py --- a/warehouse/manage/views.py +++ b/warehouse/manage/views.py @@ -22,7 +22,7 @@ from pyramid.response import Response from pyramid.view import view_config, view_defaults from sqlalchemy import func -from sqlalchemy.orm import joinedload +from sqlalchemy.orm import Load, joinedload from sqlalchemy.orm.exc import NoResultFound import warehouse.utils.otp as otp @@ -815,7 +815,37 @@ def destroy_project_docs(project, request): permission="manage:project", ) def manage_project_releases(project, request): - return {"project": project} + # Get the counts for all the files for this project, grouped by the + # release version and the package types + filecounts = ( + request.db.query(Release.version, File.packagetype, func.count(File.id)) + .options(Load(Release).load_only("version")) + .outerjoin(File) + .group_by(Release.id) + .group_by(File.packagetype) + .filter(Release.project == project) + .all() + ) + + # Turn rows like: + # [('0.1', 'bdist_wheel', 2), ('0.1', 'sdist', 1)] + # into: + # { + # '0.1: { + # 'bdist_wheel': 2, + # 'sdist': 1, + # 'total': 3, + # } + # } + + version_to_file_counts = {} + for version, packagetype, count in filecounts: + packagetype_to_count = version_to_file_counts.setdefault(version, {}) + packagetype_to_count.setdefault("total", 0) + packagetype_to_count[packagetype] = count + packagetype_to_count["total"] += count + + return {"project": project, "version_to_file_counts": version_to_file_counts} @view_defaults(
diff --git a/tests/unit/manage/test_views.py b/tests/unit/manage/test_views.py --- a/tests/unit/manage/test_views.py +++ b/tests/unit/manage/test_views.py @@ -2151,11 +2151,21 @@ def test_destroy_project_docs(self, db_request): class TestManageProjectReleases: - def test_manage_project_releases(self): - request = pretend.stub() - project = pretend.stub() + def test_manage_project_releases(self, db_request): + project = ProjectFactory.create(name="foobar") + release = ReleaseFactory.create(project=project, version="1.0.0") + release_file = FileFactory.create( + release=release, + filename=f"foobar-{release.version}.tar.gz", + packagetype="sdist", + ) - assert views.manage_project_releases(project, request) == {"project": project} + assert views.manage_project_releases(project, db_request) == { + "project": project, + "version_to_file_counts": { + release.version: {"total": 1, release_file.packagetype: 1} + }, + } class TestManageProjectRelease:
Redesign release table Right now we list the 'summary' of a release in the table: ![screenshot from 2018-03-09 07-04-08](https://user-images.githubusercontent.com/3323703/37194848-178b7464-2368-11e8-881c-29454067315f.png) Feedback from user testing is that this value *so rarely* changes that it's not worth exposing here. Instead we should: 1. Add information about what files are in the release 2. Try to expose the buttons directly (move them out of the dropdown if there is enough space)
2019-08-23T17:23:47Z
[]
[]
pypi/warehouse
6,527
pypi__warehouse-6527
[ "5825" ]
6be5b3a6662ec24c87b42ef6ab9754e3f6702e9e
diff --git a/warehouse/manage/forms.py b/warehouse/manage/forms.py --- a/warehouse/manage/forms.py +++ b/warehouse/manage/forms.py @@ -87,15 +87,20 @@ def __init__(self, *args, user_service, **kwargs): self.user_service = user_service -class DeleteTOTPForm(UsernameMixin, forms.Form): +class ConfirmPasswordForm(UsernameMixin, PasswordMixin, forms.Form): - __params__ = ["confirm_username"] + __params__ = ["confirm_password"] def __init__(self, *args, user_service, **kwargs): super().__init__(*args, **kwargs) self.user_service = user_service +class DeleteTOTPForm(ConfirmPasswordForm): + # TODO: delete? + pass + + class ProvisionTOTPForm(TOTPValueMixin, forms.Form): __params__ = ["totp_value"] @@ -246,15 +251,16 @@ def validate_token_scope(self, field): self.validated_scope = {"projects": [scope_value]} -class DeleteMacaroonForm(forms.Form): - __params__ = ["macaroon_id"] +class DeleteMacaroonForm(UsernameMixin, PasswordMixin, forms.Form): + __params__ = ["confirm_password", "macaroon_id"] macaroon_id = wtforms.StringField( validators=[wtforms.validators.DataRequired(message="Identifier required")] ) - def __init__(self, *args, macaroon_service, **kwargs): + def __init__(self, *args, macaroon_service, user_service, **kwargs): super().__init__(*args, **kwargs) + self.user_service = user_service self.macaroon_service = macaroon_service def validate_macaroon_id(self, field): diff --git a/warehouse/manage/views.py b/warehouse/manage/views.py --- a/warehouse/manage/views.py +++ b/warehouse/manage/views.py @@ -45,6 +45,7 @@ AddEmailForm, ChangePasswordForm, ChangeRoleForm, + ConfirmPasswordForm, CreateMacaroonForm, CreateRoleForm, DeleteMacaroonForm, @@ -309,18 +310,22 @@ def change_password(self): return {**self.default_response, "change_password_form": form} - @view_config(request_method="POST", request_param=["confirm_username"]) + @view_config(request_method="POST", request_param=DeleteTOTPForm.__params__) def delete_account(self): - username = self.request.params.get("confirm_username") - - if not username: + confirm_password = self.request.params.get("confirm_password") + if not confirm_password: self.request.session.flash("Confirm the request", queue="error") return self.default_response - if username != self.request.user.username: + form = ConfirmPasswordForm( + password=confirm_password, + username=self.request.user.username, + user_service=self.user_service, + ) + + if not form.validate(): self.request.session.flash( - f"Could not delete account - {username!r} is not the same as " - f"{self.request.user.username!r}", + f"Could not delete account - Invalid credentials. Please try again.", queue="error", ) return self.default_response @@ -475,7 +480,7 @@ def delete_totp(self): return HTTPSeeOther(self.request.route_path("manage.account")) form = DeleteTOTPForm( - **self.request.POST, + password=self.request.POST["confirm_password"], username=self.request.user.username, user_service=self.user_service, ) @@ -494,7 +499,7 @@ def delete_totp(self): queue="success", ) else: - self.request.session.flash("Invalid credentials", queue="error") + self.request.session.flash("Invalid credentials. Try again", queue="error") return HTTPSeeOther(self.request.route_path("manage.account")) @@ -636,7 +641,9 @@ def default_response(self): project_names=self.project_names, ), "delete_macaroon_form": DeleteMacaroonForm( - macaroon_service=self.macaroon_service + username=self.request.user.username, + user_service=self.user_service, + macaroon_service=self.macaroon_service, ), } @@ -704,7 +711,11 @@ def create_macaroon(self): @view_config(request_method="POST", request_param=DeleteMacaroonForm.__params__) def delete_macaroon(self): form = DeleteMacaroonForm( - **self.request.POST, macaroon_service=self.macaroon_service + password=self.request.POST["confirm_password"], + macaroon_id=self.request.POST["macaroon_id"], + macaroon_service=self.macaroon_service, + username=self.request.user.username, + user_service=self.user_service, ) if form.validate(): @@ -735,6 +746,8 @@ def delete_macaroon(self): self.request.session.flash( f"Deleted API token '{macaroon.description}'.", queue="success" ) + else: + self.request.session.flash("Invalid credentials. Try again", queue="error") redirect_to = self.request.referer if not is_safe_url(redirect_to, host=self.request.host):
diff --git a/tests/frontend/confirm_controller_test.js b/tests/frontend/confirm_controller_test.js --- a/tests/frontend/confirm_controller_test.js +++ b/tests/frontend/confirm_controller_test.js @@ -22,23 +22,18 @@ describe("Confirm controller", () => { document.body.innerHTML = ` <div class="modal" data-controller="confirm"> <div class="modal__content" role="dialog"> - <a id="cancel" href="#modal-close" data-action="click->confirm#cancel" title="Close" class="modal__close"> - <i class="fa fa-times" aria-hidden="true"></i> - <span class="sr-only">close</span> - </a> - <div class="modal__body"> - <h3 class="modal__title">Delete package?</h3> + <div class="modal__body"> + <h3 class="modal__title">Delete package?</h3> <p>Confirm to continue.</p> <label for="package">Delete</label> <input id="input-target" name="package" data-action="input->confirm#check" data-target="confirm.input" type="text" autocomplete="off" autocorrect="off" autocapitalize="off"> </div> <div class="modal__footer"> - <button type="reset" data-action="click->confirm#cancel">Cancel</button> - <button id="button-target" data-target="confirm.button" data-expected="package" type="submit"> - Confirm - </button> - </div> - </form> + <button id="button-target" data-target="confirm.button" data-expected="package" type="submit"> + Confirm + </button> + </div> + </form> </div> </div> `; @@ -57,18 +52,6 @@ describe("Confirm controller", () => { }); describe("functionality", function() { - describe("clicking cancel", function() { - it("sets the window location, resets the input target and disables the button", function() { - document.getElementById("cancel").click(); - - expect(window.location.href).toContain("#modal-close"); - const inputTarget = document.getElementById("input-target"); - expect(inputTarget.value).toEqual(""); - const buttonTarget = document.getElementById("button-target"); - expect(buttonTarget).toHaveAttribute("disabled"); - }); - }); - describe("entering expected text", function() { it("enables the button", function() { fireEvent.input(document.getElementById("input-target"), { target: { value: "package" } }); diff --git a/tests/frontend/modal_close_controller_test.js b/tests/frontend/modal_close_controller_test.js new file mode 100644 --- /dev/null +++ b/tests/frontend/modal_close_controller_test.js @@ -0,0 +1,56 @@ +/* Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* global expect, beforeEach, describe, it */ + +import { Application } from "stimulus"; +import ModalCloseController from "../../warehouse/static/js/warehouse/controllers/modal_close_controller"; + +describe("Modal close controller", () => { + beforeEach(() => { + document.body.innerHTML = ` + <div class="modal" data-controller="modal-close"> + <div class="modal__content" role="dialog"> + <a id="cancel" href="#modal-close" data-action="click->modal-close#cancel" title="Close" class="modal__close"> + <i class="fa fa-times" aria-hidden="true"></i> + <span class="sr-only">close</span> + </a> + <div class="modal__body"> + <h3 class="modal__title">Modal Title</h3> + <input id="input-target" name="package" data-target="modal-close.input" type="text" autocomplete="off" autocorrect="off" autocapitalize="off"> + <div class="modal__footer"> + <button id="button-target" data-target="modal-close.button" type="submit"> + Confirm + </button> + </div> + </div> + </div> + </div> + `; + + const application = Application.start(); + application.register("modal-close", ModalCloseController); + }); + + describe("clicking cancel", function() { + it("sets the window location, resets the input target and disables the button", function() { + document.getElementById("cancel").click(); + + expect(window.location.href).toContain("#modal-close"); + const inputTarget = document.getElementById("input-target"); + expect(inputTarget.value).toEqual(""); + const buttonTarget = document.getElementById("button-target"); + expect(buttonTarget).toHaveAttribute("disabled"); + }); + }); +}); diff --git a/tests/unit/manage/test_forms.py b/tests/unit/manage/test_forms.py --- a/tests/unit/manage/test_forms.py +++ b/tests/unit/manage/test_forms.py @@ -167,6 +167,19 @@ def test_creation(self): assert form.user_service is user_service + def test_validate_confirm_password(self): + user_service = pretend.stub( + find_userid=pretend.call_recorder(lambda userid: 1), + check_password=pretend.call_recorder( + lambda userid, password, tags=None: True + ), + ) + form = forms.DeleteTOTPForm( + username="username", user_service=user_service, password="password" + ) + + assert form.validate() + class TestProvisionWebAuthnForm: def test_creation(self): @@ -433,16 +446,26 @@ def test_validate_token_scope_valid_project(self): class TestDeleteMacaroonForm: def test_creation(self): macaroon_service = pretend.stub() - form = forms.DeleteMacaroonForm(macaroon_service=macaroon_service) + user_service = pretend.stub() + form = forms.DeleteMacaroonForm( + macaroon_service=macaroon_service, user_service=user_service + ) assert form.macaroon_service is macaroon_service + assert form.user_service is user_service def test_validate_macaroon_id_invalid(self): macaroon_service = pretend.stub( find_macaroon=pretend.call_recorder(lambda id: None) ) + user_service = pretend.stub( + find_userid=lambda *a, **kw: 1, check_password=lambda *a, **kw: True + ) form = forms.DeleteMacaroonForm( - data={"macaroon_id": pretend.stub()}, macaroon_service=macaroon_service + data={"macaroon_id": pretend.stub(), "password": "password"}, + macaroon_service=macaroon_service, + user_service=user_service, + username="username", ) assert not form.validate() @@ -452,8 +475,14 @@ def test_validate_macaroon_id(self): macaroon_service = pretend.stub( find_macaroon=pretend.call_recorder(lambda id: pretend.stub()) ) + user_service = pretend.stub( + find_userid=lambda *a, **kw: 1, check_password=lambda *a, **kw: True + ) form = forms.DeleteMacaroonForm( - data={"macaroon_id": pretend.stub()}, macaroon_service=macaroon_service + data={"macaroon_id": pretend.stub(), "password": "password"}, + macaroon_service=macaroon_service, + username="username", + user_service=user_service, ) assert form.validate() diff --git a/tests/unit/manage/test_views.py b/tests/unit/manage/test_views.py --- a/tests/unit/manage/test_views.py +++ b/tests/unit/manage/test_views.py @@ -667,9 +667,15 @@ def test_delete_account(self, monkeypatch, db_request): jid = JournalEntryFactory.create(submitted_by=user).id db_request.user = user - db_request.params = {"confirm_username": user.username} + db_request.params = {"confirm_password": user.password} db_request.find_service = lambda *a, **kw: pretend.stub() + confirm_password_obj = pretend.stub(validate=lambda: True) + confirm_password_cls = pretend.call_recorder( + lambda *a, **kw: confirm_password_obj + ) + monkeypatch.setattr(views, "ConfirmPasswordForm", confirm_password_cls) + monkeypatch.setattr( views.ManageAccountViews, "default_response", pretend.stub() ) @@ -698,7 +704,7 @@ def test_delete_account(self, monkeypatch, db_request): def test_delete_account_no_confirm(self, monkeypatch): request = pretend.stub( - params={"confirm_username": ""}, + params={"confirm_password": ""}, session=pretend.stub(flash=pretend.call_recorder(lambda *a, **kw: None)), find_service=lambda *a, **kw: pretend.stub(), ) @@ -716,12 +722,18 @@ def test_delete_account_no_confirm(self, monkeypatch): def test_delete_account_wrong_confirm(self, monkeypatch): request = pretend.stub( - params={"confirm_username": "invalid"}, + params={"confirm_password": "invalid"}, user=pretend.stub(username="username"), session=pretend.stub(flash=pretend.call_recorder(lambda *a, **kw: None)), find_service=lambda *a, **kw: pretend.stub(), ) + confirm_password_obj = pretend.stub(validate=lambda: False) + confirm_password_cls = pretend.call_recorder( + lambda *a, **kw: confirm_password_obj + ) + monkeypatch.setattr(views, "ConfirmPasswordForm", confirm_password_cls) + monkeypatch.setattr( views.ManageAccountViews, "default_response", pretend.stub() ) @@ -731,19 +743,25 @@ def test_delete_account_wrong_confirm(self, monkeypatch): assert view.delete_account() == view.default_response assert request.session.flash.calls == [ pretend.call( - "Could not delete account - 'invalid' is not the same as " "'username'", + "Could not delete account - Invalid credentials. Please try again.", queue="error", ) ] def test_delete_account_has_active_projects(self, monkeypatch): request = pretend.stub( - params={"confirm_username": "username"}, + params={"confirm_password": "password"}, user=pretend.stub(username="username"), session=pretend.stub(flash=pretend.call_recorder(lambda *a, **kw: None)), find_service=lambda *a, **kw: pretend.stub(), ) + confirm_password_obj = pretend.stub(validate=lambda: True) + confirm_password_cls = pretend.call_recorder( + lambda *a, **kw: confirm_password_obj + ) + monkeypatch.setattr(views, "ConfirmPasswordForm", confirm_password_cls) + monkeypatch.setattr( views.ManageAccountViews, "default_response", pretend.stub() ) @@ -1081,7 +1099,7 @@ def test_delete_totp(self, monkeypatch, db_request): record_event=pretend.call_recorder(lambda *a, **kw: None), ) request = pretend.stub( - POST={"confirm_username": pretend.stub()}, + POST={"confirm_password": pretend.stub()}, session=pretend.stub(flash=pretend.call_recorder(lambda *a, **kw: None)), find_service=lambda *a, **kw: user_service, user=pretend.stub( @@ -1124,13 +1142,13 @@ def test_delete_totp(self, monkeypatch, db_request): ) ] - def test_delete_totp_bad_username(self, monkeypatch, db_request): + def test_delete_totp_bad_password(self, monkeypatch, db_request): user_service = pretend.stub( get_totp_secret=lambda id: b"secret", update_user=pretend.call_recorder(lambda *a, **kw: None), ) request = pretend.stub( - POST={"confirm_username": pretend.stub()}, + POST={"confirm_password": pretend.stub()}, session=pretend.stub(flash=pretend.call_recorder(lambda *a, **kw: None)), find_service=lambda *a, **kw: user_service, user=pretend.stub( @@ -1152,7 +1170,7 @@ def test_delete_totp_bad_username(self, monkeypatch, db_request): assert user_service.update_user.calls == [] assert request.session.flash.calls == [ - pretend.call("Invalid credentials", queue="error") + pretend.call("Invalid credentials. Try again", queue="error") ] assert isinstance(result, HTTPSeeOther) assert result.headers["Location"] == "/foo/bar/" @@ -1163,7 +1181,7 @@ def test_delete_totp_not_provisioned(self, monkeypatch, db_request): update_user=pretend.call_recorder(lambda *a, **kw: None), ) request = pretend.stub( - POST={"confirm_username": pretend.stub()}, + POST={"confirm_password": pretend.stub()}, session=pretend.stub(flash=pretend.call_recorder(lambda *a, **kw: None)), find_service=lambda *a, **kw: user_service, user=pretend.stub( @@ -1470,7 +1488,7 @@ def test_default_response(self, monkeypatch): ) request = pretend.stub( - user=pretend.stub(id=pretend.stub()), + user=pretend.stub(id=pretend.stub(), username=pretend.stub()), find_service=lambda interface, **kw: { IMacaroonService: pretend.stub(), IUserService: pretend.stub(), @@ -1768,7 +1786,7 @@ def test_delete_macaroon_invalid_form(self, monkeypatch): delete_macaroon=pretend.call_recorder(lambda id: pretend.stub()) ) request = pretend.stub( - POST={}, + POST={"confirm_password": "password", "macaroon_id": "macaroon_id"}, route_path=pretend.call_recorder(lambda x: pretend.stub()), find_service=lambda interface, **kw: { IMacaroonService: macaroon_service, @@ -1776,6 +1794,8 @@ def test_delete_macaroon_invalid_form(self, monkeypatch): }[interface], referer="/fake/safe/route", host=None, + user=pretend.stub(username=pretend.stub()), + session=pretend.stub(flash=pretend.call_recorder(lambda *a, **kw: None)), ) delete_macaroon_obj = pretend.stub(validate=lambda: False) @@ -1791,13 +1811,16 @@ def test_delete_macaroon_invalid_form(self, monkeypatch): assert isinstance(result, HTTPSeeOther) assert result.location == "/fake/safe/route" assert macaroon_service.delete_macaroon.calls == [] + assert request.session.flash.calls == [ + pretend.call("Invalid credentials. Try again", queue="error") + ] def test_delete_macaroon_dangerous_redirect(self, monkeypatch): macaroon_service = pretend.stub( delete_macaroon=pretend.call_recorder(lambda id: pretend.stub()) ) request = pretend.stub( - POST={}, + POST={"confirm_password": "password", "macaroon_id": "macaroon_id"}, route_path=pretend.call_recorder(lambda x: "/safe/route"), find_service=lambda interface, **kw: { IMacaroonService: macaroon_service, @@ -1805,6 +1828,8 @@ def test_delete_macaroon_dangerous_redirect(self, monkeypatch): }[interface], referer="http://google.com/", host=None, + user=pretend.stub(username=pretend.stub()), + session=pretend.stub(flash=pretend.call_recorder(lambda *a, **kw: None)), ) delete_macaroon_obj = pretend.stub(validate=lambda: False) @@ -1834,7 +1859,7 @@ def test_delete_macaroon(self, monkeypatch): ) user_service = pretend.stub(record_event=record_event) request = pretend.stub( - POST={}, + POST={"confirm_password": "password", "macaroon_id": "macaroon_id"}, route_path=pretend.call_recorder(lambda x: pretend.stub()), find_service=lambda interface, **kw: { IMacaroonService: macaroon_service, @@ -1843,7 +1868,7 @@ def test_delete_macaroon(self, monkeypatch): session=pretend.stub(flash=pretend.call_recorder(lambda *a, **kw: None)), referer="/fake/safe/route", host=None, - user=pretend.stub(id=pretend.stub()), + user=pretend.stub(id=pretend.stub(), username=pretend.stub()), remote_addr="0.0.0.0", ) @@ -1893,7 +1918,7 @@ def test_delete_macaroon_records_events_for_each_project(self, monkeypatch): ) user_service = pretend.stub(record_event=record_event) request = pretend.stub( - POST={}, + POST={"confirm_password": pretend.stub(), "macaroon_id": pretend.stub()}, route_path=pretend.call_recorder(lambda x: pretend.stub()), find_service=lambda interface, **kw: { IMacaroonService: macaroon_service,
2FA Ask for password not username when disabling 2FA <!-- NOTE: This issue should be for problems with PyPI itself, including: * pypi.org * test.pypi.org * files.pythonhosted.org This issue should NOT be for a project installed from PyPI. If you are having an issue with a specific package, you should reach out to the maintainers of that project directly instead. Furthermore, this issue should NOT be for any non-PyPI properties (like python.org, docs.python.org, etc.) --> **Describe the bug** When disabling 2FA for an account, it would make sense to ask for the user's password and not the username for confirmation as that would make it much harder to do drive-by disabling on unattended computers. **Expected behavior** <!-- A clear and concise description of what you expected to happen --> Ask for password when disabling 2FA. **To Reproduce** <!-- Steps to reproduce the bug, or a link to PyPI where the bug is visible --> Try disabling 2FA on https://pypi.org/manage/account/.
Yeah, I thought this was a bit odd. Separate ticket, but possibly related: should Warehouse require a password to enable 2FA? To prevent similar drive-by enabling of 2FA on unattended computers. This was a subject of some discussion in #5567 (although I'm currently having trouble finding the exact comment). As far as I know, requiring a password (or just a second factor) when disabling 2FA is not a consistent practice across major services. Both Google and GitHub will prompt you for some kind of authentication if you haven't accessed their security pages in a while but both retain a cookie that allows future visits without re-authentication, eliminating the drive-by protection. Google requires the user's password, while GitHub allows the user to use one of their second factor methods. Twitter requires the user's password twice: once to access the security page, and again to actually disable 2FA. The theory behind the current behavior is that (1) the user is already in an authenticated context, one where they have (almost) absolute control over the state of their account, (2) anything that strictly decreases the security of their account should require confirmation, but **not** require them to jump through hoops. Therefore, we make them confirm their intent by typing their username out, but don't encourage security fatigue by re-prompting for their password. On a more general note, a drive-by attacker can currently do far worse than disable the user's 2FA method: they can transfer all packages to a temporary account, delete the current account (only the username is required), and then create a new, identically named account that they control completely. As far as I know (I might be wrong!), none of that would be visible from an package consumer's perspective. In summary: I think we should treat drive-by attacks as black swan events and mitigate them in other ways (audit logging, email alerts when an account's security posture changes, &c). That isn't to say that changing the behavior to require a password would be _bad_; I just think it's unnecessary and attempts to cover an attack that I think is unlikely (why reduce the account's security to a single factor when you can own it entirely?) We could take an approach like Google or GitHub, but I see relatively little payoff: the two are still vulnerable to drive-bys once the user is authenticated. > This was a subject of some discussion in #5567 (although I'm currently having trouble finding the exact comment). > > As far as I know, requiring a password (or just a second factor) when disabling 2FA is not a consistent practice across major services. Both Google and GitHub will prompt you for some kind of authentication if you haven't accessed their security pages in a while but both retain a cookie that allows future visits without re-authentication, eliminating the drive-by protection. Google requires the user's password, while GitHub allows the user to use one of their second factor methods. Twitter requires the user's password twice: once to access the security page, and again to actually disable 2FA. > > The theory behind the current behavior is that (1) the user is already in an authenticated context, one where they have (almost) absolute control over the state of their account, (2) anything that strictly decreases the security of their account should require confirmation, but **not** require them to jump through hoops. Therefore, we make them confirm their intent by typing their username out, but don't encourage security fatigue by re-prompting for their password. > I understand (1) for sure, but don't follow you regarding (2) to be honest. It seems to me that confirmation via a password is not an unreasonable amount of effort, while using the username effectively is no effort at all. IOW entering a username to authorize a possibly dangerous change is not an expected UX pattern (to me!), in contrast to entering a password. > On a more general note, a drive-by attacker can currently do far worse than disable the user's 2FA method: they can transfer all packages to a temporary account, delete the current account (only the username is required), and then create a new, identically named account that they control completely. As far as I know (I might be wrong!), none of that would be visible from an package consumer's perspective. > Sure, although I didn't raise this issue for any complex drive-by scenario, but about one that is relatively easy to achieve with very little exposure to a victim's computer, which would reduce the risk for an attack. Additional means to inform the user *when* 2FA is disabled would of course be useful. (Another idea for a more complex attack: a compromised browser extension can disable 2FA without user interaction since it can parse the username from the account settings page) > In summary: I think we should treat drive-by attacks as black swan events and mitigate them in other ways (audit logging, email alerts when an account's security posture changes, &c). That isn't to say that changing the behavior to require a password would be _bad_; I just think it's unnecessary and attempts to cover an attack that I think is unlikely (why reduce the account's security to a single factor when you can own it entirely?) > FWIW I gave feedback to a feature that I'm beta-testing, so this seems like a case of an unfinished product feature. What struck me as odd about the use of a username for disabling 2FA was basically me not having seen the pattern before (at all) and the chance to simply use the established pattern to use the user's password for confirming a possibly dangerous action. If in doubt I think it makes sense to not use the username but the password, basically. > We could take an approach like Google or GitHub, but I see relatively little payoff: the two are still vulnerable to drive-bys once the user is authenticated. *shrug* I haven't done any user research to proof my experience, so I let you decide what to do with my feedback. I too think asking for the username is odd, and asking for the password is more appropriate for an action like removing 2FA. Asking for a username is functionally equivalent to a confirmation dialog but requiring slightly more typing/effort for nothing much here. I don't think asking for the username for a destructive action is a pattern I've seen elsewhere and switching to either a confirmation dialog or to asking for the password are both wins in this case IMHO. @nlhkabu I have a suggestion here, which is: perhaps you could skim https://simplysecure.org/knowledge-base/ to check whether there's research or advice there on what to require of a user in this scenario? Thanks for elaborating on your thinking here, @jezdez -- I appreciate the detail! @brainwane thank you for this link - looks like a great resource, however, unfortunately, I couldn't find anything specific to this scenario. My preference here is to change this to a password validation pattern, based on this feedback. I don't think it _matters_ if it is no more secure (although obviously it would matter if it were _less_ secure) -> if it is more familiar and/or makes people feel more comfortable, that's enough to justify the change from a UX perspective. @woodruffw I've added this to the OTF security work milestone. Let's make a PR for this once the webauthn PR is done? The new modal should look like this: ![Screenshot from 2019-05-23 06-35-57](https://user-images.githubusercontent.com/3323703/58228026-29334a80-7d25-11e9-89d8-90f5d7e940a4.png) Additional features: - The user should be able to toggle the visibility of the password, as on our other password fields - The button should be disabled until a password is entered (as per the current behaviour) > Let's make a PR for this once the webauthn PR is done? Sounds good to me! @brainwane 💯 agreed that as in with other websites (say GitHub or Google), and assuming the user is logged in, the password should be required to edit any sensitive security setting, not just 2FA. Maybe a decorator would help? As discussed today with @woodruffw, we plan to update this UI to: - Ask for password on TOTP modal - Ask for password and key label on webauthn modal > The button should be disabled until the correct password is entered (as per the current behaviour) I'll flag that this seems a little odd and tricky to achieve to me; since the password checking should always happen on the backend. >> The button should be disabled until the correct password is entered (as per the current behaviour) > > I'll flag that this seems a little odd and tricky to achieve to me; since the password checking should always happen on the backend. Agreed. I wonder if this should actually be: > The button should be disabled until **a non-empty** password is entered --- Two notes: * **Elsewhere in the UI, the button is disabled until you enter *any* password, regardless of correctness.** I made a todo note a while back to poke at this a bit and see what’s going on. But I went to look just now and I couldn’t find anywhere in the settings panels where the user-side code is checking the *correctness* of the password. Closest I could find was the “Change Password” section: ![Screenshot 2019-06-19 at 22 21 12](https://user-images.githubusercontent.com/301220/59802292-a16b3c80-92e0-11e9-894d-c12d53e13acc.png) Here, the button is disabled until you enter *something* in the “Old password” field (my password is not three characters 😉 ) and both the passwords entered in the “New password” field match (which is a reasonably check to perform client-side). If I go ahead and click “Update password”, I get an error: ![Screenshot 2019-06-19 at 22 23 50](https://user-images.githubusercontent.com/301220/59802435-f9a23e80-92e0-11e9-8c45-c1bd7fbb8756.png) * **The current “disable 2FA” screen checks for correctness of username.** This is a PyPI username (I think), but it's not my username: ![Screenshot 2019-06-19 at 22 26 09](https://user-images.githubusercontent.com/301220/59802626-48e86f00-92e1-11e9-894d-00b2b84e1ce2.png) The button doesn’t get enabled until I enter my username – which isn’t an unreasonable check to perform client-side, because my username isn’t secret information. Is this what @nlhkabu meant by “as per the current behaviour”? Yes, you're right @alexwlchan - I'll update my comment above :) This requires some JavaScript work. @yeraydiazdiaz or @di do you have time to help with this? I can have a go at it. That would be great, thank you, @yeraydiazdiaz! This is an issue we want to resolve before we can declare the WebAuthn beta finished, so I particularly appreciate your help with this.
2019-08-25T14:23:55Z
[]
[]
pypi/warehouse
6,535
pypi__warehouse-6535
[ "6455" ]
29573266a67c6fb2b6e8e0ffafb1f0734cdc2dcf
diff --git a/warehouse/accounts/forms.py b/warehouse/accounts/forms.py --- a/warehouse/accounts/forms.py +++ b/warehouse/accounts/forms.py @@ -23,6 +23,7 @@ from warehouse.accounts.interfaces import TooManyFailedLogins from warehouse.accounts.models import DisableReason from warehouse.email import send_password_compromised_email_hibp +from warehouse.i18n import localize as _ from warehouse.utils.otp import TOTP_LENGTH @@ -34,7 +35,9 @@ def validate_username(self, field): userid = self.user_service.find_userid(field.data) if userid is None: - raise wtforms.validators.ValidationError("No user found with that username") + raise wtforms.validators.ValidationError( + _("No user found with that username") + ) class TOTPValueMixin: @@ -44,7 +47,10 @@ class TOTPValueMixin: wtforms.validators.DataRequired(), wtforms.validators.Regexp( rf"^[0-9]{{{TOTP_LENGTH}}}$", - message=f"TOTP code must be {TOTP_LENGTH} digits.", + message=_( + "TOTP code must be ${totp_length} digits.", + mapping={"totp_length": TOTP_LENGTH}, + ), ), ] ) @@ -61,13 +67,13 @@ class NewUsernameMixin: validators=[ wtforms.validators.DataRequired(), wtforms.validators.Length( - max=50, message=("Choose a username with 50 characters or less.") + max=50, message=_("Choose a username with 50 characters or less.") ), # the regexp below must match the CheckConstraint # for the username field in accounts.models.User wtforms.validators.Regexp( r"^[a-zA-Z0-9][a-zA-Z0-9._-]*[a-zA-Z0-9]$", - message=( + message=_( "The username is invalid. Usernames " "must be composed of letters, numbers, " "dots, hyphens and underscores. And must " @@ -81,8 +87,10 @@ class NewUsernameMixin: def validate_username(self, field): if self.user_service.find_userid(field.data) is not None: raise wtforms.validators.ValidationError( - "This username is already being used by another " - "account. Choose a different username." + _( + "This username is already being used by another " + "account. Choose a different username." + ) ) @@ -102,12 +110,14 @@ def validate_password(self, field): userid, field.data, tags=self._check_password_metrics_tags ): raise wtforms.validators.ValidationError( - "The password is invalid. Try again." + _("The password is invalid. Try again.") ) except TooManyFailedLogins: raise wtforms.validators.ValidationError( - "There have been too many unsuccessful login attempts, " - "try again later." + _( + "There have been too many unsuccessful login attempts, " + "try again later." + ) ) from None @@ -126,7 +136,7 @@ class NewPasswordMixin: validators=[ wtforms.validators.DataRequired(), wtforms.validators.EqualTo( - "new_password", "Your passwords don't match. Try again." + "new_password", message=_("Your passwords don't match. Try again.") ), ] ) @@ -157,7 +167,7 @@ class NewEmailMixin: validators=[ wtforms.validators.DataRequired(), wtforms.validators.Regexp( - r".+@.+\..+", message=("The email address isn't valid. Try again.") + r".+@.+\..+", message=_("The email address isn't valid. Try again.") ), ] ) @@ -167,20 +177,26 @@ def validate_email(self, field): if userid and userid == self.user_id: raise wtforms.validators.ValidationError( - f"This email address is already being used by this account. " - f"Use a different email." + _( + "This email address is already being used by this account. " + "Use a different email." + ) ) if userid: raise wtforms.validators.ValidationError( - f"This email address is already being used by another account. " - f"Use a different email." + _( + "This email address is already being used " + "by another account. Use a different email." + ) ) domain = field.data.split("@")[-1] if domain in disposable_email_domains.blacklist: raise wtforms.validators.ValidationError( - "You can't use an email address from this domain. Use a " - "different email." + _( + "You can't use an email address from this domain. Use a " + "different email." + ) ) @@ -199,7 +215,7 @@ class RegistrationForm( validators=[ wtforms.validators.Length( max=100, - message=( + message=_( "The name is too long. " "Choose a name with 100 characters or less." ), @@ -263,7 +279,7 @@ def validate_totp_value(self, field): totp_value = field.data.encode("utf8") if not self.user_service.check_totp_value(self.user_id, totp_value): - raise wtforms.validators.ValidationError("Invalid TOTP code.") + raise wtforms.validators.ValidationError(_("Invalid TOTP code.")) class WebAuthnAuthenticationForm(WebAuthnCredentialMixin, _TwoFactorAuthenticationForm): @@ -280,7 +296,7 @@ def validate_credential(self, field): assertion_dict = json.loads(field.data.encode("utf8")) except json.JSONDecodeError: raise wtforms.validators.ValidationError( - f"Invalid WebAuthn assertion: Bad payload" + _("Invalid WebAuthn assertion: Bad payload") ) try: @@ -313,7 +329,7 @@ def validate_username_or_email(self, field): username_or_email = self.user_service.get_user_by_email(field.data) if username_or_email is None: raise wtforms.validators.ValidationError( - "No user found with that username or email" + _("No user found with that username or email") ) diff --git a/warehouse/accounts/views.py b/warehouse/accounts/views.py --- a/warehouse/accounts/views.py +++ b/warehouse/accounts/views.py @@ -47,6 +47,7 @@ from warehouse.admin.flags import AdminFlagValue from warehouse.cache.origin import origin_cache from warehouse.email import send_email_verification_email, send_password_reset_email +from warehouse.i18n import localize as _ from warehouse.packaging.models import Project, Release from warehouse.utils.http import is_safe_url @@ -56,7 +57,7 @@ @view_config(context=TooManyFailedLogins) def failed_logins(exc, request): resp = HTTPTooManyRequests( - "There have been too many unsuccessful login attempts. " "Try again later.", + _("There have been too many unsuccessful login attempts. " "Try again later."), retry_after=exc.resets_in.total_seconds(), ) @@ -193,7 +194,7 @@ def two_factor_and_totp_validate(request, _form_class=TOTPAuthenticationForm): try: two_factor_data = _get_two_factor_data(request) except TokenException: - request.session.flash("Invalid or expired two factor login.", queue="error") + request.session.flash(_("Invalid or expired two factor login."), queue="error") return HTTPSeeOther(request.route_path("accounts.login")) userid = two_factor_data.get("userid") @@ -241,13 +242,13 @@ def two_factor_and_totp_validate(request, _form_class=TOTPAuthenticationForm): ) def webauthn_authentication_options(request): if request.authenticated_userid is not None: - return {"fail": {"errors": ["Already authenticated"]}} + return {"fail": {"errors": [_("Already authenticated")]}} try: two_factor_data = _get_two_factor_data(request) except TokenException: - request.session.flash("Invalid or expired two factor login.", queue="error") - return {"fail": {"errors": ["Invalid two factor token"]}} + request.session.flash(_("Invalid or expired two factor login."), queue="error") + return {"fail": {"errors": [_("Invalid or expired two factor login.")]}} userid = two_factor_data.get("userid") user_service = request.find_service(IUserService, context=None) @@ -272,8 +273,8 @@ def webauthn_authentication_validate(request): try: two_factor_data = _get_two_factor_data(request) except TokenException: - request.session.flash("Invalid or expired two factor login.", queue="error") - return {"fail": {"errors": ["Invalid two factor token"]}} + request.session.flash(_("Invalid or expired two factor login."), queue="error") + return {"fail": {"errors": [_("Invalid or expired two factor login.")]}} redirect_to = two_factor_data.get("redirect_to") userid = two_factor_data.get("userid") @@ -303,7 +304,10 @@ def webauthn_authentication_validate(request): .hexdigest() .lower(), ) - return {"success": "Successful WebAuthn assertion", "redirect_to": redirect_to} + return { + "success": _("Successful WebAuthn assertion"), + "redirect_to": redirect_to, + } errors = [str(error) for error in form.credential.errors] return {"fail": {"errors": errors}} @@ -380,7 +384,7 @@ def register(request, _form_class=RegistrationForm): if request.flags.enabled(AdminFlagValue.DISALLOW_NEW_USER_REGISTRATION): request.session.flash( - ( + _( "New user registration temporarily disabled. " "See https://pypi.org/help#admin-intervention for details." ), @@ -475,35 +479,37 @@ def _error(message): token = request.params.get("token") data = token_service.loads(token) except TokenExpired: - return _error("Expired token: request a new password reset link") + return _error(_("Expired token: request a new password reset link")) except TokenInvalid: - return _error("Invalid token: request a new password reset link") + return _error(_("Invalid token: request a new password reset link")) except TokenMissing: - return _error("Invalid token: no token supplied") + return _error(_("Invalid token: no token supplied")) # Check whether this token is being used correctly if data.get("action") != "password-reset": - return _error("Invalid token: not a password reset token") + return _error(_("Invalid token: not a password reset token")) # Check whether a user with the given user ID exists user = user_service.get_user(uuid.UUID(data.get("user.id"))) if user is None: - return _error("Invalid token: user not found") + return _error(_("Invalid token: user not found")) # Check whether the user has logged in since the token was created last_login = data.get("user.last_login") if str(user.last_login) > last_login: # TODO: track and audit this, seems alertable return _error( - "Invalid token: user has logged in since this token was requested" + _("Invalid token: user has logged in since " "this token was requested") ) # Check whether the password has been changed since the token was created password_date = data.get("user.password_date") if str(user.password_date) > password_date: return _error( - "Invalid token: password has already been changed since this " - "token was requested" + _( + "Invalid token: password has already been changed since this " + "token was requested" + ) ) form = _form_class( @@ -523,7 +529,7 @@ def _error(message): ) # Flash a success message - request.session.flash("You have reset your password", queue="success") + request.session.flash(_("You have reset your password"), queue="success") # Redirect to account login. return HTTPSeeOther(request.route_path("accounts.login")) @@ -545,15 +551,15 @@ def _error(message): token = request.params.get("token") data = token_service.loads(token) except TokenExpired: - return _error("Expired token: request a new verification link") + return _error(_("Expired token: request a new email verification link")) except TokenInvalid: - return _error("Invalid token: request a new verification link") + return _error(_("Invalid token: request a new email verification link")) except TokenMissing: - return _error("Invalid token: no token supplied") + return _error(_("Invalid token: no token supplied")) # Check whether this token is being used correctly if data.get("action") != "email-verify": - return _error("Invalid token: not an email verification token") + return _error(_("Invalid token: not an email verification token")) try: email = ( @@ -562,10 +568,10 @@ def _error(message): .one() ) except NoResultFound: - return _error("Email not found") + return _error(_("Email not found")) if email.verified: - return _error("Email already verified") + return _error(_("Email already verified")) email.verified = True email.unverify_reason = None @@ -577,14 +583,18 @@ def _error(message): ) if not email.primary: - confirm_message = "You can now set this email as your primary address" + confirm_message = _("You can now set this email as your primary address") else: - confirm_message = "This is your primary address" + confirm_message = _("This is your primary address") request.user.is_active = True request.session.flash( - f"Email address {email.email} verified. {confirm_message}.", queue="success" + _( + "Email address ${email_address} verified. ${confirm_message}.", + mapping={"email_address": email.email, "confirm_message": confirm_message}, + ), + queue="success", ) return HTTPSeeOther(request.route_path("manage.account")) diff --git a/warehouse/config.py b/warehouse/config.py --- a/warehouse/config.py +++ b/warehouse/config.py @@ -265,6 +265,9 @@ def configure(settings=None): # We want to use newstyle gettext config.add_settings({"jinja2.newstyle": True}) + # Our translation strings are all in the "messages" domain + config.add_settings({"jinja2.i18n.domain": "messages"}) + # We also want to use Jinja2 for .html templates as well, because we just # assume that all templates will be using Jinja. config.add_jinja2_renderer(".html") @@ -443,9 +446,6 @@ def configure(settings=None): "warehouse:static/dist/manifest.json", prefix="/static/" ) - # Enable Warehouse to serve our locale files - config.add_static_view("locales", "warehouse:locales/") - # Enable support of passing certain values like remote host, client # address, and protocol support in from an outer proxy to the application. config.add_wsgi_middleware( diff --git a/warehouse/forms.py b/warehouse/forms.py --- a/warehouse/forms.py +++ b/warehouse/forms.py @@ -10,10 +10,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -from wtforms import Form as BaseForm -from wtforms.validators import StopValidation, ValidationError +from wtforms import Form as BaseForm, StringField +from wtforms.validators import DataRequired, StopValidation, ValidationError from zxcvbn import zxcvbn +from warehouse.i18n import KNOWN_LOCALES from warehouse.utils.http import is_valid_uri @@ -118,3 +119,13 @@ class DBForm(Form): def __init__(self, *args, db, **kwargs): super().__init__(*args, **kwargs) self.db = db + + +class SetLocaleForm(Form): + __params__ = ["locale_id"] + + locale_id = StringField(validators=[DataRequired(message="Missing locale ID")]) + + def validate_locale_id(self, field): + if field.data not in KNOWN_LOCALES.keys(): + raise ValidationError(f"Unknown locale ID: {field.data}") diff --git a/warehouse/i18n/__init__.py b/warehouse/i18n/__init__.py --- a/warehouse/i18n/__init__.py +++ b/warehouse/i18n/__init__.py @@ -11,19 +11,77 @@ # limitations under the License. from babel.core import Locale +from pyramid.i18n import TranslationStringFactory, default_locale_negotiator +from pyramid.threadlocal import get_current_request + +KNOWN_LOCALES = {"en": "English"} + +LOCALE_ATTR = "_LOCALE_" + +_translation_factory = TranslationStringFactory("messages") + + +class LazyString: + def __init__(self, fn, *args, **kwargs): + self.fn = fn + self.args = args + self.mapping = kwargs.get("mapping", {}) + self.kwargs = kwargs + + def __mod__(self, new_mapping): + mapping = self.mapping.copy() + mapping.update(new_mapping) + return LazyString(self.fn, *self.args, mapping=new_mapping, **self.kwargs) + + def __str__(self): + return self.fn(*self.args, **self.kwargs) def _locale(request): """ Computes a babel.core:Locale() object for this request. """ - return Locale.parse(request.locale_name) + return Locale.parse(request.locale_name, sep="_") + + +def _negotiate_locale(request): + locale_name = getattr(request, LOCALE_ATTR, None) + if locale_name is not None: + return locale_name + + locale_name = request.params.get(LOCALE_ATTR) + if locale_name is not None: + return locale_name + + locale_name = request.cookies.get(LOCALE_ATTR) + if locale_name is not None: + return locale_name + + if not request.accept_language: + return default_locale_negotiator(request) + + return request.accept_language.best_match( + tuple(KNOWN_LOCALES.keys()), default_match=default_locale_negotiator(request) + ) + + +def localize(message, **kwargs): + def _localize(message, **kwargs): + request = get_current_request() + return request.localizer.translate(_translation_factory(message, **kwargs)) + + return LazyString(_localize, message, **kwargs) def includeme(config): # Add the request attributes config.add_request_method(_locale, name="locale", reify=True) + # Register our translation directory. + config.add_translation_dirs("warehouse:locale/") + + config.set_locale_negotiator(_negotiate_locale) + # Register our i18n/l10n filters for Jinja2 filters = config.get_settings().setdefault("jinja2.filters", {}) filters.setdefault("format_date", "warehouse.i18n.filters:format_date") @@ -32,3 +90,6 @@ def includeme(config): "format_rfc822_datetime", "warehouse.i18n.filters:format_rfc822_datetime" ) filters.setdefault("format_number", "warehouse.i18n.filters:format_number") + + jglobals = config.get_settings().setdefault("jinja2.globals", {}) + jglobals.setdefault("KNOWN_LOCALES", "warehouse.i18n:KNOWN_LOCALES") diff --git a/warehouse/manage/views.py b/warehouse/manage/views.py --- a/warehouse/manage/views.py +++ b/warehouse/manage/views.py @@ -39,6 +39,7 @@ send_password_change_email, send_primary_email_change_email, ) +from warehouse.i18n import localize as _ from warehouse.macaroons.interfaces import IMacaroonService from warehouse.manage.forms import ( AddEmailForm, @@ -164,8 +165,11 @@ def add_email(self): send_email_verification_email(self.request, (self.request.user, email)) self.request.session.flash( - f"Email {email.email} added - check your email for " - + "a verification link", + _( + "Email ${email_address} added - check your email for " + "a verification link", + mapping={"email_address": email.email}, + ), queue="success", ) return self.default_response diff --git a/warehouse/routes.py b/warehouse/routes.py --- a/warehouse/routes.py +++ b/warehouse/routes.py @@ -27,6 +27,7 @@ def includeme(config): # Basic global routes config.add_route("index", "/", domain=warehouse) + config.add_route("locale", "/locale", domain=warehouse) config.add_route("robots.txt", "/robots.txt", domain=warehouse) config.add_route("opensearch.xml", "/opensearch.xml", domain=warehouse) config.add_route("index.sitemap.xml", "/sitemap.xml", domain=warehouse) diff --git a/warehouse/sessions.py b/warehouse/sessions.py --- a/warehouse/sessions.py +++ b/warehouse/sessions.py @@ -26,6 +26,7 @@ from warehouse.cache.http import add_vary from warehouse.utils import crypto +from warehouse.utils.msgpack import object_encode def _invalid_method(method): @@ -274,7 +275,12 @@ def _process_response(self, request, response): self.redis.setex( self._redis_key(request.session.sid), self.max_age, - msgpack.packb(request.session, encoding="utf8", use_bin_type=True), + msgpack.packb( + request.session, + encoding="utf8", + default=object_encode, + use_bin_type=True, + ), ) # Send our session cookie to the client diff --git a/warehouse/utils/msgpack.py b/warehouse/utils/msgpack.py new file mode 100644 --- /dev/null +++ b/warehouse/utils/msgpack.py @@ -0,0 +1,23 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from warehouse.i18n import LazyString + + +def object_encode(obj): + if isinstance(obj, LazyString): + # NOTE: This expands the lazy string based on the locale in the current request. + # This is almost certainly what we want, but could conceivably cause + # issues for users who issue a request, change their locale, and then + # receive an email at some later date in their original locale. + return str(obj) + return obj diff --git a/warehouse/views.py b/warehouse/views.py --- a/warehouse/views.py +++ b/warehouse/views.py @@ -44,9 +44,12 @@ from warehouse.cache.origin import origin_cache from warehouse.classifiers.models import Classifier from warehouse.db import DatabaseNotAvailable +from warehouse.forms import SetLocaleForm +from warehouse.i18n import LOCALE_ATTR from warehouse.metrics import IMetricsService from warehouse.packaging.models import File, Project, Release, release_classifiers from warehouse.search.queries import SEARCH_BOOSTS, SEARCH_FIELDS, SEARCH_FILTER_ORDER +from warehouse.utils.http import is_safe_url from warehouse.utils.paginate import ElasticsearchPage, paginate_url_factory from warehouse.utils.row_counter import RowCount @@ -226,6 +229,27 @@ def index(request): } +@view_config( + route_name="locale", + request_method="GET", + request_param=SetLocaleForm.__params__, + uses_session=True, +) +def locale(request): + form = SetLocaleForm(**request.GET) + + redirect_to = request.referer + if not is_safe_url(redirect_to, host=request.host): + redirect_to = request.route_path("index") + resp = HTTPSeeOther(redirect_to) + + if form.validate(): + request.session.flash("Locale updated", queue="success") + resp.set_cookie(LOCALE_ATTR, form.locale_id.data) + + return resp + + @view_config(route_name="classifiers", renderer="pages/classifiers.html") def classifiers(request): classifiers = (
diff --git a/tests/conftest.py b/tests/conftest.py --- a/tests/conftest.py +++ b/tests/conftest.py @@ -24,6 +24,7 @@ import pytest import webtest as _webtest +from pyramid.i18n import TranslationString from pyramid.static import ManifestCacheBuster from pytest_postgresql.factories import ( drop_postgresql_database, @@ -33,12 +34,15 @@ from sqlalchemy import event from warehouse import admin, config, static -from warehouse.accounts import services as account_services +from warehouse.accounts import services as account_services, views as account_views from warehouse.macaroons import services as macaroon_services +from warehouse.manage import views as manage_views from warehouse.metrics import IMetricsService from .common.db import Session +L10N_TAGGED_MODULES = [account_views, manage_views] + def pytest_collection_modifyitems(items): for item in items: @@ -313,3 +317,25 @@ def pytest_runtest_makereport(item, call): ) if data: rep.sections.append(("Captured {} log".format(log_type), data)) + + [email protected](scope="session") +def monkeypatch_session(): + # NOTE: This is a minor hack to avoid duplicate monkeypatching + # on every function scope for dummy_localize. + # https://github.com/pytest-dev/pytest/issues/1872#issuecomment-375108891 + from _pytest.monkeypatch import MonkeyPatch + + m = MonkeyPatch() + yield m + m.undo() + + [email protected](scope="session", autouse=True) +def dummy_localize(monkeypatch_session): + def localize(message, **kwargs): + ts = TranslationString(message, **kwargs) + return ts.interpolate() + + for mod in L10N_TAGGED_MODULES: + monkeypatch_session.setattr(mod, "_", localize) diff --git a/tests/unit/accounts/test_forms.py b/tests/unit/accounts/test_forms.py --- a/tests/unit/accounts/test_forms.py +++ b/tests/unit/accounts/test_forms.py @@ -254,7 +254,7 @@ def test_password_confirm_required_error(self): assert not form.validate() assert form.password_confirm.errors.pop() == "This field is required." - def test_passwords_mismatch_error(self): + def test_passwords_mismatch_error(self, pyramid_config): user_service = pretend.stub( find_userid_by_email=pretend.call_recorder(lambda _: pretend.stub()) ) @@ -266,7 +266,7 @@ def test_passwords_mismatch_error(self): assert not form.validate() assert ( - form.password_confirm.errors.pop() + str(form.password_confirm.errors.pop()) == "Your passwords don't match. Try again." ) @@ -299,7 +299,7 @@ def test_email_required_error(self): assert not form.validate() assert form.email.errors.pop() == "This field is required." - def test_invalid_email_error(self): + def test_invalid_email_error(self, pyramid_config): form = forms.RegistrationForm( data={"email": "bad"}, user_service=pretend.stub( @@ -309,7 +309,9 @@ def test_invalid_email_error(self): ) assert not form.validate() - assert form.email.errors.pop() == "The email address isn't valid. Try again." + assert ( + str(form.email.errors.pop()) == "The email address isn't valid. Try again." + ) def test_exotic_email_success(self): form = forms.RegistrationForm( @@ -323,7 +325,7 @@ def test_exotic_email_success(self): form.validate() assert len(form.email.errors) == 0 - def test_email_exists_error(self): + def test_email_exists_error(self, pyramid_config): form = forms.RegistrationForm( data={"email": "[email protected]"}, user_service=pretend.stub( @@ -334,12 +336,12 @@ def test_email_exists_error(self): assert not form.validate() assert ( - form.email.errors.pop() + str(form.email.errors.pop()) == "This email address is already being used by another account. " "Use a different email." ) - def test_blacklisted_email_error(self): + def test_blacklisted_email_error(self, pyramid_config): form = forms.RegistrationForm( data={"email": "[email protected]"}, user_service=pretend.stub( @@ -350,12 +352,12 @@ def test_blacklisted_email_error(self): assert not form.validate() assert ( - form.email.errors.pop() + str(form.email.errors.pop()) == "You can't use an email address from this domain. Use a " "different email." ) - def test_username_exists(self): + def test_username_exists(self, pyramid_config): form = forms.RegistrationForm( data={"username": "foo"}, user_service=pretend.stub( @@ -365,13 +367,13 @@ def test_username_exists(self): ) assert not form.validate() assert ( - form.username.errors.pop() + str(form.username.errors.pop()) == "This username is already being used by another account. " "Choose a different username." ) @pytest.mark.parametrize("username", ["_foo", "bar_", "foo^bar"]) - def test_username_is_valid(self, username): + def test_username_is_valid(self, username, pyramid_config): form = forms.RegistrationForm( data={"username": username}, user_service=pretend.stub( @@ -381,7 +383,7 @@ def test_username_is_valid(self, username): ) assert not form.validate() assert ( - form.username.errors.pop() == "The username is invalid. Usernames " + str(form.username.errors.pop()) == "The username is invalid. Usernames " "must be composed of letters, numbers, " "dots, hyphens and underscores. And must " "also start and finish with a letter or number. " @@ -423,7 +425,7 @@ def test_password_breached(self): "compromised and cannot be used." ) - def test_name_too_long(self): + def test_name_too_long(self, pyramid_config): form = forms.RegistrationForm( data={"full_name": "hello " * 50}, user_service=pretend.stub( @@ -433,7 +435,7 @@ def test_name_too_long(self): ) assert not form.validate() assert ( - form.full_name.errors.pop() + str(form.full_name.errors.pop()) == "The name is too long. Choose a name with 100 characters or less." ) @@ -493,7 +495,7 @@ def test_password_confirm_required_error(self): assert not form.validate() assert form.password_confirm.errors.pop() == "This field is required." - def test_passwords_mismatch_error(self): + def test_passwords_mismatch_error(self, pyramid_config): form = forms.ResetPasswordForm( data={ "new_password": "password", @@ -507,7 +509,7 @@ def test_passwords_mismatch_error(self): assert not form.validate() assert ( - form.password_confirm.errors.pop() + str(form.password_confirm.errors.pop()) == "Your passwords don't match. Try again." ) @@ -578,7 +580,7 @@ def test_creation(self): assert form.user_service is user_service - def test_totp_secret_exists(self): + def test_totp_secret_exists(self, pyramid_config): form = forms.TOTPAuthenticationForm( data={"totp_value": ""}, user_id=pretend.stub(), user_service=pretend.stub() ) @@ -591,7 +593,7 @@ def test_totp_secret_exists(self): user_service=pretend.stub(check_totp_value=lambda *a: True), ) assert not form.validate() - assert form.totp_value.errors.pop() == "TOTP code must be 6 digits." + assert str(form.totp_value.errors.pop()) == "TOTP code must be 6 digits." form = forms.TOTPAuthenticationForm( data={"totp_value": "123456"}, @@ -599,7 +601,7 @@ def test_totp_secret_exists(self): user_service=pretend.stub(check_totp_value=lambda *a: False), ) assert not form.validate() - assert form.totp_value.errors.pop() == "Invalid TOTP code." + assert str(form.totp_value.errors.pop()) == "Invalid TOTP code." form = forms.TOTPAuthenticationForm( data={"totp_value": "123456"}, @@ -627,7 +629,7 @@ def test_creation(self): assert form.challenge is challenge - def test_credential_bad_payload(self): + def test_credential_bad_payload(self, pyramid_config): form = forms.WebAuthnAuthenticationForm( credential="not valid json", user_id=pretend.stub(), @@ -637,7 +639,10 @@ def test_credential_bad_payload(self): rp_id=pretend.stub(), ) assert not form.validate() - assert form.credential.errors.pop() == "Invalid WebAuthn assertion: Bad payload" + assert ( + str(form.credential.errors.pop()) + == "Invalid WebAuthn assertion: Bad payload" + ) def test_credential_invalid(self): form = forms.WebAuthnAuthenticationForm( diff --git a/tests/unit/accounts/test_views.py b/tests/unit/accounts/test_views.py --- a/tests/unit/accounts/test_views.py +++ b/tests/unit/accounts/test_views.py @@ -37,16 +37,17 @@ class TestFailedLoginView: - exc = TooManyFailedLogins(resets_in=datetime.timedelta(seconds=600)) - request = pretend.stub() + def test_too_many_failed_logins(self): + exc = TooManyFailedLogins(resets_in=datetime.timedelta(seconds=600)) + request = pretend.stub(localizer=pretend.stub(translate=lambda tsf: tsf())) - resp = views.failed_logins(exc, request) + resp = views.failed_logins(exc, request) - assert resp.status == "429 Too Many Failed Login Attempts" - assert resp.detail == ( - "There have been too many unsuccessful login attempts. " "Try again later." - ) - assert dict(resp.headers).get("Retry-After") == "600" + assert resp.status == "429 Too Many Failed Login Attempts" + assert resp.detail == ( + "There have been too many unsuccessful login attempts. Try again later." + ) + assert dict(resp.headers).get("Retry-After") == "600" class TestUserProfile: @@ -595,7 +596,7 @@ def test_webauthn_get_options_invalid_token(self, monkeypatch): assert request.session.flash.calls == [ pretend.call("Invalid or expired two factor login.", queue="error") ] - assert result == {"fail": {"errors": ["Invalid two factor token"]}} + assert result == {"fail": {"errors": ["Invalid or expired two factor login."]}} def test_webauthn_get_options(self, monkeypatch): _get_two_factor_data = pretend.call_recorder( @@ -642,7 +643,7 @@ def test_webauthn_validate_invalid_token(self, monkeypatch): assert request.session.flash.calls == [ pretend.call("Invalid or expired two factor login.", queue="error") ] - assert result == {"fail": {"errors": ["Invalid two factor token"]}} + assert result == {"fail": {"errors": ["Invalid or expired two factor login."]}} def test_webauthn_validate_invalid_form(self, monkeypatch): _get_two_factor_data = pretend.call_recorder( @@ -929,10 +930,8 @@ def test_register_fails_with_admin_flag_set(self, db_request): assert isinstance(result, HTTPSeeOther) assert db_request.session.flash.calls == [ pretend.call( - ( - "New user registration temporarily disabled. " - "See https://pypi.org/help#admin-intervention for details." - ), + "New user registration temporarily disabled. " + "See https://pypi.org/help#admin-intervention for details.", queue="error", ) ] @@ -1450,8 +1449,8 @@ def test_verify_email( @pytest.mark.parametrize( ("exception", "message"), [ - (TokenInvalid, "Invalid token: request a new verification link"), - (TokenExpired, "Expired token: request a new verification link"), + (TokenInvalid, "Invalid token: request a new email verification link"), + (TokenExpired, "Expired token: request a new email verification link"), (TokenMissing, "Invalid token: no token supplied"), ], ) diff --git a/tests/unit/i18n/test_init.py b/tests/unit/i18n/test_init.py --- a/tests/unit/i18n/test_init.py +++ b/tests/unit/i18n/test_init.py @@ -17,23 +17,64 @@ def test_sets_locale(monkeypatch): locale_obj = pretend.stub() - locale_cls = pretend.stub(parse=pretend.call_recorder(lambda l: locale_obj)) + locale_cls = pretend.stub(parse=pretend.call_recorder(lambda l, **kw: locale_obj)) monkeypatch.setattr(i18n, "Locale", locale_cls) request = pretend.stub(locale_name=pretend.stub()) assert i18n._locale(request) is locale_obj - assert locale_cls.parse.calls == [pretend.call(request.locale_name)] + assert locale_cls.parse.calls == [pretend.call(request.locale_name, sep="_")] + + +def test_negotiate_locale(monkeypatch): + request = pretend.stub(_LOCALE_="fake-locale-attr") + assert i18n._negotiate_locale(request) == "fake-locale-attr" + + request = pretend.stub(params={"_LOCALE_": "fake-locale-param"}) + assert i18n._negotiate_locale(request) == "fake-locale-param" + + request = pretend.stub(params={}, cookies={"_LOCALE_": "fake-locale-cookie"}) + assert i18n._negotiate_locale(request) == "fake-locale-cookie" + + request = pretend.stub(params={}, cookies={}, accept_language=None) + default_locale_negotiator = pretend.call_recorder(lambda r: "fake-locale-default") + monkeypatch.setattr(i18n, "default_locale_negotiator", default_locale_negotiator) + assert i18n._negotiate_locale(request) == "fake-locale-default" + + request = pretend.stub( + params={}, + cookies={}, + accept_language=pretend.stub( + best_match=pretend.call_recorder(lambda *a, **kw: "fake-locale-best-match") + ), + ) + assert i18n._negotiate_locale(request) == "fake-locale-best-match" + + +def test_localize(monkeypatch): + request = pretend.stub( + localizer=pretend.stub( + translate=pretend.call_recorder(lambda ts: "fake translated string") + ) + ) + get_current_request = pretend.call_recorder(lambda: request) + monkeypatch.setattr(i18n, "get_current_request", get_current_request) + + assert str(i18n.localize("foo")) == "fake translated string" def test_includeme(): config_settings = {} config = pretend.stub( + add_translation_dirs=pretend.call_recorder(lambda s: None), + set_locale_negotiator=pretend.call_recorder(lambda f: None), add_request_method=pretend.call_recorder(lambda f, name, reify: None), get_settings=lambda: config_settings, ) i18n.includeme(config) + assert config.add_translation_dirs.calls == [pretend.call("warehouse:locale/")] + assert config.set_locale_negotiator.calls == [pretend.call(i18n._negotiate_locale)] assert config.add_request_method.calls == [ pretend.call(i18n._locale, name="locale", reify=True) ] @@ -43,5 +84,6 @@ def test_includeme(): "format_datetime": "warehouse.i18n.filters:format_datetime", "format_rfc822_datetime": "warehouse.i18n.filters:format_rfc822_datetime", "format_number": "warehouse.i18n.filters:format_number", - } + }, + "jinja2.globals": {"KNOWN_LOCALES": "warehouse.i18n:KNOWN_LOCALES"}, } diff --git a/tests/unit/manage/test_forms.py b/tests/unit/manage/test_forms.py --- a/tests/unit/manage/test_forms.py +++ b/tests/unit/manage/test_forms.py @@ -76,7 +76,7 @@ def test_creation(self): assert form.user_service is user_service - def test_email_exists_error(self): + def test_email_exists_error(self, pyramid_config): user_id = pretend.stub() form = forms.AddEmailForm( data={"email": "[email protected]"}, @@ -86,12 +86,12 @@ def test_email_exists_error(self): assert not form.validate() assert ( - form.email.errors.pop() + str(form.email.errors.pop()) == "This email address is already being used by this account. " "Use a different email." ) - def test_email_exists_other_account_error(self): + def test_email_exists_other_account_error(self, pyramid_config): form = forms.AddEmailForm( data={"email": "[email protected]"}, user_id=pretend.stub(), @@ -100,12 +100,12 @@ def test_email_exists_other_account_error(self): assert not form.validate() assert ( - form.email.errors.pop() + str(form.email.errors.pop()) == "This email address is already being used by another account. " "Use a different email." ) - def test_blacklisted_email_error(self): + def test_blacklisted_email_error(self, pyramid_config): form = forms.AddEmailForm( data={"email": "[email protected]"}, user_service=pretend.stub(find_userid_by_email=lambda _: None), @@ -114,7 +114,7 @@ def test_blacklisted_email_error(self): assert not form.validate() assert ( - form.email.errors.pop() + str(form.email.errors.pop()) == "You can't use an email address from this domain. " "Use a different email." ) diff --git a/tests/unit/test_config.py b/tests/unit/test_config.py --- a/tests/unit/test_config.py +++ b/tests/unit/test_config.py @@ -343,6 +343,7 @@ def __init__(self): ] assert configurator_obj.add_settings.calls == [ pretend.call({"jinja2.newstyle": True}), + pretend.call({"jinja2.i18n.domain": "messages"}), pretend.call({"retry.attempts": 3}), pretend.call( { @@ -354,7 +355,7 @@ def __init__(self): ), pretend.call({"http": {"verify": "/etc/ssl/certs/"}}), ] - add_settings_dict = configurator_obj.add_settings.calls[2].args[0] + add_settings_dict = configurator_obj.add_settings.calls[3].args[0] assert add_settings_dict["tm.manager_hook"](pretend.stub()) is transaction_manager assert configurator_obj.add_tween.calls == [ pretend.call("warehouse.config.require_https_tween_factory"), @@ -369,8 +370,7 @@ def __init__(self): ), ] assert configurator_obj.add_static_view.calls == [ - pretend.call("static", "warehouse:static/dist/", cache_max_age=315360000), - pretend.call("locales", "warehouse:locales/"), + pretend.call("static", "warehouse:static/dist/", cache_max_age=315360000) ] assert configurator_obj.add_cache_buster.calls == [ pretend.call("warehouse:static/dist/", cachebuster_obj) diff --git a/tests/unit/test_routes.py b/tests/unit/test_routes.py --- a/tests/unit/test_routes.py +++ b/tests/unit/test_routes.py @@ -76,6 +76,7 @@ def add_policy(name, filename): pretend.call("health", "/_health/"), pretend.call("force-status", r"/_force-status/{status:[45]\d\d}/"), pretend.call("index", "/", domain=warehouse), + pretend.call("locale", "/locale", domain=warehouse), pretend.call("robots.txt", "/robots.txt", domain=warehouse), pretend.call("opensearch.xml", "/opensearch.xml", domain=warehouse), pretend.call("index.sitemap.xml", "/sitemap.xml", domain=warehouse), diff --git a/tests/unit/test_sessions.py b/tests/unit/test_sessions.py --- a/tests/unit/test_sessions.py +++ b/tests/unit/test_sessions.py @@ -31,6 +31,7 @@ session_view, ) from warehouse.utils import crypto +from warehouse.utils.msgpack import object_encode class TestInvalidSession: @@ -497,9 +498,7 @@ def test_invalidated_deletes_no_save(self, pyramid_request): assert response.delete_cookie.calls == [pretend.call("session_id")] def test_invalidated_deletes_save_non_secure(self, monkeypatch, pyramid_request): - msgpack_packb = pretend.call_recorder( - lambda data, encoding, use_bin_type: b"msgpack data" - ) + msgpack_packb = pretend.call_recorder(lambda *a, **kw: b"msgpack data") monkeypatch.setattr(msgpack, "packb", msgpack_packb) session_factory = SessionFactory("mysecret", "redis://redis://localhost:6379/0") @@ -524,7 +523,12 @@ def test_invalidated_deletes_save_non_secure(self, monkeypatch, pyramid_request) pretend.call("warehouse/session/data/2"), ] assert msgpack_packb.calls == [ - pretend.call(pyramid_request.session, encoding="utf8", use_bin_type=True) + pretend.call( + pyramid_request.session, + encoding="utf8", + default=object_encode, + use_bin_type=True, + ) ] assert session_factory.redis.setex.calls == [ pretend.call("warehouse/session/data/123456", 12 * 60 * 60, b"msgpack data") diff --git a/tests/unit/test_views.py b/tests/unit/test_views.py --- a/tests/unit/test_views.py +++ b/tests/unit/test_views.py @@ -16,7 +16,12 @@ import pretend import pytest -from pyramid.httpexceptions import HTTPBadRequest, HTTPNotFound, HTTPServiceUnavailable +from pyramid.httpexceptions import ( + HTTPBadRequest, + HTTPNotFound, + HTTPSeeOther, + HTTPServiceUnavailable, +) from webob.multidict import MultiDict from warehouse import views @@ -30,6 +35,7 @@ health, httpexception_view, index, + locale, opensearchxml, robotstxt, search, @@ -201,6 +207,36 @@ def test_index(self, db_request): } +class TestLocale: + @pytest.mark.parametrize( + ("referer", "redirect", "get", "valid"), + [ + (None, "/fake-route", {"locale_id": "en"}, True), + ("http://example.com", "/fake-route", {"nonsense": "arguments"}, False), + ("/robots.txt", "/robots.txt", {"locale_id": "non-existent-locale"}, False), + ], + ) + def test_locale(self, referer, redirect, get, valid): + request = pretend.stub( + GET=get, + referer=referer, + route_path=pretend.call_recorder(lambda r: "/fake-route"), + session=pretend.stub(flash=pretend.call_recorder(lambda *a, **kw: None)), + host=None, + ) + + result = locale(request) + + assert isinstance(result, HTTPSeeOther) + assert result.location == redirect + + if valid: + assert "Set-Cookie" in result.headers + assert f"_LOCALE_={get['locale_id']};" in result.headers["Set-Cookie"] + else: + assert "Set-Cookie" not in result.headers + + def test_esi_current_user_indicator(): assert current_user_indicator(pretend.stub()) == {} diff --git a/tests/unit/utils/test_msgpack.py b/tests/unit/utils/test_msgpack.py new file mode 100644 --- /dev/null +++ b/tests/unit/utils/test_msgpack.py @@ -0,0 +1,27 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from warehouse.i18n import LazyString +from warehouse.utils.msgpack import object_encode + + +def test_object_encode_passes_through(): + assert object_encode("foo") == "foo" + + +def test_object_encode_converts_lazystring(): + def stringify(*args, **kwargs): + return "foo" + + ls = LazyString(stringify, "foobar") + assert object_encode(ls) == "foo"
Add language switcher to PyPI UI For the upcoming localisation work -> we will need to offer a way for users to switch their language. Probably the easiest way to support this is to add a small section in the footer with text links to the different supported languages. - We should *not* use flags (a country is not a language) - The text links themselves should be translated References: - https://usersnap.com/blog/design-language-switch/
As discussed with @woodruffw I will make a dummy PR for this (with HTML and CSS only) that he can then hook up as appropriate. #6535 will contain the backend work for this: my thought is to add a `/locale` route or similar that the language switcher `POST`s appropriate language IDs to. We can then use that to return a `_LOCALE_` cookie for the user's browser to set and the locale negotiator will pick that up correctly.
2019-08-27T15:22:00Z
[]
[]
pypi/warehouse
6,564
pypi__warehouse-6564
[ "6250" ]
106226c2603f93a91c12b3bac3b8310ca151b077
diff --git a/warehouse/routes.py b/warehouse/routes.py --- a/warehouse/routes.py +++ b/warehouse/routes.py @@ -33,6 +33,7 @@ def includeme(config): config.add_route("bucket.sitemap.xml", "/{bucket}.sitemap.xml", domain=warehouse) # Some static, template driven pages + config.add_template_view("sitemap", "/sitemap/", "pages/sitemap.html") config.add_template_view("help", "/help/", "pages/help.html") config.add_template_view("security", "/security/", "pages/security.html") config.add_template_view(
diff --git a/tests/unit/test_routes.py b/tests/unit/test_routes.py --- a/tests/unit/test_routes.py +++ b/tests/unit/test_routes.py @@ -327,6 +327,7 @@ def add_policy(name, filename): ] assert config.add_template_view.calls == [ + pretend.call("sitemap", "/sitemap/", "pages/sitemap.html"), pretend.call("help", "/help/", "pages/help.html"), pretend.call("security", "/security/", "pages/security.html"), pretend.call(
Add sitemap, link to in footer WCAG 2.4.5: > Multiple ways are available to find other web pages on the site - at least two of: a list of related pages, table of contents, site map, site search, or list of all available web pages. The quickest way for us to meet this criteria is to add a basic sitemap to PyPI and link to it from the site footer.
@woodruffw - I've assigned this one to you as some backend work will be required here. Sitemap is here: https://pypi.org/sitemap.xml Thanks @di - but I don't know that this would actually be useful/usable for people wanting a secondary method of navigation. The recommendations say: > The simplest and most common kind of site map is an outline that shows links to each section or sub-site. Such outline views do not show more complex relationships within the site, such as links between pages in different sections of the site. The site maps for some large sites use headings that expand to show additional detail about each section. Further guidance on how to create site map: https://www.w3.org/TR/2016/NOTE-WCAG20-TECHS-20161007/G63 Example site map: https://www.w3.org/WAI/sitemap/ Sorry, should have added more context! Wanted to point y'all to the existing machine-readable sitemap as the view could be reused for a human-readable sitemap (i.e., we don't need to start from scratch).
2019-09-02T16:52:12Z
[]
[]
pypi/warehouse
6,699
pypi__warehouse-6699
[ "6637" ]
eb162e7e7e454605f09afb2ca6691228a3bd3b1a
diff --git a/warehouse/accounts/views.py b/warehouse/accounts/views.py --- a/warehouse/accounts/views.py +++ b/warehouse/accounts/views.py @@ -46,7 +46,11 @@ from warehouse.accounts.models import Email, User from warehouse.admin.flags import AdminFlagValue from warehouse.cache.origin import origin_cache -from warehouse.email import send_email_verification_email, send_password_reset_email +from warehouse.email import ( + send_email_verification_email, + send_password_change_email, + send_password_reset_email, +) from warehouse.i18n import localize as _ from warehouse.packaging.models import Project, Release from warehouse.utils.http import is_safe_url @@ -528,6 +532,9 @@ def _error(message): user.id, tag="account:password:reset", ip_address=request.remote_addr ) + # Send password change email + send_password_change_email(request, user) + # Flash a success message request.session.flash(_("You have reset your password"), queue="success")
diff --git a/tests/unit/accounts/test_views.py b/tests/unit/accounts/test_views.py --- a/tests/unit/accounts/test_views.py +++ b/tests/unit/accounts/test_views.py @@ -1186,7 +1186,7 @@ def test_get(self, db_request, user_service, token_service): pretend.call(ITokenService, name="password"), ] - def test_reset_password(self, db_request, user_service, token_service): + def test_reset_password(self, monkeypatch, db_request, user_service, token_service): user = UserFactory.create() db_request.method = "POST" db_request.POST.update({"token": "RANDOM_KEY"}) @@ -1199,6 +1199,9 @@ def test_reset_password(self, db_request, user_service, token_service): breach_service = pretend.stub(check_password=lambda pw: False) + send_email = pretend.call_recorder(lambda *a: None) + monkeypatch.setattr(views, "send_password_change_email", send_email) + db_request.route_path = pretend.call_recorder(lambda name: "/account/login") db_request.remote_addr = "0.0.0.0" token_service.loads = pretend.call_recorder( @@ -1242,6 +1245,7 @@ def test_reset_password(self, db_request, user_service, token_service): assert user_service.update_user.calls == [ pretend.call(user.id, password=form_obj.new_password.data) ] + assert send_email.calls == [pretend.call(db_request, user)] assert db_request.session.flash.calls == [ pretend.call("You have reset your password", queue="success") ]
Send 'password change' email when a user changes their password via the reset workflow ## Current behaviour An email (`warehouse/templates/email/password-change`) is sent to a user when they reset their password via their account settings page (assuming their email is verified) ## Requested behaviour The email should be sent to the user: - When they reset their password via their account settings page **and** - When they reset their password via the password reset workflow --- **Good First Issue**: This issue is good for first time contributors. If you've already contributed to Warehouse, work on [another issue without this label](https://github.com/pypa/warehouse/issues?utf8=%E2%9C%93&q=is%3Aissue+is%3Aopen+-label%3A%22good+first+issue%22) instead. If there is not a corresponding pull request for this issue, it is up for grabs. For directions for getting set up, see our [Getting Started Guide](https://warehouse.pypa.io/development/getting-started/). If you are working on this issue and have questions, feel free to ask them here, [`#pypa-dev` on Freenode](https://webchat.freenode.net/?channels=%23pypa-dev), or the [pypa-dev mailing list](https://groups.google.com/forum/#!forum/pypa-dev).
I've added the "good first issue" label to this issue. Looks like we need to add something like: https://github.com/pypa/warehouse/blob/30f2389b27f0d57462e03c23c3126ff707daf8dd/warehouse/manage/views.py#L303 here: https://github.com/pypa/warehouse/blob/30f2389b27f0d57462e03c23c3126ff707daf8dd/warehouse/accounts/views.py#L518-L529 Plus updated tests.
2019-09-23T22:38:08Z
[]
[]
pypi/warehouse
6,805
pypi__warehouse-6805
[ "4751", "4752" ]
25e911477ec108953a2b31b9794c9f494412b148
diff --git a/warehouse/legacy/api/json.py b/warehouse/legacy/api/json.py --- a/warehouse/legacy/api/json.py +++ b/warehouse/legacy/api/json.py @@ -144,6 +144,7 @@ def json_release(release, request): # here to consider it no longer in use. "downloads": -1, "upload_time": f.upload_time.strftime("%Y-%m-%dT%H:%M:%S"), + "upload_time_iso_8601": f.upload_time.isoformat() + "Z", "url": request.route_url("packaging.file", path=f.path), "requires_python": r.requires_python if r.requires_python else None, } diff --git a/warehouse/legacy/api/xmlrpc/views.py b/warehouse/legacy/api/xmlrpc/views.py --- a/warehouse/legacy/api/xmlrpc/views.py +++ b/warehouse/legacy/api/xmlrpc/views.py @@ -450,6 +450,7 @@ def release_urls(request, package_name: str, version: str): "digests": {"md5": f.md5_digest, "sha256": f.sha256_digest}, "has_sig": f.has_signature, "upload_time": f.upload_time.isoformat() + "Z", + "upload_time_iso_8601": f.upload_time.isoformat() + "Z", "comment_text": f.comment_text, # TODO: Remove this once we've had a long enough time with it # here to consider it no longer in use.
diff --git a/tests/unit/legacy/api/test_json.py b/tests/unit/legacy/api/test_json.py --- a/tests/unit/legacy/api/test_json.py +++ b/tests/unit/legacy/api/test_json.py @@ -281,6 +281,7 @@ def test_detail_renders(self, pyramid_config, db_request, db_session): "upload_time": files[0].upload_time.strftime( "%Y-%m-%dT%H:%M:%S" ), + "upload_time_iso_8601": files[0].upload_time.isoformat() + "Z", "url": "/the/fake/url/", "requires_python": None, } @@ -302,6 +303,7 @@ def test_detail_renders(self, pyramid_config, db_request, db_session): "upload_time": files[1].upload_time.strftime( "%Y-%m-%dT%H:%M:%S" ), + "upload_time_iso_8601": files[1].upload_time.isoformat() + "Z", "url": "/the/fake/url/", "requires_python": None, } @@ -323,6 +325,7 @@ def test_detail_renders(self, pyramid_config, db_request, db_session): "upload_time": files[2].upload_time.strftime( "%Y-%m-%dT%H:%M:%S" ), + "upload_time_iso_8601": files[2].upload_time.isoformat() + "Z", "url": "/the/fake/url/", "requires_python": None, } @@ -343,6 +346,7 @@ def test_detail_renders(self, pyramid_config, db_request, db_session): "python_version": "source", "size": 200, "upload_time": files[2].upload_time.strftime("%Y-%m-%dT%H:%M:%S"), + "upload_time_iso_8601": files[2].upload_time.isoformat() + "Z", "url": "/the/fake/url/", "requires_python": None, } @@ -424,6 +428,7 @@ def test_minimal_renders(self, pyramid_config, db_request): "python_version": "source", "size": 200, "upload_time": file.upload_time.strftime("%Y-%m-%dT%H:%M:%S"), + "upload_time_iso_8601": file.upload_time.isoformat() + "Z", "url": "/the/fake/url/", "requires_python": None, } @@ -441,6 +446,7 @@ def test_minimal_renders(self, pyramid_config, db_request): "python_version": "source", "size": 200, "upload_time": file.upload_time.strftime("%Y-%m-%dT%H:%M:%S"), + "upload_time_iso_8601": file.upload_time.isoformat() + "Z", "url": "/the/fake/url/", "requires_python": None, } diff --git a/tests/unit/legacy/api/xmlrpc/test_xmlrpc.py b/tests/unit/legacy/api/xmlrpc/test_xmlrpc.py --- a/tests/unit/legacy/api/xmlrpc/test_xmlrpc.py +++ b/tests/unit/legacy/api/xmlrpc/test_xmlrpc.py @@ -654,6 +654,7 @@ def test_release_urls(db_request): "digests": {"md5": file_.md5_digest, "sha256": file_.sha256_digest}, "has_sig": file_.has_signature, "upload_time": file_.upload_time.isoformat() + "Z", + "upload_time_iso_8601": file_.upload_time.isoformat() + "Z", "comment_text": file_.comment_text, "downloads": -1, "path": file_.path,
JSON API omits timezone and microseconds from upload_time When a request is made to `https://pypi.org/pypi/$PROJECT/json` or `https://pypi.org/pypi/$PROJECT/$VERSION/json`, the `"upload_time"` fields in the response contain no timezone information and has no sub-second precision (e.g., `"2018-09-22T19:16:05"`). Compare this to the XML-RPC API, in which the `"upload_time"` fields in the data returned by the `release_urls` method all end with `Z` and have microsecond precision (e.g., `"2018-09-22T19:16:05.067954Z"`). Align JSON API and XML-RPC timestamps Fixes #4751 (I think).
@di Adding micro seconds might break existing API users, If so how do we handle that? How bad is adding a new field in the JSON with `upload_time_with_tz`? @gsb-eng Yes, see #4752. I think we'll need to add a new field, perhaps we can come up with a better name than `upload_time_with_tz`. The change looks generally OK to me, though I am concerned that this is going to break people who had manually written parse strings for the JSON date.
2019-10-10T16:31:01Z
[]
[]
pypi/warehouse
6,833
pypi__warehouse-6833
[ "5944" ]
a3a1161159768afd8271983622611f1d16a24a79
diff --git a/warehouse/accounts/interfaces.py b/warehouse/accounts/interfaces.py --- a/warehouse/accounts/interfaces.py +++ b/warehouse/accounts/interfaces.py @@ -78,7 +78,7 @@ def create_user(username, name, password): A UserAlreadyExists Exception is raised if the user already exists. """ - def add_email(user_id, email_address, primary=False, verified=False): + def add_email(user_id, email_address, primary=False, verified=False, public=False): """ Adds an email for the provided user_id """ diff --git a/warehouse/accounts/models.py b/warehouse/accounts/models.py --- a/warehouse/accounts/models.py +++ b/warehouse/accounts/models.py @@ -120,6 +120,12 @@ def primary_email(self): if primaries: return primaries[0] + @property + def public_email(self): + publics = [x for x in self.emails if x.public] + if publics: + return publics[0] + @hybrid_property def email(self): primary_email = self.primary_email @@ -224,6 +230,7 @@ class Email(db.ModelBase): email = Column(String(length=254), nullable=False) primary = Column(Boolean, nullable=False) verified = Column(Boolean, nullable=False) + public = Column(Boolean, nullable=False, server_default=sql.false()) # Deliverability information unverify_reason = Column( diff --git a/warehouse/accounts/services.py b/warehouse/accounts/services.py --- a/warehouse/accounts/services.py +++ b/warehouse/accounts/services.py @@ -184,7 +184,9 @@ def create_user(self, username, name, password): return user - def add_email(self, user_id, email_address, primary=None, verified=False): + def add_email( + self, user_id, email_address, primary=None, verified=False, public=False + ): user = self.get_user(user_id) # If primary is None, then we're going to auto detect whether this should be the @@ -195,7 +197,11 @@ def add_email(self, user_id, email_address, primary=None, verified=False): primary = True if user.primary_email is None else False email = Email( - email=email_address, user=user, primary=primary, verified=verified + email=email_address, + user=user, + primary=primary, + verified=verified, + public=public, ) self.db.add(email) self.db.flush() # flush the db now so email.id is available diff --git a/warehouse/accounts/views.py b/warehouse/accounts/views.py --- a/warehouse/accounts/views.py +++ b/warehouse/accounts/views.py @@ -760,3 +760,14 @@ def profile_callout(user, request): ) def edit_profile_button(user, request): return {"user": user} + + +@view_config( + route_name="includes.profile-public-email", + context=User, + renderer="includes/accounts/profile-public-email.html", + uses_session=True, + has_translations=True, +) +def profile_public_email(user, request): + return {"user": user} diff --git a/warehouse/admin/views/users.py b/warehouse/admin/views/users.py --- a/warehouse/admin/views/users.py +++ b/warehouse/admin/views/users.py @@ -77,6 +77,7 @@ class EmailForm(forms.Form): ) primary = wtforms.fields.BooleanField() verified = wtforms.fields.BooleanField() + public = wtforms.fields.BooleanField() class UserForm(forms.Form): @@ -152,6 +153,7 @@ def user_add_email(request): user=user, primary=form.primary.data, verified=form.verified.data, + public=form.public.data, ) request.db.add(email) request.session.flash( diff --git a/warehouse/manage/forms.py b/warehouse/manage/forms.py --- a/warehouse/manage/forms.py +++ b/warehouse/manage/forms.py @@ -63,9 +63,28 @@ class ChangeRoleForm(RoleNameMixin, forms.Form): class SaveAccountForm(forms.Form): - __params__ = ["name"] + __params__ = ["name", "public_email"] name = wtforms.StringField() + public_email = wtforms.SelectField(choices=[("", "Not displayed")]) + + def __init__(self, *args, user_service, user_id, **kwargs): + super().__init__(*args, **kwargs) + self.user_service = user_service + self.user_id = user_id + user = user_service.get_user(user_id) + self.public_email.choices.extend( + [(e.email, e.email) for e in user.emails if e.verified] + ) + + def validate_public_email(self, field): + if field.data: + user = self.user_service.get_user(self.user_id) + verified_emails = [e.email for e in user.emails if e.verified] + if field.data not in verified_emails: + raise wtforms.validators.ValidationError( + "%s is not a verified email for %s" % (field.data, user.username) + ) class AddEmailForm(NewEmailMixin, forms.Form): diff --git a/warehouse/manage/views.py b/warehouse/manage/views.py --- a/warehouse/manage/views.py +++ b/warehouse/manage/views.py @@ -127,7 +127,12 @@ def active_projects(self): @property def default_response(self): return { - "save_account_form": SaveAccountForm(name=self.request.user.name), + "save_account_form": SaveAccountForm( + name=self.request.user.name, + public_email=getattr(self.request.user.public_email, "email", ""), + user_service=self.user_service, + user_id=self.request.user.id, + ), "add_email_form": AddEmailForm( user_service=self.user_service, user_id=self.request.user.id ), @@ -141,12 +146,20 @@ def default_response(self): def manage_account(self): return self.default_response - @view_config(request_method="POST", request_param=SaveAccountForm.__params__) + @view_config(request_method="POST", request_param=["name"]) def save_account(self): - form = SaveAccountForm(self.request.POST) + form = SaveAccountForm( + self.request.POST, + user_service=self.user_service, + user_id=self.request.user.id, + ) if form.validate(): - self.user_service.update_user(self.request.user.id, **form.data) + data = form.data + public_email = data.pop("public_email", "") + self.user_service.update_user(self.request.user.id, **data) + for email in self.request.user.emails: + email.public = email.email == public_email self.request.session.flash("Account details updated", queue="success") return {**self.default_response, "save_account_form": form} diff --git a/warehouse/migrations/versions/5c029d9ef925_add_email_public_column.py b/warehouse/migrations/versions/5c029d9ef925_add_email_public_column.py new file mode 100644 --- /dev/null +++ b/warehouse/migrations/versions/5c029d9ef925_add_email_public_column.py @@ -0,0 +1,38 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +add email.public column + +Revision ID: 5c029d9ef925 +Revises: e133fc5aa3c1 +Create Date: 2020-01-19 22:25:53.901148 +""" + +import sqlalchemy as sa + +from alembic import op + +revision = "5c029d9ef925" +down_revision = "e133fc5aa3c1" + + +def upgrade(): + op.add_column( + "user_emails", + sa.Column( + "public", sa.Boolean(), server_default=sa.text("false"), nullable=False + ), + ) + + +def downgrade(): + op.drop_column("user_emails", "public") diff --git a/warehouse/routes.py b/warehouse/routes.py --- a/warehouse/routes.py +++ b/warehouse/routes.py @@ -93,6 +93,13 @@ def includeme(config): traverse="/{username}", domain=warehouse, ) + config.add_route( + "includes.profile-public-email", + "/_includes/profile-public-email/{username}", + factory="warehouse.accounts.models:UserFactory", + traverse="/{username}", + domain=warehouse, + ) # Classifier Routes config.add_route("classifiers", "/classifiers/", domain=warehouse) diff --git a/warehouse/utils/html.py b/warehouse/utils/html.py --- a/warehouse/utils/html.py +++ b/warehouse/utils/html.py @@ -47,7 +47,14 @@ def parse(self, parser): # Now we parse a single expression that is used as the URL we're going # to include - url = parser.parse_expression() + args = [parser.parse_expression()] + + # if there is a comma, the user provided a tag type. If not use + # 'div' as second parameter. + if parser.stream.skip_if("comma"): + args.append(parser.parse_expression()) + else: + args.append(nodes.Const("div")) # Now we parse the body of the csi block up to `endcsi` and drop the # needle (which would always be `endcsi` in that case). @@ -55,9 +62,9 @@ def parse(self, parser): # Now return a `CallBlock` node that calls our _csi helper method on # this extension. - n = nodes.CallBlock(self.call_method("_csi", [url]), [], [], body) + n = nodes.CallBlock(self.call_method("_csi", args), [], [], body) n = n.set_lineno(lineno) return n - def _csi(self, url, caller): - return f'<div data-html-include="{url}">{caller()}</div>' + def _csi(self, url, tag, caller): + return f'<{tag} data-html-include="{url}">{caller()}</{tag}>'
diff --git a/tests/common/db/accounts.py b/tests/common/db/accounts.py --- a/tests/common/db/accounts.py +++ b/tests/common/db/accounts.py @@ -51,5 +51,6 @@ class Meta: email = FuzzyEmail() verified = True primary = True + public = False unverify_reason = None transient_bounces = 0 diff --git a/tests/functional/manage/__init__.py b/tests/functional/manage/__init__.py new file mode 100644 --- /dev/null +++ b/tests/functional/manage/__init__.py @@ -0,0 +1,11 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/tests/functional/manage/test_views.py b/tests/functional/manage/test_views.py new file mode 100644 --- /dev/null +++ b/tests/functional/manage/test_views.py @@ -0,0 +1,40 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pretend + +from webob.multidict import MultiDict + +from warehouse.accounts.interfaces import IPasswordBreachedService, IUserService +from warehouse.manage import views + +from ...common.db.accounts import EmailFactory, UserFactory + + +class TestManageAccount: + def test_save_account(self, pyramid_services, user_service, db_request): + breach_service = pretend.stub() + pyramid_services.register_service(IUserService, None, user_service) + pyramid_services.register_service( + IPasswordBreachedService, None, breach_service + ) + user = UserFactory.create(name="old name") + EmailFactory.create(primary=True, verified=True, public=True, user=user) + db_request.user = user + db_request.method = "POST" + db_request.path = "/manage/accounts/" + db_request.POST = MultiDict({"name": "new name", "public_email": ""}) + views.ManageAccountViews(db_request).save_account() + + user = user_service.get_user(user.id) + assert user.name == "new name" + assert user.public_email is None diff --git a/tests/unit/accounts/test_models.py b/tests/unit/accounts/test_models.py --- a/tests/unit/accounts/test_models.py +++ b/tests/unit/accounts/test_models.py @@ -71,6 +71,19 @@ def test_get_primary_email(self, db_session): assert user.email == email.email + def test_get_public_email(self, db_session): + user = DBUserFactory.create() + email = DBEmailFactory.create(user=user, verified=True, public=True) + DBEmailFactory.create(user=user, verified=True, public=False) + + assert user.public_email == email + + def test_no_public_email(self, db_session): + user = DBUserFactory.create() + DBEmailFactory.create(user=user, primary=True, verified=True) + + assert user.public_email is None + def test_query_by_email_when_primary(self, db_session): user = DBUserFactory.create() email = DBEmailFactory.create(user=user, primary=True) diff --git a/tests/unit/accounts/test_views.py b/tests/unit/accounts/test_views.py --- a/tests/unit/accounts/test_views.py +++ b/tests/unit/accounts/test_views.py @@ -1781,3 +1781,11 @@ def test_edit_profile_button(self): request = pretend.stub() assert views.edit_profile_button(user, request) == {"user": user} + + +class TestProfilePublicEmail: + def test_profile_public_email_returns_user(self): + user = pretend.stub() + request = pretend.stub() + + assert views.profile_public_email(user, request) == {"user": user} diff --git a/tests/unit/manage/test_forms.py b/tests/unit/manage/test_forms.py --- a/tests/unit/manage/test_forms.py +++ b/tests/unit/manage/test_forms.py @@ -486,3 +486,28 @@ def test_validate_macaroon_id(self): ) assert form.validate() + + +class TestSaveAccountForm: + def test_public_email_verified(self): + email = pretend.stub(verified=True, public=False, email="[email protected]") + user = pretend.stub(id=1, username=pretend.stub(), emails=[email]) + form = forms.SaveAccountForm( + name=pretend.stub(), + public_email=email.email, + user_service=pretend.stub(get_user=lambda _: user), + user_id=user.id, + ) + assert form.validate(), str(form.errors) + + def test_public_email_unverified(self): + email = pretend.stub(verified=False, public=False, email=pretend.stub()) + user = pretend.stub(id=1, username=pretend.stub(), emails=[email]) + form = forms.SaveAccountForm( + name=pretend.stub(), + public_email=email.email, + user_service=pretend.stub(get_user=lambda _: user), + user_id=user.id, + ) + assert not form.validate() + assert "is not a verified email for" in form.public_email.errors.pop() diff --git a/tests/unit/manage/test_views.py b/tests/unit/manage/test_views.py --- a/tests/unit/manage/test_views.py +++ b/tests/unit/manage/test_views.py @@ -54,7 +54,11 @@ class TestManageAccount: - def test_default_response(self, monkeypatch): + @pytest.mark.parametrize( + "public_email, expected_public_email", + [(None, ""), (pretend.stub(email="[email protected]"), "[email protected]")], + ) + def test_default_response(self, monkeypatch, public_email, expected_public_email): breach_service = pretend.stub() user_service = pretend.stub() name = pretend.stub() @@ -64,7 +68,7 @@ def test_default_response(self, monkeypatch): IPasswordBreachedService: breach_service, IUserService: user_service, }[iface], - user=pretend.stub(name=name, id=user_id), + user=pretend.stub(name=name, id=user_id, public_email=public_email), ) save_account_obj = pretend.stub() save_account_cls = pretend.call_recorder(lambda **kw: save_account_obj) @@ -90,7 +94,14 @@ def test_default_response(self, monkeypatch): } assert view.request == request assert view.user_service == user_service - assert save_account_cls.calls == [pretend.call(name=name)] + assert save_account_cls.calls == [ + pretend.call( + name=name, + public_email=expected_public_email, + user_service=user_service, + user_id=user_id, + ) + ] assert add_email_cls.calls == [ pretend.call(user_id=user_id, user_service=user_service) ] @@ -147,8 +158,16 @@ def test_save_account(self, monkeypatch): update_user = pretend.call_recorder(lambda *a, **kw: None) user_service = pretend.stub(update_user=update_user) request = pretend.stub( - POST={"name": "new name"}, - user=pretend.stub(id=pretend.stub(), name=pretend.stub()), + POST={"name": "new name", "public_email": ""}, + user=pretend.stub( + id=pretend.stub(), + name=pretend.stub(), + emails=[ + pretend.stub( + primary=True, verified=True, public=True, email=pretend.stub() + ) + ], + ), session=pretend.stub(flash=pretend.call_recorder(lambda *a, **kw: None)), find_service=lambda *a, **kw: user_service, ) @@ -172,7 +191,7 @@ def test_save_account_validation_fails(self, monkeypatch): update_user = pretend.call_recorder(lambda *a, **kw: None) user_service = pretend.stub(update_user=update_user) request = pretend.stub( - POST={"name": "new name"}, + POST={"name": "new name", "public_email": ""}, user=pretend.stub(id=pretend.stub(), name=pretend.stub()), session=pretend.stub(flash=pretend.call_recorder(lambda *a, **kw: None)), find_service=lambda *a, **kw: user_service, diff --git a/tests/unit/test_routes.py b/tests/unit/test_routes.py --- a/tests/unit/test_routes.py +++ b/tests/unit/test_routes.py @@ -115,6 +115,13 @@ def add_policy(name, filename): traverse="/{username}", domain=warehouse, ), + pretend.call( + "includes.profile-public-email", + "/_includes/profile-public-email/{username}", + factory="warehouse.accounts.models:UserFactory", + traverse="/{username}", + domain=warehouse, + ), pretend.call("classifiers", "/classifiers/", domain=warehouse), pretend.call("search", "/search/", domain=warehouse), pretend.call("stats", "/stats/", accept="text/html", domain=warehouse),
Add option to display primary email on public profile **What's the problem this feature will solve?** Currently there's no way to publicly display an email address as a point of contact for a PyPI account. Instead, maintainer/author emails for a user's packages must be used instead. **Describe the solution you'd like** Similar to Github, on the ["Account Settings"](https://pypi.org/manage/account/) page, we should have an option such as: > ☑️**Keep my email address private** > We won't show your primary email on your public PyPI profile page. This should default to "True" for all new and existing users. --- **Good First Issue**: This issue is good for first time contributors. If you've already contributed to Warehouse, work on [another issue without this label](https://github.com/pypa/warehouse/issues?utf8=%E2%9C%93&q=is%3Aissue+is%3Aopen+-label%3A%22good+first+issue%22) instead. If there is not a corresponding pull request for this issue, it is up for grabs. For directions for getting set up, see our [Getting Started Guide](https://warehouse.pypa.io/development/getting-started/). If you are working on this issue and have questions, feel free to ask them here, [`#pypa-dev` on Freenode](https://webchat.freenode.net/?channels=%23pypa-dev), or the [pypa-dev mailing list](https://groups.google.com/forum/#!forum/pypa-dev).
This would be a great addition! :D @pgadige is this something you'd like to try working on? Thank you, @brainwane, for suggesting this one. I'm working on it. @pgadige How is it going? Do you have a branch somewhere that we can look at? @brainwane, I'm figuring out few JavaScript things (I'm a newbie to JS) for displaying the email address properly on the page. I managed to break the CSS of the page couple of times. I think I'll try to submit a PR by the EOD so the reviewers can show me what exactly I'm doing wrong. @pgadige Looking forward to that PR! I just emailed with @pgadige and she'll be working on sharing a work-in-progress pull request with us in the next few days. @pgadige: If you are working on this issue and have questions or get stuck, please feel free to speak up here, or in [`#pypa-dev` on Freenode](https://webchat.freenode.net/?channels=%23pypa-dev), or on the [pypa-dev mailing list](https://groups.google.com/forum/#!forum/pypa-dev).
2019-10-14T19:13:15Z
[]
[]
pypi/warehouse
7,013
pypi__warehouse-7013
[ "1683" ]
a7d2321ff045619a97e92907d2182fd592639c61
diff --git a/warehouse/routes.py b/warehouse/routes.py --- a/warehouse/routes.py +++ b/warehouse/routes.py @@ -302,6 +302,14 @@ def includeme(config): # RSS config.add_route("rss.updates", "/rss/updates.xml", domain=warehouse) config.add_route("rss.packages", "/rss/packages.xml", domain=warehouse) + config.add_route( + "rss.project.releases", + "/rss/project/{name}/releases.xml", + factory="warehouse.packaging.models:ProjectFactory", + traverse="/{name}/", + read_only=True, + domain=warehouse, + ) # Legacy URLs config.add_route("legacy.api.simple.index", "/simple/", domain=warehouse) diff --git a/warehouse/rss/views.py b/warehouse/rss/views.py --- a/warehouse/rss/views.py +++ b/warehouse/rss/views.py @@ -104,3 +104,33 @@ def rss_packages(request): ] return {"newest_projects": tuple(zip(newest_projects, project_authors))} + + +@view_config( + route_name="rss.project.releases", + context=Project, + renderer="rss/project_releases.xml", + decorator=[ + origin_cache( + 1 * 24 * 60 * 60, stale_if_error=5 * 24 * 60 * 60 # 1 day, 5 days stale + ) + ], +) +def rss_project_releases(project, request): + request.response.content_type = "text/xml" + + request.find_service(name="csp").merge(XML_CSP) + + latest_releases = ( + request.db.query(Release) + .filter(Release.project == project) + .order_by(Release.created.desc()) + .limit(40) + .all() + ) + release_authors = [_format_author(release) for release in latest_releases] + + return { + "project": project, + "latest_releases": tuple(zip(latest_releases, release_authors)), + }
diff --git a/tests/unit/rss/test_views.py b/tests/unit/rss/test_views.py --- a/tests/unit/rss/test_views.py +++ b/tests/unit/rss/test_views.py @@ -73,6 +73,35 @@ def test_rss_packages(db_request): assert db_request.response.content_type == "text/xml" +def test_rss_project_releases(db_request): + db_request.find_service = pretend.call_recorder( + lambda *args, **kwargs: pretend.stub( + enabled=False, csp_policy=pretend.stub(), merge=lambda _: None + ) + ) + + db_request.session = pretend.stub() + + project = ProjectFactory.create() + + release_v1 = ReleaseFactory.create(project=project, version="1.0.0") + release_v1.created = datetime.date(2018, 1, 1) + release_v3 = ReleaseFactory.create(project=project, version="3.0.0") + release_v3.created = datetime.date(2019, 1, 1) + release_v2 = ReleaseFactory.create(project=project, version="2.0.0") + release_v2.created = datetime.date(2020, 1, 1) + + release_v3.author_email = "[email protected]" + + assert rss.rss_project_releases(project, db_request) == { + "project": project, + "latest_releases": tuple( + zip((release_v2, release_v3, release_v1), (None, "[email protected]", None)) + ), + } + assert db_request.response.content_type == "text/xml" + + def test_format_author(db_request): db_request.find_service = pretend.call_recorder( lambda *args, **kwargs: pretend.stub( diff --git a/tests/unit/test_routes.py b/tests/unit/test_routes.py --- a/tests/unit/test_routes.py +++ b/tests/unit/test_routes.py @@ -303,6 +303,14 @@ def add_policy(name, filename): pretend.call("ses.hook", "/_/ses-hook/", domain=warehouse), pretend.call("rss.updates", "/rss/updates.xml", domain=warehouse), pretend.call("rss.packages", "/rss/packages.xml", domain=warehouse), + pretend.call( + "rss.project.releases", + "/rss/project/{name}/releases.xml", + factory="warehouse.packaging.models:ProjectFactory", + traverse="/{name}/", + read_only=True, + domain=warehouse, + ), pretend.call("legacy.api.simple.index", "/simple/", domain=warehouse), pretend.call( "legacy.api.simple.detail",
Package update feeds Was just having a thought about a neat feature that would be really helpful for me and probably others. Thinking of downstream/secondary package maintainers for instance though others may find it valuable too. My primary personal use case is helping keep [conda-forge]( http://conda-forge.github.io/ ) up to date. Though maybe Linux packagers and others would be interested It would be really neat to have some kind of feed that would be published to when a particular package is updated. Mainly interested in version bumps, but potentially other events could be valuable if people are interested. It would be good if this feed was designed so that a person or bot could subscribe ideally without an account. Also it might be nice to have some way to filter out dev/prereleases from stable releases.
This is definitely one of those features I would expect from a package index. Right now I keep track of updates by subscribing to the `releases` feeds of the repo's of the projects I'm interested in. That's certainly another option. Though not everyone is great about keeping their releases (or tags) in sync with PyPI releases. That said, I expect we will use a variety of different sources. Another use-case for this: being able to track when new files are uploaded to an existing package version (eg additional wheel variants being uploaded) - so tools like pyup.io can know to update the requirements files with the additional hashes. I figure this would be based on the same mechanism that could send out notification emails, as requested in #98. Thanks for your note and sorry for the slow response! The folks working on Warehouse have gotten [funding to concentrate on improving and deploying Warehouse](https://pyfound.blogspot.com/2017/11/the-psf-awarded-moss-grant-pypi.html), and have kicked off work towards [our development roadmap](https://wiki.python.org/psf/WarehouseRoadmap) -- the most urgent task is to improve Warehouse to the point where we can redirect pypi.python.org to pypi.org so the site is more sustainable and reliable. Since this feature isn't something that the legacy site has, I've moved it to a future milestone. Thanks and sorry again for the wait. Folks who need this might want to check whether the Libraries.io API for https://libraries.io/pypi might suit their needs in the short term. Here's a request for RSS feeds specifically: https://github.com/pypa/warehouse/issues/5612
2019-11-17T20:28:49Z
[]
[]
pypi/warehouse
7,124
pypi__warehouse-7124
[ "6051" ]
fb550767791e40f0435dcc83b6885516b970ea5f
diff --git a/warehouse/config.py b/warehouse/config.py --- a/warehouse/config.py +++ b/warehouse/config.py @@ -162,6 +162,7 @@ def configure(settings=None): maybe_set( settings, "warehouse.release_files_table", "WAREHOUSE_RELEASE_FILES_TABLE" ) + maybe_set(settings, "github.token", "GITHUB_TOKEN") maybe_set(settings, "warehouse.trending_table", "WAREHOUSE_TRENDING_TABLE") maybe_set(settings, "celery.broker_url", "BROKER_URL") maybe_set(settings, "celery.result_url", "REDIS_URL") @@ -363,8 +364,8 @@ def configure(settings=None): # Register support for our legacy action URLs config.include(".legacy.action_routing") - # Register support for our domain predicates - config.include(".domain") + # Register support for our custom predicates + config.include(".predicates") # Register support for template views. config.add_directive("add_template_view", template_view, action_wrap=False) diff --git a/warehouse/email/__init__.py b/warehouse/email/__init__.py --- a/warehouse/email/__init__.py +++ b/warehouse/email/__init__.py @@ -192,6 +192,11 @@ def send_password_compromised_email_hibp(request, user): return {} +@_email("token-compromised-leak", allow_unverified=True) +def send_token_compromised_email_leak(request, user, *, public_url, origin): + return {"username": user.username, "public_url": public_url, "origin": origin} + + @_email("account-deleted") def send_account_deletion_email(request, user): return {"username": user.username} diff --git a/warehouse/integrations/__init__.py b/warehouse/integrations/__init__.py new file mode 100644 --- /dev/null +++ b/warehouse/integrations/__init__.py @@ -0,0 +1,11 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/warehouse/integrations/github/__init__.py b/warehouse/integrations/github/__init__.py new file mode 100644 --- /dev/null +++ b/warehouse/integrations/github/__init__.py @@ -0,0 +1,11 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/warehouse/integrations/github/tasks.py b/warehouse/integrations/github/tasks.py new file mode 100644 --- /dev/null +++ b/warehouse/integrations/github/tasks.py @@ -0,0 +1,24 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from warehouse import tasks +from warehouse.integrations.github import utils + + [email protected](ignore_result=True, acks_late=True) +def analyze_disclosure_task(task, request, disclosure_record, origin): + utils.analyze_disclosure( + request=request, + disclosure_record=disclosure_record, + origin=origin, + ) diff --git a/warehouse/integrations/github/utils.py b/warehouse/integrations/github/utils.py new file mode 100644 --- /dev/null +++ b/warehouse/integrations/github/utils.py @@ -0,0 +1,370 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import base64 +import json +import re +import time + +from typing import Optional + +import requests + +from cryptography.exceptions import InvalidSignature +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives.asymmetric.ec import ECDSA +from cryptography.hazmat.primitives.hashes import SHA256 + +from warehouse.email import send_token_compromised_email_leak +from warehouse.macaroons.caveats import InvalidMacaroon +from warehouse.macaroons.interfaces import IMacaroonService +from warehouse.metrics import IMetricsService + + +class ExtractionFailed(Exception): + pass + + +class TokenLeakMatcher: + """ + A TokenLeakMatcher is linked to a specific regex pattern. When provided + a string that matches this pattern, the matcher can extract a token-like string + from it. + """ + + name: str + pattern: re.Pattern + + def extract(self, text): + raise NotImplementedError + + +class PlainTextTokenLeakMatcher(TokenLeakMatcher): + name = "token" + # Macaroons are urlsafe_b64 encodeded so non-alphanumeric chars are - and _ + # https://github.com/ecordell/pymacaroons/blob/06b55110eda2fb192c130dee0bcedf8b124d1056/pymacaroons/serializers/binary_serializer.py#L32 + pattern = re.compile(r"pypi-[A-Za-z0-9-_=]+") + + def extract(self, text): + """ + From a string containing everything that was matched, extract the token + to check + """ + return text + + +class Base64BasicAuthTokenLeakMatcher(TokenLeakMatcher): + name = "base64-basic-auth" + # This is what we would expect to find if a basic auth value was leaked + # The following string was obtained by: + # base64.b64encode(b"__token__:pypi-").decode("utf-8") + # Basic auth is standard base64, so non-alphanumeric chars are + and / + pattern = re.compile(r"X190b2tlbl9fOnB5cGkt[A-Za-z0-9+/=]+") + + def extract(self, text): + try: + _, token = ( + base64.b64decode(text.encode("utf-8")).decode("utf-8").split(":", 1) + ) + return token + except Exception as exc: + raise ExtractionFailed from exc + + +TOKEN_LEAK_MATCHERS = { + matcher.name: matcher + for matcher in [PlainTextTokenLeakMatcher(), Base64BasicAuthTokenLeakMatcher()] +} + + +class InvalidTokenLeakRequest(Exception): + def __init__(self, message, reason): + self.reason = reason + super().__init__(message) + + +class TokenLeakDisclosureRequest: + def __init__(self, token: str, public_url: str): + self.token = token + self.public_url = public_url + + @classmethod + def from_api_record(cls, record, *, matchers=TOKEN_LEAK_MATCHERS): + + if not isinstance(record, dict): + raise InvalidTokenLeakRequest( + f"Record is not a dict but: {str(record)[:100]}", reason="format" + ) + + missing_keys = sorted({"token", "type", "url"} - set(record)) + if missing_keys: + raise InvalidTokenLeakRequest( + f"Record is missing attribute(s): {', '.join(missing_keys)}", + reason="format", + ) + + matcher_code = record["type"] + + matcher = matchers.get(matcher_code) + if not matcher: + raise InvalidTokenLeakRequest( + f"Matcher with code {matcher_code} not found. " + f"Available codes are: {', '.join(matchers)}", + reason="invalid_matcher", + ) + + try: + extracted_token = matcher.extract(record["token"]) + except ExtractionFailed: + raise InvalidTokenLeakRequest( + "Cannot extract token from recieved match", reason="extraction" + ) + + return cls(token=extracted_token, public_url=record["url"]) + + +class GitHubPublicKeyMetaAPIError(InvalidTokenLeakRequest): + pass + + +class CacheMiss(Exception): + pass + + +PUBLIC_KEYS_CACHE_TIME = 60 * 30 # 30 minutes + + +class GitHubTokenScanningPayloadVerifier: + """ + Checks payload signature using: + - `requests` for HTTP calls + - `cryptography` for signature verification + """ + + def __init__(self, *, session, metrics, api_token: Optional[str] = None): + self._metrics = metrics + self._session = session + self._api_token = api_token + + self.public_keys_cached_at = 0 + self.public_keys_cache = None + + def verify(self, *, payload, key_id, signature): + + public_key = None + try: + public_keys = self._get_cached_public_keys() + public_key = self._check_public_key( + github_public_keys=public_keys, key_id=key_id + ) + except (CacheMiss, InvalidTokenLeakRequest): + # No cache or outdated cache, it's ok, we'll do a real call. + # Just record a metric so that we can know if all calls lead to + # cache misses + self._metrics.increment("warehouse.token_leak.github.auth.cache.miss") + else: + self._metrics.increment("warehouse.token_leak.github.auth.cache.hit") + + try: + if not public_key: + pubkey_api_data = self._retrieve_public_key_payload() + public_keys = self._extract_public_keys(pubkey_api_data) + public_key = self._check_public_key( + github_public_keys=public_keys, key_id=key_id + ) + + self._check_signature( + payload=payload, public_key=public_key, signature=signature + ) + except InvalidTokenLeakRequest as exc: + self._metrics.increment( + f"warehouse.token_leak.github.auth.error.{exc.reason}" + ) + return False + + self._metrics.increment("warehouse.token_leak.github.auth.success") + return True + + def _get_cached_public_keys(self): + if not self.public_keys_cache: + raise CacheMiss + + if self.public_keys_cached_at + PUBLIC_KEYS_CACHE_TIME < time.time(): + raise CacheMiss + + return self.public_keys_cache + + def _headers_auth(self): + if not self._api_token: + return {} + return {"Authorization": f"token {self._api_token}"} + + def _retrieve_public_key_payload(self): + + token_scanning_pubkey_api_url = ( + "https://api.github.com/meta/public_keys/token_scanning" + ) + + try: + response = self._session.get( + token_scanning_pubkey_api_url, headers=self._headers_auth() + ) + response.raise_for_status() + return response.json() + except requests.HTTPError as exc: + raise GitHubPublicKeyMetaAPIError( + f"Invalid response code {response.status_code}: {response.text[:100]}", + f"public_key_api.status.{response.status_code}", + ) from exc + except json.JSONDecodeError as exc: + raise GitHubPublicKeyMetaAPIError( + f"Non-JSON response received: {response.text[:100]}", + "public_key_api.invalid_json", + ) from exc + except requests.RequestException as exc: + raise GitHubPublicKeyMetaAPIError( + "Could not connect to GitHub", "public_key_api.network_error" + ) from exc + + def _extract_public_keys(self, pubkey_api_data): + if not isinstance(pubkey_api_data, dict): + raise GitHubPublicKeyMetaAPIError( + f"Payload is not a dict but: {str(pubkey_api_data)[:100]}", + "public_key_api.format_error", + ) + try: + public_keys = pubkey_api_data["public_keys"] + except KeyError: + raise GitHubPublicKeyMetaAPIError( + "Payload misses 'public_keys' attribute", "public_key_api.format_error" + ) + + if not isinstance(public_keys, list): + raise GitHubPublicKeyMetaAPIError( + "Payload 'public_keys' attribute is not a list", + "public_key_api.format_error", + ) + + expected_attributes = {"key", "key_identifier"} + for public_key in public_keys: + + if not isinstance(public_key, dict): + raise GitHubPublicKeyMetaAPIError( + f"Key is not a dict but: {public_key}", + "public_key_api.format_error", + ) + + attributes = set(public_key) + if not expected_attributes <= attributes: + raise GitHubPublicKeyMetaAPIError( + "Missing attribute in key: " + f"{sorted(expected_attributes - attributes)}", + "public_key_api.format_error", + ) + + yield {"key": public_key["key"], "key_id": public_key["key_identifier"]} + + self.public_keys_cache = public_keys + + def _check_public_key(self, github_public_keys, key_id): + for record in github_public_keys: + if record["key_id"] == key_id: + return record["key"] + + raise InvalidTokenLeakRequest( + f"Key {key_id} not found in github public keys", reason="wrong_key_id" + ) + + def _check_signature(self, payload, public_key, signature): + try: + loaded_public_key = serialization.load_pem_public_key( + data=public_key.encode("utf-8"), backend=default_backend() + ) + loaded_public_key.verify( + signature=base64.b64decode(signature), + data=payload.encode("utf-8"), + # This validates the ECDSA and SHA256 part + signature_algorithm=ECDSA(algorithm=SHA256()), + ) + except InvalidSignature as exc: + raise InvalidTokenLeakRequest( + "Invalid signature", "invalid_signature" + ) from exc + except Exception as exc: + # Maybe the key is not a valid ECDSA key, maybe the data is not properly + # padded, etc. So many things can go wrong... + raise InvalidTokenLeakRequest( + "Invalid cryptographic values", "invalid_crypto" + ) from exc + + +def _analyze_disclosure(request, disclosure_record, origin): + + metrics = request.find_service(IMetricsService, context=None) + + metrics.increment(f"warehouse.token_leak.{origin}.recieved") + + try: + disclosure = TokenLeakDisclosureRequest.from_api_record( + record=disclosure_record + ) + except InvalidTokenLeakRequest as exc: + metrics.increment(f"warehouse.token_leak.{origin}.error.{exc.reason}") + return + + macaroon_service = request.find_service(IMacaroonService, context=None) + try: + database_macaroon = macaroon_service.check_if_macaroon_exists( + raw_macaroon=disclosure.token + ) + except InvalidMacaroon: + metrics.increment(f"warehouse.token_leak.{origin}.error.invalid") + return + + metrics.increment(f"warehouse.token_leak.{origin}.valid") + + macaroon_service.delete_macaroon(macaroon_id=str(database_macaroon.id)) + + send_token_compromised_email_leak( + request, + database_macaroon.user, + public_url=disclosure.public_url, + origin=origin, + ) + metrics.increment(f"warehouse.token_leak.{origin}.processed") + + +def analyze_disclosure(request, disclosure_record, origin): + try: + _analyze_disclosure( + request=request, + disclosure_record=disclosure_record, + origin=origin, + ) + except Exception: + metrics = request.find_service(IMetricsService, context=None) + metrics.increment(f"warehouse.token_leak.{origin}.error.unknown") + raise + + +def analyze_disclosures(disclosure_records, origin, metrics): + from warehouse.integrations.github import tasks + + if not isinstance(disclosure_records, list): + metrics.increment(f"warehouse.token_leak.{origin}.error.format") + raise InvalidTokenLeakRequest("Invalid format: payload is not a list", "format") + + for disclosure_record in disclosure_records: + tasks.analyze_disclosure_task.delay( + disclosure_record=disclosure_record, origin=origin + ) diff --git a/warehouse/integrations/github/views.py b/warehouse/integrations/github/views.py new file mode 100644 --- /dev/null +++ b/warehouse/integrations/github/views.py @@ -0,0 +1,70 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json + +from pyramid.response import Response +from pyramid.view import view_config + +from warehouse.integrations.github import utils +from warehouse.metrics import IMetricsService + + +@view_config( + require_methods=["POST"], + require_csrf=False, + renderer="json", + route_name="integrations.github.disclose-token", + # If those headers are missing, response will be a 404 + require_headers=["GITHUB-PUBLIC-KEY-IDENTIFIER", "GITHUB-PUBLIC-KEY-SIGNATURE"], + has_translations=False, +) +def github_disclose_token(request): + # GitHub calls this API view when they have identified a string matching + # the regular expressions we provided them. + # Our job is to validate we're talking to github, check if the string contains + # valid credentials and, if they do, invalidate them and warn the owner + + # The documentation for this process is at + # https://developer.github.com/partnerships/token-scanning/ + + body = request.body + + # Thanks to the predicates, we know the headers we need are defined. + key_id = request.headers.get("GITHUB-PUBLIC-KEY-IDENTIFIER") + signature = request.headers.get("GITHUB-PUBLIC-KEY-SIGNATURE") + metrics = request.find_service(IMetricsService, context=None) + + verifier = utils.GitHubTokenScanningPayloadVerifier( + session=request.http, + metrics=metrics, + api_token=request.registry.settings.get("github.token"), + ) + + if not verifier.verify(payload=body, key_id=key_id, signature=signature): + return Response(status=400) + + try: + disclosures = request.json_body + except json.decoder.JSONDecodeError: + metrics.increment("warehouse.token_leak.github.error.payload.json_error") + return Response(status=400) + + try: + utils.analyze_disclosures( + disclosure_records=disclosures, origin="github", metrics=metrics + ) + except utils.InvalidTokenLeakRequest: + return Response(status=400) + + # 204 No Content: we acknowledge but we won't comment on the outcome.# + return Response(status=204) diff --git a/warehouse/macaroons/caveats.py b/warehouse/macaroons/caveats.py --- a/warehouse/macaroons/caveats.py +++ b/warehouse/macaroons/caveats.py @@ -83,7 +83,9 @@ def __init__(self, macaroon, context, principals, permission): def verify(self, key): self.verifier.satisfy_general(V1Caveat(self)) + self.verify_signature(key=key) + def verify_signature(self, key): try: return self.verifier.verify(self.macaroon, key) except ( diff --git a/warehouse/macaroons/interfaces.py b/warehouse/macaroons/interfaces.py --- a/warehouse/macaroons/interfaces.py +++ b/warehouse/macaroons/interfaces.py @@ -43,6 +43,14 @@ def verify(raw_macaroon, context, principals, permission): Raises InvalidMacaroon if the macaroon is not valid. """ + def check_if_macaroon_exists(raw_macaroon): + """ + Returns the database macaroon if the given raw (serialized) macaroon is + an existing valid macaroon, whatever its permissions. + + Raises InvalidMacaroon otherwise. + """ + def create_macaroon(location, user_id, description, caveats): """ Returns a new raw (serialized) macaroon. The description provided diff --git a/warehouse/macaroons/services.py b/warehouse/macaroons/services.py --- a/warehouse/macaroons/services.py +++ b/warehouse/macaroons/services.py @@ -117,6 +117,32 @@ def verify(self, raw_macaroon, context, principals, permission): raise InvalidMacaroon("invalid macaroon") + def check_if_macaroon_exists(self, raw_macaroon): + """ + Returns the database macaroon if the given raw (serialized) macaroon is + an existing valid macaroon, whatever its permissions. + + Raises InvalidMacaroon otherwise. + """ + raw_macaroon = self._extract_raw_macaroon(raw_macaroon) + if raw_macaroon is None: + raise InvalidMacaroon("malformed or nonexistent macaroon") + + try: + m = pymacaroons.Macaroon.deserialize(raw_macaroon) + except MacaroonDeserializationException: + raise InvalidMacaroon("malformed macaroon") + + dm = self.find_macaroon(m.identifier.decode()) + + if dm is None: + raise InvalidMacaroon("deleted or nonexistent macaroon") + + verifier = Verifier(m, context=None, principals=None, permission=None) + verifier.verify_signature(dm.key) + + return dm + def create_macaroon(self, location, user_id, description, caveats): """ Returns a tuple of a new raw (serialized) macaroon and its DB model. diff --git a/warehouse/domain.py b/warehouse/predicates.py similarity index 61% rename from warehouse/domain.py rename to warehouse/predicates.py --- a/warehouse/domain.py +++ b/warehouse/predicates.py @@ -10,6 +10,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +from typing import List + +from pyramid import predicates +from pyramid.exceptions import ConfigurationError from pyramid.util import is_same_domain @@ -31,5 +35,26 @@ def __call__(self, info, request): return is_same_domain(request.domain, self.val) +class HeadersPredicate: + def __init__(self, val: List[str], config): + if not val: + raise ConfigurationError( + "Excpected at least one value in headers predicate" + ) + + self.sub_predicates = [ + predicates.HeaderPredicate(subval, config) for subval in val + ] + + def text(self): + return ", ".join(sub.text() for sub in self.sub_predicates) + + phash = text + + def __call__(self, context, request): + return all(sub(context, request) for sub in self.sub_predicates) + + def includeme(config): config.add_route_predicate("domain", DomainPredicate) + config.add_view_predicate("require_headers", HeadersPredicate) diff --git a/warehouse/routes.py b/warehouse/routes.py --- a/warehouse/routes.py +++ b/warehouse/routes.py @@ -163,7 +163,6 @@ def includeme(config): "/account/verify-project-role/", domain=warehouse, ) - # Management (views for logged-in users) config.add_route("manage.account", "/manage/account/", domain=warehouse) config.add_route( @@ -325,6 +324,13 @@ def includeme(config): read_only=True, domain=warehouse, ) + # Integration URLs + + config.add_route( + "integrations.github.disclose-token", + "/_/github/disclose-token", + domain=warehouse, + ) # Legacy URLs config.add_route("legacy.api.simple.index", "/simple/", domain=warehouse)
diff --git a/tests/unit/accounts/test_core.py b/tests/unit/accounts/test_core.py --- a/tests/unit/accounts/test_core.py +++ b/tests/unit/accounts/test_core.py @@ -325,6 +325,7 @@ def test_includeme(monkeypatch): set_authentication_policy=pretend.call_recorder(lambda p: None), set_authorization_policy=pretend.call_recorder(lambda p: None), maybe_dotted=pretend.call_recorder(lambda path: path), + add_route_predicate=pretend.call_recorder(lambda name, cls: None), ) accounts.includeme(config) diff --git a/tests/unit/email/test_init.py b/tests/unit/email/test_init.py --- a/tests/unit/email/test_init.py +++ b/tests/unit/email/test_init.py @@ -766,6 +766,82 @@ def test_password_compromised_email_hibp( ] +class TestTokenCompromisedLeakEmail: + @pytest.mark.parametrize("verified", [True, False]) + def test_password_compromised_email( + self, pyramid_request, pyramid_config, monkeypatch, verified + ): + stub_user = pretend.stub( + id=3, + username="username", + name="", + email="[email protected]", + primary_email=pretend.stub(email="[email protected]", verified=verified), + ) + pyramid_request.user = None + pyramid_request.db = pretend.stub( + query=lambda a: pretend.stub( + filter=lambda *a: pretend.stub(one=lambda: stub_user) + ), + ) + + subject_renderer = pyramid_config.testing_add_renderer( + "email/token-compromised-leak/subject.txt" + ) + subject_renderer.string_response = "Email Subject" + body_renderer = pyramid_config.testing_add_renderer( + "email/token-compromised-leak/body.txt" + ) + body_renderer.string_response = "Email Body" + html_renderer = pyramid_config.testing_add_renderer( + "email/token-compromised-leak/body.html" + ) + html_renderer.string_response = "Email HTML Body" + + send_email = pretend.stub( + delay=pretend.call_recorder(lambda *args, **kwargs: None) + ) + pyramid_request.task = pretend.call_recorder(lambda *args, **kwargs: send_email) + monkeypatch.setattr(email, "send_email", send_email) + + result = email.send_token_compromised_email_leak( + pyramid_request, stub_user, public_url="http://example.com", origin="github" + ) + + assert result == { + "username": "username", + "public_url": "http://example.com", + "origin": "github", + } + assert pyramid_request.task.calls == [pretend.call(send_email)] + assert send_email.delay.calls == [ + pretend.call( + f"{stub_user.username} <{stub_user.email}>", + attr.asdict( + EmailMessage( + subject="Email Subject", + body_text="Email Body", + body_html=( + "<html>\n<head></head>\n" + "<body><p>Email HTML Body</p></body>\n</html>\n" + ), + ) + ), + { + "tag": "account:email:sent", + "user_id": 3, + "ip_address": "1.2.3.4", + "additional": { + "from_": None, + "to": "[email protected]", + "subject": "Email Subject", + "redact_ip": False, + }, + }, + ) + ] + + class TestPasswordCompromisedEmail: @pytest.mark.parametrize("verified", [True, False]) def test_password_compromised_email( diff --git a/tests/unit/integration/__init__.py b/tests/unit/integration/__init__.py new file mode 100644 --- /dev/null +++ b/tests/unit/integration/__init__.py @@ -0,0 +1,11 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/tests/unit/integration/github/__init__.py b/tests/unit/integration/github/__init__.py new file mode 100644 --- /dev/null +++ b/tests/unit/integration/github/__init__.py @@ -0,0 +1,11 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/tests/unit/integration/github/test_tasks.py b/tests/unit/integration/github/test_tasks.py new file mode 100644 --- /dev/null +++ b/tests/unit/integration/github/test_tasks.py @@ -0,0 +1,40 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pretend + +from warehouse.integrations.github import tasks, utils + + +def test_analyze_disclosure_task(monkeypatch): + analyze_disclosure = pretend.call_recorder(lambda *a, **k: None) + monkeypatch.setattr(utils, "analyze_disclosure", analyze_disclosure) + + task = pretend.stub() + request = pretend.stub() + disclosure_record = pretend.stub() + origin = pretend.stub() + + tasks.analyze_disclosure_task( + task=task, + request=request, + disclosure_record=disclosure_record, + origin=origin, + ) + + assert analyze_disclosure.calls == [ + pretend.call( + request=request, + disclosure_record=disclosure_record, + origin=origin, + ) + ] diff --git a/tests/unit/integration/github/test_utils.py b/tests/unit/integration/github/test_utils.py new file mode 100644 --- /dev/null +++ b/tests/unit/integration/github/test_utils.py @@ -0,0 +1,653 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import base64 +import collections +import json +import time + +import pretend +import pytest +import requests + +from warehouse.integrations.github import tasks, utils + +basic_auth_pypi_1234 = base64.b64encode(b"__token__:pypi-1234").decode("utf-8") + + +def test_token_leak_matcher_extract(): + with pytest.raises(NotImplementedError): + utils.TokenLeakMatcher().extract("a") + + +def test_plain_text_token_leak_matcher_extract(): + assert utils.PlainTextTokenLeakMatcher().extract("a") == "a" + + +def test_base64_basic_auth_token_leak_extract(): + assert ( + utils.Base64BasicAuthTokenLeakMatcher().extract(basic_auth_pypi_1234) + == "pypi-1234" + ) + + [email protected]( + "input", [base64.b64encode(b"pypi-1234").decode("utf-8"), "foo bar"] +) +def test_base64_basic_auth_token_leak_extract_error(input): + with pytest.raises(utils.ExtractionFailed): + utils.Base64BasicAuthTokenLeakMatcher().extract(input) + + +def test_invalid_token_leak_request(): + exc = utils.InvalidTokenLeakRequest("a", "b") + + assert str(exc) == "a" + assert exc.reason == "b" + + [email protected]( + "record, error, reason", + [ + (None, "Record is not a dict but: None", "format"), + ({}, "Record is missing attribute(s): token, type, url", "format"), + ( + {"type": "not_found", "token": "a", "url": "b"}, + "Matcher with code not_found not found. " + "Available codes are: token, base64-basic-auth", + "invalid_matcher", + ), + ( + {"type": "base64-basic-auth", "token": "foo bar", "url": "a"}, + "Cannot extract token from recieved match", + "extraction", + ), + ], +) +def test_token_leak_disclosure_request_from_api_record_error(record, error, reason): + with pytest.raises(utils.InvalidTokenLeakRequest) as exc: + utils.TokenLeakDisclosureRequest.from_api_record(record) + + assert str(exc.value) == error + assert exc.value.reason == reason + + [email protected]( + "type, token", + [("token", "pypi-1234"), ("base64-basic-auth", basic_auth_pypi_1234)], +) +def test_token_leak_disclosure_request_from_api_record(type, token): + request = utils.TokenLeakDisclosureRequest.from_api_record( + {"type": type, "token": token, "url": "http://example.com"} + ) + + assert request.token == "pypi-1234" + assert request.public_url == "http://example.com" + + +class TestGitHubTokenScanningPayloadVerifier: + def test_init(self): + metrics = pretend.stub() + session = pretend.stub() + token = "api_token" + + verifier = utils.GitHubTokenScanningPayloadVerifier( + session=session, metrics=metrics, api_token=token + ) + + assert verifier._session is session + assert verifier._metrics is metrics + assert verifier._api_token == token + + def test_verify_cache_miss(self): + # Example taken from + # https://gist.github.com/ewjoachim/7dde11c31d9686ed6b4431c3ca166da2 + meta_payload = { + "public_keys": [ + { + "key_identifier": "90a421169f0a406205f1563a953312f0be898d3c" + "7b6c06b681aa86a874555f4a", + "key": "-----BEGIN PUBLIC KEY-----\n" + "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE9MJJHnMfn2+H4xL4YaPDA4RpJqU" + "q\nkCmRCBnYERxZanmcpzQSXs1X/AljlKkbJ8qpVIW4clayyef9gWhFbNHWAA==\n" + "-----END PUBLIC KEY-----", + "is_current": True, + } + ] + } + response = pretend.stub( + json=lambda: meta_payload, raise_for_status=lambda: None + ) + session = pretend.stub(get=lambda *a, **k: response) + metrics = pretend.stub(increment=pretend.call_recorder(lambda str: None)) + verifier = utils.GitHubTokenScanningPayloadVerifier( + session=session, metrics=metrics, api_token="api-token" + ) + key_id = "90a421169f0a406205f1563a953312f0be898d3c7b6c06b681aa86a874555f4a" + signature = ( + "MEQCIAfgjgz6Ou/3DXMYZBervz1TKCHFsvwMcbuJhNZse622AiAG86/" + "cku2XdcmFWNHl2WSJi2fkE8t+auvB24eURaOd2A==" + ) + + payload = ( + '[{"type":"github_oauth_token","token":"cb4985f91f740272c0234202299' + 'f43808034d7f5","url":" https://github.com/github/faketestrepo/blob/' + 'b0dd59c0b500650cacd4551ca5989a6194001b10/production.env"}]' + ) + assert ( + verifier.verify(payload=payload, key_id=key_id, signature=signature) is True + ) + + assert metrics.increment.calls == [ + pretend.call("warehouse.token_leak.github.auth.cache.miss"), + pretend.call("warehouse.token_leak.github.auth.success"), + ] + + def test_verify_cache_hit(self): + session = pretend.stub() + metrics = pretend.stub(increment=pretend.call_recorder(lambda str: None)) + verifier = utils.GitHubTokenScanningPayloadVerifier( + session=session, metrics=metrics, api_token="api-token" + ) + verifier.public_keys_cached_at = time.time() + verifier.public_keys_cache = [ + { + "key_id": "90a421169f0a406205f1563a953312f0be898d3c" + "7b6c06b681aa86a874555f4a", + "key": "-----BEGIN PUBLIC KEY-----\n" + "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE9MJJHnMfn2+H4xL4YaPDA4RpJqU" + "q\nkCmRCBnYERxZanmcpzQSXs1X/AljlKkbJ8qpVIW4clayyef9gWhFbNHWAA==\n" + "-----END PUBLIC KEY-----", + } + ] + + key_id = "90a421169f0a406205f1563a953312f0be898d3c7b6c06b681aa86a874555f4a" + signature = ( + "MEQCIAfgjgz6Ou/3DXMYZBervz1TKCHFsvwMcbuJhNZse622AiAG86/" + "cku2XdcmFWNHl2WSJi2fkE8t+auvB24eURaOd2A==" + ) + + payload = ( + '[{"type":"github_oauth_token","token":"cb4985f91f740272c0234202299' + 'f43808034d7f5","url":" https://github.com/github/faketestrepo/blob/' + 'b0dd59c0b500650cacd4551ca5989a6194001b10/production.env"}]' + ) + assert ( + verifier.verify(payload=payload, key_id=key_id, signature=signature) is True + ) + + assert metrics.increment.calls == [ + pretend.call("warehouse.token_leak.github.auth.cache.hit"), + pretend.call("warehouse.token_leak.github.auth.success"), + ] + + def test_verify_error(self): + metrics = pretend.stub(increment=pretend.call_recorder(lambda str: None)) + verifier = utils.GitHubTokenScanningPayloadVerifier( + session=pretend.stub(), metrics=metrics, api_token="api-token" + ) + verifier._retrieve_public_key_payload = pretend.raiser( + utils.InvalidTokenLeakRequest("Bla", "bla") + ) + + assert verifier.verify(payload={}, key_id="a", signature="a") is False + + assert metrics.increment.calls == [ + pretend.call("warehouse.token_leak.github.auth.cache.miss"), + pretend.call("warehouse.token_leak.github.auth.error.bla"), + ] + + def test_headers_auth_no_token(self): + headers = utils.GitHubTokenScanningPayloadVerifier( + session=pretend.stub(), metrics=pretend.stub(), api_token=None + )._headers_auth() + assert headers == {} + + def test_headers_auth_token(self): + headers = utils.GitHubTokenScanningPayloadVerifier( + session=pretend.stub(), metrics=pretend.stub(), api_token="api-token" + )._headers_auth() + assert headers == {"Authorization": "token api-token"} + + def test_retrieve_public_key_payload(self): + meta_payload = { + "public_keys": [ + { + "key_identifier": "90a421169f0a406205f1563a953312f0be898d3c" + "7b6c06b681aa86a874555f4a", + "key": "-----BEGIN PUBLIC KEY-----\n" + "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE9MJJHnMfn2+H4xL4YaPDA4RpJqU" + "q\nkCmRCBnYERxZanmcpzQSXs1X/AljlKkbJ8qpVIW4clayyef9gWhFbNHWAA==\n" + "-----END PUBLIC KEY-----", + "is_current": True, + } + ] + } + response = pretend.stub( + json=lambda: meta_payload, raise_for_status=lambda: None + ) + session = pretend.stub(get=pretend.call_recorder(lambda *a, **k: response)) + metrics = pretend.stub(increment=pretend.call_recorder(lambda str: None)) + + verifier = utils.GitHubTokenScanningPayloadVerifier( + session=session, metrics=metrics, api_token="api-token" + ) + assert verifier._retrieve_public_key_payload() == meta_payload + assert session.get.calls == [ + pretend.call( + "https://api.github.com/meta/public_keys/token_scanning", + headers={"Authorization": "token api-token"}, + ) + ] + + def test_get_cached_public_key_cache_hit(self): + metrics = pretend.stub() + session = pretend.stub() + + verifier = utils.GitHubTokenScanningPayloadVerifier( + session=session, metrics=metrics + ) + verifier.public_keys_cached_at = time.time() + cache = verifier.public_keys_cache = pretend.stub() + + assert verifier._get_cached_public_keys() is cache + + def test_get_cached_public_key_cache_miss_no_cache(self): + metrics = pretend.stub() + session = pretend.stub() + + verifier = utils.GitHubTokenScanningPayloadVerifier( + session=session, metrics=metrics + ) + + with pytest.raises(utils.CacheMiss): + verifier._get_cached_public_keys() + + def test_get_cached_public_key_cache_miss_too_old(self): + metrics = pretend.stub() + session = pretend.stub() + + verifier = utils.GitHubTokenScanningPayloadVerifier( + session=session, metrics=metrics + ) + verifier.public_keys_cache = pretend.stub() + + with pytest.raises(utils.CacheMiss): + verifier._get_cached_public_keys() + + def test_retrieve_public_key_payload_http_error(self): + response = pretend.stub( + status_code=418, + text="I'm a teapot", + raise_for_status=pretend.raiser(requests.HTTPError), + ) + session = pretend.stub( + get=lambda *a, **k: response, + ) + verifier = utils.GitHubTokenScanningPayloadVerifier( + session=session, metrics=pretend.stub() + ) + with pytest.raises(utils.GitHubPublicKeyMetaAPIError) as exc: + verifier._retrieve_public_key_payload() + + assert str(exc.value) == "Invalid response code 418: I'm a teapot" + assert exc.value.reason == "public_key_api.status.418" + + def test_retrieve_public_key_payload_json_error(self): + response = pretend.stub( + text="Still a non-json teapot", + json=pretend.raiser(json.JSONDecodeError("", "", 3)), + raise_for_status=lambda: None, + ) + session = pretend.stub(get=lambda *a, **k: response) + verifier = utils.GitHubTokenScanningPayloadVerifier( + session=session, metrics=pretend.stub() + ) + with pytest.raises(utils.GitHubPublicKeyMetaAPIError) as exc: + verifier._retrieve_public_key_payload() + + assert str(exc.value) == "Non-JSON response received: Still a non-json teapot" + assert exc.value.reason == "public_key_api.invalid_json" + + def test_retrieve_public_key_payload_connection_error(self): + session = pretend.stub(get=pretend.raiser(requests.ConnectionError)) + + verifier = utils.GitHubTokenScanningPayloadVerifier( + session=session, metrics=pretend.stub() + ) + + with pytest.raises(utils.GitHubPublicKeyMetaAPIError) as exc: + verifier._retrieve_public_key_payload() + + assert str(exc.value) == "Could not connect to GitHub" + assert exc.value.reason == "public_key_api.network_error" + + def test_extract_public_keys(self): + meta_payload = { + "public_keys": [ + { + "key_identifier": "90a421169f0a406205f1563a953312f0be898d3c" + "7b6c06b681aa86a874555f4a", + "key": "-----BEGIN PUBLIC KEY-----\n" + "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE9MJJHnMfn2+H4xL4YaPDA4RpJqU" + "q\nkCmRCBnYERxZanmcpzQSXs1X/AljlKkbJ8qpVIW4clayyef9gWhFbNHWAA==\n" + "-----END PUBLIC KEY-----", + "is_current": True, + } + ] + } + verifier = utils.GitHubTokenScanningPayloadVerifier( + session=pretend.stub(), metrics=pretend.stub() + ) + + keys = list(verifier._extract_public_keys(pubkey_api_data=meta_payload)) + + assert keys == [ + { + "key": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcD" + "QgAE9MJJHnMfn2+H4xL4YaPDA4RpJqUq\nkCmRCBnYERxZanmcpzQSXs1X/AljlKkbJ" + "8qpVIW4clayyef9gWhFbNHWAA==\n-----END PUBLIC KEY-----", + "key_id": "90a421169f0a406205f1563a953312f0be" + "898d3c7b6c06b681aa86a874555f4a", + } + ] + + @pytest.mark.parametrize( + "payload, expected", + [ + ([], "Payload is not a dict but: []"), + ({}, "Payload misses 'public_keys' attribute"), + ({"public_keys": None}, "Payload 'public_keys' attribute is not a list"), + ({"public_keys": [None]}, "Key is not a dict but: None"), + ( + {"public_keys": [{}]}, + "Missing attribute in key: ['key', 'key_identifier']", + ), + ( + {"public_keys": [{"key": "a"}]}, + "Missing attribute in key: ['key_identifier']", + ), + ( + {"public_keys": [{"key_identifier": "a"}]}, + "Missing attribute in key: ['key']", + ), + ], + ) + def test_extract_public_keys_error(self, payload, expected): + verifier = utils.GitHubTokenScanningPayloadVerifier( + session=pretend.stub(), metrics=pretend.stub() + ) + + with pytest.raises(utils.GitHubPublicKeyMetaAPIError) as exc: + list(verifier._extract_public_keys(pubkey_api_data=payload)) + + assert exc.value.reason == "public_key_api.format_error" + assert str(exc.value) == expected + + def test_check_public_key(self): + verifier = utils.GitHubTokenScanningPayloadVerifier( + session=pretend.stub(), metrics=pretend.stub() + ) + + keys = [ + {"key_id": "a", "key": "b"}, + {"key_id": "c", "key": "d"}, + ] + assert verifier._check_public_key(github_public_keys=keys, key_id="c") == "d" + + def test_check_public_key_error(self): + verifier = utils.GitHubTokenScanningPayloadVerifier( + session=pretend.stub(), metrics=pretend.stub() + ) + + with pytest.raises(utils.InvalidTokenLeakRequest) as exc: + verifier._check_public_key(github_public_keys=[], key_id="c") + + assert str(exc.value) == "Key c not found in github public keys" + assert exc.value.reason == "wrong_key_id" + + def test_check_signature(self): + verifier = utils.GitHubTokenScanningPayloadVerifier( + session=pretend.stub(), metrics=pretend.stub() + ) + public_key = ( + "-----BEGIN PUBLIC KEY-----\n" + "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE9MJJHnMfn2+H4xL4YaPDA4RpJqU" + "q\nkCmRCBnYERxZanmcpzQSXs1X/AljlKkbJ8qpVIW4clayyef9gWhFbNHWAA==\n" + "-----END PUBLIC KEY-----" + ) + signature = ( + "MEQCIAfgjgz6Ou/3DXMYZBervz1TKCHFsvwMcbuJhNZse622AiAG86/" + "cku2XdcmFWNHl2WSJi2fkE8t+auvB24eURaOd2A==" + ) + + payload = ( + '[{"type":"github_oauth_token","token":"cb4985f91f740272c0234202299' + 'f43808034d7f5","url":" https://github.com/github/faketestrepo/blob/' + 'b0dd59c0b500650cacd4551ca5989a6194001b10/production.env"}]' + ) + assert ( + verifier._check_signature( + payload=payload, public_key=public_key, signature=signature + ) + is None + ) + + def test_check_signature_invalid_signature(self): + verifier = utils.GitHubTokenScanningPayloadVerifier( + session=pretend.stub(), metrics=pretend.stub() + ) + public_key = ( + "-----BEGIN PUBLIC KEY-----\n" + "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE9MJJHnMfn2+H4xL4YaPDA4RpJqU" + "q\nkCmRCBnYERxZanmcpzQSXs1X/AljlKkbJ8qpVIW4clayyef9gWhFbNHWAA==\n" + "-----END PUBLIC KEY-----" + ) + # Changed the initial N for an M + signature = ( + "NEQCIAfgjgz6Ou/3DXMYZBervz1TKCHFsvwMcbuJhNZse622AiAG86/" + "cku2XdcmFWNHl2WSJi2fkE8t+auvB24eURaOd2A==" + ) + + payload = ( + '[{"type":"github_oauth_token","token":"cb4985f91f740272c0234202299' + 'f43808034d7f5","url":" https://github.com/github/faketestrepo/blob/' + 'b0dd59c0b500650cacd4551ca5989a6194001b10/production.env"}]' + ) + with pytest.raises(utils.InvalidTokenLeakRequest) as exc: + verifier._check_signature( + payload=payload, public_key=public_key, signature=signature + ) + + assert str(exc.value) == "Invalid signature" + assert exc.value.reason == "invalid_signature" + + def test_check_signature_invalid_crypto(self): + verifier = utils.GitHubTokenScanningPayloadVerifier( + session=pretend.stub(), metrics=pretend.stub() + ) + public_key = "" + signature = "" + + payload = "yeah, nope, that won't pass" + + with pytest.raises(utils.InvalidTokenLeakRequest) as exc: + verifier._check_signature( + payload=payload, public_key=public_key, signature=signature + ) + + assert str(exc.value) == "Invalid cryptographic values" + assert exc.value.reason == "invalid_crypto" + + +def test_analyze_disclosure(monkeypatch): + + metrics = collections.Counter() + + def metrics_increment(key): + metrics.update([key]) + + user = pretend.stub() + database_macaroon = pretend.stub(user=user, id=12) + + check = pretend.call_recorder(lambda *a, **kw: database_macaroon) + delete = pretend.call_recorder(lambda *a, **kw: None) + svc = { + utils.IMetricsService: pretend.stub(increment=metrics_increment), + utils.IMacaroonService: pretend.stub( + check_if_macaroon_exists=check, delete_macaroon=delete + ), + } + + request = pretend.stub(find_service=lambda iface, context: svc[iface]) + + send_email = pretend.call_recorder(lambda *a, **kw: None) + monkeypatch.setattr(utils, "send_token_compromised_email_leak", send_email) + + utils.analyze_disclosure( + request=request, + disclosure_record={ + "type": "token", + "token": "pypi-1234", + "url": "http://example.com", + }, + origin="github", + ) + assert metrics == { + "warehouse.token_leak.github.recieved": 1, + "warehouse.token_leak.github.processed": 1, + "warehouse.token_leak.github.valid": 1, + } + assert send_email.calls == [ + pretend.call(request, user, public_url="http://example.com", origin="github") + ] + assert check.calls == [pretend.call(raw_macaroon="pypi-1234")] + assert delete.calls == [pretend.call(macaroon_id="12")] + + +def test_analyze_disclosure_wrong_record(): + + metrics = collections.Counter() + + def metrics_increment(key): + metrics.update([key]) + + svc = { + utils.IMetricsService: pretend.stub(increment=metrics_increment), + utils.IMacaroonService: pretend.stub(), + } + + request = pretend.stub(find_service=lambda iface, context: svc[iface]) + + utils.analyze_disclosure( + request=request, + disclosure_record={}, + origin="github", + ) + assert metrics == { + "warehouse.token_leak.github.recieved": 1, + "warehouse.token_leak.github.error.format": 1, + } + + +def test_analyze_disclosure_invalid_macaroon(): + + metrics = collections.Counter() + + def metrics_increment(key): + metrics.update([key]) + + check = pretend.raiser(utils.InvalidMacaroon("Bla", "bla")) + svc = { + utils.IMetricsService: pretend.stub(increment=metrics_increment), + utils.IMacaroonService: pretend.stub(check_if_macaroon_exists=check), + } + + request = pretend.stub(find_service=lambda iface, context: svc[iface]) + + utils.analyze_disclosure( + request=request, + disclosure_record={ + "type": "token", + "token": "pypi-1234", + "url": "http://example.com", + }, + origin="github", + ) + assert metrics == { + "warehouse.token_leak.github.recieved": 1, + "warehouse.token_leak.github.error.invalid": 1, + } + + +def test_analyze_disclosure_unknown_error(monkeypatch): + + metrics = collections.Counter() + + def metrics_increment(key): + metrics.update([key]) + + request = pretend.stub( + find_service=lambda *a, **k: pretend.stub(increment=metrics_increment) + ) + monkeypatch.setattr(utils, "_analyze_disclosure", pretend.raiser(ValueError())) + + with pytest.raises(ValueError): + utils.analyze_disclosure( + request=request, + disclosure_record={}, + origin="github", + ) + assert metrics == { + "warehouse.token_leak.github.error.unknown": 1, + } + + +def test_analyze_disclosures_wrong_type(): + + metrics = collections.Counter() + + def metrics_increment(key): + metrics.update([key]) + + metrics_service = pretend.stub(increment=metrics_increment) + + with pytest.raises(utils.InvalidTokenLeakRequest) as exc: + utils.analyze_disclosures( + disclosure_records={}, origin="yay", metrics=metrics_service + ) + + assert str(exc.value) == "Invalid format: payload is not a list" + assert exc.value.reason == "format" + + +def test_analyze_disclosures_raise(monkeypatch): + metrics = collections.Counter() + + def metrics_increment(key): + metrics.update([key]) + + metrics_service = pretend.stub(increment=metrics_increment) + + task = pretend.stub(delay=pretend.call_recorder(lambda *a, **k: None)) + + monkeypatch.setattr(tasks, "analyze_disclosure_task", task) + + utils.analyze_disclosures( + disclosure_records=[1, 2, 3], origin="yay", metrics=metrics_service + ) + + assert task.delay.calls == [ + pretend.call(disclosure_record=1, origin="yay"), + pretend.call(disclosure_record=2, origin="yay"), + pretend.call(disclosure_record=3, origin="yay"), + ] diff --git a/tests/unit/integration/github/test_views.py b/tests/unit/integration/github/test_views.py new file mode 100644 --- /dev/null +++ b/tests/unit/integration/github/test_views.py @@ -0,0 +1,171 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import collections +import json + +import pretend + +from warehouse.integrations.github import utils, views + + +class TestGitHubDiscloseToken: + def test_github_disclose_token(self, pyramid_request, monkeypatch): + + pyramid_request.headers = { + "GITHUB-PUBLIC-KEY-IDENTIFIER": "foo", + "GITHUB-PUBLIC-KEY-SIGNATURE": "bar", + } + metrics = pretend.stub() + + pyramid_request.body = "[1, 2, 3]" + pyramid_request.json_body = [1, 2, 3] + pyramid_request.registry.settings = {"github.token": "token"} + pyramid_request.find_service = lambda *a, **k: metrics + + http = pyramid_request.http = pretend.stub() + + verify = pretend.call_recorder(lambda **k: True) + verifier = pretend.stub(verify=verify) + verifier_cls = pretend.call_recorder(lambda **k: verifier) + monkeypatch.setattr(utils, "GitHubTokenScanningPayloadVerifier", verifier_cls) + + analyze_disclosures = pretend.call_recorder(lambda **k: None) + monkeypatch.setattr(utils, "analyze_disclosures", analyze_disclosures) + + response = views.github_disclose_token(pyramid_request) + + assert response.status_code == 204 + assert verifier_cls.calls == [ + pretend.call(session=http, metrics=metrics, api_token="token") + ] + assert verify.calls == [ + pretend.call(payload="[1, 2, 3]", key_id="foo", signature="bar") + ] + assert analyze_disclosures.calls == [ + pretend.call(disclosure_records=[1, 2, 3], origin="github", metrics=metrics) + ] + + def test_github_disclose_token_no_token(self, pyramid_request, monkeypatch): + + pyramid_request.headers = { + "GITHUB-PUBLIC-KEY-IDENTIFIER": "foo", + "GITHUB-PUBLIC-KEY-SIGNATURE": "bar", + } + metrics = pretend.stub() + + pyramid_request.body = "[1, 2, 3]" + pyramid_request.json_body = [1, 2, 3] + pyramid_request.registry.settings = {} + pyramid_request.find_service = lambda *a, **k: metrics + pyramid_request.http = pretend.stub() + + verify = pretend.call_recorder(lambda **k: True) + verifier = pretend.stub(verify=verify) + verifier_cls = pretend.call_recorder(lambda **k: verifier) + monkeypatch.setattr(utils, "GitHubTokenScanningPayloadVerifier", verifier_cls) + + analyze_disclosures = pretend.call_recorder(lambda **k: None) + monkeypatch.setattr(utils, "analyze_disclosures", analyze_disclosures) + + response = views.github_disclose_token(pyramid_request) + + assert response.status_code == 204 + + def test_github_disclose_token_verify_fail(self, monkeypatch, pyramid_request): + + pyramid_request.headers = { + "GITHUB-PUBLIC-KEY-IDENTIFIER": "foo", + "GITHUB-PUBLIC-KEY-SIGNATURE": "bar", + } + metrics = pretend.stub() + + pyramid_request.body = "[1, 2, 3]" + pyramid_request.find_service = lambda *a, **k: metrics + pyramid_request.registry.settings = {"github.token": "token"} + + pyramid_request.http = pretend.stub() + + verify = pretend.call_recorder(lambda **k: False) + verifier = pretend.stub(verify=verify) + verifier_cls = pretend.call_recorder(lambda **k: verifier) + monkeypatch.setattr(utils, "GitHubTokenScanningPayloadVerifier", verifier_cls) + + response = views.github_disclose_token(pyramid_request) + + assert response.status_int == 400 + + def test_github_disclose_token_verify_invalid_json(self, monkeypatch): + verify = pretend.call_recorder(lambda **k: True) + verifier = pretend.stub(verify=verify) + verifier_cls = pretend.call_recorder(lambda **k: verifier) + monkeypatch.setattr(utils, "GitHubTokenScanningPayloadVerifier", verifier_cls) + + metrics = collections.Counter() + + def metrics_increment(key): + metrics.update([key]) + + # We need to raise on a property access, can't do that with a stub. + class Request: + headers = { + "GITHUB-PUBLIC-KEY-IDENTIFIER": "foo", + "GITHUB-PUBLIC-KEY-SIGNATURE": "bar", + } + body = "[" + + @property + def json_body(self): + return json.loads(self.body) + + def find_service(self, *a, **k): + return pretend.stub(increment=metrics_increment) + + response = pretend.stub(status_int=200) + http = pretend.stub() + registry = pretend.stub(settings={"github.token": "token"}) + + request = Request() + response = views.github_disclose_token(request) + + assert response.status_int == 400 + assert metrics == {"warehouse.token_leak.github.error.payload.json_error": 1} + + def test_github_disclose_token_wrong_payload(self, pyramid_request, monkeypatch): + pyramid_request.headers = { + "GITHUB-PUBLIC-KEY-IDENTIFIER": "foo", + "GITHUB-PUBLIC-KEY-SIGNATURE": "bar", + } + + metrics = collections.Counter() + + def metrics_increment(key): + metrics.update([key]) + + metrics_service = pretend.stub(increment=metrics_increment) + + pyramid_request.body = "{}" + pyramid_request.json_body = {} + pyramid_request.registry.settings = {"github.token": "token"} + pyramid_request.find_service = lambda *a, **k: metrics_service + + pyramid_request.http = pretend.stub() + + verify = pretend.call_recorder(lambda **k: True) + verifier = pretend.stub(verify=verify) + verifier_cls = pretend.call_recorder(lambda **k: verifier) + monkeypatch.setattr(utils, "GitHubTokenScanningPayloadVerifier", verifier_cls) + + response = views.github_disclose_token(pyramid_request) + + assert response.status_code == 400 + assert metrics == {"warehouse.token_leak.github.error.format": 1} diff --git a/tests/unit/macaroons/test_services.py b/tests/unit/macaroons/test_services.py --- a/tests/unit/macaroons/test_services.py +++ b/tests/unit/macaroons/test_services.py @@ -72,13 +72,16 @@ def test_find_macaroon(self, user_service, macaroon_service): def test_find_userid_no_macaroon(self, macaroon_service): assert macaroon_service.find_userid(None) is None - def test_find_userid_invalid_macaroon(self, macaroon_service): - raw_macaroon = pymacaroons.Macaroon( + @pytest.fixture + def raw_macaroon(self): + return pymacaroons.Macaroon( location="fake location", identifier=str(uuid4()), key=b"fake key", version=pymacaroons.MACAROON_V2, ).serialize() + + def test_find_userid_invalid_macaroon(self, macaroon_service, raw_macaroon): raw_macaroon = f"pypi-{raw_macaroon}" assert macaroon_service.find_userid(raw_macaroon) is None @@ -102,26 +105,13 @@ def test_find_userid(self, macaroon_service): assert user.id == user_id - def test_verify_unprefixed_macaroon(self, macaroon_service): - raw_macaroon = pymacaroons.Macaroon( - location="fake location", - identifier=str(uuid4()), - key=b"fake key", - version=pymacaroons.MACAROON_V2, - ).serialize() - + def test_verify_unprefixed_macaroon(self, macaroon_service, raw_macaroon): with pytest.raises(services.InvalidMacaroon): macaroon_service.verify( raw_macaroon, pretend.stub(), pretend.stub(), pretend.stub() ) - def test_verify_no_macaroon(self, macaroon_service): - raw_macaroon = pymacaroons.Macaroon( - location="fake location", - identifier=str(uuid4()), - key=b"fake key", - version=pymacaroons.MACAROON_V2, - ).serialize() + def test_verify_no_macaroon(self, macaroon_service, raw_macaroon): raw_macaroon = f"pypi-{raw_macaroon}" with pytest.raises(services.InvalidMacaroon): @@ -238,3 +228,50 @@ def test_get_macaroon_by_description(self, macaroon_service): macaroon_service.get_macaroon_by_description(user.id, macaroon.description) == dm ) + + def test_check_if_macaroon_exists_unprefixed_macaroon( + self, macaroon_service, raw_macaroon + ): + with pytest.raises(services.InvalidMacaroon): + macaroon_service.check_if_macaroon_exists(raw_macaroon) + + def test_check_if_macaroon_exists_no_macaroon(self, macaroon_service, raw_macaroon): + raw_macaroon = f"pypi-{raw_macaroon}" + + with pytest.raises(services.InvalidMacaroon): + macaroon_service.check_if_macaroon_exists(raw_macaroon) + + def test_check_if_macaroon_exists_invalid_macaroon( + self, monkeypatch, user_service, macaroon_service + ): + user = UserFactory.create() + raw_macaroon, _ = macaroon_service.create_macaroon( + "fake location", user.id, "fake description", {"fake": "caveats"} + ) + + verifier_obj = pretend.stub( + verify_signature=pretend.raiser(services.InvalidMacaroon) + ) + verifier_cls = pretend.call_recorder(lambda *a, **k: verifier_obj) + monkeypatch.setattr(services, "Verifier", verifier_cls) + + with pytest.raises(services.InvalidMacaroon): + macaroon_service.check_if_macaroon_exists(raw_macaroon) + + def test_check_if_macaroon_exists_malformed_macaroon(self, macaroon_service): + with pytest.raises(services.InvalidMacaroon): + macaroon_service.check_if_macaroon_exists("pypi-thiswillnotdeserialize") + + def test_check_if_macaroon_exists_valid_macaroon( + self, monkeypatch, macaroon_service + ): + user = UserFactory.create() + raw_macaroon, data_macaroon = macaroon_service.create_macaroon( + "fake location", user.id, "fake description", {"fake": "caveats"} + ) + + verifier_obj = pretend.stub(verify_signature=lambda k: None) + verifier_cls = pretend.call_recorder(lambda *a, **k: verifier_obj) + monkeypatch.setattr(services, "Verifier", verifier_cls) + + assert macaroon_service.check_if_macaroon_exists(raw_macaroon) is data_macaroon diff --git a/tests/unit/test_config.py b/tests/unit/test_config.py --- a/tests/unit/test_config.py +++ b/tests/unit/test_config.py @@ -231,7 +231,6 @@ def __init__(self): "token.default.max_age": 21600, "warehouse.xmlrpc.client.ratelimit_string": "3600 per hour", } - if environment == config.Environment.development: expected_settings.update( { @@ -304,7 +303,7 @@ def __init__(self): pretend.call(".legacy.api.xmlrpc.cache"), pretend.call("pyramid_rpc.xmlrpc"), pretend.call(".legacy.action_routing"), - pretend.call(".domain"), + pretend.call(".predicates"), pretend.call(".i18n"), pretend.call(".db"), pretend.call(".tasks"), diff --git a/tests/unit/test_domain.py b/tests/unit/test_predicates.py similarity index 53% rename from tests/unit/test_domain.py rename to tests/unit/test_predicates.py --- a/tests/unit/test_domain.py +++ b/tests/unit/test_predicates.py @@ -13,7 +13,9 @@ import pretend import pytest -from warehouse.domain import DomainPredicate, includeme +from pyramid.exceptions import ConfigurationError + +from warehouse.predicates import DomainPredicate, HeadersPredicate, includeme class TestDomainPredicate: @@ -39,10 +41,49 @@ def test_invalid_value(self): assert not predicate(None, pretend.stub(domain="pypi.io")) +class TestHeadersPredicate: + @pytest.mark.parametrize( + ("value", "expected"), + [ + (["Foo", "Bar"], "header Foo, header Bar"), + (["Foo", "Bar:baz"], "header Foo, header Bar=baz"), + ], + ) + def test_text(self, value, expected): + predicate = HeadersPredicate(value, None) + assert predicate.text() == expected + assert predicate.phash() == expected + + def test_when_empty(self): + with pytest.raises(ConfigurationError): + HeadersPredicate([], None) + + @pytest.mark.parametrize( + "value", + [["Foo", "Bar"], ["Foo", "Bar:baz"]], + ) + def test_valid_value(self, value): + predicate = HeadersPredicate(value, None) + assert predicate(None, pretend.stub(headers={"Foo": "a", "Bar": "baz"})) + + @pytest.mark.parametrize( + "value", + [["Foo", "Baz"], ["Foo", "Bar:foo"]], + ) + def test_invalid_value(self, value): + predicate = HeadersPredicate(value, None) + assert not predicate(None, pretend.stub(headers={"Foo": "a", "Bar": "baz"})) + + def test_includeme(): config = pretend.stub( - add_route_predicate=pretend.call_recorder(lambda name, pred: None) + add_route_predicate=pretend.call_recorder(lambda name, pred: None), + add_view_predicate=pretend.call_recorder(lambda name, pred: None), ) includeme(config) assert config.add_route_predicate.calls == [pretend.call("domain", DomainPredicate)] + + assert config.add_view_predicate.calls == [ + pretend.call("require_headers", HeadersPredicate) + ] diff --git a/tests/unit/test_routes.py b/tests/unit/test_routes.py --- a/tests/unit/test_routes.py +++ b/tests/unit/test_routes.py @@ -326,6 +326,11 @@ def add_policy(name, filename): read_only=True, domain=warehouse, ), + pretend.call( + "integrations.github.disclose-token", + "/_/github/disclose-token", + domain=warehouse, + ), pretend.call("legacy.api.simple.index", "/simple/", domain=warehouse), pretend.call( "legacy.api.simple.detail",
GitHub Token Scanning Partnership **What's the problem this feature will solve?** GitHub has a "Token Scanning" feature, which enables them to detect API keys/tokens, and notify the service provider who issued the token. The service providers can then act on this information according to their own policies (revoke, contact owner etc). This helps avoid situations where an active API key is available publicly on the project's repository. GitHub has a Partnership program for this, with various service providers (incl. [npm][1] and [more][2]). [1]: https://github.blog/changelog/2019-06-18-github-adds-npm-as-a-token-scanning-partner/ [2]: https://help.github.com/en/articles/about-token-scanning **What's the problem this feature will solve?** It would be worthwhile to explore partnering with GitHub for Token Scanning, when the support for API Keys has been completed. **Additional context** https://developer.github.com/partnerships/token-scanning/ contains details about how to contact GitHub for this partnership, and details on how the integration works.
Thanks @pradyunsg! (blocked on #994) This reminds me of https://python-security.readthedocs.io/pypi-vuln/index-2017-11-08-pypirc_exposure_on_github.html .
2019-12-19T01:11:45Z
[]
[]
pypi/warehouse
7,177
pypi__warehouse-7177
[ "7154" ]
f102f32855ef751451e2b197c4efc9b81d136e50
diff --git a/warehouse/admin/views/users.py b/warehouse/admin/views/users.py --- a/warehouse/admin/views/users.py +++ b/warehouse/admin/views/users.py @@ -184,6 +184,15 @@ def user_delete(request): .subquery() ) ) + for project in projects: + request.db.add( + JournalEntry( + name=project.name, + action="remove project", + submitted_by=request.user, + submitted_from=request.remote_addr, + ) + ) projects.delete(synchronize_session=False) # Update all journals to point to `deleted-user` instead
diff --git a/tests/unit/admin/views/test_users.py b/tests/unit/admin/views/test_users.py --- a/tests/unit/admin/views/test_users.py +++ b/tests/unit/admin/views/test_users.py @@ -16,12 +16,13 @@ import pytest from pyramid.httpexceptions import HTTPBadRequest, HTTPNotFound +from sqlalchemy.orm import joinedload from webob.multidict import MultiDict, NoVars from warehouse.accounts.interfaces import IUserService from warehouse.accounts.models import DisableReason from warehouse.admin.views import users as views -from warehouse.packaging.models import Project +from warehouse.packaging.models import JournalEntry, Project from ....common.db.accounts import EmailFactory, User, UserFactory from ....common.db.packaging import JournalEntryFactory, ProjectFactory, RoleFactory @@ -200,10 +201,13 @@ def test_deletes_user(self, db_request, monkeypatch): user = UserFactory.create() project = ProjectFactory.create() another_project = ProjectFactory.create() - journal = JournalEntryFactory(submitted_by=user) RoleFactory(project=project, user=user, role_name="Owner") deleted_user = UserFactory.create(username="deleted-user") + # Create an extra JournalEntry by this user which should be + # updated with the deleted-user user. + JournalEntryFactory(submitted_by=user, action="some old journal") + db_request.matchdict["user_id"] = str(user.id) db_request.params = {"username": user.username} db_request.route_path = pretend.call_recorder(lambda a: "/foobar") @@ -219,7 +223,27 @@ def test_deletes_user(self, db_request, monkeypatch): assert db_request.route_path.calls == [pretend.call("admin.user.list")] assert result.status_code == 303 assert result.location == "/foobar" - assert journal.submitted_by == deleted_user + + # Check that the correct journals were written/modified + old_journal = ( + db_request.db.query(JournalEntry) + .options(joinedload(JournalEntry.submitted_by)) + .filter(JournalEntry.action == "some old journal") + .one() + ) + assert old_journal.submitted_by == deleted_user + remove_journal = ( + db_request.db.query(JournalEntry) + .filter(JournalEntry.action == "remove project") + .one() + ) + assert remove_journal.name == project.name + nuke_journal = ( + db_request.db.query(JournalEntry) + .filter(JournalEntry.action == "nuke user") + .one() + ) + assert nuke_journal.name == f"user:{user.username}" def test_deletes_user_bad_confirm(self, db_request, monkeypatch): user = UserFactory.create()
Pypi changes feed not reporting a deleted package when a user is deleted **Describe the bug** When a user is deleted, all of the packages that they own are deleted as well. The problem is that, when this occurs, the action is not fully reported to the RPC changes feed. Instead, the changes feed will report that a user is "nuked" but it does not report that their packages were deleted. This creates a problem for anyone monitoring the changes feed because they will not receive notifications that a package is removed. **Expected behavior** The expected behavior for this would be that when a user is deleted, and their packages are deleted, you will see in the changes feed that each project is removed. **To Reproduce** In order to reproduce this, you will need to create a user that owns a package, and then nuke the user (I assume that can only be done by an admin). Then you can check the RPC changes feed to see that there is a nuke user event, but not a delete project event. From my client I am requesting the changelog as follows ``` import xmlrpc2 import sys from pprint import pprint rpc = xmlrpc2.client.Client(uri="https://pypi.org") changelog = rpc.changelog_since_serial(int(sys.argv[1])) pprint(changelog) ``` **My Platform** Centos6 Python3.6.3 **Additional context** In correspondence with @ewdurbin, the following was suggested The admin view for removing a user (“nuke user”) directly deletes project objects from the DB: https://github.com/pypa/warehouse/blob/192e60955051f8ffb34f6cc1f1e3f226acb1b5fb/warehouse/admin/views/users.py#L187 It should _really_ be using the remove project utility: https://github.com/pypa/warehouse/blob/192e60955051f8ffb34f6cc1f1e3f226acb1b5fb/warehouse/utils/project.py#L46
2020-01-02T21:09:55Z
[]
[]
pypi/warehouse
7,190
pypi__warehouse-7190
[ "7097" ]
0ec8090f36359d3e322fc1ff865af9d7f23db7fc
diff --git a/warehouse/cli/malware.py b/warehouse/cli/malware.py new file mode 100644 --- /dev/null +++ b/warehouse/cli/malware.py @@ -0,0 +1,34 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import click + +from warehouse.cli import warehouse +from warehouse.malware.tasks import sync_checks as _sync_checks + + [email protected]() # pragma: no branch +def malware(): + """ + Manage the Warehouse Malware Checks. + """ + + [email protected]() [email protected]_obj +def sync_checks(config): + """ + Sync the Warehouse database with the malware checks in malware/checks. + """ + + request = config.task(_sync_checks).get_request() + config.task(_sync_checks).run(request) diff --git a/warehouse/malware/checks/example.py b/warehouse/malware/checks/example.py --- a/warehouse/malware/checks/example.py +++ b/warehouse/malware/checks/example.py @@ -17,17 +17,15 @@ VerdictConfidence, ) -VERSION = 1 -SHORT_DESCRIPTION = "An example hook-based check" -LONG_DESCRIPTION = """The purpose of this check is to demonstrate the implementation \ -of a hook-based check. This check will generate verdicts if enabled.""" - class ExampleCheck(MalwareCheckBase): - version = VERSION - short_description = SHORT_DESCRIPTION - long_description = LONG_DESCRIPTION + version = 1 + short_description = "An example hook-based check" + long_description = """The purpose of this check is to demonstrate the \ +implementation of a hook-based check. This check will generate verdicts if enabled.""" + check_type = "event_hook" + hooked_object = "File" def __init__(self, db): super().__init__(db) diff --git a/warehouse/malware/tasks.py b/warehouse/malware/tasks.py --- a/warehouse/malware/tasks.py +++ b/warehouse/malware/tasks.py @@ -10,9 +10,12 @@ # See the License for the specific language governing permissions and # limitations under the License. +import inspect import warehouse.malware.checks as checks +from warehouse.malware.models import MalwareCheck, MalwareCheckState +from warehouse.malware.utils import get_check_fields from warehouse.tasks import task @@ -24,3 +27,62 @@ def run_check(task, request, check_name, obj_id): except Exception as exc: request.log.error("Error executing check %s: %s", check_name, str(exc)) raise task.retry(exc=exc) + + +@task(bind=True, ignore_result=True, acks_late=True) +def sync_checks(task, request): + code_checks = inspect.getmembers(checks, inspect.isclass) + request.log.info("%d malware checks found in codebase." % len(code_checks)) + + all_checks = request.db.query(MalwareCheck).all() + active_checks = {} + wiped_out_checks = {} + for check in all_checks: + if not check.is_stale: + if check.state == MalwareCheckState.wiped_out: + wiped_out_checks[check.name] = check + else: + active_checks[check.name] = check + + if len(active_checks) > len(code_checks): + code_check_names = set([name for name, cls in code_checks]) + missing = ", ".join(set(active_checks.keys()) - code_check_names) + request.log.error( + "Found %d active checks in the db, but only %d checks in \ +code. Please manually move superfluous checks to the wiped_out state \ +in the check admin: %s" + % (len(active_checks), len(code_checks), missing) + ) + raise Exception("Mismatch between number of db checks and code checks.") + + for check_name, check_class in code_checks: + check = getattr(checks, check_name) + + if wiped_out_checks.get(check_name): + request.log.error( + "%s is wiped_out and cannot be synced. Please remove check from \ +codebase." + % check_name + ) + continue + + db_check = active_checks.get(check_name) + if db_check: + if check.version == db_check.version: + request.log.info("%s is unmodified." % check_name) + continue + + request.log.info("Updating existing %s." % check_name) + fields = get_check_fields(check) + + # Migrate the check state to the newest check. + # Then mark the old check state as disabled. + if db_check.state != MalwareCheckState.disabled: + fields["state"] = db_check.state.value + db_check.state = MalwareCheckState.disabled + + request.db.add(MalwareCheck(**fields)) + else: + request.log.info("Adding new %s to the database." % check_name) + fields = get_check_fields(check) + request.db.add(MalwareCheck(**fields)) diff --git a/warehouse/malware/utils.py b/warehouse/malware/utils.py --- a/warehouse/malware/utils.py +++ b/warehouse/malware/utils.py @@ -27,6 +27,18 @@ def valid_check_types(): return set([t.value for t in MalwareCheckObjectType]) +def get_check_fields(check): + result = {"name": check.__name__} + required_fields = ["short_description", "long_description", "version", "check_type"] + for field in required_fields: + result[field] = getattr(check, field) + + if result["check_type"] == "event_hook": + result["hooked_object"] = check.hooked_object + + return result + + def get_enabled_checks(session): checks = ( session.query(MalwareCheck.name, MalwareCheck.hooked_object)
diff --git a/tests/unit/cli/test_malware.py b/tests/unit/cli/test_malware.py new file mode 100644 --- /dev/null +++ b/tests/unit/cli/test_malware.py @@ -0,0 +1,36 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pretend + +from warehouse.cli.malware import sync_checks +from warehouse.malware.tasks import sync_checks as _sync_checks + + +class TestCLIMalware: + def test_sync_checks(self, cli): + request = pretend.stub() + task = pretend.stub( + get_request=pretend.call_recorder(lambda *a, **kw: request), + run=pretend.call_recorder(lambda *a, **kw: None), + ) + config = pretend.stub(task=pretend.call_recorder(lambda *a, **kw: task)) + + result = cli.invoke(sync_checks, obj=config) + + assert result.exit_code == 0 + assert config.task.calls == [ + pretend.call(_sync_checks), + pretend.call(_sync_checks), + ] + assert task.get_request.calls == [pretend.call()] + assert task.run.calls == [pretend.call(request)] diff --git a/tests/unit/malware/test_checks.py b/tests/unit/malware/test_checks.py new file mode 100644 --- /dev/null +++ b/tests/unit/malware/test_checks.py @@ -0,0 +1,45 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import inspect + +import warehouse.malware.checks as checks + +from warehouse.malware.checks.base import MalwareCheckBase +from warehouse.malware.utils import get_check_fields + + +def test_checks_subclass_base(): + checks_from_module = inspect.getmembers(checks, inspect.isclass) + + subclasses_of_malware_base = { + cls.__name__: cls for cls in MalwareCheckBase.__subclasses__() + } + + assert len(checks_from_module) == len(subclasses_of_malware_base) + + for check_name, check in checks_from_module: + assert subclasses_of_malware_base[check_name] == check + + +def test_checks_fields(): + checks_from_module = inspect.getmembers(checks, inspect.isclass) + + for check_name, check in checks_from_module: + elems = inspect.getmembers(check, lambda a: not (inspect.isroutine(a))) + inspection_fields = {"name": check_name} + for elem_name, value in elems: + if not elem_name.startswith("__"): + inspection_fields[elem_name] = value + fields = get_check_fields(check) + + assert inspection_fields == fields diff --git a/tests/unit/malware/test_tasks.py b/tests/unit/malware/test_tasks.py --- a/tests/unit/malware/test_tasks.py +++ b/tests/unit/malware/test_tasks.py @@ -18,68 +18,240 @@ import warehouse.malware.checks as checks -from warehouse.malware.models import MalwareVerdict -from warehouse.malware.tasks import run_check +from warehouse.malware.models import MalwareCheck, MalwareCheckState, MalwareVerdict +from warehouse.malware.tasks import run_check, sync_checks from ...common.db.malware import MalwareCheckFactory from ...common.db.packaging import FileFactory, ProjectFactory, ReleaseFactory -def test_run_check(monkeypatch, db_request): - project = ProjectFactory.create(name="foo") - release = ReleaseFactory.create(project=project) - file0 = FileFactory.create(release=release, filename="foo.bar") - MalwareCheckFactory.create(name="ExampleCheck", state="enabled") +class TestRunCheck: + def test_success(self, monkeypatch, db_request): + project = ProjectFactory.create(name="foo") + release = ReleaseFactory.create(project=project) + file0 = FileFactory.create(release=release, filename="foo.bar") + MalwareCheckFactory.create(name="ExampleCheck", state=MalwareCheckState.enabled) - task = pretend.stub() - run_check(task, db_request, "ExampleCheck", file0.id) - assert db_request.db.query(MalwareVerdict).one() + task = pretend.stub() + run_check(task, db_request, "ExampleCheck", file0.id) + assert db_request.db.query(MalwareVerdict).one() + def test_missing_check_id(self, monkeypatch, db_session): + exc = NoResultFound("No row was found for one()") -def test_run_check_missing_check_id(monkeypatch, db_session): - exc = NoResultFound("No row was found for one()") + class FakeMalwareCheck: + def __init__(self, db): + raise exc - class FakeMalwareCheck: - def __init__(self, db): - raise exc + checks.FakeMalwareCheck = FakeMalwareCheck - class Task: - @staticmethod - @pretend.call_recorder - def retry(exc): - raise celery.exceptions.Retry + class Task: + @staticmethod + @pretend.call_recorder + def retry(exc): + raise celery.exceptions.Retry - task = Task() + task = Task() - checks.FakeMalwareCheck = FakeMalwareCheck + request = pretend.stub( + db=db_session, + log=pretend.stub( + error=pretend.call_recorder(lambda *args, **kwargs: None), + ), + ) + + with pytest.raises(celery.exceptions.Retry): + run_check( + task, + request, + "FakeMalwareCheck", + "d03d75d1-2511-4a8b-9759-62294a6fe3a7", + ) + + assert request.log.error.calls == [ + pretend.call( + "Error executing check %s: %s", + "FakeMalwareCheck", + "No row was found for one()", + ) + ] + + assert task.retry.calls == [pretend.call(exc=exc)] + + del checks.FakeMalwareCheck + + def test_missing_check(self, db_request): + task = pretend.stub() + with pytest.raises(AttributeError): + run_check( + task, + db_request, + "DoesNotExistCheck", + "d03d75d1-2511-4a8b-9759-62294a6fe3a7", + ) + + +class TestSyncChecks: + def test_no_updates(self, db_session): + MalwareCheckFactory.create( + name="ExampleCheck", state=MalwareCheckState.disabled + ) + + task = pretend.stub() + + request = pretend.stub( + db=db_session, + log=pretend.stub(info=pretend.call_recorder(lambda *args, **kwargs: None),), + ) + + sync_checks(task, request) + + assert request.log.info.calls == [ + pretend.call("1 malware checks found in codebase."), + pretend.call("ExampleCheck is unmodified."), + ] - request = pretend.stub( - db=db_session, - log=pretend.stub(error=pretend.call_recorder(lambda *args, **kwargs: None),), + @pytest.mark.parametrize( + ("final_state"), [MalwareCheckState.enabled, MalwareCheckState.disabled] ) + def test_upgrade_check(self, monkeypatch, db_session, final_state): + MalwareCheckFactory.create(name="ExampleCheck", state=final_state) + + class ExampleCheck: + version = 2 + short_description = "This is a short description." + long_description = "This is a longer description." + check_type = "scheduled" + + monkeypatch.setattr(checks, "ExampleCheck", ExampleCheck) + + task = pretend.stub() + request = pretend.stub( + db=db_session, + log=pretend.stub(info=pretend.call_recorder(lambda *args, **kwargs: None),), + ) + + sync_checks(task, request) + + assert request.log.info.calls == [ + pretend.call("1 malware checks found in codebase."), + pretend.call("Updating existing ExampleCheck."), + ] + db_checks = ( + db_session.query(MalwareCheck) + .filter(MalwareCheck.name == "ExampleCheck") + .all() + ) + + assert len(db_checks) == 2 - with pytest.raises(celery.exceptions.Retry): - run_check( - task, request, "FakeMalwareCheck", "d03d75d1-2511-4a8b-9759-62294a6fe3a7" + if final_state == MalwareCheckState.disabled: + assert ( + db_checks[0].state == db_checks[1].state == MalwareCheckState.disabled + ) + + else: + for c in db_checks: + if c.state == final_state: + assert c.version == 2 + else: + assert c.version == 1 + + def test_one_new_check(self, db_session): + task = pretend.stub() + + class FakeMalwareCheck: + version = 1 + short_description = "This is a short description." + long_description = "This is a longer description." + check_type = "scheduled" + + checks.FakeMalwareCheck = FakeMalwareCheck + + request = pretend.stub( + db=db_session, + log=pretend.stub(info=pretend.call_recorder(lambda *args, **kwargs: None),), + ) + + MalwareCheckFactory.create( + name="ExampleCheck", state=MalwareCheckState.evaluation + ) + + sync_checks(task, request) + + assert request.log.info.calls == [ + pretend.call("2 malware checks found in codebase."), + pretend.call("ExampleCheck is unmodified."), + pretend.call("Adding new FakeMalwareCheck to the database."), + ] + assert db_session.query(MalwareCheck).count() == 2 + + new_check = ( + db_session.query(MalwareCheck) + .filter(MalwareCheck.name == "FakeMalwareCheck") + .one() ) - assert request.log.error.calls == [ - pretend.call( - "Error executing check %s: %s", - "FakeMalwareCheck", - "No row was found for one()", + assert new_check.state == MalwareCheckState.disabled + + del checks.FakeMalwareCheck + + def test_too_many_db_checks(self, db_session): + task = pretend.stub() + + MalwareCheckFactory.create(name="ExampleCheck", state=MalwareCheckState.enabled) + MalwareCheckFactory.create( + name="AnotherCheck", state=MalwareCheckState.disabled + ) + MalwareCheckFactory.create( + name="AnotherCheck", state=MalwareCheckState.evaluation, version=2 + ) + + request = pretend.stub( + db=db_session, + log=pretend.stub( + info=pretend.call_recorder(lambda *args, **kwargs: None), + error=pretend.call_recorder(lambda *args, **kwargs: None), + ), ) - ] - assert task.retry.calls == [pretend.call(exc=exc)] + with pytest.raises(Exception): + sync_checks(task, request) + assert request.log.info.calls == [ + pretend.call("1 malware checks found in codebase."), + ] -def test_run_check_missing_check(db_request): - task = pretend.stub() - with pytest.raises(AttributeError): - run_check( - task, - db_request, - "DoesNotExistCheck", - "d03d75d1-2511-4a8b-9759-62294a6fe3a7", + assert request.log.error.calls == [ + pretend.call( + "Found 2 active checks in the db, but only 1 checks in code. Please \ +manually move superfluous checks to the wiped_out state in the check admin: \ +AnotherCheck" + ), + ] + + def test_only_wiped_out(self, db_session): + task = pretend.stub() + MalwareCheckFactory.create( + name="ExampleCheck", state=MalwareCheckState.wiped_out + ) + request = pretend.stub( + db=db_session, + log=pretend.stub( + info=pretend.call_recorder(lambda *args, **kwargs: None), + error=pretend.call_recorder(lambda *args, **kwargs: None), + ), ) + + sync_checks(task, request) + + assert request.log.info.calls == [ + pretend.call("1 malware checks found in codebase."), + ] + + assert request.log.error.calls == [ + pretend.call( + "ExampleCheck is wiped_out and cannot be synced. Please remove check \ +from codebase." + ), + ] diff --git a/tests/unit/malware/test_utils.py b/tests/unit/malware/test_utils.py --- a/tests/unit/malware/test_utils.py +++ b/tests/unit/malware/test_utils.py @@ -12,36 +12,64 @@ from collections import defaultdict +import pytest + from warehouse.malware.models import MalwareCheckState, MalwareCheckType -from warehouse.malware.utils import get_enabled_checks +from warehouse.malware.utils import get_check_fields, get_enabled_checks from ...common.db.malware import MalwareCheckFactory -def test_get_enabled_checks(db_session): - check = MalwareCheckFactory.create( - state=MalwareCheckState.enabled, check_type=MalwareCheckType.event_hook - ) - result = defaultdict(list) - result[check.hooked_object.value].append(check.name) - checks = get_enabled_checks(db_session) - assert checks == result +class TestGetEnabledChecks: + def test_one(self, db_session): + check = MalwareCheckFactory.create( + state=MalwareCheckState.enabled, check_type=MalwareCheckType.event_hook + ) + result = defaultdict(list) + result[check.hooked_object.value].append(check.name) + checks = get_enabled_checks(db_session) + assert checks == result + + def test_many(self, db_session): + result = defaultdict(list) + for i in range(10): + check = MalwareCheckFactory.create() + if ( + check.state == MalwareCheckState.enabled + and check.check_type == MalwareCheckType.event_hook + ): + result[check.hooked_object.value].append(check.name) + + checks = get_enabled_checks(db_session) + assert checks == result + + def test_none(self, db_session): + checks = get_enabled_checks(db_session) + assert checks == defaultdict(list) -def test_get_enabled_checks_many(db_session): - result = defaultdict(list) - for i in range(10): - check = MalwareCheckFactory.create() - if ( - check.state == MalwareCheckState.enabled - and check.check_type == MalwareCheckType.event_hook - ): - result[check.hooked_object.value].append(check.name) +class TestGetCheckFields: + def test_success(self): + class MySampleCheck: + version = 6 + foo = "bar" + short_description = "This is the description" + long_description = "This is the description" + check_type = "scheduled" - checks = get_enabled_checks(db_session) - assert checks == result + result = get_check_fields(MySampleCheck) + assert result == { + "name": "MySampleCheck", + "version": 6, + "short_description": "This is the description", + "long_description": "This is the description", + "check_type": "scheduled", + } + def test_failure(self): + class MySampleCheck: + version = 1 + status = True -def test_get_enabled_checks_none(db_session): - checks = get_enabled_checks(db_session) - assert checks == defaultdict(list) + with pytest.raises(AttributeError): + get_check_fields(MySampleCheck)
Write db migration tooling for user-contributed malware checks This issue is part of the work defined by [2019-Q4-PyPI Milestone 2](https://github.com/python/request-for/blob/master/2019-Q4-PyPI/RFP.md#milestone-2---systems-for-automated-detection-of-malicious-uploads). User-contributed malware checks should be automatically aggregated and inserted into the database upon deployment, and re-inserted after any change to that code.
2020-01-06T18:25:16Z
[]
[]
pypi/warehouse
7,249
pypi__warehouse-7249
[ "7198" ]
734a3fad2a71fa334b30080872a5c58da431d53c
diff --git a/warehouse/admin/views/checks.py b/warehouse/admin/views/checks.py --- a/warehouse/admin/views/checks.py +++ b/warehouse/admin/views/checks.py @@ -67,7 +67,7 @@ def run_backfill(request): check = get_check_by_name(request.db, request.matchdict["check_name"]) num_objects = 10000 - if check.state not in (MalwareCheckState.enabled, MalwareCheckState.evaluation): + if check.state not in (MalwareCheckState.Enabled, MalwareCheckState.Evaluation): request.session.flash( f"Check must be in 'enabled' or 'evaluation' state to run a backfill.", queue="error", @@ -105,11 +105,11 @@ def change_check_state(request): raise HTTPNotFound try: - check.state = getattr(MalwareCheckState, check_state) - except AttributeError: + check.state = MalwareCheckState(check_state) + except ValueError: request.session.flash("Invalid check state provided.", queue="error") else: - if check.state == MalwareCheckState.wiped_out: + if check.state == MalwareCheckState.WipedOut: request.task(remove_verdicts).delay(check.name) request.session.flash( f"Changed {check.name!r} check to {check.state.value!r}!", queue="success" diff --git a/warehouse/malware/checks/__init__.py b/warehouse/malware/checks/__init__.py --- a/warehouse/malware/checks/__init__.py +++ b/warehouse/malware/checks/__init__.py @@ -9,3 +9,5 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + +from .setup_patterns import SetupPatternCheck # noqa diff --git a/warehouse/malware/checks/base.py b/warehouse/malware/checks/base.py --- a/warehouse/malware/checks/base.py +++ b/warehouse/malware/checks/base.py @@ -11,6 +11,7 @@ # limitations under the License. from warehouse.malware.models import MalwareCheck, MalwareCheckState, MalwareVerdict +from warehouse.packaging import models class MalwareCheckBase: @@ -20,17 +21,34 @@ def __init__(self, db): self._load_check_id() self._verdicts = [] + @classmethod + def prepare(cls, request, obj_id): + """ + Prepares some context for scanning the given object. + """ + kwargs = {} + + model = getattr(models, cls.hooked_object) + kwargs["obj"] = request.db.query(model).get(obj_id) + + if cls.hooked_object == "File": + kwargs["file_url"] = request.route_url( + "packaging.file", path=kwargs["obj"].path + ) + + return kwargs + def add_verdict(self, **kwargs): self._verdicts.append(MalwareVerdict(check_id=self.id, **kwargs)) - def run(self, obj_id): + def run(self, **kwargs): """ Runs the check and inserts returned verdicts. """ - self.scan(obj_id) + self.scan(**kwargs) self.db.add_all(self._verdicts) - def scan(self, obj_id): + def scan(self, **kwargs): """ Scans the object and returns a verdict. """ @@ -43,12 +61,12 @@ def backfill(self, sample=1): """ def _load_check_id(self): - self.id = ( + (self.id,) = ( self.db.query(MalwareCheck.id) .filter(MalwareCheck.name == self._name) .filter( MalwareCheck.state.in_( - [MalwareCheckState.enabled, MalwareCheckState.evaluation] + [MalwareCheckState.Enabled, MalwareCheckState.Evaluation] ) ) .one() diff --git a/warehouse/malware/checks/setup_patterns/__init__.py b/warehouse/malware/checks/setup_patterns/__init__.py new file mode 100644 --- /dev/null +++ b/warehouse/malware/checks/setup_patterns/__init__.py @@ -0,0 +1,13 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .check import SetupPatternCheck # noqa diff --git a/warehouse/malware/checks/setup_patterns/check.py b/warehouse/malware/checks/setup_patterns/check.py new file mode 100644 --- /dev/null +++ b/warehouse/malware/checks/setup_patterns/check.py @@ -0,0 +1,108 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +from textwrap import dedent + +import yara + +from warehouse.malware.checks.base import MalwareCheckBase +from warehouse.malware.checks.utils import extract_file_content, fetch_url_content +from warehouse.malware.models import VerdictClassification, VerdictConfidence + + +class SetupPatternCheck(MalwareCheckBase): + _yara_rule_file = os.path.join( + os.path.dirname(os.path.abspath(__file__)), "setup_py_rules.yara" + ) + + version = 1 + short_description = "A check for common malicious patterns in setup.py" + long_description = dedent( + """ + This check uses YARA to search for common malicious patterns in the setup.py + files of uploaded release archives. + """ + ) + check_type = "event_hook" + hooked_object = "File" + + def __init__(self, db): + super().__init__(db) + self._yara_rules = self._load_yara_rules() + + def _load_yara_rules(self): + return yara.compile(filepath=self._yara_rule_file) + + def scan(self, **kwargs): + file = kwargs.get("obj") + file_url = kwargs.get("file_url") + if file is None or file_url is None: + # TODO: Maybe raise here, since the absence of these + # arguments is a use/user error. + return + + if file.packagetype != "sdist": + # Per PEP 491: bdists do not contain setup.py. + # This check only scans dists that contain setup.py, so + # we have nothing to perform. + return + + archive_stream = fetch_url_content(file_url) + setup_py_contents = extract_file_content(archive_stream, "setup.py") + if setup_py_contents is None: + self.add_verdict( + file_id=file.id, + classification=VerdictClassification.Indeterminate, + confidence=VerdictConfidence.High, + message="sdist does not contain a suitable setup.py for analysis", + ) + return + + matches = self._yara_rules.match(data=setup_py_contents) + if len(matches) > 0: + # We reduce N matches into a single verdict by taking the maximum + # classification and confidence. + classification = max( + VerdictClassification(m.meta["classification"]) for m in matches + ) + confidence = max(VerdictConfidence(m.meta["confidence"]) for m in matches) + message = ":".join(m.rule for m in matches) + + details = {} + for match in matches: + details[match.rule] = { + "classification": match.meta["classification"], + "confidence": match.meta["confidence"], + # NOTE: We could include the raw bytes here (s[2]), + # but we'd have to serialize/encode it to make JSON happy. + # It probably suffices to include the offset and identifier + # for triage purposes. + "strings": [[s[0], s[1]] for s in match.strings], + } + + self.add_verdict( + file_id=file.id, + classification=classification, + confidence=confidence, + message=message, + details=details, + ) + else: + # No matches? Report a low-confidence benign verdict. + self.add_verdict( + file_id=file.id, + classification=VerdictClassification.Benign, + confidence=VerdictConfidence.Low, + message="No malicious patterns found in setup.py", + ) diff --git a/warehouse/malware/checks/utils.py b/warehouse/malware/checks/utils.py new file mode 100644 --- /dev/null +++ b/warehouse/malware/checks/utils.py @@ -0,0 +1,80 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import io +import pathlib +import tarfile +import zipfile + +import requests + + +def fetch_url_content(url): + """ + Retrieves the contents of the given (presumed CDN) URL as a BytesIO. + + Performs no error checking; exceptions are handled in the check harness + as part of check retrying behavior. + """ + response = requests.get(url) + response.raise_for_status() + return io.BytesIO(response.content) + + +def extract_file_content(archive_stream, file_path): + """ + Retrieves the content of the given path from the given archive stream + (presumed to be a dist) as bytes. + + Handling of the given path is a little special: since the dist format(s) + don't enforce any naming convention for the base archive directory, + the path is interpreted as {base}/{file_path}. Thus, a call like this: + + extract_file_content(stream, "setup.py") + + will extract and return the contents of {base}/setup.py where {base} + is frequently (but not guaranteed to be) something like $name-$version. + + Returns None on any sort of failure. + """ + if zipfile.is_zipfile(archive_stream): + with zipfile.ZipFile(archive_stream) as zipobj: + for name in zipobj.namelist(): + path_parts = pathlib.Path(name).parts + if len(path_parts) >= 2: + tail = pathlib.Path(*path_parts[1:]) + if str(tail) == file_path: + return zipobj.read(name) + return None + else: + # NOTE: is_zipfile doesn't rewind the fileobj it's given. + archive_stream.seek(0) + + # NOTE: We don't need to perform a sanity check on + # the (presumed) tarfile's compression here, since we're + # extracting from a stream that's already gone through + # upload validation. + # See _is_valid_dist_file in forklift/legacy.py. + try: + with tarfile.open(fileobj=archive_stream) as tarobj: + member = tarobj.next() + while member: + path_parts = pathlib.Path(member.name).parts + if len(path_parts) >= 2: + tail = pathlib.Path(*path_parts[1:]) + if str(tail) == file_path: + return tarobj.extractfile(member).read() + + member = tarobj.next() + return None + except tarfile.TarError: + return None diff --git a/warehouse/malware/models.py b/warehouse/malware/models.py --- a/warehouse/malware/models.py +++ b/warehouse/malware/models.py @@ -11,6 +11,7 @@ # limitations under the License. import enum +import functools from citext import CIText from sqlalchemy import ( @@ -32,20 +33,23 @@ from warehouse.utils.attrs import make_repr [email protected] class MalwareCheckType(enum.Enum): - event_hook = "event_hook" - scheduled = "scheduled" + EventHook = "event_hook" + Scheduled = "scheduled" [email protected] class MalwareCheckState(enum.Enum): - enabled = "enabled" - evaluation = "evaluation" - disabled = "disabled" - wiped_out = "wiped_out" + Enabled = "enabled" + Evaluation = "evaluation" + Disabled = "disabled" + WipedOut = "wiped_out" [email protected] class MalwareCheckObjectType(enum.Enum): File = "File" @@ -53,19 +57,45 @@ class MalwareCheckObjectType(enum.Enum): Project = "Project" [email protected] [email protected]_ordering class VerdictClassification(enum.Enum): + """ + An enumeration of classification markers for malware verdicts. - threat = "threat" - indeterminate = "indeterminate" - benign = "benign" + Note that the order of declaration is important: it provides + the appropriate ordering behavior when finding the minimum + and maximum classifications for a set of verdicts. + """ + Benign = "benign" + Indeterminate = "indeterminate" + Threat = "threat" + def __lt__(self, other): + members = list(self.__class__) + return members.index(self) < members.index(other) + + [email protected] [email protected]_ordering class VerdictConfidence(enum.Enum): + """ + An enumeration of confidence markers for malware verdicts. + + Note that the order of declaration is important: it provides + the appropriate ordering behavior when finding the minimum + and maximum confidences for a set of verdicts. + """ Low = "low" Medium = "medium" High = "high" + def __lt__(self, other): + members = list(self.__class__) + return members.index(self) < members.index(other) + class MalwareCheck(db.Model): diff --git a/warehouse/malware/tasks.py b/warehouse/malware/tasks.py --- a/warehouse/malware/tasks.py +++ b/warehouse/malware/tasks.py @@ -24,7 +24,8 @@ def run_check(task, request, check_name, obj_id): check = getattr(checks, check_name)(request.db) try: - check.run(obj_id) + kwargs = check.prepare(request, obj_id) + check.run(obj_id=obj_id, **kwargs) except Exception as exc: request.log.error("Error executing check %s: %s" % (check_name, str(exc))) raise task.retry(exc=exc) @@ -55,7 +56,7 @@ def sync_checks(task, request): wiped_out_checks = {} for check in all_checks: if not check.is_stale: - if check.state == MalwareCheckState.wiped_out: + if check.state == MalwareCheckState.WipedOut: wiped_out_checks[check.name] = check else: active_checks[check.name] = check @@ -93,9 +94,9 @@ def sync_checks(task, request): # Migrate the check state to the newest check. # Then mark the old check state as disabled. - if db_check.state != MalwareCheckState.disabled: + if db_check.state != MalwareCheckState.Disabled: fields["state"] = db_check.state.value - db_check.state = MalwareCheckState.disabled + db_check.state = MalwareCheckState.Disabled request.db.add(MalwareCheck(**fields)) else: diff --git a/warehouse/malware/utils.py b/warehouse/malware/utils.py --- a/warehouse/malware/utils.py +++ b/warehouse/malware/utils.py @@ -46,8 +46,8 @@ def get_check_fields(check): def get_enabled_hooked_checks(session): checks = ( session.query(MalwareCheck.name, MalwareCheck.hooked_object) - .filter(MalwareCheck.check_type == MalwareCheckType.event_hook) - .filter(MalwareCheck.state == MalwareCheckState.enabled) + .filter(MalwareCheck.check_type == MalwareCheckType.EventHook) + .filter(MalwareCheck.state == MalwareCheckState.Enabled) .all() ) results = defaultdict(list)
diff --git a/tests/common/checks/hooked.py b/tests/common/checks/hooked.py --- a/tests/common/checks/hooked.py +++ b/tests/common/checks/hooked.py @@ -26,10 +26,14 @@ class ExampleHookedCheck(MalwareCheckBase): def __init__(self, db): super().__init__(db) - def scan(self, file_id=None): + def scan(self, **kwargs): + file_id = kwargs.get("obj_id") + if file_id is None: + return + self.add_verdict( file_id=file_id, - classification=VerdictClassification.benign, + classification=VerdictClassification.Benign, confidence=VerdictConfidence.High, message="Nothing to see here!", ) diff --git a/tests/unit/admin/views/test_checks.py b/tests/unit/admin/views/test_checks.py --- a/tests/unit/admin/views/test_checks.py +++ b/tests/unit/admin/views/test_checks.py @@ -72,11 +72,11 @@ def test_no_check_state(self, db_request): views.change_check_state(db_request) @pytest.mark.parametrize( - ("final_state"), [MalwareCheckState.disabled, MalwareCheckState.wiped_out] + ("final_state"), [MalwareCheckState.Disabled, MalwareCheckState.WipedOut] ) def test_change_to_valid_state(self, db_request, final_state): check = MalwareCheckFactory.create( - name="MyCheck", state=MalwareCheckState.disabled + name="MyCheck", state=MalwareCheckState.Disabled ) db_request.POST = {"check_state": final_state.value} @@ -104,7 +104,7 @@ def test_change_to_valid_state(self, db_request, final_state): assert check.state == final_state - if final_state == MalwareCheckState.wiped_out: + if final_state == MalwareCheckState.WipedOut: assert wipe_out_recorder.delay.calls == [pretend.call("MyCheck")] def test_change_to_invalid_state(self, db_request): @@ -134,11 +134,11 @@ class TestRunBackfill: ("check_state", "message"), [ ( - MalwareCheckState.disabled, + MalwareCheckState.Disabled, "Check must be in 'enabled' or 'evaluation' state to run a backfill.", ), ( - MalwareCheckState.wiped_out, + MalwareCheckState.WipedOut, "Check must be in 'enabled' or 'evaluation' state to run a backfill.", ), ], @@ -160,7 +160,7 @@ def test_invalid_backfill_parameters(self, db_request, check_state, message): assert db_request.session.flash.calls == [pretend.call(message, queue="error")] def test_sucess(self, db_request): - check = MalwareCheckFactory.create(state=MalwareCheckState.enabled) + check = MalwareCheckFactory.create(state=MalwareCheckState.Enabled) db_request.matchdict["check_name"] = check.name db_request.session = pretend.stub( diff --git a/tests/unit/malware/checks/__init__.py b/tests/unit/malware/checks/__init__.py new file mode 100644 --- /dev/null +++ b/tests/unit/malware/checks/__init__.py @@ -0,0 +1,11 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/tests/unit/malware/checks/setup_patterns/__init__.py b/tests/unit/malware/checks/setup_patterns/__init__.py new file mode 100644 --- /dev/null +++ b/tests/unit/malware/checks/setup_patterns/__init__.py @@ -0,0 +1,11 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/tests/unit/malware/checks/setup_patterns/test_check.py b/tests/unit/malware/checks/setup_patterns/test_check.py new file mode 100644 --- /dev/null +++ b/tests/unit/malware/checks/setup_patterns/test_check.py @@ -0,0 +1,145 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pretend +import pytest +import yara + +from warehouse.malware.checks.setup_patterns import check as c +from warehouse.malware.models import ( + MalwareCheckState, + VerdictClassification, + VerdictConfidence, +) + +from .....common.db.malware import MalwareCheckFactory +from .....common.db.packaging import FileFactory + + +def test_initializes(db_session): + check_model = MalwareCheckFactory.create( + name="SetupPatternCheck", state=MalwareCheckState.Enabled + ) + check = c.SetupPatternCheck(db_session) + + assert check.id == check_model.id + assert isinstance(check._yara_rules, yara.Rules) + + [email protected]( + ("obj", "file_url"), [(None, pretend.stub()), (pretend.stub(), None)] +) +def test_scan_missing_kwargs(db_session, obj, file_url): + MalwareCheckFactory.create( + name="SetupPatternCheck", state=MalwareCheckState.Enabled + ) + check = c.SetupPatternCheck(db_session) + check.scan(obj=obj, file_url=file_url) + + assert check._verdicts == [] + + +def test_scan_non_sdist(db_session): + MalwareCheckFactory.create( + name="SetupPatternCheck", state=MalwareCheckState.Enabled + ) + check = c.SetupPatternCheck(db_session) + + file = FileFactory.create(packagetype="bdist_wheel") + + check.scan(obj=file, file_url=pretend.stub()) + + assert check._verdicts == [] + + +def test_scan_no_setup_contents(db_session, monkeypatch): + monkeypatch.setattr( + c, "fetch_url_content", pretend.call_recorder(lambda *a: pretend.stub()) + ) + monkeypatch.setattr( + c, "extract_file_content", pretend.call_recorder(lambda *a: None) + ) + + MalwareCheckFactory.create( + name="SetupPatternCheck", state=MalwareCheckState.Enabled + ) + check = c.SetupPatternCheck(db_session) + + file = FileFactory.create(packagetype="sdist") + + check.scan(obj=file, file_url=pretend.stub()) + + assert len(check._verdicts) == 1 + assert check._verdicts[0].check_id == check.id + assert check._verdicts[0].file_id == file.id + assert check._verdicts[0].classification == VerdictClassification.Indeterminate + assert check._verdicts[0].confidence == VerdictConfidence.High + assert ( + check._verdicts[0].message + == "sdist does not contain a suitable setup.py for analysis" + ) + + +def test_scan_benign_contents(db_session, monkeypatch): + monkeypatch.setattr( + c, "fetch_url_content", pretend.call_recorder(lambda *a: pretend.stub()) + ) + monkeypatch.setattr( + c, + "extract_file_content", + pretend.call_recorder(lambda *a: b"this is a benign string"), + ) + + MalwareCheckFactory.create( + name="SetupPatternCheck", state=MalwareCheckState.Enabled + ) + check = c.SetupPatternCheck(db_session) + + file = FileFactory.create(packagetype="sdist") + + check.scan(obj=file, file_url=pretend.stub()) + + assert len(check._verdicts) == 1 + assert check._verdicts[0].check_id == check.id + assert check._verdicts[0].file_id == file.id + assert check._verdicts[0].classification == VerdictClassification.Benign + assert check._verdicts[0].confidence == VerdictConfidence.Low + assert check._verdicts[0].message == "No malicious patterns found in setup.py" + + +def test_scan_matched_content(db_session, monkeypatch): + monkeypatch.setattr( + c, "fetch_url_content", pretend.call_recorder(lambda *a: pretend.stub()) + ) + monkeypatch.setattr( + c, + "extract_file_content", + pretend.call_recorder( + lambda *a: b"this looks suspicious: os.system('cat /etc/passwd')" + ), + ) + + MalwareCheckFactory.create( + name="SetupPatternCheck", state=MalwareCheckState.Enabled + ) + check = c.SetupPatternCheck(db_session) + + file = FileFactory.create(packagetype="sdist") + + check.scan(obj=file, file_url=pretend.stub()) + + assert len(check._verdicts) == 1 + assert check._verdicts[0].check_id == check.id + assert check._verdicts[0].file_id == file.id + assert check._verdicts[0].classification == VerdictClassification.Threat + assert check._verdicts[0].confidence == VerdictConfidence.High + assert check._verdicts[0].message == "process_spawn_in_setup" diff --git a/tests/unit/malware/checks/test_utils.py b/tests/unit/malware/checks/test_utils.py new file mode 100644 --- /dev/null +++ b/tests/unit/malware/checks/test_utils.py @@ -0,0 +1,93 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import io +import tarfile +import zipfile + +import pretend + +from warehouse.malware.checks import utils + + +def test_fetch_url_content(monkeypatch): + response = pretend.stub( + raise_for_status=pretend.call_recorder(lambda: None), content=b"fake content" + ) + requests = pretend.stub(get=pretend.call_recorder(lambda url: response)) + + monkeypatch.setattr(utils, "requests", requests) + + io = utils.fetch_url_content("hxxp://fake_url.com") + + assert requests.get.calls == [pretend.call("hxxp://fake_url.com")] + assert response.raise_for_status.calls == [pretend.call()] + assert io.getvalue() == b"fake content" + + +def test_extract_file_contents_zip(): + zipbuf = io.BytesIO() + with zipfile.ZipFile(zipbuf, mode="w") as zipobj: + zipobj.writestr("toplevelgetsskipped", b"nothing to see here") + zipobj.writestr("foo/setup.py", b"these are some contents") + zipbuf.seek(0) + + assert utils.extract_file_content(zipbuf, "setup.py") == b"these are some contents" + + +def test_extract_file_contents_zip_no_file(): + zipbuf = io.BytesIO() + with zipfile.ZipFile(zipbuf, mode="w") as zipobj: + zipobj.writestr("foo/notsetup.py", b"these are some contents") + zipbuf.seek(0) + + assert utils.extract_file_content(zipbuf, "setup.py") is None + + +def test_extract_file_contents_tar(): + tarbuf = io.BytesIO() + with tarfile.open(fileobj=tarbuf, mode="w:gz") as tarobj: + contents = io.BytesIO(b"these are some contents") + member = tarfile.TarInfo(name="foo/setup.py") + member.size = len(contents.getbuffer()) + tarobj.addfile(member, fileobj=contents) + + contents = io.BytesIO(b"nothing to see here") + member = tarfile.TarInfo(name="toplevelgetsskipped") + member.size = len(contents.getbuffer()) + tarobj.addfile(member, fileobj=contents) + tarbuf.seek(0) + + assert utils.extract_file_content(tarbuf, "setup.py") == b"these are some contents" + + +def test_extract_file_contents_tar_empty(): + tarbuf = io.BytesIO(b"invalid tar contents") + + assert utils.extract_file_content(tarbuf, "setup.py") is None + + +def test_extract_file_contents_tar_no_file(): + tarbuf = io.BytesIO() + with tarfile.open(fileobj=tarbuf, mode="w:gz") as tarobj: + contents = io.BytesIO(b"these are some contents") + member = tarfile.TarInfo(name="foo/notsetup.py") + member.size = len(contents.getbuffer()) + tarobj.addfile(member, fileobj=contents) + + contents = io.BytesIO(b"nothing to see here") + member = tarfile.TarInfo(name="toplevelgetsskipped") + member.size = len(contents.getbuffer()) + tarobj.addfile(member, fileobj=contents) + tarbuf.seek(0) + + assert utils.extract_file_content(tarbuf, "setup.py") is None diff --git a/tests/unit/malware/test_checks.py b/tests/unit/malware/test_checks.py --- a/tests/unit/malware/test_checks.py +++ b/tests/unit/malware/test_checks.py @@ -12,6 +12,7 @@ import inspect +import pretend import pytest import warehouse.malware.checks as prod_checks @@ -20,6 +21,7 @@ from warehouse.malware.utils import get_check_fields from ...common import checks as test_checks +from ...common.db.packaging import FileFactory def test_checks_subclass_base(): @@ -37,9 +39,7 @@ def test_checks_subclass_base(): assert subclasses_of_malware_base[check_name] == check [email protected]( - ("checks"), [prod_checks, test_checks], -) [email protected](("checks"), [prod_checks, test_checks]) def test_checks_fields(checks): checks_from_module = inspect.getmembers(checks, inspect.isclass) @@ -47,8 +47,36 @@ def test_checks_fields(checks): elems = inspect.getmembers(check, lambda a: not (inspect.isroutine(a))) inspection_fields = {"name": check_name} for elem_name, value in elems: - if not elem_name.startswith("__"): + # Skip both dunder and "private" (_-prefixed) attributes + if not elem_name.startswith("_"): inspection_fields[elem_name] = value fields = get_check_fields(check) assert inspection_fields == fields + + +def test_base_prepare_file_hooked(db_session): + file = FileFactory.create() + request = pretend.stub( + db=db_session, route_url=pretend.call_recorder(lambda *a, **kw: "fake_url") + ) + + kwargs = test_checks.ExampleHookedCheck.prepare(request, file.id) + + assert request.route_url.calls == [pretend.call("packaging.file", path=file.path)] + assert "file_url" in kwargs + assert kwargs["file_url"] == "fake_url" + + +def test_base_prepare_nonfile_hooked(db_session): + file = FileFactory.create() + request = pretend.stub( + db=db_session, route_url=pretend.call_recorder(lambda *a, **kw: "fake_url") + ) + + class FakeProjectCheck(MalwareCheckBase): + hooked_object = "Project" + + kwargs = FakeProjectCheck.prepare(request, file.id) + assert request.route_url.calls == [] + assert "file_url" not in kwargs diff --git a/tests/unit/malware/test_init.py b/tests/unit/malware/test_init.py --- a/tests/unit/malware/test_init.py +++ b/tests/unit/malware/test_init.py @@ -170,5 +170,5 @@ def test_includeme(monkeypatch): malware.includeme(config) assert config.register_service_factory.calls == [ - pretend.call(malware_check_class.create_service, IMalwareCheckService), + pretend.call(malware_check_class.create_service, IMalwareCheckService) ] diff --git a/tests/unit/malware/test_models.py b/tests/unit/malware/test_models.py new file mode 100644 --- /dev/null +++ b/tests/unit/malware/test_models.py @@ -0,0 +1,40 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from warehouse.malware.models import VerdictClassification, VerdictConfidence + + +def test_classification_orderable(): + assert ( + VerdictClassification.Benign + < VerdictClassification.Indeterminate + < VerdictClassification.Threat + ) + assert ( + max( + [ + VerdictClassification.Benign, + VerdictClassification.Indeterminate, + VerdictClassification.Threat, + ] + ) + == VerdictClassification.Threat + ) + + +def test_confidence_orderable(): + assert VerdictConfidence.Low < VerdictConfidence.Medium < VerdictConfidence.High + assert ( + max([VerdictConfidence.Low, VerdictConfidence.Medium, VerdictConfidence.High]) + == VerdictConfidence.High + ) diff --git a/tests/unit/malware/test_tasks.py b/tests/unit/malware/test_tasks.py --- a/tests/unit/malware/test_tasks.py +++ b/tests/unit/malware/test_tasks.py @@ -26,71 +26,68 @@ class TestRunCheck: def test_success(self, db_request, monkeypatch): + db_request.route_url = pretend.call_recorder(lambda *a, **kw: "fake_route") + monkeypatch.setattr(tasks, "checks", test_checks) file0 = FileFactory.create() MalwareCheckFactory.create( - name="ExampleHookedCheck", state=MalwareCheckState.enabled + name="ExampleHookedCheck", state=MalwareCheckState.Enabled ) task = pretend.stub() tasks.run_check(task, db_request, "ExampleHookedCheck", file0.id) + assert db_request.route_url.calls == [ + pretend.call("packaging.file", path=file0.path) + ] assert db_request.db.query(MalwareVerdict).one() def test_disabled_check(self, db_request, monkeypatch): monkeypatch.setattr(tasks, "checks", test_checks) MalwareCheckFactory.create( - name="ExampleHookedCheck", state=MalwareCheckState.disabled + name="ExampleHookedCheck", state=MalwareCheckState.Disabled ) - task = pretend.stub() + file = FileFactory.create() + with pytest.raises(NoResultFound): - tasks.run_check( - task, - db_request, - "ExampleHookedCheck", - "d03d75d1-2511-4a8b-9759-62294a6fe3a7", - ) + tasks.run_check(task, db_request, "ExampleHookedCheck", file.id) def test_missing_check(self, db_request, monkeypatch): monkeypatch.setattr(tasks, "checks", test_checks) task = pretend.stub() + + file = FileFactory.create() + with pytest.raises(AttributeError): - tasks.run_check( - task, - db_request, - "DoesNotExistCheck", - "d03d75d1-2511-4a8b-9759-62294a6fe3a7", - ) + tasks.run_check(task, db_request, "DoesNotExistCheck", file.id) def test_retry(self, db_session, monkeypatch): exc = Exception("Scan failed") - def scan(self, file_id): + def scan(self, **kwargs): raise exc monkeypatch.setattr(tasks, "checks", test_checks) monkeypatch.setattr(tasks.checks.ExampleHookedCheck, "scan", scan) MalwareCheckFactory.create( - name="ExampleHookedCheck", state=MalwareCheckState.evaluation + name="ExampleHookedCheck", state=MalwareCheckState.Evaluation ) task = pretend.stub( - retry=pretend.call_recorder(pretend.raiser(celery.exceptions.Retry)), + retry=pretend.call_recorder(pretend.raiser(celery.exceptions.Retry)) ) request = pretend.stub( db=db_session, log=pretend.stub(error=pretend.call_recorder(lambda *args, **kwargs: None)), + route_url=pretend.call_recorder(lambda *a, **kw: pretend.stub()), ) + file = FileFactory.create() + with pytest.raises(celery.exceptions.Retry): - tasks.run_check( - task, - request, - "ExampleHookedCheck", - "d03d75d1-2511-4a8b-9759-62294a6fe3a7", - ) + tasks.run_check(task, request, "ExampleHookedCheck", file.id) assert request.log.error.calls == [ pretend.call("Error executing check ExampleHookedCheck: Scan failed") @@ -107,7 +104,7 @@ def test_invalid_check_name(self, db_request, monkeypatch): tasks.backfill(task, db_request, "DoesNotExist", 1) @pytest.mark.parametrize( - ("num_objects", "num_runs"), [(11, 1), (11, 11), (101, 90)], + ("num_objects", "num_runs"), [(11, 1), (11, 11), (101, 90)] ) def test_run(self, db_session, num_objects, num_runs, monkeypatch): monkeypatch.setattr(tasks, "checks", test_checks) @@ -116,7 +113,7 @@ def test_run(self, db_session, num_objects, num_runs, monkeypatch): files.append(FileFactory.create()) MalwareCheckFactory.create( - name="ExampleHookedCheck", state=MalwareCheckState.enabled + name="ExampleHookedCheck", state=MalwareCheckState.Enabled ) enqueue_recorder = pretend.stub( @@ -133,7 +130,7 @@ def test_run(self, db_session, num_objects, num_runs, monkeypatch): tasks.backfill(task, request, "ExampleHookedCheck", num_runs) assert request.log.info.calls == [ - pretend.call("Running backfill on %d Files." % num_runs), + pretend.call("Running backfill on %d Files." % num_runs) ] assert enqueue_recorder.delay.calls == [ @@ -146,20 +143,20 @@ def test_no_updates(self, db_session, monkeypatch): monkeypatch.setattr(tasks, "checks", test_checks) monkeypatch.setattr(tasks.checks.ExampleScheduledCheck, "version", 2) MalwareCheckFactory.create( - name="ExampleHookedCheck", state=MalwareCheckState.disabled + name="ExampleHookedCheck", state=MalwareCheckState.Disabled ) MalwareCheckFactory.create( - name="ExampleScheduledCheck", state=MalwareCheckState.disabled + name="ExampleScheduledCheck", state=MalwareCheckState.Disabled ) MalwareCheckFactory.create( - name="ExampleScheduledCheck", state=MalwareCheckState.enabled, version=2 + name="ExampleScheduledCheck", state=MalwareCheckState.Enabled, version=2 ) task = pretend.stub() request = pretend.stub( db=db_session, - log=pretend.stub(info=pretend.call_recorder(lambda *args, **kwargs: None),), + log=pretend.stub(info=pretend.call_recorder(lambda *args, **kwargs: None)), ) tasks.sync_checks(task, request) @@ -171,7 +168,7 @@ def test_no_updates(self, db_session, monkeypatch): ] @pytest.mark.parametrize( - ("final_state"), [MalwareCheckState.enabled, MalwareCheckState.disabled] + ("final_state"), [MalwareCheckState.Enabled, MalwareCheckState.Disabled] ) def test_upgrade_check(self, monkeypatch, db_session, final_state): monkeypatch.setattr(tasks, "checks", test_checks) @@ -179,13 +176,13 @@ def test_upgrade_check(self, monkeypatch, db_session, final_state): MalwareCheckFactory.create(name="ExampleHookedCheck", state=final_state) MalwareCheckFactory.create( - name="ExampleScheduledCheck", state=MalwareCheckState.disabled + name="ExampleScheduledCheck", state=MalwareCheckState.Disabled ) task = pretend.stub() request = pretend.stub( db=db_session, - log=pretend.stub(info=pretend.call_recorder(lambda *args, **kwargs: None),), + log=pretend.stub(info=pretend.call_recorder(lambda *args, **kwargs: None)), ) tasks.sync_checks(task, request) @@ -203,9 +200,9 @@ def test_upgrade_check(self, monkeypatch, db_session, final_state): assert len(db_checks) == 2 - if final_state == MalwareCheckState.disabled: + if final_state == MalwareCheckState.Disabled: assert ( - db_checks[0].state == db_checks[1].state == MalwareCheckState.disabled + db_checks[0].state == db_checks[1].state == MalwareCheckState.Disabled ) else: @@ -219,10 +216,10 @@ def test_one_new_check(self, db_session, monkeypatch): monkeypatch.setattr(tasks, "checks", test_checks) MalwareCheckFactory.create( - name="ExampleHookedCheck", state=MalwareCheckState.disabled + name="ExampleHookedCheck", state=MalwareCheckState.Disabled ) MalwareCheckFactory.create( - name="ExampleScheduledCheck", state=MalwareCheckState.disabled + name="ExampleScheduledCheck", state=MalwareCheckState.Disabled ) task = pretend.stub() @@ -238,7 +235,7 @@ class FakeMalwareCheck: request = pretend.stub( db=db_session, - log=pretend.stub(info=pretend.call_recorder(lambda *args, **kwargs: None),), + log=pretend.stub(info=pretend.call_recorder(lambda *args, **kwargs: None)), ) tasks.sync_checks(task, request) @@ -257,7 +254,7 @@ class FakeMalwareCheck: .one() ) - assert new_check.state == MalwareCheckState.disabled + assert new_check.state == MalwareCheckState.Disabled del tasks.checks.FakeMalwareCheck @@ -265,13 +262,13 @@ def test_too_many_db_checks(self, db_session, monkeypatch): monkeypatch.setattr(tasks, "checks", test_checks) MalwareCheckFactory.create( - name="ExampleHookedCheck", state=MalwareCheckState.enabled + name="ExampleHookedCheck", state=MalwareCheckState.Enabled ) MalwareCheckFactory.create( - name="ExampleScheduledCheck", state=MalwareCheckState.enabled + name="ExampleScheduledCheck", state=MalwareCheckState.Enabled ) MalwareCheckFactory.create( - name="AnotherCheck", state=MalwareCheckState.evaluation, version=2 + name="AnotherCheck", state=MalwareCheckState.Evaluation, version=2 ) task = pretend.stub() @@ -288,7 +285,7 @@ def test_too_many_db_checks(self, db_session, monkeypatch): tasks.sync_checks(task, request) assert request.log.info.calls == [ - pretend.call("2 malware checks found in codebase."), + pretend.call("2 malware checks found in codebase.") ] assert request.log.error.calls == [ @@ -296,16 +293,16 @@ def test_too_many_db_checks(self, db_session, monkeypatch): "Found 3 active checks in the db, but only 2 checks in code. Please \ manually move superfluous checks to the wiped_out state in the check admin: \ AnotherCheck" - ), + ) ] def test_only_wiped_out(self, db_session, monkeypatch): monkeypatch.setattr(tasks, "checks", test_checks) MalwareCheckFactory.create( - name="ExampleHookedCheck", state=MalwareCheckState.wiped_out + name="ExampleHookedCheck", state=MalwareCheckState.WipedOut ) MalwareCheckFactory.create( - name="ExampleScheduledCheck", state=MalwareCheckState.wiped_out + name="ExampleScheduledCheck", state=MalwareCheckState.WipedOut ) task = pretend.stub() @@ -320,7 +317,7 @@ def test_only_wiped_out(self, db_session, monkeypatch): tasks.sync_checks(task, request) assert request.log.info.calls == [ - pretend.call("2 malware checks found in codebase."), + pretend.call("2 malware checks found in codebase.") ] assert request.log.error.calls == [ @@ -341,7 +338,7 @@ def test_no_verdicts(self, db_session): request = pretend.stub( db=db_session, - log=pretend.stub(info=pretend.call_recorder(lambda *args, **kwargs: None),), + log=pretend.stub(info=pretend.call_recorder(lambda *args, **kwargs: None)), ) task = pretend.stub() removed = tasks.remove_verdicts(task, request, check.name) @@ -349,7 +346,7 @@ def test_no_verdicts(self, db_session): assert request.log.info.calls == [ pretend.call( "Removing 0 malware verdicts associated with %s version 1." % check.name - ), + ) ] assert removed == 0 @@ -369,7 +366,7 @@ def test_many_verdicts(self, db_session, check_with_verdicts): request = pretend.stub( db=db_session, - log=pretend.stub(info=pretend.call_recorder(lambda *args, **kwargs: None),), + log=pretend.stub(info=pretend.call_recorder(lambda *args, **kwargs: None)), ) task = pretend.stub() @@ -386,7 +383,7 @@ def test_many_verdicts(self, db_session, check_with_verdicts): pretend.call( "Removing %d malware verdicts associated with %s version 1." % (num_verdicts, wiped_out_check.name) - ), + ) ] assert removed == num_verdicts diff --git a/tests/unit/malware/test_utils.py b/tests/unit/malware/test_utils.py --- a/tests/unit/malware/test_utils.py +++ b/tests/unit/malware/test_utils.py @@ -24,7 +24,7 @@ class TestGetEnabledChecks: def test_one(self, db_session): check = MalwareCheckFactory.create( - state=MalwareCheckState.enabled, check_type=MalwareCheckType.event_hook + state=MalwareCheckState.Enabled, check_type=MalwareCheckType.EventHook ) result = defaultdict(list) result[check.hooked_object.value].append(check.name) @@ -36,8 +36,8 @@ def test_many(self, db_session): for i in range(10): check = MalwareCheckFactory.create() if ( - check.state == MalwareCheckState.enabled - and check.check_type == MalwareCheckType.event_hook + check.state == MalwareCheckState.Enabled + and check.check_type == MalwareCheckType.EventHook ): result[check.hooked_object.value].append(check.name)
PoC Event-based malware check Once #7196 has settled, we'll need an initial (non-stub) malware check for proofing the event-based check system. The current proposal: our PoC check will be based on YARA, and will include patterns for detecting unusual/malicious patterns in `setup.py` files. Examples of unusual and malicious patterns: * Attempting to spawn or invoke processes: `os.system`, `os.exec*`, `os.posix_spawn*`, etc. * Attempting to perform network requests * Attempting to call deserialization routines frequently used for ACE and/or obfuscation See #7096.
(Just to save someone a google search: YARA probably refers to https://github.com/VirusTotal/yara ) > (Just to save someone a google search: YARA probably refers to https://github.com/VirusTotal/yara ) Yep, that's the one.
2020-01-16T22:54:13Z
[]
[]
pypi/warehouse
7,339
pypi__warehouse-7339
[ "6062" ]
31b711c5512124c0dcb4c34c8bcd3a4a50de5d4b
diff --git a/warehouse/admin/routes.py b/warehouse/admin/routes.py --- a/warehouse/admin/routes.py +++ b/warehouse/admin/routes.py @@ -148,3 +148,6 @@ def includeme(config): config.add_route( "admin.verdicts.detail", "/admin/verdicts/{verdict_id}", domain=warehouse ) + config.add_route( + "admin.verdicts.review", "/admin/verdicts/{verdict_id}/review", domain=warehouse + ) diff --git a/warehouse/admin/views/checks.py b/warehouse/admin/views/checks.py --- a/warehouse/admin/views/checks.py +++ b/warehouse/admin/views/checks.py @@ -34,6 +34,8 @@ def get_checks(request): if not check.is_stale: active_checks.append(check) + active_checks.sort(key=lambda check: check.created, reverse=True) + return {"checks": active_checks} diff --git a/warehouse/admin/views/verdicts.py b/warehouse/admin/views/verdicts.py --- a/warehouse/admin/views/verdicts.py +++ b/warehouse/admin/views/verdicts.py @@ -11,7 +11,7 @@ # limitations under the License. from paginate_sqlalchemy import SqlalchemyOrmPage as SQLAlchemyORMPage -from pyramid.httpexceptions import HTTPBadRequest, HTTPNotFound +from pyramid.httpexceptions import HTTPBadRequest, HTTPNotFound, HTTPSeeOther from pyramid.view import view_config from warehouse.malware.models import ( @@ -61,11 +61,43 @@ def get_verdict(request): verdict = request.db.query(MalwareVerdict).get(request.matchdict["verdict_id"]) if verdict: - return {"verdict": verdict} + return { + "verdict": verdict, + "classifications": list(VerdictClassification.__members__.keys()), + } raise HTTPNotFound +@view_config( + route_name="admin.verdicts.review", + permission="moderator", + request_method="POST", + uses_session=True, + require_methods=False, + require_csrf=True, +) +def review_verdict(request): + verdict = request.db.query(MalwareVerdict).get(request.matchdict["verdict_id"]) + + try: + classification = getattr(VerdictClassification, request.POST["classification"]) + except (KeyError, AttributeError): + raise HTTPBadRequest("Invalid verdict classification.") from None + + verdict.manually_reviewed = True + verdict.reviewer_verdict = classification + + request.session.flash( + "Verdict %s marked as reviewed." % verdict.id, queue="success" + ) + + # If no query params are provided (e.g. request originating from + # admins.verdicts.detail view), then route to the default list view + query = request.GET or {"classification": "threat", "manually_reviewed": "0"} + return HTTPSeeOther(request.route_path("admin.verdicts.list", _query=query)) + + def validate_fields(request, validators): try: int(request.params.get("page", 1)) diff --git a/warehouse/malware/models.py b/warehouse/malware/models.py --- a/warehouse/malware/models.py +++ b/warehouse/malware/models.py @@ -161,7 +161,7 @@ class MalwareVerdict(db.Model): message = Column(Text, nullable=True) details = Column(JSONB, nullable=True) manually_reviewed = Column(Boolean, nullable=False, server_default=sql.false()) - administrator_verdict = Column( + reviewer_verdict = Column( Enum(VerdictClassification, values_callable=lambda x: [e.value for e in x]), nullable=True, ) diff --git a/warehouse/migrations/versions/061ff3d24c22_add_malware_detection_tables.py b/warehouse/migrations/versions/061ff3d24c22_add_malware_detection_tables.py --- a/warehouse/migrations/versions/061ff3d24c22_add_malware_detection_tables.py +++ b/warehouse/migrations/versions/061ff3d24c22_add_malware_detection_tables.py @@ -91,7 +91,7 @@ def upgrade(): server_default=sa.text("false"), nullable=False, ), - sa.Column("administrator_verdict", VerdictClassifications, nullable=True,), + sa.Column("reviewer_verdict", VerdictClassifications, nullable=True,), sa.Column("full_report_link", sa.String(), nullable=True), sa.ForeignKeyConstraint( ["check_id"], ["malware_checks.id"], onupdate="CASCADE", ondelete="CASCADE"
diff --git a/tests/common/db/malware.py b/tests/common/db/malware.py --- a/tests/common/db/malware.py +++ b/tests/common/db/malware.py @@ -55,7 +55,7 @@ class Meta: release = None project = None manually_reviewed = True - administrator_verdict = factory.fuzzy.FuzzyChoice(list(VerdictClassification)) + reviewer_verdict = factory.fuzzy.FuzzyChoice(list(VerdictClassification)) classification = factory.fuzzy.FuzzyChoice(list(VerdictClassification)) confidence = factory.fuzzy.FuzzyChoice(list(VerdictConfidence)) message = factory.fuzzy.FuzzyText(length=80) diff --git a/tests/unit/admin/test_routes.py b/tests/unit/admin/test_routes.py --- a/tests/unit/admin/test_routes.py +++ b/tests/unit/admin/test_routes.py @@ -141,4 +141,9 @@ def test_includeme(): pretend.call( "admin.verdicts.detail", "/admin/verdicts/{verdict_id}", domain=warehouse ), + pretend.call( + "admin.verdicts.review", + "/admin/verdicts/{verdict_id}/review", + domain=warehouse, + ), ] diff --git a/tests/unit/admin/views/test_checks.py b/tests/unit/admin/views/test_checks.py --- a/tests/unit/admin/views/test_checks.py +++ b/tests/unit/admin/views/test_checks.py @@ -28,7 +28,10 @@ def test_get_checks_none(self, db_request): def test_get_checks(self, db_request): checks = [MalwareCheckFactory.create() for _ in range(10)] - assert views.get_checks(db_request) == {"checks": checks} + result = views.get_checks(db_request)["checks"] + assert len(result) == len(checks) + for r in result: + assert r in checks def test_get_checks_different_versions(self, db_request): checks = [MalwareCheckFactory.create() for _ in range(5)] @@ -36,7 +39,10 @@ def test_get_checks_different_versions(self, db_request): MalwareCheckFactory.create(name="MyCheck", version=i) for i in range(1, 6) ] checks.append(checks_same[-1]) - assert views.get_checks(db_request) == {"checks": checks} + result = views.get_checks(db_request)["checks"] + assert len(result) == len(checks) + for r in result: + assert r in checks class TestGetCheck: diff --git a/tests/unit/admin/views/test_verdicts.py b/tests/unit/admin/views/test_verdicts.py --- a/tests/unit/admin/views/test_verdicts.py +++ b/tests/unit/admin/views/test_verdicts.py @@ -14,6 +14,7 @@ from random import randint +import pretend import pytest from pyramid.httpexceptions import HTTPBadRequest, HTTPNotFound @@ -193,10 +194,55 @@ def test_found(self, db_request): lookup_id = verdicts[index].id db_request.matchdict["verdict_id"] = lookup_id - assert views.get_verdict(db_request) == {"verdict": verdicts[index]} + assert views.get_verdict(db_request) == { + "verdict": verdicts[index], + "classifications": ["Benign", "Indeterminate", "Threat"], + } def test_not_found(self, db_request): db_request.matchdict["verdict_id"] = uuid.uuid4() with pytest.raises(HTTPNotFound): views.get_verdict(db_request) + + +class TestReviewVerdict: + @pytest.mark.parametrize( + "manually_reviewed, reviewer_verdict", + [ + (False, None), # unreviewed verdict + (True, VerdictClassification.Threat), # previously reviewed + ], + ) + def test_set_classification(self, db_request, manually_reviewed, reviewer_verdict): + verdict = MalwareVerdictFactory.create( + manually_reviewed=manually_reviewed, reviewer_verdict=reviewer_verdict, + ) + + db_request.matchdict["verdict_id"] = verdict.id + db_request.POST = {"classification": "Benign"} + db_request.session = pretend.stub( + flash=pretend.call_recorder(lambda *a, **kw: None) + ) + + db_request.route_path = pretend.call_recorder( + lambda *a, **kw: "/admin/verdicts/%s/review" % verdict.id + ) + + views.review_verdict(db_request) + + assert db_request.session.flash.calls == [ + pretend.call("Verdict %s marked as reviewed." % verdict.id, queue="success") + ] + + assert verdict.manually_reviewed + assert verdict.reviewer_verdict == VerdictClassification.Benign + + @pytest.mark.parametrize("post_params", [{}, {"classification": "Nope"}]) + def test_errors(self, db_request, post_params): + verdict = MalwareVerdictFactory.create() + db_request.matchdict["verdict_id"] = verdict.id + db_request.POST = post_params + + with pytest.raises(HTTPBadRequest): + views.review_verdict(db_request)
admin interface for review of flagged packages We're working on a system to detect malicious uploads (per #4998). It's going to be a pipeline where automated systems run checks, flag packages/projects for deletion/review/ok, etc. So this issue is for a feature in the admin interface where administrators/moderators (#4011) can review those flagged projects and releases and decide what to accept/reject. We'll probably also want to use this if/when we implement a mechanism for users to report packages (#3896), and if/when we start automatically checking uploaded packages for compliant metadata and installability (#194).
2020-02-04T00:52:30Z
[]
[]
pypi/warehouse
7,377
pypi__warehouse-7377
[ "6062" ]
3f0d4e0e296796d166ee5221de5cd323193d2316
diff --git a/warehouse/admin/routes.py b/warehouse/admin/routes.py --- a/warehouse/admin/routes.py +++ b/warehouse/admin/routes.py @@ -128,3 +128,26 @@ def includeme(config): # Squats config.add_route("admin.squats", "/admin/squats/", domain=warehouse) config.add_route("admin.squats.review", "/admin/squats/review/", domain=warehouse) + + # Malware checks + config.add_route("admin.checks.list", "/admin/checks/", domain=warehouse) + config.add_route( + "admin.checks.detail", "/admin/checks/{check_name}", domain=warehouse + ) + config.add_route( + "admin.checks.change_state", + "/admin/checks/{check_name}/change_state", + domain=warehouse, + ) + config.add_route( + "admin.checks.run_evaluation", + "/admin/checks/{check_name}/run_evaluation", + domain=warehouse, + ) + config.add_route("admin.verdicts.list", "/admin/verdicts/", domain=warehouse) + config.add_route( + "admin.verdicts.detail", "/admin/verdicts/{verdict_id}", domain=warehouse + ) + config.add_route( + "admin.verdicts.review", "/admin/verdicts/{verdict_id}/review", domain=warehouse + ) diff --git a/warehouse/admin/views/checks.py b/warehouse/admin/views/checks.py new file mode 100644 --- /dev/null +++ b/warehouse/admin/views/checks.py @@ -0,0 +1,149 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from pyramid.httpexceptions import HTTPNotFound, HTTPSeeOther +from pyramid.view import view_config +from sqlalchemy.orm.exc import NoResultFound + +from warehouse.malware.models import MalwareCheck, MalwareCheckState, MalwareCheckType +from warehouse.malware.tasks import backfill, remove_verdicts, run_scheduled_check + +EVALUATION_RUN_SIZE = 10000 + + +@view_config( + route_name="admin.checks.list", + renderer="admin/malware/checks/index.html", + permission="moderator", + request_method="GET", + uses_session=True, +) +def get_checks(request): + all_checks = request.db.query(MalwareCheck) + active_checks = [] + for check in all_checks: + if not check.is_stale: + active_checks.append(check) + + active_checks.sort(key=lambda check: check.created, reverse=True) + + return {"checks": active_checks} + + +@view_config( + route_name="admin.checks.detail", + renderer="admin/malware/checks/detail.html", + permission="moderator", + request_method="GET", + uses_session=True, +) +def get_check(request): + check = get_check_by_name(request.db, request.matchdict["check_name"]) + + all_checks = ( + request.db.query(MalwareCheck) + .filter(MalwareCheck.name == request.matchdict["check_name"]) + .order_by(MalwareCheck.version.desc()) + .all() + ) + + return { + "check": check, + "checks": all_checks, + "states": MalwareCheckState, + "evaluation_run_size": EVALUATION_RUN_SIZE, + } + + +@view_config( + route_name="admin.checks.run_evaluation", + permission="admin", + request_method="POST", + uses_session=True, + require_methods=False, + require_csrf=True, +) +def run_evaluation(request): + check = get_check_by_name(request.db, request.matchdict["check_name"]) + + if check.state not in (MalwareCheckState.Enabled, MalwareCheckState.Evaluation): + request.session.flash( + f"Check must be in 'enabled' or 'evaluation' state to manually execute.", + queue="error", + ) + return HTTPSeeOther( + request.route_path("admin.checks.detail", check_name=check.name) + ) + + if check.check_type == MalwareCheckType.EventHook: + request.session.flash( + f"Running {check.name} on {EVALUATION_RUN_SIZE} {check.hooked_object.value}s\ +!", + queue="success", + ) + request.task(backfill).delay(check.name, EVALUATION_RUN_SIZE) + + else: + request.session.flash(f"Running {check.name} now!", queue="success") + request.task(run_scheduled_check).delay(check.name, manually_triggered=True) + + return HTTPSeeOther( + request.route_path("admin.checks.detail", check_name=check.name) + ) + + +@view_config( + route_name="admin.checks.change_state", + permission="admin", + request_method="POST", + uses_session=True, + require_methods=False, + require_csrf=True, +) +def change_check_state(request): + check = get_check_by_name(request.db, request.matchdict["check_name"]) + + try: + check_state = request.POST["check_state"] + except KeyError: + raise HTTPNotFound + + try: + check.state = MalwareCheckState(check_state) + except ValueError: + request.session.flash("Invalid check state provided.", queue="error") + else: + if check.state == MalwareCheckState.WipedOut: + request.task(remove_verdicts).delay(check.name) + request.session.flash( + f"Changed {check.name!r} check to {check.state.value!r}!", queue="success" + ) + finally: + return HTTPSeeOther( + request.route_path("admin.checks.detail", check_name=check.name) + ) + + +def get_check_by_name(db, check_name): + try: + # Throw an exception if and only if no results are returned. + newest = ( + db.query(MalwareCheck) + .filter(MalwareCheck.name == check_name) + .order_by(MalwareCheck.version.desc()) + .limit(1) + .one() + ) + except NoResultFound: + raise HTTPNotFound + + return newest diff --git a/warehouse/admin/views/verdicts.py b/warehouse/admin/views/verdicts.py new file mode 100644 --- /dev/null +++ b/warehouse/admin/views/verdicts.py @@ -0,0 +1,136 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from paginate_sqlalchemy import SqlalchemyOrmPage as SQLAlchemyORMPage +from pyramid.httpexceptions import HTTPBadRequest, HTTPNotFound, HTTPSeeOther +from pyramid.view import view_config + +from warehouse.malware.models import ( + MalwareCheck, + MalwareVerdict, + VerdictClassification, + VerdictConfidence, +) +from warehouse.utils.paginate import paginate_url_factory + + +@view_config( + route_name="admin.verdicts.list", + renderer="admin/malware/verdicts/index.html", + permission="moderator", + request_method="GET", + uses_session=True, +) +def get_verdicts(request): + result = {} + result["check_names"] = set( + [name for (name,) in request.db.query(MalwareCheck.name)] + ) + result["classifications"] = set([c.value for c in VerdictClassification]) + result["confidences"] = set([c.value for c in VerdictConfidence]) + + validate_fields(request, result) + + result["verdicts"] = SQLAlchemyORMPage( + generate_query(request.db, request.params), + page=int(request.params.get("page", 1)), + items_per_page=25, + url_maker=paginate_url_factory(request), + ) + + return result + + +@view_config( + route_name="admin.verdicts.detail", + renderer="admin/malware/verdicts/detail.html", + permission="moderator", + request_method="GET", + uses_session=True, +) +def get_verdict(request): + verdict = request.db.query(MalwareVerdict).get(request.matchdict["verdict_id"]) + + if verdict: + return { + "verdict": verdict, + "classifications": list(VerdictClassification.__members__.keys()), + } + + raise HTTPNotFound + + +@view_config( + route_name="admin.verdicts.review", + permission="moderator", + request_method="POST", + uses_session=True, + require_methods=False, + require_csrf=True, +) +def review_verdict(request): + verdict = request.db.query(MalwareVerdict).get(request.matchdict["verdict_id"]) + + try: + classification = getattr(VerdictClassification, request.POST["classification"]) + except (KeyError, AttributeError): + raise HTTPBadRequest("Invalid verdict classification.") from None + + verdict.manually_reviewed = True + verdict.reviewer_verdict = classification + + request.session.flash( + "Verdict %s marked as reviewed." % verdict.id, queue="success" + ) + + # If no query params are provided (e.g. request originating from + # admins.verdicts.detail view), then route to the default list view + query = request.GET or {"classification": "threat", "manually_reviewed": "0"} + return HTTPSeeOther(request.route_path("admin.verdicts.list", _query=query)) + + +def validate_fields(request, validators): + try: + int(request.params.get("page", 1)) + except ValueError: + raise HTTPBadRequest("'page' must be an integer.") from None + + validators = {**validators, **{"manually_revieweds": set(["0", "1"])}} + + for key, possible_values in validators.items(): + # Remove the trailing 's' + value = request.params.get(key[:-1]) + additional_values = set([None, ""]) + if value not in possible_values | additional_values: + raise HTTPBadRequest( + "Invalid value for '%s': %s." % (key[:-1], value) + ) from None + + +def generate_query(db, params): + """ + Returns an SQLAlchemy query wth request params applied as filters. + """ + query = db.query(MalwareVerdict) + if params.get("check_name"): + query = query.join(MalwareCheck) + query = query.filter(MalwareCheck.name == params["check_name"]) + if params.get("confidence"): + query = query.filter(MalwareVerdict.confidence == params["confidence"]) + if params.get("classification"): + query = query.filter(MalwareVerdict.classification == params["classification"]) + if params.get("manually_reviewed"): + query = query.filter( + MalwareVerdict.manually_reviewed == bool(int(params["manually_reviewed"])) + ) + + return query.order_by(MalwareVerdict.run_date.desc()) diff --git a/warehouse/cli/malware.py b/warehouse/cli/malware.py new file mode 100644 --- /dev/null +++ b/warehouse/cli/malware.py @@ -0,0 +1,34 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import click + +from warehouse.cli import warehouse +from warehouse.malware.tasks import sync_checks as _sync_checks + + [email protected]() # pragma: no branch +def malware(): + """ + Manage the Warehouse Malware Checks. + """ + + [email protected]() [email protected]_obj +def sync_checks(config): + """ + Sync the Warehouse database with the malware checks in malware/checks. + """ + + request = config.task(_sync_checks).get_request() + config.task(_sync_checks).run(request) diff --git a/warehouse/config.py b/warehouse/config.py --- a/warehouse/config.py +++ b/warehouse/config.py @@ -203,6 +203,7 @@ def configure(settings=None): maybe_set_compound(settings, "mail", "backend", "MAIL_BACKEND") maybe_set_compound(settings, "metrics", "backend", "METRICS_BACKEND") maybe_set_compound(settings, "breached_passwords", "backend", "BREACHED_PASSWORDS") + maybe_set_compound(settings, "malware_check", "backend", "MALWARE_CHECK_BACKEND") # Add the settings we use when the environment is set to development. if settings["warehouse.env"] == Environment.development: @@ -389,6 +390,9 @@ def configure(settings=None): # Register support for Macaroon based authentication config.include(".macaroons") + # Register support for malware checks + config.include(".malware") + # Register logged-in views config.include(".manage") diff --git a/warehouse/malware/__init__.py b/warehouse/malware/__init__.py new file mode 100644 --- /dev/null +++ b/warehouse/malware/__init__.py @@ -0,0 +1,76 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import inspect + +from celery.schedules import crontab + +import warehouse.malware.checks as checks + +from warehouse import db +from warehouse.malware.interfaces import IMalwareCheckService +from warehouse.malware.models import MalwareCheckObjectType +from warehouse.malware.tasks import run_scheduled_check +from warehouse.malware.utils import get_enabled_hooked_checks + + [email protected]_for(db.Session, "after_flush") +def determine_malware_checks(config, session, flush_context): + if not session.new: + return + + if not any( + [ + obj.__class__.__name__ + for obj in session.new + if obj.__class__.__name__ in MalwareCheckObjectType.__members__ + ] + ): + return + + malware_checks = session.info.setdefault("warehouse.malware.checks", set()) + enabled_checks = get_enabled_hooked_checks(session) + for obj in session.new: + for check_name in enabled_checks.get(obj.__class__.__name__, []): + malware_checks.update([f"{check_name}:{obj.id}"]) + + [email protected]_for(db.Session, "after_commit") +def queue_malware_checks(config, session): + + malware_checks = session.info.pop("warehouse.malware.checks", set()) + if not malware_checks: + return + + malware_check_factory = config.find_service_factory(IMalwareCheckService) + + malware_check = malware_check_factory(None, config) + malware_check.run_checks(malware_checks) + + +def includeme(config): + malware_check_class = config.maybe_dotted( + config.registry.settings["malware_check.backend"] + ) + # Register the malware check service + config.register_service_factory( + malware_check_class.create_service, IMalwareCheckService + ) + + # Add scheduled tasks for every scheduled Malware Check. + all_checks = inspect.getmembers(checks, inspect.isclass) + for check_obj in all_checks: + check = check_obj[1] + if check.check_type == "scheduled": + config.add_periodic_task( + crontab(**check.schedule), run_scheduled_check, args=(check_obj[0],) + ) diff --git a/warehouse/malware/checks/__init__.py b/warehouse/malware/checks/__init__.py new file mode 100644 --- /dev/null +++ b/warehouse/malware/checks/__init__.py @@ -0,0 +1,14 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .package_turnover import PackageTurnoverCheck # noqa +from .setup_patterns import SetupPatternCheck # noqa diff --git a/warehouse/malware/checks/base.py b/warehouse/malware/checks/base.py new file mode 100644 --- /dev/null +++ b/warehouse/malware/checks/base.py @@ -0,0 +1,73 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from warehouse.malware.models import MalwareCheck, MalwareCheckState, MalwareVerdict +from warehouse.packaging import models + + +class MalwareCheckBase: + def __init__(self, db): + self.db = db + self._name = self.__class__.__name__ + self._load_check_fields() + self._verdicts = [] + + @classmethod + def prepare(cls, request, obj_id): + """ + Prepares some context for scanning the given object. + """ + kwargs = {"obj_id": obj_id} + + model = getattr(models, cls.hooked_object) + kwargs["obj"] = request.db.query(model).get(obj_id) + + if cls.hooked_object == "File": + kwargs["file_url"] = request.route_url( + "packaging.file", path=kwargs["obj"].path + ) + + return kwargs + + def add_verdict(self, **kwargs): + self._verdicts.append(MalwareVerdict(check_id=self.id, **kwargs)) + + def run(self, **kwargs): + """ + Runs the check and inserts returned verdicts. + """ + self.scan(**kwargs) + self.db.add_all(self._verdicts) + + def scan(self, **kwargs): + """ + Scans the object and returns a verdict. + """ + + def backfill(self, sample=1): + """ + Runs the check across all historical data in PyPI. The sample value represents + the percentage of files to file the check against. By default, it will run the + backfill on the entire corpus. + """ + + def _load_check_fields(self): + self.id, self.state = ( + self.db.query(MalwareCheck.id, MalwareCheck.state) + .filter(MalwareCheck.name == self._name) + .filter( + MalwareCheck.state.in_( + [MalwareCheckState.Enabled, MalwareCheckState.Evaluation] + ) + ) + .one() + ) diff --git a/warehouse/malware/checks/package_turnover/__init__.py b/warehouse/malware/checks/package_turnover/__init__.py new file mode 100644 --- /dev/null +++ b/warehouse/malware/checks/package_turnover/__init__.py @@ -0,0 +1,13 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .check import PackageTurnoverCheck # noqa diff --git a/warehouse/malware/checks/package_turnover/check.py b/warehouse/malware/checks/package_turnover/check.py new file mode 100644 --- /dev/null +++ b/warehouse/malware/checks/package_turnover/check.py @@ -0,0 +1,112 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from datetime import datetime, timedelta +from textwrap import dedent + +from warehouse.accounts.models import UserEvent +from warehouse.malware.checks.base import MalwareCheckBase +from warehouse.malware.models import ( + MalwareVerdict, + VerdictClassification, + VerdictConfidence, +) +from warehouse.packaging.models import ProjectEvent, Release + + +class PackageTurnoverCheck(MalwareCheckBase): + version = 1 + short_description = "A check for unusual changes in package ownership" + long_description = dedent( + """ + This check looks at recently uploaded releases and determines + whether their owners have recently changed or decreased the security + of their accounts (e.g., by disabling 2FA). + """ + ) + check_type = "scheduled" + schedule = {"minute": 0, "hour": 0} + + def __init__(self, db): + super().__init__(db) + self._scan_interval = datetime.utcnow() - timedelta(hours=24) + + def user_posture_verdicts(self, project): + for user in project.users: + has_removed_2fa_method = self.db.query( + self.db.query(UserEvent) + .filter(UserEvent.user_id == user.id) + .filter(UserEvent.time >= self._scan_interval) + .filter(UserEvent.tag == "account:two_factor:method_removed") + .exists() + ).scalar() + + if has_removed_2fa_method and not user.has_two_factor: + self.add_verdict( + project_id=project.id, + classification=VerdictClassification.Threat, + confidence=VerdictConfidence.High, + message="User with control over this package has disabled 2FA", + ) + + def user_turnover_verdicts(self, project): + # NOTE: This could probably be more involved to check for the case + # where someone adds themself, removes the real maintainers, pushes a malicious + # release, then reverts the ownership to the original maintainers and removes + # themself again. + recent_role_adds = ( + self.db.query(ProjectEvent.additional) + .filter(ProjectEvent.project_id == project.id) + .filter(ProjectEvent.time >= self._scan_interval) + .filter(ProjectEvent.tag == "project:role:add") + .all() + ) + + added_users = {role_add["target_user"] for role_add, in recent_role_adds} + current_users = {user.username for user in project.users} + + if added_users == current_users: + self.add_verdict( + project_id=project.id, + classification=VerdictClassification.Threat, + confidence=VerdictConfidence.High, + message="Suspicious user turnover; all current maintainers are new", + ) + + def scan(self, **kwargs): + prior_verdicts = ( + self.db.query(MalwareVerdict.release_id).filter( + MalwareVerdict.check_id == self.id + ) + ).subquery() + + releases = ( + self.db.query(Release) + .filter(Release.created >= self._scan_interval) + .filter(~Release.id.in_(prior_verdicts)) + .all() + ) + + visited_project_ids = set() + for release in releases: + # Skip projects for which this is the first release, + # since we need a baseline to compare against + if len(release.project.releases) < 2: + continue + + if release.project.id in visited_project_ids: + continue + + visited_project_ids.add(release.project.id) + + self.user_posture_verdicts(release.project) + self.user_turnover_verdicts(release.project) diff --git a/warehouse/malware/checks/setup_patterns/__init__.py b/warehouse/malware/checks/setup_patterns/__init__.py new file mode 100644 --- /dev/null +++ b/warehouse/malware/checks/setup_patterns/__init__.py @@ -0,0 +1,13 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .check import SetupPatternCheck # noqa diff --git a/warehouse/malware/checks/setup_patterns/check.py b/warehouse/malware/checks/setup_patterns/check.py new file mode 100644 --- /dev/null +++ b/warehouse/malware/checks/setup_patterns/check.py @@ -0,0 +1,109 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +from textwrap import dedent + +import yara + +from warehouse.malware.checks.base import MalwareCheckBase +from warehouse.malware.checks.utils import extract_file_content, fetch_url_content +from warehouse.malware.errors import FatalCheckException +from warehouse.malware.models import VerdictClassification, VerdictConfidence + + +class SetupPatternCheck(MalwareCheckBase): + _yara_rule_file = os.path.join( + os.path.dirname(os.path.abspath(__file__)), "setup_py_rules.yara" + ) + + version = 1 + short_description = "A check for common malicious patterns in setup.py" + long_description = dedent( + """ + This check uses YARA to search for common malicious patterns in the setup.py + files of uploaded release archives. + """ + ) + check_type = "event_hook" + hooked_object = "File" + + def __init__(self, db): + super().__init__(db) + self._yara_rules = self._load_yara_rules() + + def _load_yara_rules(self): + return yara.compile(filepath=self._yara_rule_file) + + def scan(self, **kwargs): + release_file = kwargs.get("obj") + file_url = kwargs.get("file_url") + if release_file is None or file_url is None: + raise FatalCheckException( + "Release file or file url is None, indicating user error." + ) + + if release_file.packagetype != "sdist": + # Per PEP 491: bdists do not contain setup.py. + # This check only scans dists that contain setup.py, so + # we have nothing to perform. + return + + archive_stream = fetch_url_content(file_url) + setup_py_contents = extract_file_content(archive_stream, "setup.py") + if setup_py_contents is None: + self.add_verdict( + file_id=release_file.id, + classification=VerdictClassification.Indeterminate, + confidence=VerdictConfidence.High, + message="sdist does not contain a suitable setup.py for analysis", + ) + return + + matches = self._yara_rules.match(data=setup_py_contents) + if len(matches) > 0: + # We reduce N matches into a single verdict by taking the maximum + # classification and confidence. + classification = max( + VerdictClassification(m.meta["classification"]) for m in matches + ) + confidence = max(VerdictConfidence(m.meta["confidence"]) for m in matches) + message = ":".join(m.rule for m in matches) + + details = {} + for match in matches: + details[match.rule] = { + "classification": match.meta["classification"], + "confidence": match.meta["confidence"], + # NOTE: We could include the raw bytes here (s[2]), + # but we'd have to serialize/encode it to make JSON happy. + # It probably suffices to include the offset and identifier + # for triage purposes. + "strings": [[s[0], s[1]] for s in match.strings], + } + + self.add_verdict( + file_id=release_file.id, + classification=classification, + confidence=confidence, + message=message, + details=details, + ) + else: + # No matches? Report a low-confidence benign verdict. + self.add_verdict( + file_id=release_file.id, + classification=VerdictClassification.Benign, + confidence=VerdictConfidence.Low, + message="No malicious patterns found in setup.py", + ) diff --git a/warehouse/malware/checks/utils.py b/warehouse/malware/checks/utils.py new file mode 100644 --- /dev/null +++ b/warehouse/malware/checks/utils.py @@ -0,0 +1,80 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import io +import pathlib +import tarfile +import zipfile + +import requests + + +def fetch_url_content(url): + """ + Retrieves the contents of the given (presumed CDN) URL as a BytesIO. + + Performs no error checking; exceptions are handled in the check harness + as part of check retrying behavior. + """ + response = requests.get(url) + response.raise_for_status() + return io.BytesIO(response.content) + + +def extract_file_content(archive_stream, file_path): + """ + Retrieves the content of the given path from the given archive stream + (presumed to be a dist) as bytes. + + Handling of the given path is a little special: since the dist format(s) + don't enforce any naming convention for the base archive directory, + the path is interpreted as {base}/{file_path}. Thus, a call like this: + + extract_file_content(stream, "setup.py") + + will extract and return the contents of {base}/setup.py where {base} + is frequently (but not guaranteed to be) something like $name-$version. + + Returns None on any sort of failure. + """ + if zipfile.is_zipfile(archive_stream): + with zipfile.ZipFile(archive_stream) as zipobj: + for name in zipobj.namelist(): + path_parts = pathlib.Path(name).parts + if len(path_parts) >= 2: + tail = pathlib.Path(*path_parts[1:]) + if str(tail) == file_path: + return zipobj.read(name) + return None + else: + # NOTE: is_zipfile doesn't rewind the fileobj it's given. + archive_stream.seek(0) + + # NOTE: We don't need to perform a sanity check on + # the (presumed) tarfile's compression here, since we're + # extracting from a stream that's already gone through + # upload validation. + # See _is_valid_dist_file in forklift/legacy.py. + try: + with tarfile.open(fileobj=archive_stream) as tarobj: + member = tarobj.next() + while member: + path_parts = pathlib.Path(member.name).parts + if len(path_parts) >= 2: + tail = pathlib.Path(*path_parts[1:]) + if str(tail) == file_path: + return tarobj.extractfile(member).read() + + member = tarobj.next() + return None + except tarfile.TarError: + return None diff --git a/warehouse/malware/errors.py b/warehouse/malware/errors.py new file mode 100644 --- /dev/null +++ b/warehouse/malware/errors.py @@ -0,0 +1,15 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +class FatalCheckException(Exception): + pass diff --git a/warehouse/malware/interfaces.py b/warehouse/malware/interfaces.py new file mode 100644 --- /dev/null +++ b/warehouse/malware/interfaces.py @@ -0,0 +1,26 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from zope.interface import Interface + + +class IMalwareCheckService(Interface): + def create_service(context, request): + """ + Create the service, given the context and request for which it is being + created for. + """ + + def run_checks(checks, **kwargs): + """ + Run a given set of Checks + """ diff --git a/warehouse/malware/models.py b/warehouse/malware/models.py new file mode 100644 --- /dev/null +++ b/warehouse/malware/models.py @@ -0,0 +1,173 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import enum +import functools + +from citext import CIText +from sqlalchemy import ( + Boolean, + Column, + DateTime, + Enum, + ForeignKey, + Integer, + String, + Text, + UniqueConstraint, + orm, + sql, +) +from sqlalchemy.dialects.postgresql import JSONB + +from warehouse import db +from warehouse.utils.attrs import make_repr + + [email protected] +class MalwareCheckType(enum.Enum): + + EventHook = "event_hook" + Scheduled = "scheduled" + + [email protected] +class MalwareCheckState(enum.Enum): + + Enabled = "enabled" + Evaluation = "evaluation" + Disabled = "disabled" + WipedOut = "wiped_out" + + [email protected] +class MalwareCheckObjectType(enum.Enum): + + File = "File" + Release = "Release" + Project = "Project" + + [email protected] [email protected]_ordering +class VerdictClassification(enum.Enum): + """ + An enumeration of classification markers for malware verdicts. + + Note that the order of declaration is important: it provides + the appropriate ordering behavior when finding the minimum + and maximum classifications for a set of verdicts. + """ + + Benign = "benign" + Indeterminate = "indeterminate" + Threat = "threat" + + def __lt__(self, other): + members = list(self.__class__) + return members.index(self) < members.index(other) + + [email protected] [email protected]_ordering +class VerdictConfidence(enum.Enum): + """ + An enumeration of confidence markers for malware verdicts. + + Note that the order of declaration is important: it provides + the appropriate ordering behavior when finding the minimum + and maximum confidences for a set of verdicts. + """ + + Low = "low" + Medium = "medium" + High = "high" + + def __lt__(self, other): + members = list(self.__class__) + return members.index(self) < members.index(other) + + +class MalwareCheck(db.Model): + + __tablename__ = "malware_checks" + __table_args__ = (UniqueConstraint("name", "version"),) + __repr__ = make_repr("name", "version") + + name = Column(CIText, nullable=False) + version = Column(Integer, default=1, nullable=False) + short_description = Column(String(length=128), nullable=False) + long_description = Column(Text, nullable=False) + check_type = Column( + Enum(MalwareCheckType, values_callable=lambda x: [e.value for e in x]), + nullable=False, + ) + # The object name that hooked-based checks operate on, e.g. + # Project, File, Release + hooked_object = Column( + Enum(MalwareCheckObjectType, values_callable=lambda x: [e.value for e in x]), + nullable=True, + ) + # The run schedule for schedule-based checks. + schedule = Column(JSONB, nullable=True) + state = Column( + Enum(MalwareCheckState, values_callable=lambda x: [e.value for e in x]), + nullable=False, + server_default=("disabled"), + ) + created = Column(DateTime, nullable=False, server_default=sql.func.now()) + + @property + def is_stale(self): + session = orm.object_session(self) + newest = ( + session.query(MalwareCheck) + .filter(MalwareCheck.name == self.name) + .order_by(MalwareCheck.version.desc()) + .first() + ) + return self.version != newest.version + + +class MalwareVerdict(db.Model): + __tablename__ = "malware_verdicts" + + run_date = Column(DateTime, nullable=False, server_default=sql.func.now()) + check_id = Column( + ForeignKey("malware_checks.id", onupdate="CASCADE", ondelete="CASCADE"), + nullable=False, + index=True, + ) + file_id = Column(ForeignKey("release_files.id"), nullable=True) + release_id = Column(ForeignKey("releases.id"), nullable=True) + project_id = Column(ForeignKey("projects.id"), nullable=True) + classification = Column( + Enum(VerdictClassification, values_callable=lambda x: [e.value for e in x]), + nullable=False, + ) + confidence = Column( + Enum(VerdictConfidence, values_callable=lambda x: [e.value for e in x]), + nullable=False, + ) + message = Column(Text, nullable=True) + details = Column(JSONB, nullable=True) + manually_reviewed = Column(Boolean, nullable=False, server_default=sql.false()) + reviewer_verdict = Column( + Enum(VerdictClassification, values_callable=lambda x: [e.value for e in x]), + nullable=True, + ) + full_report_link = Column(String, nullable=True) + + check = orm.relationship("MalwareCheck", foreign_keys=[check_id], lazy=True) + release_file = orm.relationship("File", foreign_keys=[file_id], lazy=True) + release = orm.relationship("Release", foreign_keys=[release_id], lazy=True) + project = orm.relationship("Project", foreign_keys=[project_id], lazy=True) diff --git a/warehouse/malware/services.py b/warehouse/malware/services.py new file mode 100644 --- /dev/null +++ b/warehouse/malware/services.py @@ -0,0 +1,51 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from zope.interface import implementer + +from warehouse.malware.interfaces import IMalwareCheckService +from warehouse.malware.tasks import run_check + + +@implementer(IMalwareCheckService) +class PrinterMalwareCheckService: + def __init__(self, executor): + self.executor = executor + + @classmethod + def create_service(cls, context, request): + return cls(print) + + def run_checks(self, checks, **kwargs): + for check in checks: + self.executor(check, kwargs) + + +@implementer(IMalwareCheckService) +class DatabaseMalwareCheckService: + def __init__(self, executor): + self.executor = executor + + @classmethod + def create_service(cls, context, request): + return cls(request.task(run_check).delay) + + def run_checks(self, checks, **kwargs): + for check_info in checks: + # Hooked checks + if ":" in check_info: + check_name, obj_id = check_info.split(":") + kwargs["obj_id"] = obj_id + # Scheduled checks + else: + check_name = check_info + self.executor(check_name, **kwargs) diff --git a/warehouse/malware/tasks.py b/warehouse/malware/tasks.py new file mode 100644 --- /dev/null +++ b/warehouse/malware/tasks.py @@ -0,0 +1,166 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import inspect + +from sqlalchemy.orm.exc import NoResultFound + +import warehouse.malware.checks as checks +import warehouse.packaging.models as packaging_models + +from warehouse.malware.errors import FatalCheckException +from warehouse.malware.interfaces import IMalwareCheckService +from warehouse.malware.models import MalwareCheck, MalwareCheckState, MalwareVerdict +from warehouse.malware.utils import get_check_fields +from warehouse.tasks import task + + +@task(bind=True, ignore_result=True, acks_late=True, retry_backoff=True) +def run_check(task, request, check_name, obj_id=None, manually_triggered=False): + try: + check = getattr(checks, check_name)(request.db) + except NoResultFound: + request.log.info("Check %s isn't active. Aborting." % check_name) + return + + # Don't run scheduled checks if they are in evaluation mode, unless manually + # triggered. + if check.state == MalwareCheckState.Evaluation and not manually_triggered: + request.log.info( + "%s is in the `evaluation` state and must be manually triggered to run." + % check_name + ) + return + + kwargs = {} + + # Hooked checks require `obj_id`s. + if obj_id is not None: + kwargs = check.prepare(request, obj_id) + + try: + check.run(**kwargs) + except FatalCheckException as exc: + request.log.error("Fatal exception: %s: %s" % (check_name, str(exc))) + return + except Exception as exc: + request.log.error("Error executing check %s: %s" % (check_name, str(exc))) + raise task.retry(exc=exc) + + +@task(bind=True, ignore_result=True, acks_late=True) +def run_scheduled_check(task, request, check_name, manually_triggered=False): + malware_check_service = request.find_service_factory(IMalwareCheckService) + malware_check = malware_check_service(None, request) + malware_check.run_checks([check_name], manually_triggered=manually_triggered) + + +@task(bind=True, ignore_result=True, acks_late=True) +def backfill(task, request, check_name, num_objects): + """ + Runs a backfill on a fixed number of objects. + """ + check = getattr(checks, check_name)(request.db) + target_object = getattr(packaging_models, check.hooked_object) + query = request.db.query(target_object.id).limit(num_objects) + + request.log.info("Running backfill on %d %ss." % (num_objects, check.hooked_object)) + + runs = set() + for (elem_id,) in query: + runs.update([f"{check_name}:{elem_id}"]) + + malware_check_service = request.find_service_factory(IMalwareCheckService) + malware_check = malware_check_service(None, request) + malware_check.run_checks(runs, manually_triggered=True) + + +@task(bind=True, ignore_result=True, acks_late=True) +def sync_checks(task, request): + code_checks = inspect.getmembers(checks, inspect.isclass) + request.log.info("%d malware checks found in codebase." % len(code_checks)) + + all_checks = request.db.query(MalwareCheck).all() + active_checks = {} + wiped_out_checks = {} + for check in all_checks: + if not check.is_stale: + if check.state == MalwareCheckState.WipedOut: + wiped_out_checks[check.name] = check + else: + active_checks[check.name] = check + + if len(active_checks) > len(code_checks): + code_check_names = set([name for name, cls in code_checks]) + missing = ", ".join(set(active_checks.keys()) - code_check_names) + request.log.error( + "Found %d active checks in the db, but only %d checks in \ +code. Please manually move superfluous checks to the wiped_out state \ +in the check admin: %s" + % (len(active_checks), len(code_checks), missing) + ) + raise Exception("Mismatch between number of db checks and code checks.") + + for check_name, check_class in code_checks: + check = getattr(checks, check_name) + + if wiped_out_checks.get(check_name): + request.log.error( + "%s is wiped_out and cannot be synced. Please remove check from \ +codebase." + % check_name + ) + continue + + db_check = active_checks.get(check_name) + if db_check: + if check.version == db_check.version: + request.log.info("%s is unmodified." % check_name) + continue + + request.log.info("Updating existing %s." % check_name) + fields = get_check_fields(check) + + # Migrate the check state to the newest check. + # Then mark the old check state as disabled. + if db_check.state != MalwareCheckState.Disabled: + fields["state"] = db_check.state.value + db_check.state = MalwareCheckState.Disabled + + request.db.add(MalwareCheck(**fields)) + else: + request.log.info("Adding new %s to the database." % check_name) + fields = get_check_fields(check) + request.db.add(MalwareCheck(**fields)) + + +@task(bind=True, ignore_result=True, acks_late=True) +def remove_verdicts(task, request, check_name): + check_ids = ( + request.db.query(MalwareCheck.id, MalwareCheck.version) + .filter(MalwareCheck.name == check_name) + .all() + ) + total_deleted = 0 + for check_id, check_version in check_ids: + query = request.db.query(MalwareVerdict).filter( + MalwareVerdict.check_id == check_id + ) + num_verdicts = query.count() + request.log.info( + "Removing %d malware verdicts associated with %s version %d." + % (num_verdicts, check_name, check_version) + ) + total_deleted += query.delete(synchronize_session=False) + + # This returned value is only relevant for testing. + return total_deleted diff --git a/warehouse/malware/utils.py b/warehouse/malware/utils.py new file mode 100644 --- /dev/null +++ b/warehouse/malware/utils.py @@ -0,0 +1,46 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from collections import defaultdict + +from warehouse.malware.models import MalwareCheck, MalwareCheckState, MalwareCheckType + + +def get_check_fields(check): + result = {"name": check.__name__} + + required_fields = ["short_description", "long_description", "version", "check_type"] + for field in required_fields: + result[field] = getattr(check, field) + + if result["check_type"] == "event_hook": + result["hooked_object"] = check.hooked_object + + if result["check_type"] == "scheduled": + result["schedule"] = check.schedule + + return result + + +def get_enabled_hooked_checks(session): + checks = ( + session.query(MalwareCheck.name, MalwareCheck.hooked_object) + .filter(MalwareCheck.check_type == MalwareCheckType.EventHook) + .filter(MalwareCheck.state == MalwareCheckState.Enabled) + .all() + ) + results = defaultdict(list) + + for check_name, object_type in checks: + results[object_type.value].append(check_name) + + return results diff --git a/warehouse/migrations/versions/061ff3d24c22_add_malware_detection_tables.py b/warehouse/migrations/versions/061ff3d24c22_add_malware_detection_tables.py new file mode 100644 --- /dev/null +++ b/warehouse/migrations/versions/061ff3d24c22_add_malware_detection_tables.py @@ -0,0 +1,120 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +Add malware detection tables + +Revision ID: 061ff3d24c22 +Revises: b5bb5d08543d +Create Date: 2019-12-18 17:27:00.183542 +""" +import citext +import sqlalchemy as sa + +from alembic import op +from sqlalchemy.dialects import postgresql + +revision = "061ff3d24c22" +down_revision = "b5bb5d08543d" + +MalwareCheckTypes = sa.Enum("event_hook", "scheduled", name="malwarechecktypes") + +MalwareCheckStates = sa.Enum( + "enabled", "evaluation", "disabled", "wiped_out", name="malwarecheckstate" +) + +MalwareCheckObjectTypes = sa.Enum( + "File", "Release", "Project", name="malwarecheckobjecttype" +) + +VerdictClassifications = sa.Enum( + "threat", "indeterminate", "benign", name="verdictclassification" +) +VerdictConfidences = sa.Enum("low", "medium", "high", name="verdictconfidence") + + +def upgrade(): + op.create_table( + "malware_checks", + sa.Column( + "id", + postgresql.UUID(as_uuid=True), + server_default=sa.text("gen_random_uuid()"), + nullable=False, + ), + sa.Column("name", citext.CIText(), nullable=False), + sa.Column("version", sa.Integer(), default=1, nullable=False), + sa.Column("short_description", sa.String(length=128), nullable=False), + sa.Column("long_description", sa.Text(), nullable=False), + sa.Column("check_type", MalwareCheckTypes, nullable=False), + sa.Column("hooked_object", MalwareCheckObjectTypes, nullable=True), + sa.Column("schedule", postgresql.JSONB(astext_type=sa.Text()), nullable=True), + sa.Column( + "state", MalwareCheckStates, server_default="disabled", nullable=False, + ), + sa.Column( + "created", sa.DateTime(), server_default=sa.text("now()"), nullable=False + ), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("name", "version"), + ) + op.create_table( + "malware_verdicts", + sa.Column( + "id", + postgresql.UUID(as_uuid=True), + server_default=sa.text("gen_random_uuid()"), + nullable=False, + ), + sa.Column( + "run_date", sa.DateTime(), server_default=sa.text("now()"), nullable=False + ), + sa.Column("check_id", postgresql.UUID(as_uuid=True), nullable=False), + sa.Column("file_id", postgresql.UUID(as_uuid=True), nullable=True), + sa.Column("project_id", postgresql.UUID(as_uuid=True), nullable=True), + sa.Column("release_id", postgresql.UUID(as_uuid=True), nullable=True), + sa.Column("classification", VerdictClassifications, nullable=False,), + sa.Column("confidence", VerdictConfidences, nullable=False,), + sa.Column("message", sa.Text(), nullable=True), + sa.Column("details", postgresql.JSONB(astext_type=sa.Text()), nullable=True), + sa.Column( + "manually_reviewed", + sa.Boolean(), + server_default=sa.text("false"), + nullable=False, + ), + sa.Column("reviewer_verdict", VerdictClassifications, nullable=True,), + sa.Column("full_report_link", sa.String(), nullable=True), + sa.ForeignKeyConstraint( + ["check_id"], ["malware_checks.id"], onupdate="CASCADE", ondelete="CASCADE" + ), + sa.ForeignKeyConstraint(["file_id"], ["release_files.id"]), + sa.ForeignKeyConstraint(["release_id"], ["releases.id"]), + sa.ForeignKeyConstraint(["project_id"], ["projects.id"]), + sa.PrimaryKeyConstraint("id"), + ) + op.create_index( + op.f("ix_malware_verdicts_check_id"), + "malware_verdicts", + ["check_id"], + unique=False, + ) + + +def downgrade(): + op.drop_index(op.f("ix_malware_verdicts_check_id"), table_name="malware_verdicts") + op.drop_table("malware_verdicts") + op.drop_table("malware_checks") + MalwareCheckTypes.drop(op.get_bind()) + MalwareCheckStates.drop(op.get_bind()) + MalwareCheckObjectTypes.drop(op.get_bind()) + VerdictClassifications.drop(op.get_bind()) + VerdictConfidences.drop(op.get_bind()) diff --git a/warehouse/tasks.py b/warehouse/tasks.py --- a/warehouse/tasks.py +++ b/warehouse/tasks.py @@ -195,8 +195,11 @@ def includeme(config): task_default_queue="default", task_default_routing_key="task.default", task_queue_ha_policy="all", - task_queues=(Queue("default", routing_key="task.#"),), - task_routes=([]), + task_queues=( + Queue("default", routing_key="task.#"), + Queue("malware", routing_key="malware.#"), + ), + task_routes={"warehouse.malware.tasks.*": {"queue": "malware"}}, task_serializer="json", worker_disable_rate_limits=True, REDBEAT_REDIS_URL=s["celery.scheduler_url"],
diff --git a/tests/common/checks/__init__.py b/tests/common/checks/__init__.py new file mode 100644 --- /dev/null +++ b/tests/common/checks/__init__.py @@ -0,0 +1,14 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .hooked import ExampleHookedCheck # noqa +from .scheduled import ExampleScheduledCheck # noqa diff --git a/tests/common/checks/hooked.py b/tests/common/checks/hooked.py new file mode 100644 --- /dev/null +++ b/tests/common/checks/hooked.py @@ -0,0 +1,40 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from warehouse.malware.checks.base import MalwareCheckBase +from warehouse.malware.errors import FatalCheckException +from warehouse.malware.models import VerdictClassification, VerdictConfidence + + +class ExampleHookedCheck(MalwareCheckBase): + + version = 1 + short_description = "An example hook-based check" + long_description = "The purpose of this check is to test the \ +implementation of a hook-based check. This check will generate verdicts if enabled." + check_type = "event_hook" + hooked_object = "File" + + def __init__(self, db): + super().__init__(db) + + def scan(self, **kwargs): + file_id = kwargs.get("obj_id") + if file_id is None: + raise FatalCheckException("Missing required kwarg `obj_id`") + + self.add_verdict( + file_id=file_id, + classification=VerdictClassification.Benign, + confidence=VerdictConfidence.High, + message="Nothing to see here!", + ) diff --git a/tests/common/checks/scheduled.py b/tests/common/checks/scheduled.py new file mode 100644 --- /dev/null +++ b/tests/common/checks/scheduled.py @@ -0,0 +1,37 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from warehouse.malware.checks.base import MalwareCheckBase +from warehouse.malware.models import VerdictClassification, VerdictConfidence +from warehouse.packaging.models import Project + + +class ExampleScheduledCheck(MalwareCheckBase): + + version = 1 + short_description = "An example scheduled check" + long_description = "The purpose of this check is to test the \ +implementation of a scheduled check. This check will generate verdicts if enabled." + check_type = "scheduled" + schedule = {"minute": "0", "hour": "*/8"} + + def __init__(self, db): + super().__init__(db) + + def scan(self, **kwargs): + project = self.db.query(Project).first() + self.add_verdict( + project_id=project.id, + classification=VerdictClassification.Benign, + confidence=VerdictConfidence.High, + message="Nothing to see here!", + ) diff --git a/tests/common/db/malware.py b/tests/common/db/malware.py new file mode 100644 --- /dev/null +++ b/tests/common/db/malware.py @@ -0,0 +1,63 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import datetime + +import factory +import factory.fuzzy + +from warehouse.malware.models import ( + MalwareCheck, + MalwareCheckObjectType, + MalwareCheckState, + MalwareCheckType, + MalwareVerdict, + VerdictClassification, + VerdictConfidence, +) + +from .base import WarehouseFactory +from .packaging import FileFactory + + +class MalwareCheckFactory(WarehouseFactory): + class Meta: + model = MalwareCheck + + name = factory.fuzzy.FuzzyText(length=12) + version = 1 + short_description = factory.fuzzy.FuzzyText(length=80) + long_description = factory.fuzzy.FuzzyText(length=300) + check_type = factory.fuzzy.FuzzyChoice(list(MalwareCheckType)) + hooked_object = factory.fuzzy.FuzzyChoice(list(MalwareCheckObjectType)) + schedule = {"minute": "*/10"} + state = factory.fuzzy.FuzzyChoice(list(MalwareCheckState)) + created = factory.fuzzy.FuzzyNaiveDateTime( + datetime.datetime.utcnow() - datetime.timedelta(days=7) + ) + + +class MalwareVerdictFactory(WarehouseFactory): + class Meta: + model = MalwareVerdict + + check = factory.SubFactory(MalwareCheckFactory) + release_file = factory.SubFactory(FileFactory) + release = None + project = None + manually_reviewed = True + reviewer_verdict = factory.fuzzy.FuzzyChoice(list(VerdictClassification)) + classification = factory.fuzzy.FuzzyChoice(list(VerdictClassification)) + confidence = factory.fuzzy.FuzzyChoice(list(VerdictConfidence)) + message = factory.fuzzy.FuzzyText(length=80) + full_report_link = None + details = None diff --git a/tests/common/db/packaging.py b/tests/common/db/packaging.py --- a/tests/common/db/packaging.py +++ b/tests/common/db/packaging.py @@ -83,6 +83,7 @@ class Meta: release = factory.SubFactory(ReleaseFactory) python_version = "source" + filename = factory.fuzzy.FuzzyText(length=12) md5_digest = factory.LazyAttribute( lambda o: hashlib.md5(o.filename.encode("utf8")).hexdigest() ) diff --git a/tests/conftest.py b/tests/conftest.py --- a/tests/conftest.py +++ b/tests/conftest.py @@ -174,6 +174,9 @@ def app_config(database): "files.backend": "warehouse.packaging.services.LocalFileStorage", "docs.backend": "warehouse.packaging.services.LocalFileStorage", "mail.backend": "warehouse.email.services.SMTPEmailSender", + "malware_check.backend": ( + "warehouse.malware.services.PrinterMalwareCheckService" + ), "files.url": "http://localhost:7000/", "sessions.secret": "123456", "sessions.url": "redis://localhost:0/", diff --git a/tests/unit/admin/test_routes.py b/tests/unit/admin/test_routes.py --- a/tests/unit/admin/test_routes.py +++ b/tests/unit/admin/test_routes.py @@ -123,4 +123,27 @@ def test_includeme(): pretend.call("admin.flags.edit", "/admin/flags/edit/", domain=warehouse), pretend.call("admin.squats", "/admin/squats/", domain=warehouse), pretend.call("admin.squats.review", "/admin/squats/review/", domain=warehouse), + pretend.call("admin.checks.list", "/admin/checks/", domain=warehouse), + pretend.call( + "admin.checks.detail", "/admin/checks/{check_name}", domain=warehouse + ), + pretend.call( + "admin.checks.change_state", + "/admin/checks/{check_name}/change_state", + domain=warehouse, + ), + pretend.call( + "admin.checks.run_evaluation", + "/admin/checks/{check_name}/run_evaluation", + domain=warehouse, + ), + pretend.call("admin.verdicts.list", "/admin/verdicts/", domain=warehouse), + pretend.call( + "admin.verdicts.detail", "/admin/verdicts/{verdict_id}", domain=warehouse + ), + pretend.call( + "admin.verdicts.review", + "/admin/verdicts/{verdict_id}/review", + domain=warehouse, + ), ] diff --git a/tests/unit/admin/views/test_checks.py b/tests/unit/admin/views/test_checks.py new file mode 100644 --- /dev/null +++ b/tests/unit/admin/views/test_checks.py @@ -0,0 +1,216 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pretend +import pytest + +from pyramid.httpexceptions import HTTPNotFound + +from warehouse.admin.views import checks as views +from warehouse.malware.models import MalwareCheckState, MalwareCheckType +from warehouse.malware.tasks import backfill, run_scheduled_check + +from ....common.db.malware import MalwareCheckFactory + + +class TestListChecks: + def test_get_checks_none(self, db_request): + assert views.get_checks(db_request) == {"checks": []} + + def test_get_checks(self, db_request): + checks = [MalwareCheckFactory.create() for _ in range(10)] + result = views.get_checks(db_request)["checks"] + assert len(result) == len(checks) + for r in result: + assert r in checks + + def test_get_checks_different_versions(self, db_request): + checks = [MalwareCheckFactory.create() for _ in range(5)] + checks_same = [ + MalwareCheckFactory.create(name="MyCheck", version=i) for i in range(1, 6) + ] + checks.append(checks_same[-1]) + result = views.get_checks(db_request)["checks"] + assert len(result) == len(checks) + for r in result: + assert r in checks + + +class TestGetCheck: + def test_get_check(self, db_request): + check = MalwareCheckFactory.create() + db_request.matchdict["check_name"] = check.name + assert views.get_check(db_request) == { + "check": check, + "checks": [check], + "states": MalwareCheckState, + "evaluation_run_size": 10000, + } + + def test_get_check_many_versions(self, db_request): + check1 = MalwareCheckFactory.create(name="MyCheck", version="1") + check2 = MalwareCheckFactory.create(name="MyCheck", version="2") + db_request.matchdict["check_name"] = check1.name + assert views.get_check(db_request) == { + "check": check2, + "checks": [check2, check1], + "states": MalwareCheckState, + "evaluation_run_size": 10000, + } + + def test_get_check_not_found(self, db_request): + db_request.matchdict["check_name"] = "DoesNotExist" + with pytest.raises(HTTPNotFound): + views.get_check(db_request) + + +class TestChangeCheckState: + def test_no_check_state(self, db_request): + check = MalwareCheckFactory.create() + db_request.matchdict["check_name"] = check.name + with pytest.raises(HTTPNotFound): + views.change_check_state(db_request) + + @pytest.mark.parametrize( + ("final_state"), [MalwareCheckState.Disabled, MalwareCheckState.WipedOut] + ) + def test_change_to_valid_state(self, db_request, final_state): + check = MalwareCheckFactory.create( + name="MyCheck", state=MalwareCheckState.Disabled + ) + + db_request.POST = {"check_state": final_state.value} + db_request.matchdict["check_name"] = check.name + + db_request.session = pretend.stub( + flash=pretend.call_recorder(lambda *a, **kw: None) + ) + wipe_out_recorder = pretend.stub( + delay=pretend.call_recorder(lambda *a, **kw: None) + ) + db_request.task = pretend.call_recorder(lambda *a, **kw: wipe_out_recorder) + + db_request.route_path = pretend.call_recorder( + lambda *a, **kw: "/admin/checks/MyCheck/change_state" + ) + + views.change_check_state(db_request) + + assert db_request.session.flash.calls == [ + pretend.call( + "Changed 'MyCheck' check to '%s'!" % final_state.value, queue="success" + ) + ] + + assert check.state == final_state + + if final_state == MalwareCheckState.WipedOut: + assert wipe_out_recorder.delay.calls == [pretend.call("MyCheck")] + + def test_change_to_invalid_state(self, db_request): + check = MalwareCheckFactory.create(name="MyCheck") + initial_state = check.state + invalid_check_state = "cancelled" + db_request.POST = {"check_state": invalid_check_state} + db_request.matchdict["check_name"] = check.name + + db_request.session = pretend.stub( + flash=pretend.call_recorder(lambda *a, **kw: None) + ) + db_request.route_path = pretend.call_recorder( + lambda *a, **kw: "/admin/checks/MyCheck/change_state" + ) + + views.change_check_state(db_request) + + assert db_request.session.flash.calls == [ + pretend.call("Invalid check state provided.", queue="error") + ] + assert check.state == initial_state + + +class TestRunEvaluation: + @pytest.mark.parametrize( + ("check_state", "message"), + [ + ( + MalwareCheckState.Disabled, + "Check must be in 'enabled' or 'evaluation' state to manually execute.", + ), + ( + MalwareCheckState.WipedOut, + "Check must be in 'enabled' or 'evaluation' state to manually execute.", + ), + ], + ) + def test_invalid_backfill_parameters(self, db_request, check_state, message): + check = MalwareCheckFactory.create(state=check_state) + db_request.matchdict["check_name"] = check.name + + db_request.session = pretend.stub( + flash=pretend.call_recorder(lambda *a, **kw: None) + ) + + db_request.route_path = pretend.call_recorder( + lambda *a, **kw: "/admin/checks/%s/run_evaluation" % check.name + ) + + views.run_evaluation(db_request) + + assert db_request.session.flash.calls == [pretend.call(message, queue="error")] + + @pytest.mark.parametrize( + ("check_type"), [MalwareCheckType.EventHook, MalwareCheckType.Scheduled] + ) + def test_success(self, db_request, check_type): + + check = MalwareCheckFactory.create( + check_type=check_type, state=MalwareCheckState.Enabled + ) + db_request.matchdict["check_name"] = check.name + + db_request.session = pretend.stub( + flash=pretend.call_recorder(lambda *a, **kw: None) + ) + + db_request.route_path = pretend.call_recorder( + lambda *a, **kw: "/admin/checks/%s/run_evaluation" % check.name + ) + + backfill_recorder = pretend.stub( + delay=pretend.call_recorder(lambda *a, **kw: None) + ) + + db_request.task = pretend.call_recorder(lambda *a, **kw: backfill_recorder) + + views.run_evaluation(db_request) + + if check_type == MalwareCheckType.EventHook: + assert db_request.session.flash.calls == [ + pretend.call( + "Running %s on 10000 %ss!" + % (check.name, check.hooked_object.value), + queue="success", + ) + ] + assert db_request.task.calls == [pretend.call(backfill)] + assert backfill_recorder.delay.calls == [pretend.call(check.name, 10000)] + elif check_type == MalwareCheckType.Scheduled: + assert db_request.session.flash.calls == [ + pretend.call("Running %s now!" % check.name, queue="success",) + ] + assert db_request.task.calls == [pretend.call(run_scheduled_check)] + assert backfill_recorder.delay.calls == [ + pretend.call(check.name, manually_triggered=True) + ] + else: + raise Exception("Invalid check type: %s" % check_type) diff --git a/tests/unit/admin/views/test_verdicts.py b/tests/unit/admin/views/test_verdicts.py new file mode 100644 --- /dev/null +++ b/tests/unit/admin/views/test_verdicts.py @@ -0,0 +1,248 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import uuid + +from random import randint + +import pretend +import pytest + +from pyramid.httpexceptions import HTTPBadRequest, HTTPNotFound + +from warehouse.admin.views import verdicts as views +from warehouse.malware.models import VerdictClassification, VerdictConfidence + +from ....common.db.malware import MalwareCheckFactory, MalwareVerdictFactory + + +class TestListVerdicts: + def test_none(self, db_request): + assert views.get_verdicts(db_request) == { + "verdicts": [], + "check_names": set(), + "classifications": set(["threat", "indeterminate", "benign"]), + "confidences": set(["low", "medium", "high"]), + } + + def test_some(self, db_request): + check = MalwareCheckFactory.create() + verdicts = [MalwareVerdictFactory.create(check=check) for _ in range(10)] + + assert views.get_verdicts(db_request) == { + "verdicts": verdicts, + "check_names": set([check.name]), + "classifications": set(["threat", "indeterminate", "benign"]), + "confidences": set(["low", "medium", "high"]), + } + + def test_some_with_multipage(self, db_request): + check1 = MalwareCheckFactory.create() + check2 = MalwareCheckFactory.create() + verdicts = [MalwareVerdictFactory.create(check=check2) for _ in range(60)] + + db_request.GET["page"] = "2" + + assert views.get_verdicts(db_request) == { + "verdicts": verdicts[25:50], + "check_names": set([check1.name, check2.name]), + "classifications": set(["threat", "indeterminate", "benign"]), + "confidences": set(["low", "medium", "high"]), + } + + @pytest.mark.parametrize( + "check_name", ["check0", "check1", ""], + ) + def test_check_name_filter(self, db_request, check_name): + result_verdicts, all_verdicts = [], [] + for i in range(3): + check = MalwareCheckFactory.create(name="check%d" % i) + verdicts = [MalwareVerdictFactory.create(check=check) for _ in range(5)] + all_verdicts.extend(verdicts) + if check.name == check_name: + result_verdicts = verdicts + + # Emptry string + if not result_verdicts: + result_verdicts = all_verdicts + + response = { + "verdicts": result_verdicts, + "check_names": set(["check0", "check1", "check2"]), + "classifications": set(["threat", "indeterminate", "benign"]), + "confidences": set(["low", "medium", "high"]), + } + + db_request.GET["check_name"] = check_name + assert views.get_verdicts(db_request) == response + + @pytest.mark.parametrize( + "classification", ["benign", "indeterminate", "threat", ""], + ) + def test_classification_filter(self, db_request, classification): + check1 = MalwareCheckFactory.create() + result_verdicts, all_verdicts = [], [] + for c in VerdictClassification: + verdicts = [ + MalwareVerdictFactory.create(check=check1, classification=c) + for _ in range(5) + ] + all_verdicts.extend(verdicts) + if c.value == classification: + result_verdicts = verdicts + + # Emptry string + if not result_verdicts: + result_verdicts = all_verdicts + + db_request.GET["classification"] = classification + response = { + "verdicts": result_verdicts, + "check_names": set([check1.name]), + "classifications": set(["threat", "indeterminate", "benign"]), + "confidences": set(["low", "medium", "high"]), + } + assert views.get_verdicts(db_request) == response + + @pytest.mark.parametrize( + "confidence", ["low", "medium", "high", ""], + ) + def test_confidence_filter(self, db_request, confidence): + check1 = MalwareCheckFactory.create() + result_verdicts, all_verdicts = [], [] + for c in VerdictConfidence: + verdicts = [ + MalwareVerdictFactory.create(check=check1, confidence=c) + for _ in range(5) + ] + all_verdicts.extend(verdicts) + if c.value == confidence: + result_verdicts = verdicts + + # Emptry string + if not result_verdicts: + result_verdicts = all_verdicts + + response = { + "verdicts": result_verdicts, + "check_names": set([check1.name]), + "classifications": set(["threat", "indeterminate", "benign"]), + "confidences": set(["low", "medium", "high"]), + } + + db_request.GET["confidence"] = confidence + assert views.get_verdicts(db_request) == response + + @pytest.mark.parametrize( + "manually_reviewed", [1, 0], + ) + def test_manually_reviewed_filter(self, db_request, manually_reviewed): + check1 = MalwareCheckFactory.create() + result_verdicts = [ + MalwareVerdictFactory.create( + check=check1, manually_reviewed=bool(manually_reviewed) + ) + for _ in range(5) + ] + + # Create other verdicts to ensure filter works properly + for _ in range(10): + MalwareVerdictFactory.create( + check=check1, manually_reviewed=not bool(manually_reviewed) + ) + + db_request.GET["manually_reviewed"] = str(manually_reviewed) + + response = { + "verdicts": result_verdicts, + "check_names": set([check1.name]), + "classifications": set(["threat", "indeterminate", "benign"]), + "confidences": set(["low", "medium", "high"]), + } + + assert views.get_verdicts(db_request) == response + + @pytest.mark.parametrize( + "invalid_param", + [ + ("page", "invalid"), + ("check_name", "NotACheck"), + ("confidence", "NotAConfidence"), + ("classification", "NotAClassification"), + ("manually_reviewed", "False"), + ], + ) + def test_errors(self, db_request, invalid_param): + db_request.GET[invalid_param[0]] = invalid_param[1] + with pytest.raises(HTTPBadRequest): + views.get_verdicts(db_request) + + +class TestGetVerdict: + def test_found(self, db_request): + verdicts = [MalwareVerdictFactory.create() for _ in range(10)] + index = randint(0, 9) + lookup_id = verdicts[index].id + db_request.matchdict["verdict_id"] = lookup_id + + assert views.get_verdict(db_request) == { + "verdict": verdicts[index], + "classifications": ["Benign", "Indeterminate", "Threat"], + } + + def test_not_found(self, db_request): + db_request.matchdict["verdict_id"] = uuid.uuid4() + + with pytest.raises(HTTPNotFound): + views.get_verdict(db_request) + + +class TestReviewVerdict: + @pytest.mark.parametrize( + "manually_reviewed, reviewer_verdict", + [ + (False, None), # unreviewed verdict + (True, VerdictClassification.Threat), # previously reviewed + ], + ) + def test_set_classification(self, db_request, manually_reviewed, reviewer_verdict): + verdict = MalwareVerdictFactory.create( + manually_reviewed=manually_reviewed, reviewer_verdict=reviewer_verdict, + ) + + db_request.matchdict["verdict_id"] = verdict.id + db_request.POST = {"classification": "Benign"} + db_request.session = pretend.stub( + flash=pretend.call_recorder(lambda *a, **kw: None) + ) + + db_request.route_path = pretend.call_recorder( + lambda *a, **kw: "/admin/verdicts/%s/review" % verdict.id + ) + + views.review_verdict(db_request) + + assert db_request.session.flash.calls == [ + pretend.call("Verdict %s marked as reviewed." % verdict.id, queue="success") + ] + + assert verdict.manually_reviewed + assert verdict.reviewer_verdict == VerdictClassification.Benign + + @pytest.mark.parametrize("post_params", [{}, {"classification": "Nope"}]) + def test_errors(self, db_request, post_params): + verdict = MalwareVerdictFactory.create() + db_request.matchdict["verdict_id"] = verdict.id + db_request.POST = post_params + + with pytest.raises(HTTPBadRequest): + views.review_verdict(db_request) diff --git a/tests/unit/cli/test_malware.py b/tests/unit/cli/test_malware.py new file mode 100644 --- /dev/null +++ b/tests/unit/cli/test_malware.py @@ -0,0 +1,36 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pretend + +from warehouse.cli.malware import sync_checks +from warehouse.malware.tasks import sync_checks as _sync_checks + + +class TestCLIMalware: + def test_sync_checks(self, cli): + request = pretend.stub() + task = pretend.stub( + get_request=pretend.call_recorder(lambda *a, **kw: request), + run=pretend.call_recorder(lambda *a, **kw: None), + ) + config = pretend.stub(task=pretend.call_recorder(lambda *a, **kw: task)) + + result = cli.invoke(sync_checks, obj=config) + + assert result.exit_code == 0 + assert config.task.calls == [ + pretend.call(_sync_checks), + pretend.call(_sync_checks), + ] + assert task.get_request.calls == [pretend.call()] + assert task.run.calls == [pretend.call(request)] diff --git a/tests/unit/malware/__init__.py b/tests/unit/malware/__init__.py new file mode 100644 --- /dev/null +++ b/tests/unit/malware/__init__.py @@ -0,0 +1,11 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/tests/unit/malware/checks/__init__.py b/tests/unit/malware/checks/__init__.py new file mode 100644 --- /dev/null +++ b/tests/unit/malware/checks/__init__.py @@ -0,0 +1,11 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/tests/unit/malware/checks/package_turnover/__init__.py b/tests/unit/malware/checks/package_turnover/__init__.py new file mode 100644 --- /dev/null +++ b/tests/unit/malware/checks/package_turnover/__init__.py @@ -0,0 +1,11 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/tests/unit/malware/checks/package_turnover/test_check.py b/tests/unit/malware/checks/package_turnover/test_check.py new file mode 100644 --- /dev/null +++ b/tests/unit/malware/checks/package_turnover/test_check.py @@ -0,0 +1,177 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pretend + +from warehouse.malware.checks.package_turnover import check as c +from warehouse.malware.models import ( + MalwareCheckState, + VerdictClassification, + VerdictConfidence, +) + +from .....common.db.accounts import UserFactory +from .....common.db.malware import MalwareCheckFactory +from .....common.db.packaging import ProjectFactory, ReleaseFactory + + +def test_initializes(db_session): + check_model = MalwareCheckFactory.create( + name="PackageTurnoverCheck", state=MalwareCheckState.Enabled, + ) + check = c.PackageTurnoverCheck(db_session) + + assert check.id == check_model.id + + +def test_user_posture_verdicts(db_session): + user = UserFactory.create() + project = pretend.stub(users=[user], id=pretend.stub()) + + MalwareCheckFactory.create( + name="PackageTurnoverCheck", state=MalwareCheckState.Enabled, + ) + check = c.PackageTurnoverCheck(db_session) + + user.record_event( + tag="account:two_factor:method_removed", ip_address="0.0.0.0", additional={} + ) + + check.user_posture_verdicts(project) + assert len(check._verdicts) == 1 + assert check._verdicts[0].check_id == check.id + assert check._verdicts[0].project_id == project.id + assert check._verdicts[0].classification == VerdictClassification.Threat + assert check._verdicts[0].confidence == VerdictConfidence.High + assert ( + check._verdicts[0].message + == "User with control over this package has disabled 2FA" + ) + + +def test_user_posture_verdicts_hasnt_removed_2fa(db_session): + user = UserFactory.create() + project = pretend.stub(users=[user], id=pretend.stub()) + + MalwareCheckFactory.create( + name="PackageTurnoverCheck", state=MalwareCheckState.Enabled, + ) + check = c.PackageTurnoverCheck(db_session) + + check.user_posture_verdicts(project) + assert len(check._verdicts) == 0 + + +def test_user_posture_verdicts_has_2fa(db_session): + user = UserFactory.create(totp_secret=b"fake secret") + project = pretend.stub(users=[user], id=pretend.stub()) + + MalwareCheckFactory.create( + name="PackageTurnoverCheck", state=MalwareCheckState.Enabled, + ) + check = c.PackageTurnoverCheck(db_session) + + user.record_event( + tag="account:two_factor:method_removed", ip_address="0.0.0.0", additional={} + ) + + check.user_posture_verdicts(project) + assert len(check._verdicts) == 0 + + +def test_user_turnover_verdicts(db_session): + user = UserFactory.create() + project = ProjectFactory.create(users=[user]) + + project.record_event( + tag="project:role:add", + ip_address="0.0.0.0", + additional={"target_user": user.username}, + ) + + MalwareCheckFactory.create( + name="PackageTurnoverCheck", state=MalwareCheckState.Enabled, + ) + check = c.PackageTurnoverCheck(db_session) + + check.user_turnover_verdicts(project) + assert len(check._verdicts) == 1 + assert check._verdicts[0].check_id == check.id + assert check._verdicts[0].project_id == project.id + assert check._verdicts[0].classification == VerdictClassification.Threat + assert check._verdicts[0].confidence == VerdictConfidence.High + assert ( + check._verdicts[0].message + == "Suspicious user turnover; all current maintainers are new" + ) + + +def test_user_turnover_verdicts_no_turnover(db_session): + user = UserFactory.create() + project = ProjectFactory.create(users=[user]) + + MalwareCheckFactory.create( + name="PackageTurnoverCheck", state=MalwareCheckState.Enabled, + ) + check = c.PackageTurnoverCheck(db_session) + + check.user_turnover_verdicts(project) + assert len(check._verdicts) == 0 + + +def test_scan(db_session, monkeypatch): + user = UserFactory.create() + project = ProjectFactory.create(users=[user]) + + for _ in range(3): + ReleaseFactory.create(project=project) + + MalwareCheckFactory.create( + name="PackageTurnoverCheck", state=MalwareCheckState.Enabled, + ) + check = c.PackageTurnoverCheck(db_session) + + monkeypatch.setattr( + check, "user_posture_verdicts", pretend.call_recorder(lambda project: None) + ) + monkeypatch.setattr( + check, "user_turnover_verdicts", pretend.call_recorder(lambda project: None) + ) + + check.scan() + + # Each verdict rendering method is only called once per project, + # thanks to deduplication. + assert check.user_posture_verdicts.calls == [pretend.call(project)] + assert check.user_turnover_verdicts.calls == [pretend.call(project)] + + +def test_scan_too_few_releases(db_session, monkeypatch): + user = UserFactory.create() + project = ProjectFactory.create(users=[user]) + ReleaseFactory.create(project=project) + + MalwareCheckFactory.create( + name="PackageTurnoverCheck", state=MalwareCheckState.Enabled, + ) + check = c.PackageTurnoverCheck(db_session) + + monkeypatch.setattr( + check, "user_posture_verdicts", pretend.call_recorder(lambda project: None) + ) + monkeypatch.setattr( + check, "user_turnover_verdicts", pretend.call_recorder(lambda project: None) + ) + + check.scan() + assert check.user_posture_verdicts.calls == [] + assert check.user_turnover_verdicts.calls == [] diff --git a/tests/unit/malware/checks/setup_patterns/__init__.py b/tests/unit/malware/checks/setup_patterns/__init__.py new file mode 100644 --- /dev/null +++ b/tests/unit/malware/checks/setup_patterns/__init__.py @@ -0,0 +1,11 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/tests/unit/malware/checks/setup_patterns/test_check.py b/tests/unit/malware/checks/setup_patterns/test_check.py new file mode 100644 --- /dev/null +++ b/tests/unit/malware/checks/setup_patterns/test_check.py @@ -0,0 +1,144 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pretend +import pytest +import yara + +from warehouse.malware.checks.setup_patterns import check as c +from warehouse.malware.models import ( + MalwareCheckState, + VerdictClassification, + VerdictConfidence, +) + +from .....common.db.malware import MalwareCheckFactory +from .....common.db.packaging import FileFactory + + +def test_initializes(db_session): + check_model = MalwareCheckFactory.create( + name="SetupPatternCheck", state=MalwareCheckState.Enabled + ) + check = c.SetupPatternCheck(db_session) + + assert check.id == check_model.id + assert isinstance(check._yara_rules, yara.Rules) + + [email protected]( + ("obj", "file_url"), [(None, pretend.stub()), (pretend.stub(), None)] +) +def test_scan_missing_kwargs(db_session, obj, file_url): + MalwareCheckFactory.create( + name="SetupPatternCheck", state=MalwareCheckState.Enabled + ) + check = c.SetupPatternCheck(db_session) + with pytest.raises(c.FatalCheckException): + check.scan(obj=obj, file_url=file_url) + + +def test_scan_non_sdist(db_session): + MalwareCheckFactory.create( + name="SetupPatternCheck", state=MalwareCheckState.Enabled + ) + check = c.SetupPatternCheck(db_session) + + file = FileFactory.create(packagetype="bdist_wheel") + + check.scan(obj=file, file_url=pretend.stub()) + + assert check._verdicts == [] + + +def test_scan_no_setup_contents(db_session, monkeypatch): + monkeypatch.setattr( + c, "fetch_url_content", pretend.call_recorder(lambda *a: pretend.stub()) + ) + monkeypatch.setattr( + c, "extract_file_content", pretend.call_recorder(lambda *a: None) + ) + + MalwareCheckFactory.create( + name="SetupPatternCheck", state=MalwareCheckState.Enabled + ) + check = c.SetupPatternCheck(db_session) + + file = FileFactory.create(packagetype="sdist") + + check.scan(obj=file, file_url=pretend.stub()) + + assert len(check._verdicts) == 1 + assert check._verdicts[0].check_id == check.id + assert check._verdicts[0].file_id == file.id + assert check._verdicts[0].classification == VerdictClassification.Indeterminate + assert check._verdicts[0].confidence == VerdictConfidence.High + assert ( + check._verdicts[0].message + == "sdist does not contain a suitable setup.py for analysis" + ) + + +def test_scan_benign_contents(db_session, monkeypatch): + monkeypatch.setattr( + c, "fetch_url_content", pretend.call_recorder(lambda *a: pretend.stub()) + ) + monkeypatch.setattr( + c, + "extract_file_content", + pretend.call_recorder(lambda *a: b"this is a benign string"), + ) + + MalwareCheckFactory.create( + name="SetupPatternCheck", state=MalwareCheckState.Enabled + ) + check = c.SetupPatternCheck(db_session) + + file = FileFactory.create(packagetype="sdist") + + check.scan(obj=file, file_url=pretend.stub()) + + assert len(check._verdicts) == 1 + assert check._verdicts[0].check_id == check.id + assert check._verdicts[0].file_id == file.id + assert check._verdicts[0].classification == VerdictClassification.Benign + assert check._verdicts[0].confidence == VerdictConfidence.Low + assert check._verdicts[0].message == "No malicious patterns found in setup.py" + + +def test_scan_matched_content(db_session, monkeypatch): + monkeypatch.setattr( + c, "fetch_url_content", pretend.call_recorder(lambda *a: pretend.stub()) + ) + monkeypatch.setattr( + c, + "extract_file_content", + pretend.call_recorder( + lambda *a: b"this looks suspicious: os.system('cat /etc/passwd')" + ), + ) + + MalwareCheckFactory.create( + name="SetupPatternCheck", state=MalwareCheckState.Enabled + ) + check = c.SetupPatternCheck(db_session) + + file = FileFactory.create(packagetype="sdist") + + check.scan(obj=file, file_url=pretend.stub()) + + assert len(check._verdicts) == 1 + assert check._verdicts[0].check_id == check.id + assert check._verdicts[0].file_id == file.id + assert check._verdicts[0].classification == VerdictClassification.Threat + assert check._verdicts[0].confidence == VerdictConfidence.High + assert check._verdicts[0].message == "process_spawn_in_setup" diff --git a/tests/unit/malware/checks/test_utils.py b/tests/unit/malware/checks/test_utils.py new file mode 100644 --- /dev/null +++ b/tests/unit/malware/checks/test_utils.py @@ -0,0 +1,93 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import io +import tarfile +import zipfile + +import pretend + +from warehouse.malware.checks import utils + + +def test_fetch_url_content(monkeypatch): + response = pretend.stub( + raise_for_status=pretend.call_recorder(lambda: None), content=b"fake content" + ) + requests = pretend.stub(get=pretend.call_recorder(lambda url: response)) + + monkeypatch.setattr(utils, "requests", requests) + + io = utils.fetch_url_content("hxxp://fake_url.com") + + assert requests.get.calls == [pretend.call("hxxp://fake_url.com")] + assert response.raise_for_status.calls == [pretend.call()] + assert io.getvalue() == b"fake content" + + +def test_extract_file_contents_zip(): + zipbuf = io.BytesIO() + with zipfile.ZipFile(zipbuf, mode="w") as zipobj: + zipobj.writestr("toplevelgetsskipped", b"nothing to see here") + zipobj.writestr("foo/setup.py", b"these are some contents") + zipbuf.seek(0) + + assert utils.extract_file_content(zipbuf, "setup.py") == b"these are some contents" + + +def test_extract_file_contents_zip_no_file(): + zipbuf = io.BytesIO() + with zipfile.ZipFile(zipbuf, mode="w") as zipobj: + zipobj.writestr("foo/notsetup.py", b"these are some contents") + zipbuf.seek(0) + + assert utils.extract_file_content(zipbuf, "setup.py") is None + + +def test_extract_file_contents_tar(): + tarbuf = io.BytesIO() + with tarfile.open(fileobj=tarbuf, mode="w:gz") as tarobj: + contents = io.BytesIO(b"these are some contents") + member = tarfile.TarInfo(name="foo/setup.py") + member.size = len(contents.getbuffer()) + tarobj.addfile(member, fileobj=contents) + + contents = io.BytesIO(b"nothing to see here") + member = tarfile.TarInfo(name="toplevelgetsskipped") + member.size = len(contents.getbuffer()) + tarobj.addfile(member, fileobj=contents) + tarbuf.seek(0) + + assert utils.extract_file_content(tarbuf, "setup.py") == b"these are some contents" + + +def test_extract_file_contents_tar_empty(): + tarbuf = io.BytesIO(b"invalid tar contents") + + assert utils.extract_file_content(tarbuf, "setup.py") is None + + +def test_extract_file_contents_tar_no_file(): + tarbuf = io.BytesIO() + with tarfile.open(fileobj=tarbuf, mode="w:gz") as tarobj: + contents = io.BytesIO(b"these are some contents") + member = tarfile.TarInfo(name="foo/notsetup.py") + member.size = len(contents.getbuffer()) + tarobj.addfile(member, fileobj=contents) + + contents = io.BytesIO(b"nothing to see here") + member = tarfile.TarInfo(name="toplevelgetsskipped") + member.size = len(contents.getbuffer()) + tarobj.addfile(member, fileobj=contents) + tarbuf.seek(0) + + assert utils.extract_file_content(tarbuf, "setup.py") is None diff --git a/tests/unit/malware/test_checks.py b/tests/unit/malware/test_checks.py new file mode 100644 --- /dev/null +++ b/tests/unit/malware/test_checks.py @@ -0,0 +1,82 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import inspect + +import pretend +import pytest + +import warehouse.malware.checks as prod_checks + +from warehouse.malware.checks.base import MalwareCheckBase +from warehouse.malware.utils import get_check_fields + +from ...common import checks as test_checks +from ...common.db.packaging import FileFactory + + +def test_checks_subclass_base(): + prod_checks_from_module = inspect.getmembers(prod_checks, inspect.isclass) + test_checks_from_module = inspect.getmembers(test_checks, inspect.isclass) + all_checks = prod_checks_from_module + test_checks_from_module + + subclasses_of_malware_base = { + cls.__name__: cls for cls in MalwareCheckBase.__subclasses__() + } + + assert len(all_checks) == len(subclasses_of_malware_base) + + for check_name, check in all_checks: + assert subclasses_of_malware_base[check_name] == check + + [email protected](("checks"), [prod_checks, test_checks]) +def test_checks_fields(checks): + checks_from_module = inspect.getmembers(checks, inspect.isclass) + + for check_name, check in checks_from_module: + elems = inspect.getmembers(check, lambda a: not (inspect.isroutine(a))) + inspection_fields = {"name": check_name} + for elem_name, value in elems: + # Skip both dunder and "private" (_-prefixed) attributes + if not elem_name.startswith("_"): + inspection_fields[elem_name] = value + fields = get_check_fields(check) + + assert inspection_fields == fields + + +def test_base_prepare_file_hooked(db_session): + file = FileFactory.create() + request = pretend.stub( + db=db_session, route_url=pretend.call_recorder(lambda *a, **kw: "fake_url") + ) + + kwargs = test_checks.ExampleHookedCheck.prepare(request, file.id) + + assert request.route_url.calls == [pretend.call("packaging.file", path=file.path)] + assert "file_url" in kwargs + assert kwargs["file_url"] == "fake_url" + + +def test_base_prepare_nonfile_hooked(db_session): + file = FileFactory.create() + request = pretend.stub( + db=db_session, route_url=pretend.call_recorder(lambda *a, **kw: "fake_url") + ) + + class FakeProjectCheck(MalwareCheckBase): + hooked_object = "Project" + + kwargs = FakeProjectCheck.prepare(request, file.id) + assert request.route_url.calls == [] + assert "file_url" not in kwargs diff --git a/tests/unit/malware/test_init.py b/tests/unit/malware/test_init.py new file mode 100644 --- /dev/null +++ b/tests/unit/malware/test_init.py @@ -0,0 +1,185 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from collections import defaultdict + +import pretend + +from celery.schedules import crontab + +from warehouse import malware +from warehouse.malware.interfaces import IMalwareCheckService +from warehouse.malware.tasks import run_scheduled_check + +from ...common import checks as test_checks +from ...common.db.accounts import UserFactory +from ...common.db.packaging import FileFactory, ProjectFactory, ReleaseFactory + + +def test_determine_malware_checks_no_checks(monkeypatch, db_request): + def get_enabled_hooked_checks(session): + return defaultdict(list) + + monkeypatch.setattr(malware, "get_enabled_hooked_checks", get_enabled_hooked_checks) + + project = ProjectFactory.create(name="foo") + release = ReleaseFactory.create(project=project) + file0 = FileFactory.create(release=release, filename="foo.bar") + + session = pretend.stub(info={}, new={file0, release, project}, dirty={}, deleted={}) + + malware.determine_malware_checks(pretend.stub(), session, pretend.stub()) + assert session.info["warehouse.malware.checks"] == set() + + +def test_determine_malware_checks_nothing_new(monkeypatch, db_request): + def get_enabled_hooked_checks(session): + result = defaultdict(list) + result["File"] = ["Check1", "Check2"] + result["Release"] = ["Check3"] + return result + + monkeypatch.setattr(malware, "get_enabled_hooked_checks", get_enabled_hooked_checks) + + project = ProjectFactory.create(name="foo") + release = ReleaseFactory.create(project=project) + file0 = FileFactory.create(release=release, filename="foo.bar") + + session = pretend.stub(info={}, new={}, dirty={file0, release}, deleted={}) + + malware.determine_malware_checks(pretend.stub(), session, pretend.stub()) + assert session.info.get("warehouse.malware.checks") is None + + +def test_determine_malware_checks_unsupported_object(monkeypatch, db_request): + def get_enabled_hooked_checks(session): + result = defaultdict(list) + result["File"] = ["Check1", "Check2"] + result["Release"] = ["Check3"] + return result + + monkeypatch.setattr(malware, "get_enabled_hooked_checks", get_enabled_hooked_checks) + + user = UserFactory.create() + + session = pretend.stub(info={}, new={user}, dirty={}, deleted={}) + + malware.determine_malware_checks(pretend.stub(), session, pretend.stub()) + assert session.info.get("warehouse.malware.checks") is None + + +def test_determine_malware_checks_file_only(monkeypatch, db_request): + def get_enabled_hooked_checks(session): + result = defaultdict(list) + result["File"] = ["Check1", "Check2"] + result["Release"] = ["Check3"] + return result + + monkeypatch.setattr(malware, "get_enabled_hooked_checks", get_enabled_hooked_checks) + + project = ProjectFactory.create(name="foo") + release = ReleaseFactory.create(project=project) + file0 = FileFactory.create(release=release, filename="foo.bar") + + session = pretend.stub(info={}, new={file0}, dirty={}, deleted={}) + + checks = set(["Check%d:%s" % (x, file0.id) for x in range(1, 3)]) + malware.determine_malware_checks(pretend.stub(), session, pretend.stub()) + assert session.info["warehouse.malware.checks"] == checks + + +def test_determine_malware_checks_file_and_release(monkeypatch, db_request): + def get_enabled_hooked_checks(session): + result = defaultdict(list) + result["File"] = ["Check1", "Check2"] + result["Release"] = ["Check3"] + return result + + monkeypatch.setattr(malware, "get_enabled_hooked_checks", get_enabled_hooked_checks) + + project = ProjectFactory.create(name="foo") + release = ReleaseFactory.create(project=project) + file0 = FileFactory.create(release=release, filename="foo.bar") + file1 = FileFactory.create(release=release, filename="foo.baz") + + session = pretend.stub( + info={}, new={project, release, file0, file1}, dirty={}, deleted={} + ) + + checks = set(["Check%d:%s" % (x, file0.id) for x in range(1, 3)]) + checks.update(["Check%d:%s" % (x, file1.id) for x in range(1, 3)]) + checks.add("Check3:%s" % release.id) + + malware.determine_malware_checks(pretend.stub(), session, pretend.stub()) + + assert session.info["warehouse.malware.checks"] == checks + + +def test_enqueue_malware_checks(app_config): + malware_check = pretend.stub( + run_checks=pretend.call_recorder(lambda malware_checks: None) + ) + factory = pretend.call_recorder(lambda ctx, config: malware_check) + app_config.register_service_factory(factory, IMalwareCheckService) + app_config.commit() + session = pretend.stub( + info={ + "warehouse.malware.checks": {"Check1:ba70267f-fabf-496f-9ac2-d237a983b187"} + } + ) + + malware.queue_malware_checks(app_config, session) + + assert factory.calls == [pretend.call(None, app_config)] + assert malware_check.run_checks.calls == [ + pretend.call({"Check1:ba70267f-fabf-496f-9ac2-d237a983b187"}) + ] + assert "warehouse.malware.checks" not in session.info + + +def test_enqueue_malware_checks_no_checks(app_config): + session = pretend.stub(info={}) + malware.queue_malware_checks(app_config, session) + assert "warehouse.malware.checks" not in session.info + + +def test_includeme(monkeypatch): + monkeypatch.setattr(malware, "checks", test_checks) + + malware_check_class = pretend.stub( + create_service=pretend.call_recorder(lambda *a, **kw: pretend.stub()) + ) + + config = pretend.stub( + maybe_dotted=lambda dotted: malware_check_class, + register_service_factory=pretend.call_recorder( + lambda factory, iface, name=None: None + ), + registry=pretend.stub( + settings={"malware_check.backend": "TestMalwareCheckService"} + ), + add_periodic_task=pretend.call_recorder(lambda *a, **kw: None), + ) + + malware.includeme(config) + + assert config.register_service_factory.calls == [ + pretend.call(malware_check_class.create_service, IMalwareCheckService) + ] + + assert config.add_periodic_task.calls == [ + pretend.call( + crontab(minute="0", hour="*/8"), + run_scheduled_check, + args=("ExampleScheduledCheck",), + ) + ] diff --git a/tests/unit/malware/test_models.py b/tests/unit/malware/test_models.py new file mode 100644 --- /dev/null +++ b/tests/unit/malware/test_models.py @@ -0,0 +1,40 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from warehouse.malware.models import VerdictClassification, VerdictConfidence + + +def test_classification_orderable(): + assert ( + VerdictClassification.Benign + < VerdictClassification.Indeterminate + < VerdictClassification.Threat + ) + assert ( + max( + [ + VerdictClassification.Benign, + VerdictClassification.Indeterminate, + VerdictClassification.Threat, + ] + ) + == VerdictClassification.Threat + ) + + +def test_confidence_orderable(): + assert VerdictConfidence.Low < VerdictConfidence.Medium < VerdictConfidence.High + assert ( + max([VerdictConfidence.Low, VerdictConfidence.Medium, VerdictConfidence.High]) + == VerdictConfidence.High + ) diff --git a/tests/unit/malware/test_services.py b/tests/unit/malware/test_services.py new file mode 100644 --- /dev/null +++ b/tests/unit/malware/test_services.py @@ -0,0 +1,87 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pretend +import pytest + +from zope.interface.verify import verifyClass + +from warehouse.malware.interfaces import IMalwareCheckService +from warehouse.malware.services import ( + DatabaseMalwareCheckService, + PrinterMalwareCheckService, +) +from warehouse.malware.tasks import run_check + + +class TestPrinterMalwareCheckService: + def test_verify_service(self): + assert verifyClass(IMalwareCheckService, PrinterMalwareCheckService) + + def test_create_service(self): + request = pretend.stub() + service = PrinterMalwareCheckService.create_service(None, request) + assert service.executor == print + + @pytest.mark.parametrize(("kwargs"), [{}, {"manually_triggered": True}]) + def test_run_checks(self, capfd, kwargs): + request = pretend.stub() + service = PrinterMalwareCheckService.create_service(None, request) + checks = ["one", "two", "three"] + service.run_checks(checks, **kwargs) + out, err = capfd.readouterr() + assert out == "".join(["%s %s\n" % (check, kwargs) for check in checks]) + + +class TestDatabaseMalwareService: + def test_verify_service(self): + assert verifyClass(IMalwareCheckService, DatabaseMalwareCheckService) + + def test_create_service(self, db_request): + _delay = pretend.call_recorder(lambda *args: None) + db_request.task = lambda x: pretend.stub(delay=_delay) + service = DatabaseMalwareCheckService.create_service(None, db_request) + assert service.executor == db_request.task(run_check).delay + + def test_run_hooked_check(self, db_request): + _delay = pretend.call_recorder(lambda *args, **kwargs: None) + db_request.task = lambda x: pretend.stub(delay=_delay) + service = DatabaseMalwareCheckService.create_service(None, db_request) + checks = [ + "MyTestCheck:ba70267f-fabf-496f-9ac2-d237a983b187", + "AnotherCheck:44f57b0e-c5b0-47c5-8713-341cf392efe2", + "FinalCheck:e8518a15-8f01-430e-8f5b-87644007c9c0", + ] + service.run_checks(checks) + assert _delay.calls == [ + pretend.call("MyTestCheck", obj_id="ba70267f-fabf-496f-9ac2-d237a983b187"), + pretend.call("AnotherCheck", obj_id="44f57b0e-c5b0-47c5-8713-341cf392efe2"), + pretend.call("FinalCheck", obj_id="e8518a15-8f01-430e-8f5b-87644007c9c0"), + ] + + def test_run_scheduled_check(self, db_request): + _delay = pretend.call_recorder(lambda *args, **kwargs: None) + db_request.task = lambda x: pretend.stub(delay=_delay) + service = DatabaseMalwareCheckService.create_service(None, db_request) + checks = ["MyTestScheduledCheck"] + service.run_checks(checks) + assert _delay.calls == [pretend.call("MyTestScheduledCheck")] + + def test_run_triggered_check(self, db_request): + _delay = pretend.call_recorder(lambda *args, **kwargs: None) + db_request.task = lambda x: pretend.stub(delay=_delay) + service = DatabaseMalwareCheckService.create_service(None, db_request) + checks = ["MyTriggeredCheck"] + service.run_checks(checks, manually_triggered=True) + assert _delay.calls == [ + pretend.call("MyTriggeredCheck", manually_triggered=True) + ] diff --git a/tests/unit/malware/test_tasks.py b/tests/unit/malware/test_tasks.py new file mode 100644 --- /dev/null +++ b/tests/unit/malware/test_tasks.py @@ -0,0 +1,490 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import celery +import pretend +import pytest + +from warehouse.malware import tasks +from warehouse.malware.models import MalwareCheck, MalwareCheckState, MalwareVerdict +from warehouse.malware.services import PrinterMalwareCheckService + +from ...common import checks as test_checks +from ...common.db.malware import MalwareCheckFactory, MalwareVerdictFactory +from ...common.db.packaging import FileFactory, ProjectFactory, ReleaseFactory + + +class TestRunCheck: + def test_success(self, db_request, monkeypatch): + db_request.route_url = pretend.call_recorder(lambda *a, **kw: "fake_route") + + monkeypatch.setattr(tasks, "checks", test_checks) + file0 = FileFactory.create() + MalwareCheckFactory.create( + name="ExampleHookedCheck", state=MalwareCheckState.Enabled + ) + task = pretend.stub() + tasks.run_check(task, db_request, "ExampleHookedCheck", obj_id=file0.id) + + assert db_request.route_url.calls == [ + pretend.call("packaging.file", path=file0.path) + ] + assert db_request.db.query(MalwareVerdict).one() + + @pytest.mark.parametrize(("manually_triggered"), [True, False]) + def test_evaluation_run(self, db_session, monkeypatch, manually_triggered): + monkeypatch.setattr(tasks, "checks", test_checks) + MalwareCheckFactory.create( + name="ExampleScheduledCheck", state=MalwareCheckState.Evaluation + ) + ProjectFactory.create() + task = pretend.stub() + + request = pretend.stub( + db=db_session, + log=pretend.stub(info=pretend.call_recorder(lambda *args, **kwargs: None)), + ) + + tasks.run_check( + task, + request, + "ExampleScheduledCheck", + manually_triggered=manually_triggered, + ) + + if manually_triggered: + assert db_session.query(MalwareVerdict).one() + else: + assert request.log.info.calls == [ + pretend.call( + "ExampleScheduledCheck is in the `evaluation` state and must be \ +manually triggered to run." + ) + ] + assert db_session.query(MalwareVerdict).all() == [] + + def test_disabled_check(self, db_session, monkeypatch): + monkeypatch.setattr(tasks, "checks", test_checks) + MalwareCheckFactory.create( + name="ExampleHookedCheck", state=MalwareCheckState.Disabled + ) + task = pretend.stub() + request = pretend.stub( + db=db_session, + log=pretend.stub(info=pretend.call_recorder(lambda *args, **kwargs: None)), + ) + + file = FileFactory.create() + + tasks.run_check(task, request, "ExampleHookedCheck", obj_id=file.id) + + assert request.log.info.calls == [ + pretend.call("Check ExampleHookedCheck isn't active. Aborting.") + ] + + def test_missing_check(self, db_request, monkeypatch): + monkeypatch.setattr(tasks, "checks", test_checks) + task = pretend.stub() + + with pytest.raises(AttributeError): + tasks.run_check(task, db_request, "DoesNotExistCheck") + + def test_missing_obj_id(self, db_session, monkeypatch): + monkeypatch.setattr(tasks, "checks", test_checks) + task = pretend.stub() + + MalwareCheckFactory.create( + name="ExampleHookedCheck", state=MalwareCheckState.Enabled + ) + task = pretend.stub() + + request = pretend.stub( + db=db_session, + log=pretend.stub(error=pretend.call_recorder(lambda *args, **kwargs: None)), + ) + + tasks.run_check(task, request, "ExampleHookedCheck") + + assert request.log.error.calls == [ + pretend.call( + "Fatal exception: ExampleHookedCheck: Missing required kwarg `obj_id`" + ) + ] + + def test_retry(self, db_session, monkeypatch): + monkeypatch.setattr(tasks, "checks", test_checks) + exc = Exception("Scan failed") + + def scan(self, **kwargs): + raise exc + + monkeypatch.setattr(tasks.checks.ExampleHookedCheck, "scan", scan) + + MalwareCheckFactory.create( + name="ExampleHookedCheck", state=MalwareCheckState.Enabled + ) + + task = pretend.stub( + retry=pretend.call_recorder(pretend.raiser(celery.exceptions.Retry)) + ) + request = pretend.stub( + db=db_session, + log=pretend.stub(error=pretend.call_recorder(lambda *args, **kwargs: None)), + route_url=pretend.call_recorder(lambda *a, **kw: pretend.stub()), + ) + + file = FileFactory.create() + + with pytest.raises(celery.exceptions.Retry): + tasks.run_check(task, request, "ExampleHookedCheck", obj_id=file.id) + + assert request.log.error.calls == [ + pretend.call("Error executing check ExampleHookedCheck: Scan failed") + ] + + assert task.retry.calls == [pretend.call(exc=exc)] + + +class TestRunScheduledCheck: + def test_invalid_check_name(self, db_request, monkeypatch): + monkeypatch.setattr(tasks, "checks", test_checks) + task = pretend.stub() + with pytest.raises(AttributeError): + tasks.run_scheduled_check(task, db_request, "DoesNotExist") + + def test_run_check(self, db_session, capfd, monkeypatch): + MalwareCheckFactory.create( + name="ExampleScheduledCheck", state=MalwareCheckState.Enabled + ) + + request = pretend.stub( + db=db_session, + find_service_factory=pretend.call_recorder( + lambda interface: PrinterMalwareCheckService.create_service + ), + ) + + task = pretend.stub() + + tasks.run_scheduled_check(task, request, "ExampleScheduledCheck") + + assert request.find_service_factory.calls == [ + pretend.call(tasks.IMalwareCheckService) + ] + + out, err = capfd.readouterr() + assert out == "ExampleScheduledCheck {'manually_triggered': False}\n" + + +class TestBackfill: + def test_invalid_check_name(self, db_request, monkeypatch): + monkeypatch.setattr(tasks, "checks", test_checks) + task = pretend.stub() + with pytest.raises(AttributeError): + tasks.backfill(task, db_request, "DoesNotExist", 1) + + @pytest.mark.parametrize( + ("num_objects", "num_runs"), [(11, 1), (11, 11), (101, 90)] + ) + def test_run(self, db_session, capfd, num_objects, num_runs, monkeypatch): + monkeypatch.setattr(tasks, "checks", test_checks) + + ids = [] + for i in range(num_objects): + ids.append(FileFactory.create().id) + + MalwareCheckFactory.create( + name="ExampleHookedCheck", state=MalwareCheckState.Enabled + ) + + request = pretend.stub( + db=db_session, + log=pretend.stub(info=pretend.call_recorder(lambda *args, **kwargs: None)), + find_service_factory=pretend.call_recorder( + lambda interface: PrinterMalwareCheckService.create_service + ), + ) + + task = pretend.stub() + + tasks.backfill(task, request, "ExampleHookedCheck", num_runs) + + assert request.log.info.calls == [ + pretend.call("Running backfill on %d Files." % num_runs) + ] + + assert request.find_service_factory.calls == [ + pretend.call(tasks.IMalwareCheckService) + ] + + out, err = capfd.readouterr() + num_output_lines = 0 + for file_id in ids: + logged_output = "ExampleHookedCheck:%s %s\n" % ( + file_id, + {"manually_triggered": True}, + ) + num_output_lines += 1 if logged_output in out else 0 + + assert num_output_lines == num_runs + + +class TestSyncChecks: + def test_no_updates(self, db_session, monkeypatch): + monkeypatch.setattr(tasks, "checks", test_checks) + monkeypatch.setattr(tasks.checks.ExampleScheduledCheck, "version", 2) + + MalwareCheckFactory.create( + name="ExampleHookedCheck", state=MalwareCheckState.Disabled + ) + MalwareCheckFactory.create( + name="ExampleScheduledCheck", state=MalwareCheckState.Disabled + ) + MalwareCheckFactory.create( + name="ExampleScheduledCheck", state=MalwareCheckState.Enabled, version=2 + ) + + task = pretend.stub() + + request = pretend.stub( + db=db_session, + log=pretend.stub(info=pretend.call_recorder(lambda *args, **kwargs: None)), + ) + + tasks.sync_checks(task, request) + + assert request.log.info.calls == [ + pretend.call("2 malware checks found in codebase."), + pretend.call("ExampleHookedCheck is unmodified."), + pretend.call("ExampleScheduledCheck is unmodified."), + ] + + @pytest.mark.parametrize( + ("final_state"), [MalwareCheckState.Enabled, MalwareCheckState.Disabled] + ) + def test_upgrade_check(self, monkeypatch, db_session, final_state): + monkeypatch.setattr(tasks, "checks", test_checks) + monkeypatch.setattr(tasks.checks.ExampleHookedCheck, "version", 2) + + MalwareCheckFactory.create(name="ExampleHookedCheck", state=final_state) + MalwareCheckFactory.create( + name="ExampleScheduledCheck", state=MalwareCheckState.Disabled + ) + + task = pretend.stub() + request = pretend.stub( + db=db_session, + log=pretend.stub(info=pretend.call_recorder(lambda *args, **kwargs: None)), + ) + + tasks.sync_checks(task, request) + + assert request.log.info.calls == [ + pretend.call("2 malware checks found in codebase."), + pretend.call("Updating existing ExampleHookedCheck."), + pretend.call("ExampleScheduledCheck is unmodified."), + ] + db_checks = ( + db_session.query(MalwareCheck) + .filter(MalwareCheck.name == "ExampleHookedCheck") + .all() + ) + + assert len(db_checks) == 2 + + if final_state == MalwareCheckState.Disabled: + assert ( + db_checks[0].state == db_checks[1].state == MalwareCheckState.Disabled + ) + + else: + for c in db_checks: + if c.state == final_state: + assert c.version == 2 + else: + assert c.version == 1 + + def test_one_new_check(self, db_session, monkeypatch): + monkeypatch.setattr(tasks, "checks", test_checks) + + MalwareCheckFactory.create( + name="ExampleHookedCheck", state=MalwareCheckState.Disabled + ) + MalwareCheckFactory.create( + name="ExampleScheduledCheck", state=MalwareCheckState.Disabled + ) + + task = pretend.stub() + + class FakeMalwareCheck: + version = 1 + short_description = "This is a short description." + long_description = "This is a longer description." + check_type = "scheduled" + schedule = {"minute": "0", "hour": "*/8"} + + tasks.checks.FakeMalwareCheck = FakeMalwareCheck + + request = pretend.stub( + db=db_session, + log=pretend.stub(info=pretend.call_recorder(lambda *args, **kwargs: None)), + ) + + tasks.sync_checks(task, request) + + assert request.log.info.calls == [ + pretend.call("3 malware checks found in codebase."), + pretend.call("ExampleHookedCheck is unmodified."), + pretend.call("ExampleScheduledCheck is unmodified."), + pretend.call("Adding new FakeMalwareCheck to the database."), + ] + assert db_session.query(MalwareCheck).count() == 3 + + new_check = ( + db_session.query(MalwareCheck) + .filter(MalwareCheck.name == "FakeMalwareCheck") + .one() + ) + + assert new_check.state == MalwareCheckState.Disabled + + del tasks.checks.FakeMalwareCheck + + def test_too_many_db_checks(self, db_session, monkeypatch): + monkeypatch.setattr(tasks, "checks", test_checks) + + MalwareCheckFactory.create( + name="ExampleHookedCheck", state=MalwareCheckState.Enabled + ) + MalwareCheckFactory.create( + name="ExampleScheduledCheck", state=MalwareCheckState.Enabled + ) + MalwareCheckFactory.create( + name="AnotherCheck", state=MalwareCheckState.Evaluation, version=2 + ) + + task = pretend.stub() + + request = pretend.stub( + db=db_session, + log=pretend.stub( + info=pretend.call_recorder(lambda *args, **kwargs: None), + error=pretend.call_recorder(lambda *args, **kwargs: None), + ), + ) + + with pytest.raises(Exception): + tasks.sync_checks(task, request) + + assert request.log.info.calls == [ + pretend.call("2 malware checks found in codebase.") + ] + + assert request.log.error.calls == [ + pretend.call( + "Found 3 active checks in the db, but only 2 checks in code. Please \ +manually move superfluous checks to the wiped_out state in the check admin: \ +AnotherCheck" + ) + ] + + def test_only_wiped_out(self, db_session, monkeypatch): + monkeypatch.setattr(tasks, "checks", test_checks) + MalwareCheckFactory.create( + name="ExampleHookedCheck", state=MalwareCheckState.WipedOut + ) + MalwareCheckFactory.create( + name="ExampleScheduledCheck", state=MalwareCheckState.WipedOut + ) + + task = pretend.stub() + request = pretend.stub( + db=db_session, + log=pretend.stub( + info=pretend.call_recorder(lambda *args, **kwargs: None), + error=pretend.call_recorder(lambda *args, **kwargs: None), + ), + ) + + tasks.sync_checks(task, request) + + assert request.log.info.calls == [ + pretend.call("2 malware checks found in codebase.") + ] + + assert request.log.error.calls == [ + pretend.call( + "ExampleHookedCheck is wiped_out and cannot be synced. Please remove check \ +from codebase." + ), + pretend.call( + "ExampleScheduledCheck is wiped_out and cannot be synced. Please remove check \ +from codebase." + ), + ] + + +class TestRemoveVerdicts: + def test_no_verdicts(self, db_session): + check = MalwareCheckFactory.create() + + request = pretend.stub( + db=db_session, + log=pretend.stub(info=pretend.call_recorder(lambda *args, **kwargs: None)), + ) + task = pretend.stub() + removed = tasks.remove_verdicts(task, request, check.name) + + assert request.log.info.calls == [ + pretend.call( + "Removing 0 malware verdicts associated with %s version 1." % check.name + ) + ] + assert removed == 0 + + @pytest.mark.parametrize(("check_with_verdicts"), [True, False]) + def test_many_verdicts(self, db_session, check_with_verdicts): + check0 = MalwareCheckFactory.create() + check1 = MalwareCheckFactory.create() + project = ProjectFactory.create(name="foo") + release = ReleaseFactory.create(project=project) + file0 = FileFactory.create(release=release, filename="foo.bar") + num_verdicts = 10 + + for i in range(num_verdicts): + MalwareVerdictFactory.create(check=check1, release_file=file0) + + assert db_session.query(MalwareVerdict).count() == num_verdicts + + request = pretend.stub( + db=db_session, + log=pretend.stub(info=pretend.call_recorder(lambda *args, **kwargs: None)), + ) + + task = pretend.stub() + + if check_with_verdicts: + wiped_out_check = check1 + else: + wiped_out_check = check0 + num_verdicts = 0 + + removed = tasks.remove_verdicts(task, request, wiped_out_check.name) + + assert request.log.info.calls == [ + pretend.call( + "Removing %d malware verdicts associated with %s version 1." + % (num_verdicts, wiped_out_check.name) + ) + ] + + assert removed == num_verdicts diff --git a/tests/unit/malware/test_utils.py b/tests/unit/malware/test_utils.py new file mode 100644 --- /dev/null +++ b/tests/unit/malware/test_utils.py @@ -0,0 +1,89 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from collections import defaultdict + +import pytest + +from warehouse.malware.models import MalwareCheckState, MalwareCheckType +from warehouse.malware.utils import get_check_fields, get_enabled_hooked_checks + +from ...common.checks import ExampleHookedCheck, ExampleScheduledCheck +from ...common.db.malware import MalwareCheckFactory + + +class TestGetEnabledChecks: + def test_one(self, db_session): + check = MalwareCheckFactory.create( + state=MalwareCheckState.Enabled, check_type=MalwareCheckType.EventHook + ) + result = defaultdict(list) + result[check.hooked_object.value].append(check.name) + checks = get_enabled_hooked_checks(db_session) + assert checks == result + + def test_many(self, db_session): + result = defaultdict(list) + for i in range(10): + check = MalwareCheckFactory.create() + if ( + check.state == MalwareCheckState.Enabled + and check.check_type == MalwareCheckType.EventHook + ): + result[check.hooked_object.value].append(check.name) + + checks = get_enabled_hooked_checks(db_session) + assert checks == result + + def test_none(self, db_session): + checks = get_enabled_hooked_checks(db_session) + assert checks == defaultdict(list) + + +class TestGetCheckFields: + @pytest.mark.parametrize( + ("check", "result"), + [ + ( + ExampleHookedCheck, + { + "name": "ExampleHookedCheck", + "version": 1, + "short_description": "An example hook-based check", + "long_description": "The purpose of this check is to test the \ +implementation of a hook-based check. This check will generate verdicts if enabled.", + "check_type": "event_hook", + "hooked_object": "File", + }, + ), + ( + ExampleScheduledCheck, + { + "name": "ExampleScheduledCheck", + "version": 1, + "short_description": "An example scheduled check", + "long_description": "The purpose of this check is to test the \ +implementation of a scheduled check. This check will generate verdicts if enabled.", + "check_type": "scheduled", + "schedule": {"minute": "0", "hour": "*/8"}, + }, + ), + ], + ) + def test_success(self, check, result): + assert get_check_fields(check) == result + + def test_failure(self, monkeypatch): + monkeypatch.delattr(ExampleScheduledCheck, "schedule") + + with pytest.raises(AttributeError): + get_check_fields(ExampleScheduledCheck) diff --git a/tests/unit/test_config.py b/tests/unit/test_config.py --- a/tests/unit/test_config.py +++ b/tests/unit/test_config.py @@ -317,6 +317,7 @@ def __init__(self): pretend.call(".email"), pretend.call(".accounts"), pretend.call(".macaroons"), + pretend.call(".malware"), pretend.call(".manage"), pretend.call(".packaging"), pretend.call(".redirects"), diff --git a/tests/unit/test_tasks.py b/tests/unit/test_tasks.py --- a/tests/unit/test_tasks.py +++ b/tests/unit/test_tasks.py @@ -501,8 +501,11 @@ def test_includeme(env, ssl, broker_url, expected_url, transport_options): "task_serializer": "json", "accept_content": ["json", "msgpack"], "task_queue_ha_policy": "all", - "task_queues": (Queue("default", routing_key="task.#"),), - "task_routes": ([]), + "task_queues": ( + Queue("default", routing_key="task.#"), + Queue("malware", routing_key="malware.#"), + ), + "task_routes": {"warehouse.malware.tasks.*": {"queue": "malware"}}, "REDBEAT_REDIS_URL": (config.registry.settings["celery.scheduler_url"]), }.items(): assert app.conf[key] == value
admin interface for review of flagged packages We're working on a system to detect malicious uploads (per #4998). It's going to be a pipeline where automated systems run checks, flag packages/projects for deletion/review/ok, etc. So this issue is for a feature in the admin interface where administrators/moderators (#4011) can review those flagged projects and releases and decide what to accept/reject. We'll probably also want to use this if/when we implement a mechanism for users to report packages (#3896), and if/when we start automatically checking uploaded packages for compliant metadata and installability (#194).
2020-02-11T19:52:54Z
[]
[]
pypi/warehouse
7,424
pypi__warehouse-7424
[ "7298" ]
48048180c4fe3c3116d0e8e4bd13dd8b1150db10
diff --git a/warehouse/forklift/legacy.py b/warehouse/forklift/legacy.py --- a/warehouse/forklift/legacy.py +++ b/warehouse/forklift/legacy.py @@ -759,7 +759,11 @@ def file_upload(request): # request, then we'll go ahead and bomb out. if request.authenticated_userid is None: raise _exc_with_message( - HTTPForbidden, "Invalid or non-existent authentication information." + HTTPForbidden, + "Invalid or non-existent authentication information. " + "See {projecthelp} for details".format( + projecthelp=request.help_url(_anchor="invalid-auth") + ), ) # Ensure that user has a verified, primary email address. This should both diff --git a/warehouse/macaroons/services.py b/warehouse/macaroons/services.py --- a/warehouse/macaroons/services.py +++ b/warehouse/macaroons/services.py @@ -10,6 +10,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import binascii import datetime import json import uuid @@ -77,6 +78,8 @@ def find_userid(self, raw_macaroon): try: m = pymacaroons.Macaroon.deserialize(raw_macaroon) + except binascii.Error: + return None except MacaroonDeserializationException: return None
diff --git a/tests/unit/forklift/test_legacy.py b/tests/unit/forklift/test_legacy.py --- a/tests/unit/forklift/test_legacy.py +++ b/tests/unit/forklift/test_legacy.py @@ -3139,6 +3139,7 @@ def test_fails_in_read_only_mode(self, pyramid_request): def test_fails_without_user(self, pyramid_config, pyramid_request): pyramid_request.flags = pretend.stub(enabled=lambda *a: False) + pyramid_request.help_url = pretend.call_recorder(lambda **kw: "/the/help/url/") pyramid_config.testing_securitypolicy(userid=None) with pytest.raises(HTTPForbidden) as excinfo: @@ -3148,7 +3149,8 @@ def test_fails_without_user(self, pyramid_config, pyramid_request): assert resp.status_code == 403 assert resp.status == ( - "403 Invalid or non-existent authentication information." + "403 Invalid or non-existent authentication information. " + "See /the/help/url/ for details" ) diff --git a/tests/unit/macaroons/test_services.py b/tests/unit/macaroons/test_services.py --- a/tests/unit/macaroons/test_services.py +++ b/tests/unit/macaroons/test_services.py @@ -81,6 +81,13 @@ def test_find_userid_invalid_macaroon(self, macaroon_service): def test_find_userid_malformed_macaroon(self, macaroon_service): assert macaroon_service.find_userid(f"pypi-thiswillnotdeserialize") is None + def test_find_userid_valid_macaroon_trailinglinebreak(self, macaroon_service): + user = UserFactory.create() + raw_macaroon, _ = macaroon_service.create_macaroon( + "fake location", user.id, "fake description", {"fake": "caveats"} + ) + assert macaroon_service.find_userid(f"{raw_macaroon}\n") is None + def test_find_userid(self, macaroon_service): user = UserFactory.create() raw_macaroon, _ = macaroon_service.create_macaroon(
"500 Server Error: Internal Server Error" when there is an unintentional trailing newline in the password **Describe the bug** When uploading distributions with twine I get the error `HTTPError: 500 Server Error: Internal Server Error` when there is an unintentional trailing newline in the password. This is confusing as I would expect the error `403 Client Error: Invalid or non-existent authentication information.` or even an automatic stripping of whitepace. This happens easily when using continuous deployment and authentication via github secrets, as it is easy to accidentally copy the token from the PyPI [Add API token](https://pypi.org/manage/account/token/) page with a trailing newline. This is then preserved when entering it into a textfield and thus included in the environment variable set by the runner. As the server seems to be returning a confusing message I added this issue here instead of in the twine repo. Hope it's correct here. **Expected behavior** Receive authentication error or automatic stripping of whitespace **To Reproduce** Note the newline at the end of the token: ``` $ twine upload --repository-url https://test.pypi.org/legacy/ -u "__token__" -p "pypi-XXX " dist/* Uploading distributions to https://test.pypi.org/legacy/ Uploading somepackage-0.1-py3-none-any.whl 100%|██████████████████████████████████████████| 9.74k/9.74k [00:00<00:00, 12.0kB/s] Received "500: Internal Server Error" Package upload appears to have failed. Retry 1 of 5 [...] Received "500: Internal Server Error" Package upload appears to have failed. Retry 5 of 5 NOTE: Try --verbose to see response content. HTTPError: 500 Server Error: Internal Server Error for url: https://test.pypi.org/legacy/ ``` Without the newline at the end of the token the behavior is better: ``` $ twine upload --repository-url https://test.pypi.org/legacy/ -u "__token__" -p "pypi-XXX" dist/* Uploading distributions to https://test.pypi.org/legacy/ Uploading somepackage-0.1-py3-none-any.whl 100%|██████████████████████████████████████████| 9.74k/9.74k [00:00<00:00, 10.4kB/s] NOTE: Try --verbose to see response content. HTTPError: 403 Client Error: Invalid or non-existent authentication information. for url: https://test.pypi.org/legacy/ ``` **My Platform** ``` $ python --version Python 3.6.9 $ twine --version twine version 3.1.1 (pkginfo: 1.5.0.1, requests: 2.22.0, setuptools: 45.1.0, requests-toolbelt: 0.9.1, tqdm: 4.41.1) ```
Thanks! I think this is the full stacktrace if anyone would like to fix it: ``` Error: Incorrect padding File "raven/middleware.py", line 20, in common_exception_handling yield File "raven/middleware.py", line 100, in __call__ iterable = self.application(environ, start_response) File "warehouse/utils/wsgi.py", line 99, in __call__ return self.app(environ, start_response) File "warehouse/utils/wsgi.py", line 83, in __call__ return self.app(environ, start_response) File "warehouse/utils/wsgi.py", line 71, in __call__ return self.app(environ, start_response) File "whitenoise/base.py", line 86, in __call__ return self.application(environ, start_response) File "pyramid/router.py", line 270, in __call__ response = self.execution_policy(environ, self) File "pyramid_retry/__init__.py", line 121, in retry_policy response = router.invoke_request(request) File "pyramid/router.py", line 249, in invoke_request response = handle_request(request) File "warehouse/sanity.py", line 69, in sanity_tween_ingress return handler(request) File "warehouse/referrer_policy.py", line 16, in referrer_policy_tween response = handler(request) File "warehouse/csp.py", line 31, in content_security_policy_tween resp = handler(request) File "warehouse/config.py", line 82, in require_https_tween return handler(request) File "pyramid_tm/__init__.py", line 178, in tm_tween reraise(*exc_info) File "pyramid_tm/compat.py", line 36, in reraise raise value File "pyramid_tm/__init__.py", line 143, in tm_tween response = handler(request) File "warehouse/utils/compression.py", line 92, in compression_tween response = handler(request) File "warehouse/raven.py", line 40, in raven_tween return handler(request) File "pyramid/tweens.py", line 43, in excview_tween response = _error_handler(request, exc) File "pyramid/tweens.py", line 17, in _error_handler reraise(*exc_info) File "pyramid/compat.py", line 179, in reraise raise value File "pyramid/tweens.py", line 41, in excview_tween response = handler(request) File "warehouse/cache/http.py", line 74, in conditional_http_tween response = handler(request) File "warehouse/sanity.py", line 76, in sanity_tween_egress return unicode_redirects(handler(request)) File "pyramid/router.py", line 148, in handle_request registry, request, context, context_iface, view_name File "pyramid/view.py", line 667, in _call_view response = view_callable(context, request) File "warehouse/cache/http.py", line 33, in wrapped return view(context, request) File "warehouse/cache/http.py", line 33, in wrapped return view(context, request) File "warehouse/csrf.py", line 38, in wrapped return view(context, request) File "warehouse/metrics/views.py", line 34, in wrapper_view return view(context, request) File "pyramid/viewderivers.py", line 401, in viewresult_to_response result = view(context, request) File "pyramid/viewderivers.py", line 144, in _requestonly_view response = view(request) File "warehouse/forklift/legacy.py", line 760, in file_upload if request.authenticated_userid is None: File "pyramid/security.py", line 381, in authenticated_userid return policy.authenticated_userid(self) File "pyramid_multiauth/__init__.py", line 78, in authenticated_userid userid = policy.authenticated_userid(request) File "pyramid/authentication.py", line 66, in authenticated_userid userid = self.unauthenticated_userid(request) File "warehouse/macaroons/auth_policy.py", line 89, in unauthenticated_userid userid = macaroon_service.find_userid(macaroon) File "warehouse/macaroons/services.py", line 79, in find_userid m = pymacaroons.Macaroon.deserialize(raw_macaroon) File "pymacaroons/macaroon.py", line 47, in deserialize return serializer.deserialize(serialized) File "pymacaroons/serializers/binary_serializer.py", line 89, in deserialize decoded = raw_b64decode(serialized) File "pymacaroons/utils.py", line 111, in raw_b64decode return raw_urlsafe_b64decode(s) File "pymacaroons/utils.py", line 122, in raw_urlsafe_b64decode return base64.urlsafe_b64decode(add_base64_padding(s.encode('utf-8'))) File "python3.7/base64.py", line 133, in urlsafe_b64decode return b64decode(s) File "python3.7/base64.py", line 87, in b64decode return binascii.a2b_base64(s) ``` (https://sentry.io/organizations/python-software-foundation/issues/1143700661/) This might be a bug with `pymacaroons`, we're already attempting to catch a `MacaroonDeserializationException`: https://github.com/pypa/warehouse/blob/dfd5de6d1aa91ea591c0bda0c7d20edcab1aad0f/warehouse/macaroons/services.py#L78-L81
2020-02-22T13:44:06Z
[]
[]
pypi/warehouse
7,529
pypi__warehouse-7529
[ "6792" ]
7cbd78c24cbc7acb5ecc45ed787c95e4607f0db8
diff --git a/warehouse/forklift/legacy.py b/warehouse/forklift/legacy.py --- a/warehouse/forklift/legacy.py +++ b/warehouse/forklift/legacy.py @@ -142,11 +142,7 @@ def _valid_platform_tag(platform_tag): _error_message_order = ["metadata_version", "name", "version"] -_dist_file_regexes = { - # True/False is for legacy or not. - True: re.compile(r".+?\.(exe|tar\.gz|bz2|rpm|deb|zip|tgz|egg|dmg|msi|whl)$", re.I), - False: re.compile(r".+?\.(tar\.gz|zip|whl|egg)$", re.I), -} +_dist_file_re = re.compile(r".+?\.(tar\.gz|zip|whl|egg)$", re.I) _wheel_file_re = re.compile( @@ -500,17 +496,7 @@ class MetadataForm(forms.Form): validators=[ wtforms.validators.DataRequired(), wtforms.validators.AnyOf( - [ - "bdist_dmg", - "bdist_dumb", - "bdist_egg", - "bdist_msi", - "bdist_rpm", - "bdist_wheel", - "bdist_wininst", - "sdist", - ], - message="Use a known file type.", + ["bdist_egg", "bdist_wheel", "sdist"], message="Use a known file type.", ), ] ) @@ -1171,7 +1157,7 @@ def file_upload(request): ) # Make sure the filename ends with an allowed extension. - if _dist_file_regexes[project.allow_legacy_files].search(filename) is None: + if _dist_file_re.search(filename) is None: raise _exc_with_message( HTTPBadRequest, "Invalid file extension: Use .egg, .tar.gz, .whl or .zip " @@ -1193,16 +1179,6 @@ def file_upload(request): ): raise _exc_with_message(HTTPBadRequest, "Invalid distribution file.") - # Ensure that the package filetype is allowed. - # TODO: Once PEP 527 is completely implemented we should be able to delete - # this and just move it into the form itself. - if not project.allow_legacy_files and form.filetype.data not in { - "sdist", - "bdist_wheel", - "bdist_egg", - }: - raise _exc_with_message(HTTPBadRequest, "Unknown type of file.") - # The project may or may not have a file size specified on the project, if # it does then it may or may not be smaller or larger than our global file # size limits. diff --git a/warehouse/migrations/versions/b265ed9eeb8a_fully_deprecate_legacy_distribution_.py b/warehouse/migrations/versions/b265ed9eeb8a_fully_deprecate_legacy_distribution_.py new file mode 100644 --- /dev/null +++ b/warehouse/migrations/versions/b265ed9eeb8a_fully_deprecate_legacy_distribution_.py @@ -0,0 +1,31 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +Fully deprecate legacy distribution types + +Revision ID: b265ed9eeb8a +Revises: c4cb2d15dada +Create Date: 2020-03-12 17:51:08.447903 +""" + +from alembic import op + +revision = "b265ed9eeb8a" +down_revision = "c4cb2d15dada" + + +def upgrade(): + op.drop_column("projects", "allow_legacy_files") + + +def downgrade(): + raise RuntimeError("Order No. 227 - Ни шагу назад!") diff --git a/warehouse/packaging/models.py b/warehouse/packaging/models.py --- a/warehouse/packaging/models.py +++ b/warehouse/packaging/models.py @@ -115,7 +115,6 @@ class Project(SitemapMixin, db.Model): has_docs = Column(Boolean) upload_limit = Column(Integer, nullable=True) last_serial = Column(Integer, nullable=False, server_default=sql.text("0")) - allow_legacy_files = Column(Boolean, nullable=False, server_default=sql.false()) zscore = Column(Float, nullable=True) total_size = Column(BigInteger, server_default=sql.text("0"))
diff --git a/tests/unit/forklift/test_legacy.py b/tests/unit/forklift/test_legacy.py --- a/tests/unit/forklift/test_legacy.py +++ b/tests/unit/forklift/test_legacy.py @@ -1513,7 +1513,10 @@ def test_upload_fails_with_legacy_type(self, pyramid_config, db_request): resp = excinfo.value assert resp.status_code == 400 - assert resp.status == "400 Unknown type of file." + assert ( + resp.status + == "400 Invalid value for filetype. Error: Use a known file type." + ) def test_upload_fails_with_legacy_ext(self, pyramid_config, db_request): pyramid_config.testing_securitypolicy(userid=1) @@ -2519,106 +2522,6 @@ def storage_service_store(path, file_path, *, meta): ) ] - def test_upload_succeeds_with_legacy_ext( - self, tmpdir, monkeypatch, pyramid_config, db_request, metrics - ): - monkeypatch.setattr(tempfile, "tempdir", str(tmpdir)) - - pyramid_config.testing_securitypolicy(userid=1) - - user = UserFactory.create() - EmailFactory.create(user=user) - project = ProjectFactory.create(allow_legacy_files=True) - release = ReleaseFactory.create(project=project, version="1.0") - RoleFactory.create(user=user, project=project) - - filename = "{}-{}.tar.bz2".format(project.name, release.version) - - db_request.user = user - db_request.remote_addr = "10.10.10.30" - db_request.user_agent = "warehouse-tests/6.6.6" - db_request.POST = MultiDict( - { - "metadata_version": "1.2", - "name": project.name, - "version": release.version, - "filetype": "sdist", - "pyversion": "source", - "md5_digest": _TAR_BZ2_PKG_MD5, - "content": pretend.stub( - filename=filename, - file=io.BytesIO(_TAR_BZ2_PKG_TESTDATA), - type="application/tar", - ), - } - ) - - def storage_service_store(path, file_path, *, meta): - with open(file_path, "rb") as fp: - assert fp.read() == _TAR_BZ2_PKG_TESTDATA - - storage_service = pretend.stub(store=storage_service_store) - db_request.find_service = lambda svc, name=None, context=None: { - IFileStorage: storage_service, - IMetricsService: metrics, - }.get(svc) - - monkeypatch.setattr(legacy, "_is_valid_dist_file", lambda *a, **kw: True) - - resp = legacy.file_upload(db_request) - - assert resp.status_code == 200 - - def test_upload_succeeds_with_legacy_type( - self, tmpdir, monkeypatch, pyramid_config, db_request, metrics - ): - monkeypatch.setattr(tempfile, "tempdir", str(tmpdir)) - - pyramid_config.testing_securitypolicy(userid=1) - - user = UserFactory.create() - EmailFactory.create(user=user) - project = ProjectFactory.create(allow_legacy_files=True) - release = ReleaseFactory.create(project=project, version="1.0") - RoleFactory.create(user=user, project=project) - - filename = "{}-{}.tar.gz".format(project.name, release.version) - - db_request.user = user - db_request.remote_addr = "10.10.10.30" - db_request.user_agent = "warehouse-tests/6.6.6" - db_request.POST = MultiDict( - { - "metadata_version": "1.2", - "name": project.name, - "version": release.version, - "filetype": "bdist_dumb", - "pyversion": "3.5", - "md5_digest": _TAR_GZ_PKG_MD5, - "content": pretend.stub( - filename=filename, - file=io.BytesIO(_TAR_GZ_PKG_TESTDATA), - type="application/tar", - ), - } - ) - - def storage_service_store(path, file_path, *, meta): - with open(file_path, "rb") as fp: - assert fp.read() == _TAR_GZ_PKG_TESTDATA - - storage_service = pretend.stub(store=storage_service_store) - db_request.find_service = lambda svc, name=None, context=None: { - IFileStorage: storage_service, - IMetricsService: metrics, - }.get(svc) - - monkeypatch.setattr(legacy, "_is_valid_dist_file", lambda *a, **kw: True) - - resp = legacy.file_upload(db_request) - - assert resp.status_code == 200 - @pytest.mark.parametrize("plat", ["linux_x86_64", "linux_x86_64.win32"]) def test_upload_fails_with_unsupported_wheel_plat( self, monkeypatch, pyramid_config, db_request, plat
Fully implement PEP 527 We still retain the ability for some projects to upload the legacy filetypes listed in [PEP 527](https://www.python.org/dev/peps/pep-0527/): https://github.com/pypa/warehouse/blob/aa0d54019c322b52cb6428780808816d417abbd1/warehouse/packaging/models.py#L124 https://github.com/pypa/warehouse/blob/e1d0e4e41738fd07a0edeb77f95fedba0fdd41f8/warehouse/forklift/legacy.py#L463-L480 https://github.com/pypa/warehouse/blob/e1d0e4e41738fd07a0edeb77f95fedba0fdd41f8/warehouse/forklift/legacy.py#L1167-L1174 We should audit which projects currently have this ability, and whether they are still publishing deprecated filetypes. For example, [Pillow is no longer publishing `bdist_wininst` files](https://github.com/python-pillow/Pillow/pull/4029).
IMO we should also push these projects to drop usage of the legacy formats, and if not, at least get a good understanding if the issue is a "the current toolchain doesn't satisfy workflows like the legacy formats" or "oh, we don't need it" or something else entirely. How long is the list? If not too long, let's create issues at these projects, it's likely they're not aware of the deprecation. I'd expect many are already using wheels and can ditch the legacy formats. For example with Pillow, I didn't know they were deprecated until it came up at https://discuss.python.org/t/deprecate-bdist-wininst/1929/12?u=hugovk, and we already distribute wheels so it was "oh, we don't need it". And how about adding a deprecation warning to Twine when uploading them? > How long is the list? IIRC the "list" was any project that had previously uploaded one of these filetypes, essentially we only blocked new projects. A better list would be every project that has this ability that has actually published one of these filetypes in the last `N` months. I'm guessing that this list is short enough that adding a deprecation notice to Twine would be unnecessary, but hard to say until we actually make an audit. There are currently 4,678 projects that have `allow_legacy_files` set: ``` warehouse=> select count(*) from projects where allow_legacy_files; count ------- 4678 (1 row) ``` Recent uploads for individual deprecated filetypes: ### `bdist_dmg`: ``` warehouse=> select filename, upload_time from release_files where packagetype='bdist_dmg' order by upload_time desc limit 10; filename | upload_time ------------------------------------------------+---------------------------- python_igraph-0.7.1.post6-py2.7-macosx10.9.dmg | 2015-06-05 20:58:15.702734 python_igraph-0.7.1.post6-py2.6-macosx10.9.dmg | 2015-06-05 20:56:47.202244 python_igraph-0.7.1_4-py2.7-macosx10.9.dmg | 2015-03-05 21:11:20.376493 python_igraph-0.7.1_4-py2.6-macosx10.9.dmg | 2015-03-05 21:10:22.376507 python_igraph-0.7.1_3-py2.7-macosx10.9.dmg | 2015-03-05 20:30:28.362479 python_igraph-0.7.1_3-py2.6-macosx10.9.dmg | 2015-03-05 20:28:18.772667 python_igraph-0.7.1_2-py2.7-macosx10.9.dmg | 2015-02-10 20:29:57.860577 python_igraph-0.7.1_2-py2.6-macosx10.9.dmg | 2015-02-10 20:12:25.660451 python_igraph-0.7.1_1-py2.7-macosx10.9.dmg | 2015-02-10 07:56:59.793387 python_igraph-0.7.1_1-py2.6-macosx10.9.dmg | 2015-02-09 20:48:42.300196 (10 rows) ``` ### `bdist_dumb`: ``` warehouse=> select filename, upload_time from release_files where packagetype='bdist_dumb' order by upload_time desc limit 10; filename | upload_time -------------------------------------------------+---------------------------- airspeed-0.5.13.macosx-10.14-x86_64.tar.gz | 2019-10-22 00:49:01.646779 py_nifty_cloud-0.9.5.macosx-10.14-x86_64.tar.gz | 2019-09-28 14:32:32.022959 algorithmia-1.2.0.linux-x86_64.tar.gz | 2019-08-02 19:12:17.972642 htrc-0.1.51.macosx-10.7-x86_64.tar.gz | 2019-07-30 14:19:25.724787 htrc-0.1.51b1.macosx-10.7-x86_64.tar.gz | 2019-07-24 17:42:03.053786 htrc-0.1.51b0.macosx-10.7-x86_64.tar.gz | 2019-07-24 15:36:59.304947 airspeed-0.5.12.macosx-10.14-x86_64.tar.gz | 2019-07-24 06:32:12.329112 pysodium-0.7.2.linux-x86_64.tar.gz | 2019-06-25 14:30:59.086794 htrc-0.1.50.macosx-10.7-x86_64.tar.gz | 2019-06-21 14:43:12.68958 htrc-0.1.50b0.macosx-10.7-x86_64.tar.gz | 2019-06-20 17:23:49.411197 (10 rows) ``` ### `bdist_msi`: ``` warehouse=> select filename, upload_time from release_files where packagetype='bdist_msi' order by upload_time desc limit 10; filename | upload_time -------------------------------------+---------------------------- pywincffi-0.5.0.win32-py3.6.msi | 2017-11-18 18:55:27.694295 pywincffi-0.5.0.win32-py3.5.msi | 2017-11-18 18:55:26.221545 pywincffi-0.5.0.win32-py3.4.msi | 2017-11-18 18:55:25.067028 pywincffi-0.5.0.win32-py3.3.msi | 2017-11-18 18:55:23.828515 pywincffi-0.5.0.win32-py2.7.msi | 2017-11-18 18:55:22.319013 pywincffi-0.5.0.win-amd64-py3.6.msi | 2017-11-18 18:55:21.128075 pywincffi-0.5.0.win-amd64-py3.5.msi | 2017-11-18 18:55:20.016371 pywincffi-0.5.0.win-amd64-py3.4.msi | 2017-11-18 18:55:18.597794 pywincffi-0.5.0.win-amd64-py3.3.msi | 2017-11-18 18:55:17.357411 pywincffi-0.5.0.win-amd64-py2.7.msi | 2017-11-18 18:55:16.060902 (10 rows) ``` ### `bdist_rpm`: ``` warehouse=> select filename, upload_time from release_files where packagetype='bdist_rpm' order by upload_time desc limit 10; filename | upload_time -------------------------------------+---------------------------- Aglyph-3.0.0-1.noarch.rpm | 2018-03-16 02:39:14.304743 Aglyph-3.0.0-1.src.rpm | 2018-03-16 02:39:08.195363 toughradius-5.0.0.6-1.noarch.rpm | 2017-11-19 04:45:14.203082 toughradius-5.0.0.6-1.src.rpm | 2017-11-19 04:45:09.635322 python-otopi-mdp-0.2.2-1.noarch.rpm | 2017-10-02 10:23:32.79819 python-otopi-mdp-0.2.2-1.src.rpm | 2017-10-02 10:23:23.707941 toughradius-5.0.0.5-1.noarch.rpm | 2017-08-20 07:48:30.370971 toughradius-5.0.0.5-1.src.rpm | 2017-08-20 07:48:26.664831 cx_Oracle-6.0rc1-py35-1.x86_64.rpm | 2017-06-17 00:14:38.838786 cx_Oracle-6.0rc1-py27-1.x86_64.rpm | 2017-06-17 00:14:20.067593 (10 rows) ``` ### `bdist_wininst`: ``` warehouse=> select filename, upload_time from release_files where packagetype='bdist_wininst' order by upload_time desc limit 10; filename | upload_time -----------------------------------------+---------------------------- GPy-1.9.9.win-amd64-py3.7.exe | 2019-10-17 08:37:05.494866 GPy-1.9.9.win-amd64-py3.6.exe | 2019-10-17 08:28:54.341581 GPy-1.9.9.win-amd64-py3.5.exe | 2019-10-17 08:10:01.336052 GPy-1.9.9.win-amd64-py2.7.exe | 2019-10-17 08:01:52.12953 Trac-1.0.19.win-amd64.exe | 2019-10-15 00:36:35.161141 Trac-1.0.19.win32.exe | 2019-10-15 00:36:30.520495 xrayutilities-1.5.3.win-amd64-py3.7.exe | 2019-10-09 10:12:48.375835 xrayutilities-1.5.3.win-amd64-py3.6.exe | 2019-10-09 10:12:44.698438 xrayutilities-1.5.3.win-amd64-py3.5.exe | 2019-10-09 10:12:41.362779 xrayutilities-1.5.3.win-amd64-py2.7.exe | 2019-10-09 10:12:38.179182 (10 rows) ``` Of these it looks like `bdist_dmg`, `bdist_msi`, and `bdist_rpm` can just be shut off. The `bdist_wininst` filetype is still getting a lot of uploads, but PEP 527 says that this is misleading: > It's quite easy to look at the low usage of `bdist_dmg` and `bdist_msi` and conclude that removing them will be fairly low impact, however `bdist_wininst` has several orders of magnitude more usage. This is somewhat misleading though, because although it has more people uploading those files the actual usage of those uploaded files is fairly low. Taking a look at the previous 30 days, we can see that 90% of all downloads of `bdist_winist` files from PyPI were generated by the mirroring infrastructure and 7% of them were generated by setuptools (which can currently be better covered by `bdist_egg` files). Also `bdist_dumb` is still getting the occasional upload, but these projects would probably be better served by uploading wheels if they want platform-specific built distributions. Thanks. From the PEP's [removal process](https://www.python.org/dev/peps/pep-0527/#removal-process): > Finally, an email will be generated to the maintainers of all projects still given the legacy flag, which will inform them of the upcoming new restrictions on uploads and tell them that these restrictions will be applied to future uploads to their projects starting in 1 month. Finally, after 1 month all projects will have the legacy file type flag removed, and support for uploading these types of files will cease to exist on PyPI. Would now be a good time to send out the email? And to just bdist_dmg, bdist_msi, and bdist_rpm users first, or to all legacy format users? ooooh! Nice find @hugovk! ^>^ I'm in favor of dropping all legacy formats if the PEP has a clear mechanism to do so. I don't really think it's necessary to email all 4,678 projects when only a small fraction have actually used their legacy flag recently. Perhaps we should set a timeframe instead: if they've uploaded a deprecated distribution type in the last year? > We still retain the ability for some projects to upload the legacy filetypes listed in [PEP 527](https://www.python.org/dev/peps/pep-0527/): > > https://github.com/pypa/warehouse/blob/aa0d54019c322b52cb6428780808816d417abbd1/warehouse/packaging/models.py#L124 > > https://github.com/pypa/warehouse/blob/e1d0e4e41738fd07a0edeb77f95fedba0fdd41f8/warehouse/forklift/legacy.py#L463-L480 > > https://github.com/pypa/warehouse/blob/e1d0e4e41738fd07a0edeb77f95fedba0fdd41f8/warehouse/forklift/legacy.py#L1167-L1174 > > We should audit which projects currently have this ability, and whether they are still publishing deprecated filetypes. For example, [Pillow is no longer publishing `bdist_wininst` files](https://github.com/python-pillow/Pillow/pull/4029). Yes, that sound reasonable. Gentle nudge on this, given https://discuss.python.org/t/3115. OK, next steps would be: 1. querying to get the subset of the 4,678 projects that have `allow_legacy_files` set which have had an upload in the last year 2. querying for email addresses for all their maintainers 3. drafting the email 4. sending a bulk email Would anyone like to help with #​3? 3. Something along the lines of this? > Hello, > > We're emailing because you're listed as the maintainer for a package that has uploaded a legacy file type to PyPI in the past year: > > bdist_dmg > bdist_dumb > bdist_msi > bdist_rpm > bdist_wininst > > Following PEP 527, it will soon not be possible to upload legacy file types. Existing uploads will remain on PyPI, but soon new ones cannot be uploaded. > > https://www.python.org/dev/peps/pep-0527/ > > This restriction will apply to new uploads after 2020-04-01 [TODO decide exact date, must be at least 1 month from email date]. > > See PEP 527 for suggestions of replacement file types, and if you have any questions, please visit https://github.com/pypa/warehouse/issues/6792 [TODO or https://discuss.python.org/somewhere or somewhere else?]. > > Thank you, > > [TODO] > OK, I've sent the notices to everyone that's uploaded one of these packages in the last year. The shutoff date is 30 days from today (2020-04-12). For posterity, here's the SQL script I used to generate the affected users/projects: ```sql SELECT user_id, projects.name as project_name, packagetype FROM ( SELECT roles.user_id as user_id, roles.project_id as project_id, packagetype FROM ( SELECT project_id, packagetype FROM ( SELECT release_id, packagetype FROM release_files WHERE ( packagetype IN ( 'bdist_dmg', 'bdist_dumb', 'bdist_msi', 'bdist_rpm', 'bdist_wininst' ) AND "upload_time" > ( localtimestamp - interval '365 days' ) ) GROUP BY release_id, packagetype ) f JOIN releases ON releases.id = f.release_id GROUP BY project_id, packagetype ) release JOIN roles ON release.project_id = roles.project_id GROUP BY user_id, roles.project_id, packagetype ) p1 JOIN projects ON p1.project_id = projects.id; ``` Ran that like so: ``` psql service=pypi -t -A -F"," -f pep527.sql > pep527.csv ``` Then used the following script to turn that output into a CSV of mass emails: ```python import csv from collections import defaultdict users = defaultdict(list) subject = "[PyPI] Notice: Deprecation of underused file types/extensions" body_template = """ Hello, We're emailing because you're listed as a maintainer or owner for a package that has uploaded a legacy file type to PyPI in the past year: {project_list} Following PEP 527, it will soon not be possible to upload legacy file types. https://www.python.org/dev/peps/pep-0527/ This restriction will apply to new uploads after 30 days from today (2020-04-12). Existing uploads will remain on PyPI, but soon new ones cannot be uploaded. See PEP 527 for suggestions of replacement file types, and if you have any questions, please comment on the tracking issue for this deprecation: https://github.com/pypa/warehouse/issues/6792 Thank you, The PyPI Administrators """ with open("pep527.csv") as f: reader = csv.DictReader(f) for row in reader: users[row["user_id"]].append((row["project_name"], row["packagetype"])) with open("pep527-complete.csv", "w") as f: writer = csv.DictWriter(f, fieldnames=["user_id", "subject", "body_text"]) writer.writeheader() for user_id, projects in users.items(): project_list = "\n".join( f"* Project: {project_name}, package type: {packagetype}" for project_name, packagetype in projects ) writer.writerow( { "user_id": user_id, "subject": subject, "body_text": body_template.format(project_list=project_list), } ) ```
2020-03-12T18:00:09Z
[]
[]
pypi/warehouse
7,582
pypi__warehouse-7582
[ "4626", "3786", "1244" ]
58d6829071d917413c8aa5a68056938d9d99bdf4
diff --git a/warehouse/admin/routes.py b/warehouse/admin/routes.py --- a/warehouse/admin/routes.py +++ b/warehouse/admin/routes.py @@ -99,15 +99,6 @@ def includeme(config): # Journal related Admin pages config.add_route("admin.journals.list", "/admin/journals/", domain=warehouse) - # Classifier related Admin pages - config.add_route("admin.classifiers", "/admin/classifiers/", domain=warehouse) - config.add_route( - "admin.classifiers.add", "/admin/classifiers/add/", domain=warehouse - ) - config.add_route( - "admin.classifiers.deprecate", "/admin/classifiers/deprecate/", domain=warehouse - ) - # Blacklist related Admin pages config.add_route("admin.blacklist.list", "/admin/blacklist/", domain=warehouse) config.add_route("admin.blacklist.add", "/admin/blacklist/add/", domain=warehouse) diff --git a/warehouse/admin/views/classifiers.py b/warehouse/admin/views/classifiers.py deleted file mode 100644 --- a/warehouse/admin/views/classifiers.py +++ /dev/null @@ -1,106 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from pyramid.httpexceptions import HTTPSeeOther -from pyramid.view import view_config, view_defaults - -from warehouse.packaging.models import Classifier - - -@view_config( - route_name="admin.classifiers", - renderer="admin/classifiers/index.html", - permission="moderator", - request_method="GET", - uses_session=True, -) -def get_classifiers(request): - classifiers = request.db.query(Classifier).order_by(Classifier.classifier).all() - - return {"classifiers": classifiers} - - -@view_defaults( - route_name="admin.classifiers.add", - permission="moderator", - request_method="POST", - uses_session=True, - require_methods=False, - require_csrf=True, -) -class AddClassifier: - def __init__(self, request): - self.request = request - - @view_config(request_param=["parent"]) - def add_parent_classifier(self): - classifier = Classifier( - classifier=self.request.params.get("parent"), l3=0, l4=0, l5=0 - ) - - self.request.db.add(classifier) - self.request.db.flush() # To get the ID - - classifier.l2 = classifier.id - - self.request.session.flash( - f"Added classifier {classifier.classifier!r}", queue="success" - ) - - return HTTPSeeOther(self.request.route_path("admin.classifiers")) - - @view_config(request_param=["parent_id", "child"]) - def add_child_classifier(self): - parent = self.request.db.query(Classifier).get( - self.request.params.get("parent_id") - ) - - classifier = Classifier( - l2=parent.l2, - l3=parent.l3, - l4=parent.l4, - l5=parent.l5, - classifier=(parent.classifier + " :: " + self.request.params.get("child")), - ) - self.request.db.add(classifier) - self.request.db.flush() # To get the ID - - for level in ["l3", "l4", "l5"]: - if getattr(classifier, level) == 0: - setattr(classifier, level, classifier.id) - break - - self.request.session.flash( - f"Added classifier {classifier.classifier!r}", queue="success" - ) - - return HTTPSeeOther(self.request.route_path("admin.classifiers")) - - -@view_config( - route_name="admin.classifiers.deprecate", - permission="moderator", - request_method="POST", - uses_session=True, - require_methods=False, - require_csrf=True, -) -def deprecate_classifier(request): - classifier = request.db.query(Classifier).get(request.params.get("classifier_id")) - - classifier.deprecated = True - - request.session.flash( - f"Deprecated classifier {classifier.classifier!r}", queue="success" - ) - - return HTTPSeeOther(request.route_path("admin.classifiers")) diff --git a/warehouse/classifiers/models.py b/warehouse/classifiers/models.py --- a/warehouse/classifiers/models.py +++ b/warehouse/classifiers/models.py @@ -10,7 +10,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from sqlalchemy import Boolean, CheckConstraint, Column, Integer, Text, sql +from sqlalchemy import CheckConstraint, Column, Integer, Text from warehouse import db from warehouse.utils.attrs import make_repr @@ -28,8 +28,3 @@ class Classifier(db.ModelBase): id = Column(Integer, primary_key=True, nullable=False) classifier = Column(Text, unique=True) - deprecated = Column(Boolean, nullable=False, server_default=sql.false()) - l2 = Column(Integer) - l3 = Column(Integer) - l4 = Column(Integer) - l5 = Column(Integer) diff --git a/warehouse/forklift/legacy.py b/warehouse/forklift/legacy.py --- a/warehouse/forklift/legacy.py +++ b/warehouse/forklift/legacy.py @@ -37,6 +37,7 @@ from pyramid.view import view_config from sqlalchemy import exists, func, orm from sqlalchemy.orm.exc import MultipleResultsFound, NoResultFound +from trove_classifiers import classifiers, deprecated_classifiers from warehouse import forms from warehouse.admin.flags import AdminFlagValue @@ -340,6 +341,38 @@ def _raise(message): ) +def _validate_no_deprecated_classifiers(form, field): + invalid_classifiers = set(field.data or []) & deprecated_classifiers.keys() + if invalid_classifiers: + first_invalid_classifier_name = sorted(invalid_classifiers)[0] + deprecated_by = deprecated_classifiers[first_invalid_classifier_name] + + if deprecated_by: + raise wtforms.validators.ValidationError( + f"Classifier {first_invalid_classifier_name!r} has been " + "deprecated, use the following classifier(s) instead: " + f"{deprecated_by}" + ) + else: + raise wtforms.validators.ValidationError( + f"Classifier {first_invalid_classifier_name!r} has been deprecated." + ) + + +def _validate_classifiers(form, field): + invalid = sorted(set(field.data or []) - classifiers) + + if invalid: + if len(invalid) == 1: + raise wtforms.validators.ValidationError( + f"Classifier {invalid[0]!r} is not a valid classifier." + ) + else: + raise wtforms.validators.ValidationError( + f"Classifiers {invalid!r} are not valid classifiers." + ) + + def _construct_dependencies(form, types): for name, kind in types.items(): for item in getattr(form, name).data: @@ -437,7 +470,10 @@ class MetadataForm(forms.Form): keywords = wtforms.StringField( description="Keywords", validators=[wtforms.validators.Optional()] ) - classifiers = wtforms.fields.SelectMultipleField(description="Classifier") + classifiers = ListField( + description="Classifier", + validators=[_validate_no_deprecated_classifiers, _validate_classifiers], + ) platform = wtforms.StringField( description="Platform", validators=[wtforms.validators.Optional()] ) @@ -702,32 +738,6 @@ def _is_duplicate_file(db_session, filename, hashes): return None -def _no_deprecated_classifiers(request): - deprecated_classifiers = { - classifier.classifier - for classifier in ( - request.db.query(Classifier.classifier) - .filter(Classifier.deprecated.is_(True)) - .all() - ) - } - - def validate_no_deprecated_classifiers(form, field): - invalid_classifiers = set(field.data or []) & deprecated_classifiers - if invalid_classifiers: - first_invalid_classifier = sorted(invalid_classifiers)[0] - host = request.registry.settings.get("warehouse.domain") - classifiers_url = request.route_url("classifiers", _host=host) - - raise wtforms.validators.ValidationError( - f"Classifier {first_invalid_classifier!r} has been " - f"deprecated, see {classifiers_url} for a list of valid " - "classifiers." - ) - - return validate_no_deprecated_classifiers - - @view_config( route_name="forklift.legacy.file_upload", uses_session=True, @@ -816,16 +826,9 @@ def file_upload(request): if any(isinstance(value, FieldStorage) for value in values): raise _exc_with_message(HTTPBadRequest, f"{field}: Should not be a tuple.") - # Look up all of the valid classifiers - all_classifiers = request.db.query(Classifier).all() - # Validate and process the incoming metadata. form = MetadataForm(request.POST) - # Add a validator for deprecated classifiers - form.classifiers.validators.append(_no_deprecated_classifiers(request)) - - form.classifiers.choices = [(c.classifier, c.classifier) for c in all_classifiers] if not form.validate(): for field_name in _error_message_order: if field_name in form.errors: @@ -1054,11 +1057,29 @@ def file_upload(request): .one() ) except NoResultFound: + # Look up all of the valid classifiers + all_classifiers = request.db.query(Classifier).all() + + # Get all the classifiers for this release + release_classifiers = [ + c for c in all_classifiers if c.classifier in form.classifiers.data + ] + + # Determine if we need to add any new classifiers to the database + missing_classifiers = set(form.classifiers.data or []) - set( + c.classifier for c in release_classifiers + ) + + # Add any new classifiers to the database + if missing_classifiers: + for missing_classifier_name in missing_classifiers: + missing_classifier = Classifier(classifier=missing_classifier_name) + request.db.add(missing_classifier) + release_classifiers.append(missing_classifier) + release = Release( project=project, - _classifiers=[ - c for c in all_classifiers if c.classifier in form.classifiers.data - ], + _classifiers=release_classifiers, dependencies=list( _construct_dependencies( form, diff --git a/warehouse/legacy/api/pypi.py b/warehouse/legacy/api/pypi.py --- a/warehouse/legacy/api/pypi.py +++ b/warehouse/legacy/api/pypi.py @@ -13,6 +13,7 @@ from pyramid.httpexceptions import HTTPGone, HTTPMovedPermanently, HTTPNotFound from pyramid.response import Response from pyramid.view import forbidden_view_config, view_config +from trove_classifiers import classifiers from warehouse.classifiers.models import Classifier @@ -74,16 +75,8 @@ def forbidden_legacy(exc, request): @view_config(route_name="legacy.api.pypi.list_classifiers") def list_classifiers(request): - classifiers = ( - request.db.query(Classifier.classifier) - .filter(Classifier.deprecated.is_(False)) - .order_by(Classifier.classifier) - .all() - ) - return Response( - text="\n".join(c[0] for c in classifiers), - content_type="text/plain; charset=utf-8", + text="\n".join(sorted(classifiers)), content_type="text/plain; charset=utf-8", ) diff --git a/warehouse/migrations/versions/d15f020ee3df_simplify_classifier_model.py b/warehouse/migrations/versions/d15f020ee3df_simplify_classifier_model.py new file mode 100644 --- /dev/null +++ b/warehouse/migrations/versions/d15f020ee3df_simplify_classifier_model.py @@ -0,0 +1,62 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +Simplify classifier model + +Revision ID: d15f020ee3df +Revises: 6af76ffb9612 +Create Date: 2020-02-03 03:28:22.838779 +""" + +import sqlalchemy as sa + +from alembic import op + +revision = "d15f020ee3df" +down_revision = "6af76ffb9612" + + +def upgrade(): + op.drop_column("trove_classifiers", "l4") + op.drop_column("trove_classifiers", "l5") + op.drop_column("trove_classifiers", "l3") + op.drop_column("trove_classifiers", "deprecated") + op.drop_column("trove_classifiers", "l2") + + +def downgrade(): + op.add_column( + "trove_classifiers", + sa.Column("l2", sa.INTEGER(), autoincrement=False, nullable=True), + ) + op.add_column( + "trove_classifiers", + sa.Column( + "deprecated", + sa.BOOLEAN(), + server_default=sa.text("false"), + autoincrement=False, + nullable=False, + ), + ) + op.add_column( + "trove_classifiers", + sa.Column("l3", sa.INTEGER(), autoincrement=False, nullable=True), + ) + op.add_column( + "trove_classifiers", + sa.Column("l5", sa.INTEGER(), autoincrement=False, nullable=True), + ) + op.add_column( + "trove_classifiers", + sa.Column("l4", sa.INTEGER(), autoincrement=False, nullable=True), + ) diff --git a/warehouse/views.py b/warehouse/views.py --- a/warehouse/views.py +++ b/warehouse/views.py @@ -38,6 +38,7 @@ from sqlalchemy import func from sqlalchemy.orm import aliased, joinedload from sqlalchemy.sql import exists +from trove_classifiers import classifiers, deprecated_classifiers from warehouse.accounts import REDIRECT_FIELD_NAME from warehouse.accounts.models import User @@ -54,8 +55,6 @@ from warehouse.utils.paginate import ElasticsearchPage, paginate_url_factory from warehouse.utils.row_counter import RowCount -# 403, 404, 410, 500, - @view_config(context=HTTPException) @notfound_view_config(append_slash=HTTPMovedPermanently) @@ -261,15 +260,8 @@ def locale(request): @view_config( route_name="classifiers", renderer="pages/classifiers.html", has_translations=True ) -def classifiers(request): - classifiers = ( - request.db.query(Classifier.classifier) - .filter(Classifier.deprecated.is_(False)) - .order_by(Classifier.classifier) - .all() - ) - - return {"classifiers": classifiers} +def list_classifiers(request): + return {"classifiers": sorted(classifiers)} @view_config( @@ -313,11 +305,11 @@ def search(request): classifiers_q = ( request.db.query(Classifier) .with_entities(Classifier.classifier) - .filter(Classifier.deprecated.is_(False)) .filter( exists([release_classifiers.c.trove_id]).where( release_classifiers.c.trove_id == Classifier.id - ) + ), + Classifier.classifier.notin_(deprecated_classifiers.keys()), ) .order_by(Classifier.classifier) )
diff --git a/tests/common/db/classifiers.py b/tests/common/db/classifiers.py --- a/tests/common/db/classifiers.py +++ b/tests/common/db/classifiers.py @@ -10,9 +10,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -import factory -import factory.fuzzy - from warehouse.classifiers.models import Classifier from .base import WarehouseFactory @@ -21,8 +18,3 @@ class ClassifierFactory(WarehouseFactory): class Meta: model = Classifier - - l2 = factory.fuzzy.FuzzyInteger(0) - l3 = factory.fuzzy.FuzzyInteger(0) - l4 = factory.fuzzy.FuzzyInteger(0) - l5 = factory.fuzzy.FuzzyInteger(0) diff --git a/tests/unit/admin/test_routes.py b/tests/unit/admin/test_routes.py --- a/tests/unit/admin/test_routes.py +++ b/tests/unit/admin/test_routes.py @@ -101,15 +101,6 @@ def test_includeme(): domain=warehouse, ), pretend.call("admin.journals.list", "/admin/journals/", domain=warehouse), - pretend.call("admin.classifiers", "/admin/classifiers/", domain=warehouse), - pretend.call( - "admin.classifiers.add", "/admin/classifiers/add/", domain=warehouse - ), - pretend.call( - "admin.classifiers.deprecate", - "/admin/classifiers/deprecate/", - domain=warehouse, - ), pretend.call("admin.blacklist.list", "/admin/blacklist/", domain=warehouse), pretend.call("admin.blacklist.add", "/admin/blacklist/add/", domain=warehouse), pretend.call( diff --git a/tests/unit/admin/views/test_classifiers.py b/tests/unit/admin/views/test_classifiers.py deleted file mode 100644 --- a/tests/unit/admin/views/test_classifiers.py +++ /dev/null @@ -1,134 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import pretend -import pytest -import sqlalchemy - -from warehouse.admin.views import classifiers as views -from warehouse.classifiers.models import Classifier - -from ....common.db.classifiers import ClassifierFactory - - -class TestGetClassifiers: - def test_get_classifiers(self, db_request): - classifier_a = ClassifierFactory(classifier="I am first") - classifier_b = ClassifierFactory(classifier="I am last") - - assert views.get_classifiers(db_request) == { - "classifiers": [classifier_a, classifier_b] - } - - -class TestAddClassifier: - @pytest.mark.parametrize( - "parent_levels, expected_levels", - [ - ((2, 0, 0, 0), (2, None, 0, 0)), - ((2, 3, 0, 0), (2, 3, None, 0)), - ((2, 3, 4, 0), (2, 3, 4, None)), - # This won't actually happen but it's needed for coverage - ((2, 3, 4, 5), (2, 3, 4, 5)), - ], - ) - def test_add_child_classifier(self, db_request, parent_levels, expected_levels): - l2, l3, l4, l5 = parent_levels - parent = ClassifierFactory(l2=l2, l3=l3, l4=l4, l5=l5, classifier="Parent") - - db_request.params = {"parent_id": parent.id, "child": "Foobar"} - db_request.session.flash = pretend.call_recorder(lambda *a, **kw: None) - db_request.route_path = lambda *a: "/the/path" - - views.AddClassifier(db_request).add_child_classifier() - - new = ( - db_request.db.query(Classifier) - .filter(Classifier.classifier == "Parent :: Foobar") - .one() - ) - - new_l2, new_l3, new_l4, new_l5 = expected_levels - assert new.l2 == new_l2 if new_l2 is not None else new.id - assert new.l3 == new_l3 if new_l3 is not None else new.id - assert new.l4 == new_l4 if new_l4 is not None else new.id - assert new.l5 == new_l5 if new_l5 is not None else new.id - - def test_add_parent_classifier(self, db_request): - db_request.params = {"parent": "Foo :: Bar"} - db_request.session.flash = pretend.call_recorder(lambda *a, **kw: None) - db_request.route_path = lambda *a: "/the/path" - - views.AddClassifier(db_request).add_parent_classifier() - - new = ( - db_request.db.query(Classifier) - .filter(Classifier.classifier == "Foo :: Bar") - .one() - ) - - assert new.l2 == new.id - assert new.l3 == 0 - assert new.l4 == 0 - assert new.l5 == 0 - - @pytest.mark.parametrize( - "parent_classifier, parent_levels, expected_levels", - [ - ("private", (2, 0, 0, 0), (2, None, 0, 0)), - ("private", (2, 3, 0, 0), (2, 3, None, 0)), - ("private", (2, 3, 4, 0), (2, 3, 4, None)), - ("Private", (2, 0, 0, 0), (2, None, 0, 0)), - ("Private", (2, 3, 0, 0), (2, 3, None, 0)), - ("Private", (2, 3, 4, 0), (2, 3, 4, None)), - ("PrIvAtE", (2, 0, 0, 0), (2, None, 0, 0)), - ("PrIvAtE", (2, 3, 0, 0), (2, 3, None, 0)), - ("PrIvAtE", (2, 3, 4, 0), (2, 3, 4, None)), - ], - ) - def test_add_private_child_classifier( - self, db_request, parent_classifier, parent_levels, expected_levels - ): - l2, l3, l4, l5 = parent_levels - parent = ClassifierFactory( - l2=l2, l3=l3, l4=l4, l5=l5, classifier=parent_classifier - ) - - db_request.params = {"parent_id": parent.id, "child": "Foobar"} - db_request.session.flash = pretend.call_recorder(lambda *a, **kw: None) - db_request.route_path = lambda *a: "/the/path" - - with pytest.raises(sqlalchemy.exc.IntegrityError): - views.AddClassifier(db_request).add_child_classifier() - - @pytest.mark.parametrize("parent_classifier", ["private", "Private", "PrIvAtE"]) - def test_add_private_parent_classifier(self, db_request, parent_classifier): - db_request.params = {"parent": f"{parent_classifier} :: Do Not Upload"} - db_request.session.flash = pretend.call_recorder(lambda *a, **kw: None) - db_request.route_path = lambda *a: "/the/path" - - with pytest.raises(sqlalchemy.exc.IntegrityError): - views.AddClassifier(db_request).add_parent_classifier() - - -class TestDeprecateClassifier: - def test_deprecate_classifier(self, db_request): - classifier = ClassifierFactory(deprecated=False) - - db_request.params = {"classifier_id": classifier.id} - db_request.session.flash = pretend.call_recorder(lambda *a, **kw: None) - db_request.route_path = lambda *a: "/the/path" - - views.deprecate_classifier(db_request) - db_request.db.flush() - - assert classifier.deprecated diff --git a/tests/unit/forklift/test_legacy.py b/tests/unit/forklift/test_legacy.py --- a/tests/unit/forklift/test_legacy.py +++ b/tests/unit/forklift/test_legacy.py @@ -26,7 +26,9 @@ import requests from pyramid.httpexceptions import HTTPBadRequest, HTTPForbidden +from sqlalchemy.exc import IntegrityError from sqlalchemy.orm import joinedload +from trove_classifiers import classifiers from webob.multidict import MultiDict from wtforms.form import Form from wtforms.validators import ValidationError @@ -328,25 +330,42 @@ def test_validate_description_content_type_invalid(self, data): legacy._validate_description_content_type(form, field) def test_validate_no_deprecated_classifiers_valid(self, db_request): - valid_classifier = ClassifierFactory(deprecated=False) - validator = legacy._no_deprecated_classifiers(db_request) + valid_classifier = ClassifierFactory(classifier="AA :: BB") form = pretend.stub() field = pretend.stub(data=[valid_classifier.classifier]) - validator(form, field) + legacy._validate_no_deprecated_classifiers(form, field) - def test_validate_no_deprecated_classifiers_invalid(self, db_request): - deprecated_classifier = ClassifierFactory(classifier="AA: BB", deprecated=True) - validator = legacy._no_deprecated_classifiers(db_request) - db_request.registry = pretend.stub(settings={"warehouse.domain": "host"}) - db_request.route_url = pretend.call_recorder(lambda *a, **kw: "/url") + @pytest.mark.parametrize( + "deprecated_classifiers", [({"AA :: BB": []}), ({"AA :: BB": ["CC :: DD"]})] + ) + def test_validate_no_deprecated_classifiers_invalid( + self, db_request, deprecated_classifiers, monkeypatch + ): + monkeypatch.setattr(legacy, "deprecated_classifiers", deprecated_classifiers) + + form = pretend.stub() + field = pretend.stub(data=["AA :: BB"]) + + with pytest.raises(ValidationError): + legacy._validate_no_deprecated_classifiers(form, field) + + def test_validate_classifiers_valid(self, db_request, monkeypatch): + monkeypatch.setattr(legacy, "classifiers", {"AA :: BB"}) + + form = pretend.stub() + field = pretend.stub(data=["AA :: BB"]) + legacy._validate_classifiers(form, field) + + @pytest.mark.parametrize("data", [(["AA :: BB"]), (["AA :: BB", "CC :: DD"])]) + def test_validate_classifiers_invalid(self, db_request, data): form = pretend.stub() - field = pretend.stub(data=[deprecated_classifier.classifier]) + field = pretend.stub(data=data) with pytest.raises(ValidationError): - validator(form, field) + legacy._validate_classifiers(form, field) def test_construct_dependencies(): @@ -1639,7 +1658,7 @@ def test_upload_fails_with_invalid_classifier(self, pyramid_config, db_request): ), } ) - db_request.POST.extend([("classifiers", "Environment :: Other Environment")]) + db_request.POST.extend([("classifiers", "Invalid :: Classifier")]) with pytest.raises(HTTPBadRequest) as excinfo: legacy.file_upload(db_request) @@ -1648,12 +1667,29 @@ def test_upload_fails_with_invalid_classifier(self, pyramid_config, db_request): assert resp.status_code == 400 assert resp.status == ( - "400 Invalid value for classifiers. " - "Error: 'Environment :: Other Environment' is not a valid choice " - "for this field" + "400 Invalid value for classifiers. Error: Classifier 'Invalid :: " + "Classifier' is not a valid classifier." ) - def test_upload_fails_with_deprecated_classifier(self, pyramid_config, db_request): + @pytest.mark.parametrize( + "deprecated_classifiers, expected", + [ + ( + {"AA :: BB": ["CC :: DD"]}, + "400 Invalid value for classifiers. Error: Classifier 'AA :: " + "BB' has been deprecated, use the following classifier(s) " + "instead: ['CC :: DD']", + ), + ( + {"AA :: BB": []}, + "400 Invalid value for classifiers. Error: Classifier 'AA :: " + "BB' has been deprecated.", + ), + ], + ) + def test_upload_fails_with_deprecated_classifier( + self, pyramid_config, db_request, monkeypatch, deprecated_classifiers, expected + ): pyramid_config.testing_securitypolicy(userid=1) user = UserFactory.create() @@ -1662,7 +1698,9 @@ def test_upload_fails_with_deprecated_classifier(self, pyramid_config, db_reques project = ProjectFactory.create() release = ReleaseFactory.create(project=project, version="1.0") RoleFactory.create(user=user, project=project) - classifier = ClassifierFactory(classifier="AA :: BB", deprecated=True) + classifier = ClassifierFactory(classifier="AA :: BB") + + monkeypatch.setattr(legacy, "deprecated_classifiers", deprecated_classifiers) filename = "{}-{}.tar.gz".format(project.name, release.version) @@ -1689,11 +1727,7 @@ def test_upload_fails_with_deprecated_classifier(self, pyramid_config, db_reques resp = excinfo.value assert resp.status_code == 400 - assert resp.status == ( - "400 Invalid value for classifiers. " - "Error: Classifier 'AA :: BB' has been deprecated, see /url " - "for a list of valid classifiers." - ) + assert resp.status == expected @pytest.mark.parametrize( "digests", @@ -2786,6 +2820,91 @@ def test_upload_succeeds_creates_release(self, pyramid_config, db_request, metri ), ] + def test_upload_succeeds_creates_classifier( + self, pyramid_config, db_request, metrics, monkeypatch + ): + pyramid_config.testing_securitypolicy(userid=1) + + user = UserFactory.create() + EmailFactory.create(user=user) + project = ProjectFactory.create() + RoleFactory.create(user=user, project=project) + + monkeypatch.setattr(legacy, "classifiers", {"AA :: BB", "CC :: DD"}) + + db_request.db.add(Classifier(classifier="AA :: BB")) + + filename = "{}-{}.tar.gz".format(project.name, "1.0") + + db_request.user = user + db_request.remote_addr = "10.10.10.20" + db_request.user_agent = "warehouse-tests/6.6.6" + db_request.POST = MultiDict( + { + "metadata_version": "1.2", + "name": project.name, + "version": "1.0", + "summary": "This is my summary!", + "filetype": "sdist", + "md5_digest": _TAR_GZ_PKG_MD5, + "content": pretend.stub( + filename=filename, + file=io.BytesIO(_TAR_GZ_PKG_TESTDATA), + type="application/tar", + ), + } + ) + db_request.POST.extend( + [ + ("classifiers", "AA :: BB"), + ("classifiers", "CC :: DD"), + ("requires_dist", "foo"), + ("requires_dist", "bar (>1.0)"), + ("project_urls", "Test, https://example.com/"), + ("requires_external", "Cheese (>1.0)"), + ("provides", "testing"), + ] + ) + + storage_service = pretend.stub(store=lambda path, filepath, meta: None) + db_request.find_service = lambda svc, name=None, context=None: { + IFileStorage: storage_service, + IMetricsService: metrics, + }.get(svc) + + resp = legacy.file_upload(db_request) + + assert resp.status_code == 200 + + # Ensure that a new Classifier has been created + classifier = ( + db_request.db.query(Classifier) + .filter(Classifier.classifier == "CC :: DD") + .one() + ) + assert classifier.classifier == "CC :: DD" + + # Ensure that the Release has the new classifier + release = ( + db_request.db.query(Release) + .filter((Release.project == project) & (Release.version == "1.0")) + .one() + ) + assert release.classifiers == ["AA :: BB", "CC :: DD"] + + def test_all_valid_classifiers_can_be_created(self, db_request): + for classifier in classifiers: + db_request.db.add(Classifier(classifier=classifier)) + db_request.db.commit() + + @pytest.mark.parametrize( + "parent_classifier", ["private", "Private", "PrIvAtE"], + ) + def test_private_classifiers_cannot_be_created(self, db_request, parent_classifier): + with pytest.raises(IntegrityError): + db_request.db.add(Classifier(classifier=f"{parent_classifier} :: Foo")) + db_request.db.commit() + def test_equivalent_version_one_release(self, pyramid_config, db_request, metrics): """ Test that if a release with a version like '1.0' exists, that a future diff --git a/tests/unit/legacy/api/test_pypi.py b/tests/unit/legacy/api/test_pypi.py --- a/tests/unit/legacy/api/test_pypi.py +++ b/tests/unit/legacy/api/test_pypi.py @@ -14,6 +14,7 @@ import pytest from pyramid.httpexceptions import HTTPBadRequest, HTTPMovedPermanently, HTTPNotFound +from trove_classifiers import classifiers from warehouse.legacy.api import pypi @@ -67,14 +68,10 @@ def test_forbidden_legacy(): def test_list_classifiers(db_request): - ClassifierFactory.create(classifier="foo :: bar") - ClassifierFactory.create(classifier="foo :: baz") - ClassifierFactory.create(classifier="fiz :: buz") - resp = pypi.list_classifiers(db_request) assert resp.status_code == 200 - assert resp.text == "fiz :: buz\nfoo :: bar\nfoo :: baz" + assert resp.text == "\n".join(sorted(classifiers)) def test_search(): diff --git a/tests/unit/test_views.py b/tests/unit/test_views.py --- a/tests/unit/test_views.py +++ b/tests/unit/test_views.py @@ -22,11 +22,11 @@ HTTPSeeOther, HTTPServiceUnavailable, ) +from trove_classifiers import classifiers from webob.multidict import MultiDict from warehouse import views from warehouse.views import ( - classifiers, current_user_indicator, flash_messages, forbidden, @@ -35,6 +35,7 @@ health, httpexception_view, index, + list_classifiers, locale, opensearchxml, robotstxt, @@ -424,12 +425,7 @@ def raiser(*args, **kwargs): def test_classifiers(db_request): - classifier_a = ClassifierFactory(classifier="I am first") - classifier_b = ClassifierFactory(classifier="I am last") - - assert classifiers(db_request) == { - "classifiers": [(classifier_a.classifier,), (classifier_b.classifier,)] - } + assert list_classifiers(db_request) == {"classifiers": sorted(classifiers)} def test_stats(db_request):
Add "replaced by" field for deprecated classifiers One problem with our current ability to deprecate trove classifiers is that we don't have a great way to indicate to the user which classifier(s) they should be using instead. For example, when I deprecated the classifier `Natural Language :: Ukranian` due to a typo and added `Natural Language :: Ukrainian`, any user that might have had the old classifier would have received an error like: ``` HTTPError: 400 Client Error: Invalid value for classifiers. Error: Classifier 'Natural Language :: Ukranian' has been deprecated, see https://pypi.org/classifiers/ for a list of valid classifiers. for url: http://upload.pypi.org/legacy/ ``` It would be great if we could optionally identify one or more classifiers which replace a deprecated classifier when deprecating it, so we could make this error message something like: ``` HTTPError: 400 Client Error: Invalid value for classifiers. Error: Classifier 'Natural Language :: Ukranian' has been deprecated, and replaced with the following classifier(s): 'Natural Language :: Ukrainian'. for url: http://upload.pypi.org/legacy/ ``` --- **Good First Issue**: This issue is good for first time contributors. If you've already contributed to Warehouse, work on [another issue without this label](https://github.com/pypa/warehouse/issues?utf8=%E2%9C%93&q=is%3Aissue+is%3Aopen+-label%3A%22good+first+issue%22) instead. If there is not a corresponding pull request for this issue, it is up for grabs. For directions for getting set up, see our [Getting Started Guide](https://warehouse.pypa.io/development/getting-started/). If you are working on this issue and have questions, feel free to ask them here, [`#pypa-dev` on Freenode](https://webchat.freenode.net/?channels=%23pypa-dev), or the [pypa-dev mailing list](https://groups.google.com/forum/#!forum/pypa-dev). Derive list of classifiers from a public, version-controlled source Per discussions in #1300, in particular the following comments, quoted below for convenience: [ncoghlan](https://github.com/pypa/warehouse/issues/1300#issuecomment-360965801): > I think @ timofurrer's question does raise an interesting UX question: would a file in the specifications section of the PyPUG and/or some other PyPA repo that anyone can submit a PR to be a better primary data source for this information than the PyPI database? > > Then PyPI would just be a consumer of that file (presenting it via the web URL), rather than the source of the official list. [jonparrott](https://github.com/pypa/warehouse/issues/1300#issuecomment-375944477): > I agree with @ ncoghlan - we should have some canonical location for this that's easy to modify. > > Could we pull these classifiers out of the warehouse database and just store them in a datafile in this repository? > > Alternatively, I could see us establishing a new project that holds the canonical list that `warehouse` and `pypug` depends on. > > Related: #3028 [dstufft](https://github.com/pypa/warehouse/issues/1300#issuecomment-375945558): > Easy to modify isn't exactly ideal here, there are a few different types of modifications you can make: > > - Addition: This is easy to cope with, since it's purely addition, Warehouse can simply add it. > - Deletion: This is less easy to cope with, because there are really two kinds of deletion possible: > - Deletion where we want to expunge the record from all releases. This is technically easy, but unlikely to actually be what we want (and it would make the PyPI metadata and the package metadata disagree, which is undesirable) > - Deletion where we simply want to disallow new uploads, containing the classifier, but still want to retain it for historical record. > - Rename: This is hard to deal with, because you don't want to suddenly start rejecting previous versions of the classifier, it would break people's uploads for little reason, but you want to treat the old and the new name as equivalent. > > This also makes a simple text file not really well suited for it, because you can't really different between deletion to expunge from deletion to block from renames. In addition, internally in Warehouse (and legacy PyPI) the trove classifiers are represented as a rows in a database that we foreign key against, so something that we depend on isn't going to be a workable solution unless we do something janky like try to automatically reconcile our database against that dependency (which then starts to get into all of the problems I listed above with having to figure out what sort of change it was made). > > Beyond all of that though, regardless of what we call the list in some other location or the list inside of the DB the "canonical" location, practically speaking, PyPI is going to be the defacto canonical location in every way that anyone actually cares about (since 99% of the time, what someone cares about when looking at classifiers, is whether PyPI will accept them or not). > > Ultimately, I think the canonical location being on PyPI makes things easier to maintain and manage and it allows us to provide a better user experience for end users as well. It lets us put structured data in the database, while providing a UI to actually manage it that glosses over the details of actually managing that structured data. It also lets us tailor what the list we give people contains, based on what the context of us giving them that list is. For example, in documentation we would almost certainly exclude any legacy aliases for renamed classifiers or deleted classifiers that we still have the record for but are no longer accepting, but for an API endpoint that something like `flit` might call, we'd want to include all of the classifiers we are currently accepting (legacy alias or not) but none of the ones that we are not. I imagine there'd even be an API that reports all classifiers past and present and their current status. (...) [waldyrious](https://github.com/pypa/warehouse/issues/1300#issuecomment-383285416): > @ brainwane, @ di and all: is there a place where the suggestion made by @ ncoghlan [above](https://github.com/pypa/warehouse/issues/1300#issuecomment-360965801): > >> would a file in the specifications section of the PyPUG and/or some other PyPA repo that anyone can submit a PR to be a better primary data source for this information than the PyPI database? > > ...could be tracked, e.g. by opening a new issue? Or has that already been decided against? > > FWIW, I still think that a public and collaborative ("PR-able") data source would be preferable to a private database table. At least the table definitions for recreating the database could be made available in a repo somewhere, similar to https://noc.wikimedia.org, and for [similar reasons](https://blog.wikimedia.org/2011/09/19/ever-wondered-how-the-wikimedia-servers-are-configured/). [ncoghlan](https://github.com/pypa/warehouse/issues/1300#issuecomment-383307128): > I withdrew the basic suggestion of a flat text file based on Donald's comments at https://github.com/pypa/warehouse/issues/1300#issuecomment-375945558 > > That doesn't rule out the possibility of a "classifier log" format though, that tracks the possible operations as a series of historical events: > > - addition of new classifiers > - renaming of classifiers > - prohibition of a classifier in new uploads (rare) > - removal of a classifier from all published metadata records (incredibly rare due to the resulting inconsistent with the artifact's internal metadata) > > The way to pursue the idea further would be as a new issue proposing to derive the contents of the classifier table from a source controlled log of classifier changes, and then after discussing a suitable design with the Warehouse devs, working on a PR to actually implement that. JSON API for Trove Classifiers Similar to #1241 but provide a JSON API, this will depend on figuring out a solution to #284.
@di I would like to work on issue. @Man-Jain If you're still interested in this issue, yes, please go ahead! @nlhkabu heads-up in case we should expose this in some way in the UI in the future. @di, I'd like to work on this issue from the Bloomberg London PyPA Sprint This is the first part, where I'm adding deprecation alternative to the admin page: https://github.com/pypa/warehouse/pull/4950 I will try to complete the second part (message for the API) tomorrow. Pinging the authors of the comments above: @ncoghlan, @jonparrott, @dstufft.
2020-03-23T23:48:50Z
[]
[]
pypi/warehouse
7,689
pypi__warehouse-7689
[ "2745" ]
32f621a359bfa7ecb84d8b0ff77e675aae2cd9e1
diff --git a/warehouse/manage/views.py b/warehouse/manage/views.py --- a/warehouse/manage/views.py +++ b/warehouse/manage/views.py @@ -13,8 +13,6 @@ import base64 import io -from collections import defaultdict - import pyqrcode from paginate_sqlalchemy import SqlalchemyOrmPage as SQLAlchemyORMPage @@ -915,44 +913,13 @@ def manage_project_settings(project, request): return {"project": project} -def get_project_contributors(project_name, request): - query_res = ( - request.db.query(Project) - .join(User, Project.users) - .filter(Project.name == project_name) +def get_user_role_in_project(project, user, request): + return ( + request.db.query(Role) + .filter(Role.user == user, Role.project == project) .one() + .role_name ) - return query_res.users - - -def get_user_role_in_project(project_name, username, request): - raw_res = ( - request.db.query(Project) - .join(User, Project.users) - .filter(User.username == username, Project.name == project_name) - .with_entities(Role.role_name) - .distinct(Role.role_name) - .all() - ) - - query_res = [] - for el in raw_res: - if el.role_name is not None: - query_res.append(el) - - user_role = "" - # This check is needed because of - # issue https://github.com/pypa/warehouse/issues/2745 - # which is not yet resolved and a user could be an owner - # and a maintainer at the same time - if len(query_res) == 2 and ( - query_res[0].role_name == "Owner" or query_res[1].role_name == "Owner" - ): - user_role = "Owner" - if len(query_res) == 1: - user_role = query_res[0].role_name - - return user_role @view_config( @@ -978,15 +945,10 @@ def delete_project(project, request): confirm_project(project, request, fail_route="manage.project.settings") - submitter_role = get_user_role_in_project( - project.name, request.user.username, request - ) - contributors = get_project_contributors(project.name, request) + submitter_role = get_user_role_in_project(project, request.user, request) - for contributor in contributors: - contributor_role = get_user_role_in_project( - project.name, contributor.username, request - ) + for contributor in project.users: + contributor_role = get_user_role_in_project(project, contributor, request) send_removed_project_email( request, @@ -1130,9 +1092,8 @@ def delete_project_release(self): ) submitter_role = get_user_role_in_project( - self.release.project.name, self.request.user.username, self.request + self.release.project, self.request.user, self.request ) - contributors = get_project_contributors(self.release.project.name, self.request) self.request.db.add( JournalEntry( @@ -1159,9 +1120,9 @@ def delete_project_release(self): f"Deleted release {self.release.version!r}", queue="success" ) - for contributor in contributors: + for contributor in self.release.project.users: contributor_role = get_user_role_in_project( - self.release.project.name, contributor.username, self.request + self.release.project, contributor, self.request ) send_removed_project_release_email( @@ -1244,13 +1205,12 @@ def _error(message): ) submitter_role = get_user_role_in_project( - project_name, self.request.user.username, self.request + self.release.project, self.request.user, self.request ) - contributors = get_project_contributors(project_name, self.request) - for contributor in contributors: + for contributor in self.release.project.users: contributor_role = get_user_role_in_project( - project_name, contributor.username, self.request + self.release.project, contributor, self.request ) send_removed_project_release_file_email( @@ -1297,15 +1257,17 @@ def manage_project_roles(project, request, _form_class=CreateRoleForm): userid = user_service.find_userid(username) user = user_service.get_user(userid) - if request.db.query( + existing_role = ( request.db.query(Role) - .filter( - Role.user == user, Role.project == project, Role.role_name == role_name - ) - .exists() - ).scalar(): + .filter(Role.user == user, Role.project == project) + .first() + ) + if existing_role: request.session.flash( - f"User '{username}' already has {role_name} role for project", + ( + f"User '{username}' already has {existing_role.role_name} " + "role for project" + ), queue="error", ) elif user.primary_email is None or not user.primary_email.verified: @@ -1371,15 +1333,9 @@ def manage_project_roles(project, request, _form_class=CreateRoleForm): ) form = _form_class(user_service=user_service) - roles = request.db.query(Role).join(User).filter(Role.project == project).all() - - # TODO: The following lines are a hack to handle multiple roles for a - # single user and should be removed when fixing GH-2745 - roles_by_user = defaultdict(list) - for role in roles: - roles_by_user[role.user.username].append(role) + roles = set(request.db.query(Role).join(User).filter(Role.project == project).all()) - return {"project": project, "roles_by_user": roles_by_user, "form": form} + return {"project": project, "roles": roles, "form": form} @view_config( @@ -1391,95 +1347,43 @@ def manage_project_roles(project, request, _form_class=CreateRoleForm): has_translations=True, ) def change_project_role(project, request, _form_class=ChangeRoleForm): - # TODO: This view was modified to handle deleting multiple roles for a - # single user and should be updated when fixing GH-2745 - form = _form_class(request.POST) if form.validate(): - role_ids = request.POST.getall("role_id") - - if len(role_ids) > 1: - # This user has more than one role, so just delete all the ones - # that aren't what we want. - # - # TODO: This branch should be removed when fixing GH-2745. - roles = ( + role_id = request.POST["role_id"] + try: + role = ( request.db.query(Role) .join(User) - .filter( - Role.id.in_(role_ids), - Role.project == project, - Role.role_name != form.role_name.data, - ) - .all() - ) - removing_self = any( - role.role_name == "Owner" and role.user == request.user - for role in roles + .filter(Role.id == role_id, Role.project == project) + .one() ) - if removing_self: + if role.role_name == "Owner" and role.user == request.user: request.session.flash("Cannot remove yourself as Owner", queue="error") else: - for role in roles: - request.db.delete(role) - request.db.add( - JournalEntry( - name=project.name, - action=f"remove {role.role_name} {role.user.username}", - submitted_by=request.user, - submitted_from=request.remote_addr, - ) - ) - project.record_event( - tag="project:role:delete", - ip_address=request.remote_addr, - additional={ - "submitted_by": request.user.username, - "role_name": role.role_name, - "target_user": role.user.username, - }, - ) - request.session.flash("Changed role", queue="success") - else: - # This user only has one role, so get it and change the type. - try: - role = ( - request.db.query(Role) - .join(User) - .filter( - Role.id == request.POST.get("role_id"), Role.project == project + request.db.add( + JournalEntry( + name=project.name, + action="change {} {} to {}".format( + role.role_name, role.user.username, form.role_name.data + ), + submitted_by=request.user, + submitted_from=request.remote_addr, ) - .one() ) - if role.role_name == "Owner" and role.user == request.user: - request.session.flash( - "Cannot remove yourself as Owner", queue="error" - ) - else: - request.db.add( - JournalEntry( - name=project.name, - action="change {} {} to {}".format( - role.role_name, role.user.username, form.role_name.data - ), - submitted_by=request.user, - submitted_from=request.remote_addr, - ) - ) - role.role_name = form.role_name.data - project.record_event( - tag="project:role:change", - ip_address=request.remote_addr, - additional={ - "submitted_by": request.user.username, - "role_name": form.role_name.data, - "target_user": role.user.username, - }, - ) - request.session.flash("Changed role", queue="success") - except NoResultFound: - request.session.flash("Could not find role", queue="error") + role.role_name = form.role_name.data + project.record_event( + tag="project:role:change", + ip_address=request.remote_addr, + additional={ + "submitted_by": request.user.username, + "role_name": form.role_name.data, + "target_user": role.user.username, + }, + ) + request.session.flash("Changed role", queue="success") + except NoResultFound: + request.session.flash("Could not find role", queue="error") return HTTPSeeOther( request.route_path("manage.project.roles", project_name=project.name) @@ -1495,25 +1399,17 @@ def change_project_role(project, request, _form_class=ChangeRoleForm): has_translations=True, ) def delete_project_role(project, request): - # TODO: This view was modified to handle deleting multiple roles for a - # single user and should be updated when fixing GH-2745 - - roles = ( - request.db.query(Role) - .join(User) - .filter(Role.id.in_(request.POST.getall("role_id")), Role.project == project) - .all() - ) - removing_self = any( - role.role_name == "Owner" and role.user == request.user for role in roles - ) - - if not roles: - request.session.flash("Could not find role", queue="error") - elif removing_self: - request.session.flash("Cannot remove yourself as Owner", queue="error") - else: - for role in roles: + try: + role = ( + request.db.query(Role) + .join(User) + .filter(Role.id == request.POST["role_id"]) + .one() + ) + removing_self = role.role_name == "Owner" and role.user == request.user + if removing_self: + request.session.flash("Cannot remove yourself as Owner", queue="error") + else: request.db.delete(role) request.db.add( JournalEntry( @@ -1532,7 +1428,9 @@ def delete_project_role(project, request): "target_user": role.user.username, }, ) - request.session.flash("Removed role", queue="success") + request.session.flash("Removed role", queue="success") + except NoResultFound: + request.session.flash("Could not find role", queue="error") return HTTPSeeOther( request.route_path("manage.project.roles", project_name=project.name) diff --git a/warehouse/migrations/versions/aaa60e8ea12e_enforce_uniqueness_of_user_id_project_.py b/warehouse/migrations/versions/aaa60e8ea12e_enforce_uniqueness_of_user_id_project_.py new file mode 100644 --- /dev/null +++ b/warehouse/migrations/versions/aaa60e8ea12e_enforce_uniqueness_of_user_id_project_.py @@ -0,0 +1,49 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +enforce uniqueness of user_id, project_id on roles + +Revision ID: aaa60e8ea12e +Revises: 5c029d9ef925 +Create Date: 2020-03-04 21:56:32.651065 +""" + +from alembic import op + +revision = "aaa60e8ea12e" +down_revision = "5c029d9ef925" + + +def upgrade(): + op.execute( + """ + DELETE FROM roles + WHERE id IN ( + SELECT id FROM ( + SELECT id, + ROW_NUMBER() OVER ( + PARTITION BY project_id, user_id ORDER BY role_name DESC + ) as row_num + FROM roles + ) t + WHERE t.row_num > 1 + ) + RETURNING * + """ + ) + op.create_unique_constraint( + "_roles_user_project_uc", "roles", ["user_id", "project_id"] + ) + + +def downgrade(): + op.drop_constraint("_roles_user_project_uc", "roles", type_="unique") diff --git a/warehouse/packaging/models.py b/warehouse/packaging/models.py --- a/warehouse/packaging/models.py +++ b/warehouse/packaging/models.py @@ -57,9 +57,12 @@ class Role(db.Model): __tablename__ = "roles" - __table_args__ = (Index("roles_user_id_idx", "user_id"),) + __table_args__ = ( + Index("roles_user_id_idx", "user_id"), + UniqueConstraint("user_id", "project_id", name="_roles_user_project_uc"), + ) - __repr__ = make_repr("role_name", "user_name", "package_name") + __repr__ = make_repr("role_name") role_name = Column(Text) user_id = Column( @@ -73,16 +76,6 @@ class Role(db.Model): user = orm.relationship(User, lazy=False) project = orm.relationship("Project", lazy=False) - def __gt__(self, other): - """ - Temporary hack to allow us to only display the 'highest' role when - there are multiple for a given user - - TODO: This should be removed when fixing GH-2745. - """ - order = ["Maintainer", "Owner"] # from lowest to highest - return order.index(self.role_name) > order.index(other.role_name) - class ProjectFactory: def __init__(self, request):
diff --git a/tests/common/db/base.py b/tests/common/db/base.py --- a/tests/common/db/base.py +++ b/tests/common/db/base.py @@ -33,6 +33,7 @@ def _create(cls, *args, **kwargs): r = super()._create(*args, **kwargs) session = cls._meta.sqlalchemy_session session.flush() + session.expire_all() return r diff --git a/tests/unit/admin/views/test_projects.py b/tests/unit/admin/views/test_projects.py --- a/tests/unit/admin/views/test_projects.py +++ b/tests/unit/admin/views/test_projects.py @@ -296,13 +296,13 @@ def test_invalid_key_query(self, db_request): reverse=True, ) db_request.matchdict["project_name"] = project.normalized_name - db_request.GET["q"] = "user:{}".format(journals[3].submitted_by) + db_request.GET["q"] = "user:username" result = views.journals_list(project, db_request) assert result == { "journals": journals[:25], "project": project, - "query": "user:{}".format(journals[3].submitted_by), + "query": "user:username", } def test_basic_query(self, db_request): diff --git a/tests/unit/legacy/api/test_simple.py b/tests/unit/legacy/api/test_simple.py --- a/tests/unit/legacy/api/test_simple.py +++ b/tests/unit/legacy/api/test_simple.py @@ -93,10 +93,6 @@ def test_no_files_with_serial(self, db_request): user = UserFactory.create() je = JournalEntryFactory.create(name=project.name, submitted_by=user) - # Make sure that we get any changes made since the JournalEntry was - # saved. - db_request.db.refresh(project) - assert simple.simple_detail(project, db_request) == { "project": project, "files": [], @@ -118,10 +114,6 @@ def test_with_files_no_serial(self, db_request): user = UserFactory.create() JournalEntryFactory.create(submitted_by=user) - # Make sure that we get any changes made since the JournalEntry was - # saved. - db_request.db.refresh(project) - assert simple.simple_detail(project, db_request) == { "project": project, "files": files, @@ -143,10 +135,6 @@ def test_with_files_with_serial(self, db_request): user = UserFactory.create() je = JournalEntryFactory.create(name=project.name, submitted_by=user) - # Make sure that we get any changes made since the JournalEntry was - # saved. - db_request.db.refresh(project) - assert simple.simple_detail(project, db_request) == { "project": project, "files": files, @@ -201,10 +189,6 @@ def test_with_files_with_version_multi_digit(self, db_request): user = UserFactory.create() je = JournalEntryFactory.create(name=project.name, submitted_by=user) - # Make sure that we get any changes made since the JournalEntry was - # saved. - db_request.db.refresh(project) - assert simple.simple_detail(project, db_request) == { "project": project, "files": files, diff --git a/tests/unit/manage/test_views.py b/tests/unit/manage/test_views.py --- a/tests/unit/manage/test_views.py +++ b/tests/unit/manage/test_views.py @@ -2217,20 +2217,41 @@ def test_manage_projects(self, db_request): newer_project_with_no_releases = ProjectFactory( releases=[], created=datetime.datetime(2018, 1, 1) ) - db_request.user = UserFactory( - projects=[ - project_with_older_release, - project_with_newer_release, - newer_project_with_no_releases, - older_project_with_no_releases, - ] + db_request.user = UserFactory() + RoleFactory.create( + user=db_request.user, + project=project_with_older_release, + role_name="Maintainer", + ) + RoleFactory.create( + user=db_request.user, project=project_with_newer_release, role_name="Owner" ) - user_second_owner = UserFactory( - projects=[project_with_older_release, older_project_with_no_releases] + RoleFactory.create( + user=db_request.user, + project=newer_project_with_no_releases, + role_name="Owner", + ) + RoleFactory.create( + user=db_request.user, + project=older_project_with_no_releases, + role_name="Maintainer", + ) + user_second_owner = UserFactory() + RoleFactory.create( + user=user_second_owner, + project=project_with_older_release, + role_name="Owner", + ) + RoleFactory.create( + user=user_second_owner, + project=older_project_with_no_releases, + role_name="Owner", + ) + RoleFactory.create( + user=user_second_owner, + project=project_with_newer_release, + role_name="Owner", ) - RoleFactory.create(user=db_request.user, project=project_with_newer_release) - RoleFactory.create(user=db_request.user, project=newer_project_with_no_releases) - RoleFactory.create(user=user_second_owner, project=project_with_newer_release) assert views.manage_projects(db_request) == { "projects": [ @@ -2329,30 +2350,15 @@ def test_delete_project_disallow_deletion(self): pretend.call("manage.project.settings", project_name="foo") ] - def test_get_project_contributors(self, db_request): - project = ProjectFactory.create(name="foo") - db_request.session = pretend.stub( - flash=pretend.call_recorder(lambda *a, **kw: None), - ) - - db_request.user = UserFactory.create() - project.users = [db_request.user] - - res = views.get_project_contributors(project.name, db_request) - assert res == [db_request.user] - def test_get_user_role_in_project_single_role_owner(self, db_request): project = ProjectFactory.create(name="foo") db_request.session = pretend.stub( flash=pretend.call_recorder(lambda *a, **kw: None), ) db_request.user = UserFactory.create() - project.users = [db_request.user] - RoleFactory(user=db_request.user, project=project) + RoleFactory(user=db_request.user, project=project, role_name="Owner") - res = views.get_user_role_in_project( - project.name, db_request.user.username, db_request - ) + res = views.get_user_role_in_project(project, db_request.user, db_request) assert res == "Owner" def test_get_user_role_in_project_single_role_maintainer(self, db_request): @@ -2361,42 +2367,11 @@ def test_get_user_role_in_project_single_role_maintainer(self, db_request): flash=pretend.call_recorder(lambda *a, **kw: None), ) db_request.user = UserFactory.create() - project.users = [db_request.user] RoleFactory(user=db_request.user, project=project, role_name="Maintainer") - res = views.get_user_role_in_project( - project.name, db_request.user.username, db_request - ) + res = views.get_user_role_in_project(project, db_request.user, db_request) assert res == "Maintainer" - def test_get_user_role_in_project_two_roles_owner_and_maintainer(self, db_request): - project = ProjectFactory.create(name="foo") - db_request.session = pretend.stub( - flash=pretend.call_recorder(lambda *a, **kw: None), - ) - db_request.user = UserFactory.create() - project.users = [db_request.user] - RoleFactory(user=db_request.user, project=project, role_name="Owner") - RoleFactory(user=db_request.user, project=project, role_name="Maintainer") - - res = views.get_user_role_in_project( - project.name, db_request.user.username, db_request - ) - assert res == "Owner" - - def test_get_user_role_in_project_no_role(self, db_request): - project = ProjectFactory.create(name="foo") - db_request.session = pretend.stub( - flash=pretend.call_recorder(lambda *a, **kw: None), - ) - db_request.user = UserFactory.create() - project.users = [db_request.user] - - res = views.get_user_role_in_project( - project.name, db_request.user.username, db_request - ) - assert res == "" - def test_delete_project(self, monkeypatch, db_request): project = ProjectFactory.create(name="foo") @@ -2407,16 +2382,13 @@ def test_delete_project(self, monkeypatch, db_request): db_request.POST["confirm_project_name"] = project.normalized_name db_request.user = UserFactory.create() + RoleFactory.create(project=project, user=db_request.user, role_name="Owner") + get_user_role_in_project = pretend.call_recorder( - lambda project_name, username, req: "Owner" + lambda project, user, req: "Owner" ) monkeypatch.setattr(views, "get_user_role_in_project", get_user_role_in_project) - get_project_contributors = pretend.call_recorder( - lambda project_name, req: [db_request.user] - ) - monkeypatch.setattr(views, "get_project_contributors", get_project_contributors) - send_removed_project_email = pretend.call_recorder(lambda req, user, **k: None) monkeypatch.setattr( views, "send_removed_project_email", send_removed_project_email @@ -2434,12 +2406,8 @@ def test_delete_project(self, monkeypatch, db_request): assert result.headers["Location"] == "/the-redirect" assert get_user_role_in_project.calls == [ - pretend.call(project.name, db_request.user.username, db_request,), - pretend.call(project.name, db_request.user.username, db_request,), - ] - - assert get_project_contributors.calls == [ - pretend.call(project.name, db_request,) + pretend.call(project, db_request.user, db_request,), + pretend.call(project, db_request.user, db_request,), ] assert send_removed_project_email.calls == [ @@ -2612,11 +2580,14 @@ def test_delete_project_release_disallow_deletion(self, monkeypatch): ] def test_delete_project_release(self, monkeypatch): + user = pretend.stub(username=pretend.stub()) release = pretend.stub( version="1.2.3", canonical_version="1.2.3", project=pretend.stub( - name="foobar", record_event=pretend.call_recorder(lambda *a, **kw: None) + name="foobar", + record_event=pretend.call_recorder(lambda *a, **kw: None), + users=[user], ), created=datetime.datetime(2017, 2, 5, 17, 18, 18, 462_634), ) @@ -2630,20 +2601,16 @@ def test_delete_project_release(self, monkeypatch): flags=pretend.stub(enabled=pretend.call_recorder(lambda *a: False)), route_path=pretend.call_recorder(lambda *a, **kw: "/the-redirect"), session=pretend.stub(flash=pretend.call_recorder(lambda *a, **kw: None)), - user=pretend.stub(username=pretend.stub()), + user=user, remote_addr=pretend.stub(), ) journal_obj = pretend.stub() journal_cls = pretend.call_recorder(lambda **kw: journal_obj) get_user_role_in_project = pretend.call_recorder( - lambda project_name, username, req: "Owner" + lambda project, user, req: "Owner" ) monkeypatch.setattr(views, "get_user_role_in_project", get_user_role_in_project) - get_project_contributors = pretend.call_recorder( - lambda project_name, request: [request.user] - ) - monkeypatch.setattr(views, "get_project_contributors", get_project_contributors) monkeypatch.setattr(views, "JournalEntry", journal_cls) send_removed_project_release_email = pretend.call_recorder( @@ -2663,11 +2630,8 @@ def test_delete_project_release(self, monkeypatch): assert result.headers["Location"] == "/the-redirect" assert get_user_role_in_project.calls == [ - pretend.call(release.project.name, request.user.username, request,), - pretend.call(release.project.name, request.user.username, request,), - ] - assert get_project_contributors.calls == [ - pretend.call(release.project.name, request,) + pretend.call(release.project, request.user, request,), + pretend.call(release.project, request.user, request,), ] assert send_removed_project_release_email.calls == [ @@ -2821,6 +2785,7 @@ def test_delete_project_release_file(self, monkeypatch, db_request): release_file = FileFactory.create( release=release, filename=f"foobar-{release.version}.tar.gz" ) + RoleFactory.create(project=project, user=user) db_request.POST = { "confirm_project_name": release.project.name, @@ -2835,15 +2800,10 @@ def test_delete_project_release_file(self, monkeypatch, db_request): db_request.remote_addr = "1.2.3.4" get_user_role_in_project = pretend.call_recorder( - lambda project_name, username, req: "Owner" + lambda project, user, req: "Owner" ) monkeypatch.setattr(views, "get_user_role_in_project", get_user_role_in_project) - get_project_contributors = pretend.call_recorder( - lambda project_name, req: [db_request.user] - ) - monkeypatch.setattr(views, "get_project_contributors", get_project_contributors) - send_removed_project_release_file_email = pretend.call_recorder( lambda req, user, **k: None ) @@ -2885,12 +2845,8 @@ def test_delete_project_release_file(self, monkeypatch, db_request): ] assert get_user_role_in_project.calls == [ - pretend.call(project.name, db_request.user.username, db_request,), - pretend.call(project.name, db_request.user.username, db_request,), - ] - - assert get_project_contributors.calls == [ - pretend.call(project.name, db_request,) + pretend.call(project, db_request.user, db_request,), + pretend.call(project, db_request.user, db_request,), ] assert send_removed_project_release_file_email.calls == [ @@ -3043,7 +2999,7 @@ def test_get_manage_project_roles(self, db_request): ] assert result == { "project": project, - "roles_by_user": {user.username: [role]}, + "roles": {role}, "form": form_obj, } @@ -3071,7 +3027,7 @@ def test_post_new_role_validation_fails(self, db_request): assert form_obj.validate.calls == [pretend.call()] assert result == { "project": project, - "roles_by_user": {user.username: [role]}, + "roles": {role}, "form": form_obj, } @@ -3158,11 +3114,7 @@ def test_post_new_role(self, monkeypatch, db_request): assert result == { "project": project, - "roles_by_user": { - new_user.username: [role], - owner_1.username: [owner_1_role], - owner_2.username: [owner_2_role], - }, + "roles": {role, owner_1_role, owner_2_role}, "form": form_obj, } @@ -3219,7 +3171,7 @@ def test_post_duplicate_role(self, db_request): assert result == { "project": project, - "roles_by_user": {user.username: [role]}, + "roles": {role}, "form": form_obj, } @@ -3269,7 +3221,7 @@ def test_post_unverified_email(self, db_request, with_email): # No additional roles are created assert db_request.db.query(Role).all() == [] - assert result == {"project": project, "roles_by_user": {}, "form": form_obj} + assert result == {"project": project, "roles": set(), "form": form_obj} class TestChangeProjectRoles: @@ -3328,51 +3280,6 @@ def test_change_role_invalid_role_name(self, pyramid_request): assert isinstance(result, HTTPSeeOther) assert result.headers["Location"] == "/the-redirect" - def test_change_role_when_multiple(self, db_request): - project = ProjectFactory.create(name="foobar") - user = UserFactory.create(username="testuser") - owner_role = RoleFactory.create(user=user, project=project, role_name="Owner") - maintainer_role = RoleFactory.create( - user=user, project=project, role_name="Maintainer" - ) - new_role_name = "Maintainer" - - db_request.method = "POST" - db_request.user = UserFactory.create() - db_request.remote_addr = "10.10.10.10" - db_request.POST = MultiDict( - [ - ("role_id", owner_role.id), - ("role_id", maintainer_role.id), - ("role_name", new_role_name), - ] - ) - db_request.session = pretend.stub( - flash=pretend.call_recorder(lambda *a, **kw: None) - ) - db_request.route_path = pretend.call_recorder(lambda *a, **kw: "/the-redirect") - - result = views.change_project_role(project, db_request) - - assert db_request.db.query(Role).all() == [maintainer_role] - assert db_request.route_path.calls == [ - pretend.call("manage.project.roles", project_name=project.name) - ] - assert db_request.session.flash.calls == [ - pretend.call("Changed role", queue="success") - ] - assert isinstance(result, HTTPSeeOther) - assert result.headers["Location"] == "/the-redirect" - - entry = ( - db_request.db.query(JournalEntry).options(joinedload("submitted_by")).one() - ) - - assert entry.name == project.name - assert entry.action == "remove Owner testuser" - assert entry.submitted_by == db_request.user - assert entry.submitted_from == db_request.remote_addr - def test_change_missing_role(self, db_request): project = ProjectFactory.create(name="foobar") missing_role_id = str(uuid.uuid4()) @@ -3414,36 +3321,6 @@ def test_change_own_owner_role(self, db_request): assert isinstance(result, HTTPSeeOther) assert result.headers["Location"] == "/the-redirect" - def test_change_own_owner_role_when_multiple(self, db_request): - project = ProjectFactory.create(name="foobar") - user = UserFactory.create(username="testuser") - owner_role = RoleFactory.create(user=user, project=project, role_name="Owner") - maintainer_role = RoleFactory.create( - user=user, project=project, role_name="Maintainer" - ) - - db_request.method = "POST" - db_request.user = user - db_request.POST = MultiDict( - [ - ("role_id", owner_role.id), - ("role_id", maintainer_role.id), - ("role_name", "Maintainer"), - ] - ) - db_request.session = pretend.stub( - flash=pretend.call_recorder(lambda *a, **kw: None) - ) - db_request.route_path = pretend.call_recorder(lambda *a, **kw: "/the-redirect") - - result = views.change_project_role(project, db_request) - - assert db_request.session.flash.calls == [ - pretend.call("Cannot remove yourself as Owner", queue="error") - ] - assert isinstance(result, HTTPSeeOther) - assert result.headers["Location"] == "/the-redirect" - class TestDeleteProjectRoles: def test_delete_role(self, db_request): diff --git a/tests/unit/packaging/test_models.py b/tests/unit/packaging/test_models.py --- a/tests/unit/packaging/test_models.py +++ b/tests/unit/packaging/test_models.py @@ -28,14 +28,6 @@ ) -class TestRole: - def test_role_ordering(self, db_request): - project = DBProjectFactory.create() - owner_role = DBRoleFactory.create(project=project, role_name="Owner") - maintainer_role = DBRoleFactory.create(project=project, role_name="Maintainer") - assert max([maintainer_role, owner_role]) == owner_role - - class TestProjectFactory: @pytest.mark.parametrize(("name", "normalized"), [("foo", "foo"), ("Bar", "bar")]) def test_traversal_finds(self, db_request, name, normalized): diff --git a/tests/unit/packaging/test_views.py b/tests/unit/packaging/test_views.py --- a/tests/unit/packaging/test_views.py +++ b/tests/unit/packaging/test_views.py @@ -174,9 +174,6 @@ def test_detail_rendered(self, db_request): for user in users: RoleFactory.create(user=user, project=project) - # Add an extra role for one user, to ensure deduplication - RoleFactory.create(user=users[0], project=project, role_name="another role") - result = views.release_detail(releases[1], db_request) assert result == { @@ -218,9 +215,6 @@ def test_detail_renders(self, monkeypatch, db_request): for user in users: RoleFactory.create(user=user, project=project) - # Add an extra role for one user, to ensure deduplication - RoleFactory.create(user=users[0], project=project, role_name="another role") - # patch the readme rendering logic. render_description = pretend.call_recorder( lambda raw, content_type: "rendered description"
De-duplicate "Owner" and "Maintainer" roles Currently on pypi-legacy, it's possible for a user to have both the "Owner" and "Maintainer" roles. However, an "Owner" can do everything a "Maintainer" can do, so it doesn't make sense for a user to be able to have both roles. In the short term, we decided to add a hack to hide any "duplicate" roles in #2705, and remove the ability to add multiple roles for the same user in Warehouse. Once pypi-legacy is shut down, we can run a migration which removes any of these "duplicate" roles., and remove this hack.
I think I'm unclear -- what can an Owner do that a Maintainer *can't* do? I am probably going to open a documentation issue to put that info somewhere users can get at. Thanks. @brainwane The following information box is included on the 'Collaboration' page where a user has the ability to change these roles: <img width="825" alt="screen shot 2018-02-09 at 5 00 52 pm" src="https://user-images.githubusercontent.com/294415/36054225-d98ad76c-0dba-11e8-94fd-7dcb72649fd7.png"> I think that should probably suffice in terms of documentation, but it probably wouldn't hurt to duplicate it elsewhere. Actually, this should probably get revised, because Owners can do other things too now (like delete files, releases, and the entire project). This was causing the 503 error experienced in #4728, seen here: https://sentry.io/python-software-foundation/warehouse-production/issues/677429114/
2020-03-27T21:37:11Z
[]
[]
pypi/warehouse
7,697
pypi__warehouse-7697
[ "7688", "7688" ]
a5111979313a1cef0388e505a25aebb1ecec9b6c
diff --git a/warehouse/migrations/versions/6af76ffb9612_role_role_name_should_not_be_nullable.py b/warehouse/migrations/versions/6af76ffb9612_role_role_name_should_not_be_nullable.py new file mode 100644 --- /dev/null +++ b/warehouse/migrations/versions/6af76ffb9612_role_role_name_should_not_be_nullable.py @@ -0,0 +1,33 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +Role.role_name should not be nullable + +Revision ID: 6af76ffb9612 +Revises: aaa60e8ea12e +Create Date: 2020-03-28 01:20:30.453875 +""" + +import sqlalchemy as sa + +from alembic import op + +revision = "6af76ffb9612" +down_revision = "aaa60e8ea12e" + + +def upgrade(): + op.alter_column("roles", "role_name", existing_type=sa.TEXT(), nullable=False) + + +def downgrade(): + op.alter_column("roles", "role_name", existing_type=sa.TEXT(), nullable=True) diff --git a/warehouse/packaging/models.py b/warehouse/packaging/models.py --- a/warehouse/packaging/models.py +++ b/warehouse/packaging/models.py @@ -64,7 +64,7 @@ class Role(db.Model): __repr__ = make_repr("role_name") - role_name = Column(Text) + role_name = Column(Text, nullable=False) user_id = Column( ForeignKey("users.id", onupdate="CASCADE", ondelete="CASCADE"), nullable=False )
diff --git a/tests/unit/malware/checks/package_turnover/test_check.py b/tests/unit/malware/checks/package_turnover/test_check.py --- a/tests/unit/malware/checks/package_turnover/test_check.py +++ b/tests/unit/malware/checks/package_turnover/test_check.py @@ -21,7 +21,7 @@ from .....common.db.accounts import UserFactory from .....common.db.malware import MalwareCheckFactory -from .....common.db.packaging import ProjectFactory, ReleaseFactory +from .....common.db.packaging import ProjectFactory, ReleaseFactory, RoleFactory def test_initializes(db_session): @@ -90,7 +90,8 @@ def test_user_posture_verdicts_has_2fa(db_session): def test_user_turnover_verdicts(db_session): user = UserFactory.create() - project = ProjectFactory.create(users=[user]) + project = ProjectFactory.create() + RoleFactory.create(user=user, project=project, role_name="Owner") project.record_event( tag="project:role:add", @@ -117,7 +118,8 @@ def test_user_turnover_verdicts(db_session): def test_user_turnover_verdicts_no_turnover(db_session): user = UserFactory.create() - project = ProjectFactory.create(users=[user]) + project = ProjectFactory.create() + RoleFactory.create(user=user, project=project, role_name="Owner") MalwareCheckFactory.create( name="PackageTurnoverCheck", state=MalwareCheckState.Enabled, @@ -130,7 +132,8 @@ def test_user_turnover_verdicts_no_turnover(db_session): def test_scan(db_session, monkeypatch): user = UserFactory.create() - project = ProjectFactory.create(users=[user]) + project = ProjectFactory.create() + RoleFactory.create(user=user, project=project, role_name="Owner") for _ in range(3): ReleaseFactory.create(project=project) @@ -157,7 +160,8 @@ def test_scan(db_session, monkeypatch): def test_scan_too_few_releases(db_session, monkeypatch): user = UserFactory.create() - project = ProjectFactory.create(users=[user]) + project = ProjectFactory.create() + RoleFactory.create(user=user, project=project, role_name="Owner") ReleaseFactory.create(project=project) MalwareCheckFactory.create(
Role.role_name can be null Pairing with @ewdurbin on #2745 we noticed that `Role.role_name` can be null: https://github.com/pypa/warehouse/blob/32f621a359bfa7ecb84d8b0ff77e675aae2cd9e1/warehouse/packaging/models.py#L64 Enforcing this to be `nullable=False` causes a lot of our tests to fail due to our `ProjectFactory` and `UserFactory` implicitly creating roles without names, since they bypass the `RoleFactory`. We should update our test fixtures to allow us to set `nullable=False`. Some possible leads: https://factoryboy.readthedocs.io/en/latest/reference.html Role.role_name can be null Pairing with @ewdurbin on #2745 we noticed that `Role.role_name` can be null: https://github.com/pypa/warehouse/blob/32f621a359bfa7ecb84d8b0ff77e675aae2cd9e1/warehouse/packaging/models.py#L64 Enforcing this to be `nullable=False` causes a lot of our tests to fail due to our `ProjectFactory` and `UserFactory` implicitly creating roles without names, since they bypass the `RoleFactory`. We should update our test fixtures to allow us to set `nullable=False`. Some possible leads: https://factoryboy.readthedocs.io/en/latest/reference.html
After #7689, it looks like only 4 tests are left depending on the implicit Role creation here. I'm not sure if supporting `test_project = ProjectFactory(users=[test_user0, test_user1])` or `test_user = UserFactory(projects=[test_project0, test_project1])` is worth it really. After #7689, it looks like only 4 tests are left depending on the implicit Role creation here. I'm not sure if supporting `test_project = ProjectFactory(users=[test_user0, test_user1])` or `test_user = UserFactory(projects=[test_project0, test_project1])` is worth it really.
2020-03-30T12:40:11Z
[]
[]
pypi/warehouse
7,794
pypi__warehouse-7794
[ "7585" ]
d5e37b744d3bd1cf9307a74151dd48ffe897d6b1
diff --git a/warehouse/accounts/forms.py b/warehouse/accounts/forms.py --- a/warehouse/accounts/forms.py +++ b/warehouse/accounts/forms.py @@ -48,7 +48,7 @@ class TOTPValueMixin: validators=[ wtforms.validators.DataRequired(), wtforms.validators.Regexp( - rf"^[0-9]{{{TOTP_LENGTH}}}$", + rf"^ *([0-9] *){{{TOTP_LENGTH}}}$", message=_( "TOTP code must be ${totp_length} digits.", mapping={"totp_length": TOTP_LENGTH}, @@ -296,7 +296,7 @@ def __init__(self, *args, user_id, user_service, **kwargs): class TOTPAuthenticationForm(TOTPValueMixin, _TwoFactorAuthenticationForm): def validate_totp_value(self, field): - totp_value = field.data.encode("utf8") + totp_value = field.data.replace(" ", "").encode("utf8") if not self.user_service.check_totp_value(self.user_id, totp_value): raise wtforms.validators.ValidationError(_("Invalid TOTP code."))
diff --git a/tests/unit/accounts/test_forms.py b/tests/unit/accounts/test_forms.py --- a/tests/unit/accounts/test_forms.py +++ b/tests/unit/accounts/test_forms.py @@ -596,6 +596,14 @@ def test_totp_secret_exists(self, pyramid_config): assert not form.validate() assert str(form.totp_value.errors.pop()) == "TOTP code must be 6 digits." + form = forms.TOTPAuthenticationForm( + data={"totp_value": "1 2 3 4 5 6 7"}, + user_id=pretend.stub(), + user_service=pretend.stub(check_totp_value=lambda *a: True), + ) + assert not form.validate() + assert str(form.totp_value.errors.pop()) == "TOTP code must be 6 digits." + form = forms.TOTPAuthenticationForm( data={"totp_value": "123456"}, user_id=pretend.stub(), @@ -611,6 +619,20 @@ def test_totp_secret_exists(self, pyramid_config): ) assert form.validate() + form = forms.TOTPAuthenticationForm( + data={"totp_value": " 1 2 3 4 5 6 "}, + user_id=pretend.stub(), + user_service=pretend.stub(check_totp_value=lambda *a: True), + ) + assert form.validate() + + form = forms.TOTPAuthenticationForm( + data={"totp_value": "123 456"}, + user_id=pretend.stub(), + user_service=pretend.stub(check_totp_value=lambda *a: True), + ) + assert form.validate() + class TestWebAuthnAuthenticationForm: def test_creation(self):
Ignore whitespace for 2FA As reported in https://github.com/pypa/warehouse/issues/7584, trying to use a 2FA code that has whitespace currently fails: > Scenario 1 (I put the 6 digit number with the space) > > I get the validation message: Please match the requested format.. I assume this behavior is expected, BUT I don't see any expected format in the page. We should remove all whitespace from user-submitted 2FA codes. We might also want to improve the error message here to include the expected format. --- **Good First Issue**: This issue is good for first time contributors. If you've already contributed to Warehouse, work on [another issue without this label](https://github.com/pypa/warehouse/issues?utf8=%E2%9C%93&q=is%3Aissue+is%3Aopen+-label%3A%22good+first+issue%22) instead. If there is not a corresponding pull request for this issue, it is up for grabs. For directions for getting set up, see our [Getting Started Guide](https://warehouse.pypa.io/development/getting-started/). If you are working on this issue and have questions, feel free to ask them here, [`#pypa-dev` on Freenode](https://webchat.freenode.net/?channels=%23pypa-dev), or the [pypa-dev mailing list](https://groups.google.com/forum/#!forum/pypa-dev).
2020-04-13T16:09:01Z
[]
[]
pypi/warehouse
7,910
pypi__warehouse-7910
[ "7434" ]
b0daee4f722fe94e134ff3d1c9ab3920a38db8a5
diff --git a/warehouse/macaroons/services.py b/warehouse/macaroons/services.py --- a/warehouse/macaroons/services.py +++ b/warehouse/macaroons/services.py @@ -13,6 +13,7 @@ import binascii import datetime import json +import struct import uuid import pymacaroons @@ -67,20 +68,32 @@ def find_macaroon(self, macaroon_id): return dm + def _deserialize_raw_macaroon(self, raw_macaroon): + raw_macaroon = self._extract_raw_macaroon(raw_macaroon) + + if raw_macaroon is None: + raise InvalidMacaroon("malformed or nonexistent macaroon") + + try: + return pymacaroons.Macaroon.deserialize(raw_macaroon) + except ( + IndexError, + TypeError, + ValueError, + binascii.Error, + struct.error, + MacaroonDeserializationException, + ): + raise InvalidMacaroon("malformed macaroon") + def find_userid(self, raw_macaroon): """ Returns the id of the user associated with the given raw (serialized) macaroon. """ - raw_macaroon = self._extract_raw_macaroon(raw_macaroon) - if raw_macaroon is None: - return None - try: - m = pymacaroons.Macaroon.deserialize(raw_macaroon) - except binascii.Error: - return None - except MacaroonDeserializationException: + m = self._deserialize_raw_macaroon(raw_macaroon) + except InvalidMacaroon: return None dm = self.find_macaroon(m.identifier.decode()) @@ -97,15 +110,7 @@ def verify(self, raw_macaroon, context, principals, permission): Raises InvalidMacaroon if the macaroon is not valid. """ - raw_macaroon = self._extract_raw_macaroon(raw_macaroon) - if raw_macaroon is None: - raise InvalidMacaroon("malformed or nonexistent macaroon") - - try: - m = pymacaroons.Macaroon.deserialize(raw_macaroon) - except MacaroonDeserializationException: - raise InvalidMacaroon("malformed macaroon") - + m = self._deserialize_raw_macaroon(raw_macaroon) dm = self.find_macaroon(m.identifier.decode()) if dm is None:
diff --git a/tests/unit/macaroons/test_services.py b/tests/unit/macaroons/test_services.py --- a/tests/unit/macaroons/test_services.py +++ b/tests/unit/macaroons/test_services.py @@ -10,6 +10,9 @@ # See the License for the specific language governing permissions and # limitations under the License. +import binascii +import struct + from unittest import mock from uuid import uuid4 @@ -17,6 +20,8 @@ import pymacaroons import pytest +from pymacaroons.exceptions import MacaroonDeserializationException + from warehouse.macaroons import services from warehouse.macaroons.models import Macaroon @@ -144,6 +149,40 @@ def test_verify_invalid_macaroon(self, monkeypatch, user_service, macaroon_servi pretend.call(mock.ANY, context, principals, permissions) ] + def test_deserialize_raw_macaroon_when_none(self, macaroon_service): + raw_macaroon = pretend.stub() + macaroon_service._extract_raw_macaroon = pretend.call_recorder(lambda a: None) + + with pytest.raises(services.InvalidMacaroon): + macaroon_service._deserialize_raw_macaroon(raw_macaroon) + + assert macaroon_service._extract_raw_macaroon.calls == [ + pretend.call(raw_macaroon), + ] + + @pytest.mark.parametrize( + "exception", + [ + IndexError, + TypeError, + ValueError, + binascii.Error, + struct.error, + MacaroonDeserializationException, + ], + ) + def test_deserialize_raw_macaroon(self, monkeypatch, macaroon_service, exception): + raw_macaroon = pretend.stub() + macaroon_service._extract_raw_macaroon = pretend.call_recorder( + lambda a: raw_macaroon + ) + monkeypatch.setattr( + pymacaroons.Macaroon, "deserialize", pretend.raiser(exception) + ) + + with pytest.raises(services.InvalidMacaroon): + macaroon_service._deserialize_raw_macaroon(raw_macaroon) + def test_verify_malformed_macaroon(self, macaroon_service): with pytest.raises(services.InvalidMacaroon): macaroon_service.verify(f"pypi-thiswillnotdeserialize", None, None, None)
Catch additional pymacaroons.Macaroon.deserialize exceptions It appears that someone has [enumerated the various exceptions `pymacaroons.Macaroon.deserialize` might raise](https://github.com/ecordell/pymacaroons/issues/50). It'd be great if that were resolved, but we might want to further harden the work from #7424 to handle these other cases? _Originally posted by @ewdurbin in https://github.com/pypa/warehouse/issues/7298#issuecomment-589957864_ --- **Good First Issue**: This issue is good for first time contributors. If you've already contributed to Warehouse, work on [another issue without this label](https://github.com/pypa/warehouse/issues?utf8=%E2%9C%93&q=is%3Aissue+is%3Aopen+-label%3A%22good+first+issue%22) instead. If there is not a corresponding pull request for this issue, it is up for grabs. For directions for getting set up, see our [Getting Started Guide](https://warehouse.pypa.io/development/getting-started/). If you are working on this issue and have questions, feel free to ask them here, [`#pypa-dev` on Freenode](https://webchat.freenode.net/?channels=%23pypa-dev), or the [pypa-dev mailing list](https://groups.google.com/forum/#!forum/pypa-dev).
I've added the ["good first issue"](https://github.com/pypa/warehouse/labels/good%20first%20issue) label to this issue. In #7876 we discovered that `pymacaroons.Macaroon.deserialize` can also raise an `IndexError`, which is causing https://sentry.io/share/issue/8737571d268f41ae9eef2d12279882d2/ I want to work on this PR, but it would be great if I get some additional context before I start. With that in mind, I will outline below what I could understand about the problem until now. Please feel free to expand/add on if I have missed something here for an initial stab at this. We are currently using `pymacaroons.Macaroon.deserialize` [here](https://github.com/pypa/warehouse/blob/master/warehouse/macaroons/services.py#L80) and [here](https://github.com/pypa/warehouse/blob/master/warehouse/macaroons/services.py#L105) , and we are not catching the other exceptions mentioned in https://github.com/ecordell/pymacaroons/issues/50. So in order to work towards fixing this, we will had to add one `except` clause around both call sites per PR, and then 1. return None https://github.com/pypa/warehouse/blob/2d6364a12372cbf3ff18aeb861f26432486bb88e/warehouse/macaroons/services.py#L79-L82 2. raise an exception again https://github.com/pypa/warehouse/blob/2d6364a12372cbf3ff18aeb861f26432486bb88e/warehouse/macaroons/services.py#L104-L107 3. log an exception via LOGGER.error (The message in cases 2 and 3 will have to be discussed) And finally based on how we handle it, add unit tests for it. We can also add a helpful FAQ to suggest the user on how he can further debug the issue, on a case-by-case basis.
2020-05-08T17:21:15Z
[]
[]
pypi/warehouse
7,916
pypi__warehouse-7916
[ "7856", "7886" ]
178cf9b69c9bdc347417638874e3818efa4a36e5
diff --git a/warehouse/email/__init__.py b/warehouse/email/__init__.py --- a/warehouse/email/__init__.py +++ b/warehouse/email/__init__.py @@ -244,6 +244,7 @@ def send_yanked_project_release_email( "submitter": submitter_name, "submitter_role": submitter_role.lower(), "recipient_role_descr": recipient_role_descr, + "yanked_reason": release.yanked_reason, } diff --git a/warehouse/legacy/api/json.py b/warehouse/legacy/api/json.py --- a/warehouse/legacy/api/json.py +++ b/warehouse/legacy/api/json.py @@ -112,7 +112,11 @@ def json_release(release, request): # Get all of the releases and files for this project. release_files = ( request.db.query(Release, File) - .options(Load(Release).load_only("version", "requires_python", "yanked")) + .options( + Load(Release).load_only( + "version", "requires_python", "yanked", "yanked_reason" + ) + ) .outerjoin(File) .filter(Release.project == project) .order_by(Release._pypi_ordering.desc(), File.filename) @@ -148,6 +152,7 @@ def json_release(release, request): "url": request.route_url("packaging.file", path=f.path), "requires_python": r.requires_python if r.requires_python else None, "yanked": r.yanked, + "yanked_reason": r.yanked_reason or None, } for f in fs ] @@ -185,6 +190,7 @@ def json_release(release, request): "home_page": release.home_page, "download_url": release.download_url, "yanked": release.yanked, + "yanked_reason": r.yanked_reason or None, }, "urls": releases[release.version], "releases": releases, diff --git a/warehouse/manage/views.py b/warehouse/manage/views.py --- a/warehouse/manage/views.py +++ b/warehouse/manage/views.py @@ -1056,6 +1056,8 @@ def manage_project_release(self): @view_config(request_method="POST", request_param=["confirm_yank_version"]) def yank_project_release(self): version = self.request.POST.get("confirm_yank_version") + yanked_reason = self.request.POST.get("yanked_reason", "") + if not version: self.request.session.flash("Confirm the request", queue="error") return HTTPSeeOther( @@ -1100,10 +1102,12 @@ def yank_project_release(self): additional={ "submitted_by": self.request.user.username, "canonical_version": self.release.canonical_version, + "yanked_reason": yanked_reason, }, ) self.release.yanked = True + self.release.yanked_reason = yanked_reason self.request.session.flash( f"Yanked release {self.release.version!r}", queue="success" @@ -1180,6 +1184,7 @@ def unyank_project_release(self): ) self.release.yanked = False + self.release.yanked_reason = "" self.request.session.flash( f"Un-yanked release {self.release.version!r}", queue="success" diff --git a/warehouse/migrations/versions/30a7791fea33_add_yanked_reason_column_to_release_.py b/warehouse/migrations/versions/30a7791fea33_add_yanked_reason_column_to_release_.py new file mode 100644 --- /dev/null +++ b/warehouse/migrations/versions/30a7791fea33_add_yanked_reason_column_to_release_.py @@ -0,0 +1,36 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +Add yanked_reason column to Release table + +Revision ID: 30a7791fea33 +Revises: 43b0e796a40d +Create Date: 2020-05-09 20:25:19.454034 +""" + +import sqlalchemy as sa + +from alembic import op + +revision = "30a7791fea33" +down_revision = "43b0e796a40d" + + +def upgrade(): + op.add_column( + "releases", + sa.Column("yanked_reason", sa.Text(), server_default="", nullable=False), + ) + + +def downgrade(): + op.drop_column("releases", "yanked_reason") diff --git a/warehouse/packaging/models.py b/warehouse/packaging/models.py --- a/warehouse/packaging/models.py +++ b/warehouse/packaging/models.py @@ -353,6 +353,8 @@ def __table_args__(cls): # noqa yanked = Column(Boolean, nullable=False, server_default=sql.false()) + yanked_reason = Column(Text, nullable=False, server_default="") + _classifiers = orm.relationship( Classifier, backref="project_releases",
diff --git a/tests/unit/email/test_init.py b/tests/unit/email/test_init.py --- a/tests/unit/email/test_init.py +++ b/tests/unit/email/test_init.py @@ -1322,6 +1322,7 @@ def test_send_yanked_project_release_email_to_maintainer( version="0.0.0", project=pretend.stub(name="test_project"), created=datetime.datetime(2017, 2, 5, 0, 0, 0, 0), + yanked_reason="Yanky Doodle went to town", ) result = email.send_yanked_project_release_email( @@ -1340,6 +1341,7 @@ def test_send_yanked_project_release_email_to_maintainer( "submitter": stub_submitter_user.username, "submitter_role": "owner", "recipient_role_descr": "a maintainer", + "yanked_reason": "Yanky Doodle went to town", } subject_renderer.assert_(project="test_project") @@ -1426,6 +1428,7 @@ def test_send_yanked_project_release_email_to_owner( version="0.0.0", project=pretend.stub(name="test_project"), created=datetime.datetime(2017, 2, 5, 0, 0, 0, 0), + yanked_reason="Yanky Doodle went to town", ) result = email.send_yanked_project_release_email( @@ -1444,6 +1447,7 @@ def test_send_yanked_project_release_email_to_owner( "submitter": stub_submitter_user.username, "submitter_role": "owner", "recipient_role_descr": "an owner", + "yanked_reason": "Yanky Doodle went to town", } subject_renderer.assert_(project="test_project") @@ -1532,6 +1536,7 @@ def test_send_unyanked_project_release_email_to_maintainer( version="0.0.0", project=pretend.stub(name="test_project"), created=datetime.datetime(2017, 2, 5, 0, 0, 0, 0), + yanked_reason="", ) result = email.send_unyanked_project_release_email( @@ -1636,6 +1641,7 @@ def test_send_unyanked_project_release_email_to_owner( version="0.0.0", project=pretend.stub(name="test_project"), created=datetime.datetime(2017, 2, 5, 0, 0, 0, 0), + yanked_reason="", ) result = email.send_unyanked_project_release_email( @@ -1742,6 +1748,7 @@ def test_send_removed_project_release_email_to_maintainer( version="0.0.0", project=pretend.stub(name="test_project"), created=datetime.datetime(2017, 2, 5, 0, 0, 0, 0), + yanked_reason="", ) result = email.send_removed_project_release_email( @@ -1846,6 +1853,7 @@ def test_send_removed_project_release_email_to_owner( version="0.0.0", project=pretend.stub(name="test_project"), created=datetime.datetime(2017, 2, 5, 0, 0, 0, 0), + yanked_reason="", ) result = email.send_removed_project_release_email( @@ -1952,6 +1960,7 @@ def test_send_removed_project_release_file_email_to_owner( version="0.0.0", project=pretend.stub(name="test_project"), created=datetime.datetime(2017, 2, 5, 0, 0, 0, 0), + yanked_reason="", ) result = email.send_removed_project_release_file_email( @@ -2057,6 +2066,7 @@ def test_send_removed_project_release_file_email_to_maintainer( version="0.0.0", project=pretend.stub(name="test_project"), created=datetime.datetime(2017, 2, 5, 0, 0, 0, 0), + yanked_reason="", ) result = email.send_removed_project_release_file_email( diff --git a/tests/unit/legacy/api/test_json.py b/tests/unit/legacy/api/test_json.py --- a/tests/unit/legacy/api/test_json.py +++ b/tests/unit/legacy/api/test_json.py @@ -261,6 +261,7 @@ def test_detail_renders(self, pyramid_config, db_request, db_session): "requires_python": None, "summary": None, "yanked": False, + "yanked_reason": None, "version": "3.0", }, "releases": { @@ -286,6 +287,7 @@ def test_detail_renders(self, pyramid_config, db_request, db_session): "url": "/the/fake/url/", "requires_python": None, "yanked": False, + "yanked_reason": None, } ], "2.0": [ @@ -309,6 +311,7 @@ def test_detail_renders(self, pyramid_config, db_request, db_session): "url": "/the/fake/url/", "requires_python": None, "yanked": False, + "yanked_reason": None, } ], "3.0": [ @@ -332,6 +335,7 @@ def test_detail_renders(self, pyramid_config, db_request, db_session): "url": "/the/fake/url/", "requires_python": None, "yanked": False, + "yanked_reason": None, } ], }, @@ -354,6 +358,7 @@ def test_detail_renders(self, pyramid_config, db_request, db_session): "url": "/the/fake/url/", "requires_python": None, "yanked": False, + "yanked_reason": None, } ], "last_serial": je.id, @@ -416,6 +421,7 @@ def test_minimal_renders(self, pyramid_config, db_request): "requires_python": None, "summary": None, "yanked": False, + "yanked_reason": None, "version": "0.1", }, "releases": { @@ -438,6 +444,7 @@ def test_minimal_renders(self, pyramid_config, db_request): "url": "/the/fake/url/", "requires_python": None, "yanked": False, + "yanked_reason": None, } ] }, @@ -457,6 +464,7 @@ def test_minimal_renders(self, pyramid_config, db_request): "url": "/the/fake/url/", "requires_python": None, "yanked": False, + "yanked_reason": None, } ], "last_serial": je.id, diff --git a/tests/unit/manage/test_views.py b/tests/unit/manage/test_views.py --- a/tests/unit/manage/test_views.py +++ b/tests/unit/manage/test_views.py @@ -2607,9 +2607,13 @@ def test_yank_project_release(self, monkeypatch): ), created=datetime.datetime(2017, 2, 5, 17, 18, 18, 462_634), yanked=False, + yanked_reason="", ) request = pretend.stub( - POST={"confirm_yank_version": release.version}, + POST={ + "confirm_yank_version": release.version, + "yanked_reason": "Yanky Doodle went to town", + }, method="POST", db=pretend.stub(add=pretend.call_recorder(lambda a: None),), flags=pretend.stub(enabled=pretend.call_recorder(lambda *a: False)), @@ -2644,6 +2648,7 @@ def test_yank_project_release(self, monkeypatch): assert result.headers["Location"] == "/the-redirect" assert release.yanked + assert release.yanked_reason == "Yanky Doodle went to town" assert get_user_role_in_project.calls == [ pretend.call(release.project, request.user, request,), @@ -2684,13 +2689,17 @@ def test_yank_project_release(self, monkeypatch): additional={ "submitted_by": request.user.username, "canonical_version": release.canonical_version, + "yanked_reason": "Yanky Doodle went to town", }, ) ] def test_yank_project_release_no_confirm(self): release = pretend.stub( - version="1.2.3", project=pretend.stub(name="foobar"), yanked=False + version="1.2.3", + project=pretend.stub(name="foobar"), + yanked=False, + yanked_reason="", ) request = pretend.stub( POST={"confirm_yank_version": ""}, @@ -2707,6 +2716,7 @@ def test_yank_project_release_no_confirm(self): assert result.headers["Location"] == "/the-redirect" assert not release.yanked + assert not release.yanked_reason assert request.session.flash.calls == [ pretend.call("Confirm the request", queue="error") @@ -2721,7 +2731,10 @@ def test_yank_project_release_no_confirm(self): def test_yank_project_release_bad_confirm(self): release = pretend.stub( - version="1.2.3", project=pretend.stub(name="foobar"), yanked=False + version="1.2.3", + project=pretend.stub(name="foobar"), + yanked=False, + yanked_reason="", ) request = pretend.stub( POST={"confirm_yank_version": "invalid"}, @@ -2738,6 +2751,7 @@ def test_yank_project_release_bad_confirm(self): assert result.headers["Location"] == "/the-redirect" assert not release.yanked + assert not release.yanked_reason assert request.session.flash.calls == [ pretend.call( @@ -2803,6 +2817,7 @@ def test_unyank_project_release(self, monkeypatch): assert result.headers["Location"] == "/the-redirect" assert not release.yanked + assert not release.yanked_reason assert get_user_role_in_project.calls == [ pretend.call(release.project, request.user, request), @@ -2849,10 +2864,16 @@ def test_unyank_project_release(self, monkeypatch): def test_unyank_project_release_no_confirm(self): release = pretend.stub( - version="1.2.3", project=pretend.stub(name="foobar"), yanked=True + version="1.2.3", + project=pretend.stub(name="foobar"), + yanked=True, + yanked_reason="", ) request = pretend.stub( - POST={"confirm_unyank_version": ""}, + POST={ + "confirm_unyank_version": "", + "yanked_reason": "Yanky Doodle went to town", + }, method="POST", flags=pretend.stub(enabled=pretend.call_recorder(lambda *a: False)), route_path=pretend.call_recorder(lambda *a, **kw: "/the-redirect"), @@ -2866,6 +2887,7 @@ def test_unyank_project_release_no_confirm(self): assert result.headers["Location"] == "/the-redirect" assert release.yanked + assert not release.yanked_reason assert request.session.flash.calls == [ pretend.call("Confirm the request", queue="error") @@ -2880,10 +2902,13 @@ def test_unyank_project_release_no_confirm(self): def test_unyank_project_release_bad_confirm(self): release = pretend.stub( - version="1.2.3", project=pretend.stub(name="foobar"), yanked=True + version="1.2.3", + project=pretend.stub(name="foobar"), + yanked=True, + yanked_reason="Old reason", ) request = pretend.stub( - POST={"confirm_unyank_version": "invalid"}, + POST={"confirm_unyank_version": "invalid", "yanked_reason": "New reason"}, method="POST", flags=pretend.stub(enabled=pretend.call_recorder(lambda *a: False)), route_path=pretend.call_recorder(lambda *a, **kw: "/the-redirect"), @@ -2897,6 +2922,7 @@ def test_unyank_project_release_bad_confirm(self): assert result.headers["Location"] == "/the-redirect" assert release.yanked + assert release.yanked_reason == "Old reason" assert request.session.flash.calls == [ pretend.call(
Give maintainers the ability to provide reason for yanking PEP 592 permits the use of an arbitrary string which gives a reason for yanking: > The value of the `data-yanked` attribute, if present, is an arbitrary string that represents the reason for why the file has been yanked. Tools that process the simple repository API MAY surface this string to end users. PyPI does not currently support this. Completing this issue would include a UI to allow the user to set this when yanking, an update to the DB model to change this from a boolean to a text field, and an update to the simple page to surface this value if present. --- **Good First Issue**: This issue is good for first time contributors. If you've already contributed to Warehouse, work on [another issue without this label](https://github.com/pypa/warehouse/issues?utf8=%E2%9C%93&q=is%3Aissue+is%3Aopen+-label%3A%22good+first+issue%22) instead. If there is not a corresponding pull request for this issue, it is up for grabs. For directions for getting set up, see our [Getting Started Guide](https://warehouse.pypa.io/development/getting-started/). If you are working on this issue and have questions, feel free to ask them here, [`#pypa-dev` on Freenode](https://webchat.freenode.net/?channels=%23pypa-dev), or the [pypa-dev mailing list](https://groups.google.com/forum/#!forum/pypa-dev). /simple serves HTML that can't be parsed by Python's xml.etree if package has yanked releases **Describe the bug** Parsing HTML served by `/simple` endpoint results in `xml.etree.ElementTree.ParseError`. **Expected behavior** No parse error, as it was before when there were no yanked releases yet or with packages that don't have any yanked releases (yet). **To Reproduce** * Python script `test.py` that contains: ```python import requests from xml.etree import ElementTree simple_pip = requests.get('https://pypi.python.org/simple/pip') ElementTree.fromstring(simple_pip.text) ``` * run it with `python test.py`, for example (on macOS): ``` $ python3 test.py Traceback (most recent call last): File "test.py", line 4, in <module> ElementTree.fromstring(simple_pip.text) File "/usr/local/Cellar/python/3.7.7/Frameworks/Python.framework/Versions/3.7/lib/python3.7/xml/etree/ElementTree.py", line 1315, in XML parser.feed(text) xml.etree.ElementTree.ParseError: not well-formed (invalid token): line 143, column 306 ``` The problem is the `data-yanked` part in lines like: ``` <a href="https://files.pythonhosted.org/packages/8c/5c/c18d58ab5c1a702bf670e0bd6a77cd4645e4aeca021c6118ef850895cc96/pip-20.0.tar.gz#sha256=5128e9a9401f1d16c1d15b2ed766a79d7813db1538428d0b0ce74838249e3a41" data-requires-python="&gt;=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" data-yanked>pip-20.0.tar.gz</a><br/> ``` **My Platform** * macOS 10.15.4 with Python 2.7.16 or 3.7.7 (but same issue occurs on other platforms too) **Additional context** * EasyBuild bug report: https://github.com/easybuilders/easybuild/issues/619 * we've worked around this in the upcoming EasyBuild version by stripping out the `data-yanked` part (see https://github.com/easybuilders/easybuild-framework/pull/3303), but this issue still occurs in EasyBuild releases that worked fine perfectly before package releases were getting yanked
FYI: pip prints the reason for yanking if it is provided, and if it is not, it prints "Reason for being yanked: \<none given>". As a result of this, any yanked releases on PyPI today will result in pip's output when installing a yanked release saying "Reason for being yanked: \<none given>". Reported/mentioned in https://github.com/pypa/pip/issues/8160#issuecomment-620772136. Just to confirm, is this issue also taken up by someone to work on as per https://github.com/pypa/warehouse/issues/7886#issuecomment-626093001 ? Hi @boegel thanks for the report. From https://docs.python.org/3/library/xml.etree.elementtree.html: > The `xml.etree.ElementTree` module implements a simple and efficient API for parsing and creating XML data. [PEP 503](https://www.python.org/dev/peps/pep-0503/), which defines the simple API, says that this page is HTML, not XML: > Within a repository, the root URL (`/` for this PEP which represents the base URL) MUST be a valid HTML5 page with a single anchor element per project in the repository. Therefore I wouldn't expect to be able to parse a `/simple` page (HTML) with ElementTree (for XML). As is, this page [is valid HTML5](https://validator.w3.org/nu/?doc=https%3A%2F%2Fpypi.org%2Fsimple%2Fpip%2F), so I don't think there's anything for us to do here. I'd recommend using [`html.parser`](https://docs.python.org/3/library/html.parser.html) instead for parsing HTML. Thanks for the feedback! Unfortunately `html.parser` is not really an option for us currently, since we still support Python 2... It's a shame that there's no interest in fixing this on the PyPI side, since it seems like it could be an easy fix, for example by indicating yanked releases with `data-yanked='yes'` or something, similar to how `data-requires-python` is specified. I didn't see anything about the `data-yanked` attribute in PEP 503, is how yanked releases are specified in `/simple` elsewhere? Yes, in [PEP 592](https://www.python.org/dev/peps/pep-0592/). Unfortunately adding a value to this attribute has meaning according to the PEP, so we can't just make it "yes" or something similar. @di How about using `data-yanked=''` if no reason for yanking was provided? It seems like that would still match what is specified in PEP 592 (depending on how "no value" is interpreted, I guess). Yes, I think that would depend on how `pip` is currently interpreting this value. `data-yanked` and `data-yanked=''` are equivalent to `pip`, based on the tests in https://github.com/pypa/pip/blob/327a31536ff71b99e5aa454bd325b3daf75b666d/tests/unit/test_collector.py#L336-L341 I'm not super enthused by attempting to maintain compatibility with an XML parser and I think it's likely going to be error prone since this response is html5 and not XML. In this case we can possibly do it, but I'm not sure that holds true moving forward. Maybe simple changes infrequently enough and is weird enough that it isn't a big deal and it's worth doing, I dunno. I just worry that long term it's a a futile effort. We've been relying on `/simple` for quite some time now in EasyBuild (I think after the simple API was strongly recommended to us), and we've never run into problems with it up until recently when packages started yanking releases. The issue I reported is quite annoying for us, since it effectively break the auto-download-from-PyPI feature we have in all existing EasyBuild releases. We have a workaround for it for the upcoming release, but it's still annoying to many. That shouldn't be the big motivation here though, of course, but I suspect other people be running into this too (and it's not trivial to pinpoint the exact issue either if you see the error popping up, it look like a fluke in PyPI at first to be honest). Maybe it's sufficient to have a test somewhere that checks whether a page like https://pypi.python.org/simple/pip can be parsed by `xml.etree.ElementTree`, with a pointer to this issue? If some breakage then pops up again in the future, and it's too much effort to avoid it, then so be it. If I can help with that in any way, I'd love to hear it. I'll try and make sure we don't rely on `ElementTree` anymore in EasyBuild for parsing what is served by `/simple`, since that clearly wasn't the best solution, to be more robust against changes in PyPI in the future, but that leaves the issue in existing EasyBuild releases, which can also be resolved by moving `data-yanked=''` rather than `data-yanked`. To add on > data-yanked and data-yanked='' are equivalent to pip Those are not only equivalent to pip they are in fact defined to be equivalent in HTML5: https://stackoverflow.com/a/23752239/1930508 So I see no downside in adding that. Question however: PIP for Python2 is still (kinda) supported (at least up to some version). Does that access the /simple-endpoint too? How does it parse the page? > To add on > > > data-yanked and data-yanked='' are equivalent to pip > > Those are not only equivalent to pip they are in fact defined to be equivalent in HTML5: https://stackoverflow.com/a/23752239/1930508 > > So I see no downside in adding that. Oh, even better. Hopefully that makes it less of an issue to change to `data-yanked=''`. > Question however: PIP for Python2 is still (kinda) supported (at least up to some version). Does that access the /simple-endpoint too? How does it parse the page? Latest `pip` still supports Python 2, so no issue there? > Question however: PIP for Python2 is still (kinda) supported (at least up to some version). Does that access the /simple-endpoint too? How does it parse the page? Pip uses https://pypi.org/project/html5lib/ to parse `/simple`. @di and I discussed the details of #7856 today so that I could pick it up, and it feels like the natural outcome of that work will be `data-yanked="<optional reason for yank>"`.
2020-05-09T23:21:57Z
[]
[]
pypi/warehouse
7,927
pypi__warehouse-7927
[ "6683" ]
5641afcf67856f959e03b62ff9df5d7f060c64ea
diff --git a/warehouse/i18n/__init__.py b/warehouse/i18n/__init__.py --- a/warehouse/i18n/__init__.py +++ b/warehouse/i18n/__init__.py @@ -19,19 +19,21 @@ from warehouse.cache.http import add_vary -# Taken from: -# https://github.com/django/django/blob/master/django/conf/locale/__init__.py KNOWN_LOCALES = { - "en": "English", # English - "es": "español", # Spanish - "fr": "français", # French - "ja": "日本語", # Japanese - "pt_BR": "Português Brasileiro", # Brazilian Portugeuse - "uk": "Українська", # Ukrainian - "el": "Ελληνικά", # Greek - "de": "Deutsch", # German - "zh_Hans": "简体中文", # Simplified Chinese - "ru": "Русский", # Russian + identifier: Locale.parse(identifier, sep="_") + for identifier in [ + "en", # English + "es", # Spanish + "fr", # French + "ja", # Japanese + "pt_BR", # Brazilian Portugeuse + "uk", # Ukranian + "el", # Greek + "de", # German + "zh_Hans", # Simplified Chinese + "ru", # Russian + "he", # Hebrew + ] } LOCALE_ATTR = "_LOCALE_" @@ -60,9 +62,9 @@ def __str__(self): def _locale(request): """ - Computes a babel.core:Locale() object for this request. + Gets a babel.core:Locale() object for this request. """ - return Locale.parse(request.locale_name, sep="_") + return KNOWN_LOCALES.get(request.locale_name, "en") def _negotiate_locale(request):
diff --git a/tests/unit/i18n/test_init.py b/tests/unit/i18n/test_init.py --- a/tests/unit/i18n/test_init.py +++ b/tests/unit/i18n/test_init.py @@ -87,13 +87,12 @@ def view(context, request): def test_sets_locale(monkeypatch): + locale_name = pretend.stub() locale_obj = pretend.stub() - locale_cls = pretend.stub(parse=pretend.call_recorder(lambda l, **kw: locale_obj)) - monkeypatch.setattr(i18n, "Locale", locale_cls) - request = pretend.stub(locale_name=pretend.stub()) + monkeypatch.setattr(i18n, "KNOWN_LOCALES", {locale_name: locale_obj}) + request = pretend.stub(locale_name=locale_name) assert i18n._locale(request) is locale_obj - assert locale_cls.parse.calls == [pretend.call(request.locale_name, sep="_")] def test_negotiate_locale(monkeypatch):
Support for properly displaying the site translated to RTL languages **What's the problem this feature will solve?** Display PyPI.org propely when translated to RTL languages such as Arabic, Farsi and Hebrew. **Describe the solution you'd like** I can't suggest a specific technical solution, since I'm not familiar with the website's design and implementation. **Additional context** The context is the recent work on translation into additional languages.
Options to investigate: - https://www.npmjs.com/package/postcss-rtl - https://github.com/MohammadYounes/rtlcss I wanted to toss in another possibility; GoDaddy just recently developed this WebPack plugin during our efforts to build better BiDi support: https://github.com/godaddy/webpack-rtlcss-plugin It uses rtlcss under the covers (linked above by @nlhkabu), but might make it a bit easier to use. At the time of writing, our Arabic translation is at 29%, Hebrew is at ~0%, and Farsi does not yet exist, so this is low- to medium-priority for now. > our Arabic translation is at 29%, Hebrew is at ~0%, and Farsi does not yet exist This is a chicken-and-egg problem: There's little point in translating to a language that the site doesn't support at a technical level. It's also more difficult since one can't properly see the result of the translations on the site itself. If it would help, I'd be willing to help with the Hebrew translation (I'm a native speaker). To be clear, translations are done through https://hosted.weblate.org/projects/pypa/warehouse/ and can be completed independently of whether PyPI supports them or not. All the currently completed translations were done through that interface, and none of the translators were able to see the result of the translations on the site until they reached ~100% completion and we explicitly added support for their given locale. I'm also not convinced that our lack of support for RTL languages is what's causing the low completion percentages for those locales. I would assume that most contributors are unaware that we'd have extra work to do to support their locale once it reaches ~100% complete. > If it would help, I'd be willing to help with the Hebrew translation (I'm a native speaker). Absolutely! If you're able to significantly increase the completion percentage for just one of these languages, we can prioritize this work to ensure that we're ready to support it when it's fully translated. Let us know if you have any questions! > If you're able to significantly increase the completion percentage for just one of these languages, we can prioritize this work to ensure that we're ready to support it when it's fully translated. That's great to hear! This is why I hadn't started helping with the translation earlier: I wasn't sure whether RTL language support would actually be worked on. In that case I'll start working on the translation when I can. I've translated 50% of the strings for the Hebrew language so far. BTW, > I would assume that most contributors are unaware that we'd have extra work to do to support their locale once it reaches ~100% complete. IMO most of those using RTL languages would likely be acutely aware of this, since we see so many issues caused by our languages being RTL in this digital age... I've completed 100% translation of PyPI.org to Hebrew. Ready for the next step! @di, with the Hebrew translation done, can this be given higher priority? Thank you for this @taleinat! @di - maybe we can book some pairing time together to look at this? @taleinat Yes! @nlhkabu Yes! Will DM you. @taleinat FYI, @di and I started on this today. Once we have made some more progress, we'll probably be looking for feedback from you as to whether or not everything is displayed correctly.
2020-05-11T20:55:38Z
[]
[]
pypi/warehouse
8,050
pypi__warehouse-8050
[ "7548" ]
418c7511dc367fb410c71be139545d0134ccb0df
diff --git a/warehouse/accounts/forms.py b/warehouse/accounts/forms.py --- a/warehouse/accounts/forms.py +++ b/warehouse/accounts/forms.py @@ -341,7 +341,7 @@ def validate_recovery_code_value(self, field): recovery_code_value = field.data.encode("utf-8") if not self.user_service.check_recovery_code(self.user_id, recovery_code_value): - raise wtforms.validators.ValidationError(_("Invalid Recovery Code.")) + raise wtforms.validators.ValidationError(_("Invalid recovery code.")) class RequestPasswordResetForm(forms.Form):
diff --git a/tests/unit/accounts/test_forms.py b/tests/unit/accounts/test_forms.py --- a/tests/unit/accounts/test_forms.py +++ b/tests/unit/accounts/test_forms.py @@ -730,7 +730,7 @@ def test_invalid_recovery_code(self, pyramid_config): ) assert not form.validate() - assert str(form.recovery_code_value.errors.pop()) == "Invalid Recovery Code." + assert str(form.recovery_code_value.errors.pop()) == "Invalid recovery code." def test_valid_recovery_code(self): form = forms.RecoveryCodeAuthenticationForm(
Audit 'recovery code' copy against copywriting guidelines I noticed that some of the text introduced in the new 2fa recovery codes interfaces do not meet the [guidelines](https://warehouse.readthedocs.io/ui-principles/#write-clearly-with-consistent-style-and-terminology), mostly by using `Title Case` instead of `Sentence case`, as recommended. We need to audit and update strings in: - Any emails sent about 2FA recovery codes - The new UIs for generating recovery codes - The new [help text](https://pypi.org/help/#recoverycodes) Don't forget to run `make translations` to update the translation files :) --- **Good First Issue**: This issue is good for first time contributors. If you've already contributed to Warehouse, work on [another issue without this label](https://github.com/pypa/warehouse/issues?utf8=%E2%9C%93&q=is%3Aissue+is%3Aopen+-label%3A%22good+first+issue%22) instead. If there is not a corresponding pull request for this issue, it is up for grabs. For directions for getting set up, see our [Getting Started Guide](https://warehouse.pypa.io/development/getting-started/). If you are working on this issue and have questions, feel free to ask them here, [`#pypa-dev` on Freenode](https://webchat.freenode.net/?channels=%23pypa-dev), or the [pypa-dev mailing list](https://groups.google.com/forum/#!forum/pypa-dev). **Screenshot Required**: *If your pull request makes a visual change*, include a screenshot of your update. This helps our team give you feedback faster.
2020-06-03T17:59:11Z
[]
[]
pypi/warehouse
8,093
pypi__warehouse-8093
[ "7403" ]
670c3d452b5813f07ddbdc81c265d7739f303ad8
diff --git a/warehouse/config.py b/warehouse/config.py --- a/warehouse/config.py +++ b/warehouse/config.py @@ -159,6 +159,9 @@ def configure(settings=None): maybe_set(settings, "aws.region", "AWS_REGION") maybe_set(settings, "gcloud.credentials", "GCLOUD_CREDENTIALS") maybe_set(settings, "gcloud.project", "GCLOUD_PROJECT") + maybe_set( + settings, "warehouse.release_files_table", "WAREHOUSE_RELEASE_FILES_TABLE" + ) maybe_set(settings, "warehouse.trending_table", "WAREHOUSE_TRENDING_TABLE") maybe_set(settings, "celery.broker_url", "BROKER_URL") maybe_set(settings, "celery.result_url", "REDIS_URL") diff --git a/warehouse/forklift/legacy.py b/warehouse/forklift/legacy.py --- a/warehouse/forklift/legacy.py +++ b/warehouse/forklift/legacy.py @@ -57,6 +57,7 @@ Release, Role, ) +from warehouse.packaging.tasks import update_bigquery_release_files from warehouse.utils import http, readme ONE_MB = 1 * 1024 * 1024 @@ -1191,6 +1192,7 @@ def file_upload(request): file_size_limit = max(filter(None, [MAX_FILESIZE, project.upload_limit])) project_size_limit = max(filter(None, [MAX_PROJECT_SIZE, project.total_size_limit])) + file_data = None with tempfile.TemporaryDirectory() as tmpdir: temporary_filename = os.path.join(tmpdir, filename) @@ -1366,6 +1368,7 @@ def file_upload(request): ), uploaded_via=request.user_agent, ) + file_data = file_ request.db.add(file_) # TODO: This should be handled by some sort of database trigger or a @@ -1410,6 +1413,15 @@ def file_upload(request): }, ) + # We are flushing the database requests so that we + # can access the server default values when initiating celery + # tasks. + request.db.flush() + + # Push updates to BigQuery + if not request.registry.settings.get("warehouse.release_files_table") is None: + request.task(update_bigquery_release_files).delay(file_data, form) + # Log a successful upload metrics.increment("warehouse.upload.ok", tags=[f"filetype:{form.filetype.data}"]) diff --git a/warehouse/packaging/__init__.py b/warehouse/packaging/__init__.py --- a/warehouse/packaging/__init__.py +++ b/warehouse/packaging/__init__.py @@ -18,7 +18,11 @@ from warehouse.cache.origin import key_factory, receive_set from warehouse.packaging.interfaces import IDocsStorage, IFileStorage from warehouse.packaging.models import File, Project, Release, Role -from warehouse.packaging.tasks import compute_trending, update_description_html +from warehouse.packaging.tasks import ( + compute_trending, + sync_bigquery_release_files, + update_description_html, +) @db.listens_for(User.name, "set") @@ -95,3 +99,6 @@ def includeme(config): # been configured to be able to access BigQuery. if config.get_settings().get("warehouse.trending_table"): config.add_periodic_task(crontab(minute=0, hour=3), compute_trending) + + if config.get_settings().get("warehouse.release_files_table"): + config.add_periodic_task(crontab(minute="*/60"), sync_bigquery_release_files) diff --git a/warehouse/packaging/tasks.py b/warehouse/packaging/tasks.py --- a/warehouse/packaging/tasks.py +++ b/warehouse/packaging/tasks.py @@ -10,9 +10,11 @@ # See the License for the specific language governing permissions and # limitations under the License. +import datetime + from warehouse import tasks from warehouse.cache.origin import IOriginCache -from warehouse.packaging.models import Description, Project +from warehouse.packaging.models import Description, File, Project, Release from warehouse.utils import readme @@ -111,3 +113,114 @@ def update_description_html(request): for description in descriptions: description.html = readme.render(description.raw, description.content_type) description.rendered_by = renderer_version + + [email protected]( + bind=True, + ignore_result=True, + acks_late=True, + autoretry_for=(Exception,), + retry_backoff=15, + retry_jitter=False, + max_retries=5, +) +def update_bigquery_release_files(task, request, file, form): + """ + Adds release file metadata to public BigQuery database + """ + bq = request.find_service(name="gcloud.bigquery") + + table_name = request.registry.settings["warehouse.release_files_table"] + table_schema = bq.get_table(table_name).schema + + # Using the schema to populate the data allows us to automatically + # set the values to their respective fields rather than assigning + # values individually + json_rows = dict() + for sch in table_schema: + # The order of data extraction below is determined based on the + # classes that are most recently updated + if hasattr(file, sch.name): + field_data = getattr(file, sch.name) + else: + field_data = form[sch.name].data + + if isinstance(field_data, datetime.datetime): + field_data = field_data.isoformat() + + # Replace all empty objects to None will ensure + # proper checks if a field is nullable or not + if not isinstance(field_data, bool) and not field_data: + field_data = None + + if field_data is None and sch.mode == "REPEATED": + json_rows[sch.name] = [] + else: + json_rows[sch.name] = field_data + json_rows = [json_rows] + + bq.insert_rows_json(table=table_name, json_rows=json_rows) + + [email protected](ignore_result=True, acks_late=True) +def sync_bigquery_release_files(request): + bq = request.find_service(name="gcloud.bigquery") + table_name = request.registry.settings["warehouse.release_files_table"] + table_schema = bq.get_table(table_name).schema + + db_release_files = request.db.query(File).all() + db_file_digests = [file.md5_digest for file in db_release_files] + + bq_file_digests = bq.query(f"SELECT md5_digest FROM {table_name}").result() + bq_file_digests = [row.get("md5_digest") for row in bq_file_digests] + + md5_diff_list = list(set(db_file_digests) - set(bq_file_digests))[:1000] + + release_files = ( + request.db.query(File) + .join(Release, Release.id == File.release_id) + .filter(File.md5_digest.in_(md5_diff_list)) + .all() + ) + + # Using the schema to populate the data allows us to automatically + # set the values to their respective fields rather than assigning + # values individually + def populate_data_using_schema(file): + release = file.release + project = release.project + + row_data = dict() + for sch in table_schema: + # The order of data extraction below is determined based on the + # classes that are most recently updated + if hasattr(file, sch.name): + field_data = getattr(file, sch.name) + elif hasattr(release, sch.name) and sch.name == "description": + field_data = getattr(release, sch.name).raw + elif sch.name == "description_content_type": + field_data = getattr(release, "description").content_type + elif hasattr(release, sch.name): + field_data = getattr(release, sch.name) + elif hasattr(project, sch.name): + field_data = getattr(project, sch.name) + else: + field_data = None + + if isinstance(field_data, datetime.datetime): + field_data = field_data.isoformat() + + # Replace all empty objects to None will ensure + # proper checks if a field is nullable or not + if not isinstance(field_data, bool) and not field_data: + field_data = None + + if field_data is None and sch.mode == "REPEATED": + row_data[sch.name] = [] + else: + row_data[sch.name] = field_data + return row_data + + json_rows = [populate_data_using_schema(file) for file in release_files] + + bq.insert_rows_json(table=table_name, json_rows=json_rows) diff --git a/warehouse/tasks.py b/warehouse/tasks.py --- a/warehouse/tasks.py +++ b/warehouse/tasks.py @@ -11,6 +11,7 @@ # limitations under the License. import functools +import logging import urllib.parse import celery @@ -37,6 +38,8 @@ # We need to register that the sqs:// url scheme uses a netloc urllib.parse.uses_netloc.append("sqs") +logger = logging.getLogger(__name__) + class TLSRedisBackend(celery.backends.redis.RedisBackend): def _params_from_url(self, url, defaults): @@ -119,6 +122,9 @@ def _after_commit_hook(self, success, *args, **kwargs): if success: super().apply_async(*args, **kwargs) + def on_failure(self, exc, task_id, args, kwargs, einfo): + logger.error("Task id {id} failed.".format(id=task_id), exc_info=einfo) + def task(**kwargs): kwargs.setdefault("shared", False)
diff --git a/tests/unit/forklift/test_legacy.py b/tests/unit/forklift/test_legacy.py --- a/tests/unit/forklift/test_legacy.py +++ b/tests/unit/forklift/test_legacy.py @@ -49,6 +49,7 @@ Release, Role, ) +from warehouse.packaging.tasks import update_bigquery_release_files from ...common.db.accounts import EmailFactory, UserFactory from ...common.db.classifiers import ClassifierFactory @@ -1359,6 +1360,14 @@ def storage_service_store(path, file_path, *, meta): IMetricsService: metrics, }.get(svc) ) + db_request.registry.settings = { + "warehouse.release_files_table": "example.pypi.distributions" + } + + update_bigquery = pretend.stub( + delay=pretend.call_recorder(lambda *a, **kw: None) + ) + db_request.task = pretend.call_recorder(lambda *a, **kw: update_bigquery) resp = legacy.file_upload(db_request) @@ -1437,6 +1446,8 @@ def storage_service_store(path, file_path, *, meta): ) ] + assert db_request.task.calls == [pretend.call(update_bigquery_release_files)] + assert metrics.increment.calls == [ pretend.call("warehouse.upload.attempt"), pretend.call("warehouse.upload.ok", tags=["filetype:sdist"]), diff --git a/tests/unit/packaging/test_init.py b/tests/unit/packaging/test_init.py --- a/tests/unit/packaging/test_init.py +++ b/tests/unit/packaging/test_init.py @@ -19,11 +19,18 @@ from warehouse.accounts.models import Email, User from warehouse.packaging.interfaces import IDocsStorage, IFileStorage from warehouse.packaging.models import File, Project, Release, Role -from warehouse.packaging.tasks import compute_trending, update_description_html +from warehouse.packaging.tasks import ( + compute_trending, + sync_bigquery_release_files, + update_description_html, +) [email protected]("with_trending", [True, False]) -def test_includeme(monkeypatch, with_trending): [email protected]( + ("with_trending", "with_bq_sync"), + ([True, True], [True, False], [False, True], [False, False]), +) +def test_includeme(monkeypatch, with_trending, with_bq_sync): storage_class = pretend.stub( create_service=pretend.call_recorder(lambda *a, **kw: pretend.stub()) ) @@ -32,6 +39,11 @@ def key_factory(keystring, iterate_on=None): return pretend.call(keystring, iterate_on=iterate_on) monkeypatch.setattr(packaging, "key_factory", key_factory) + settings = dict() + if with_trending: + settings["warehouse.trending_table"] = "foobar" + if with_bq_sync: + settings["warehouse.release_files_table"] = "fizzbuzz" config = pretend.stub( maybe_dotted=lambda dotted: storage_class, @@ -42,9 +54,7 @@ def key_factory(keystring, iterate_on=None): settings={"files.backend": "foo.bar", "docs.backend": "wu.tang"} ), register_origin_cache_keys=pretend.call_recorder(lambda c, **kw: None), - get_settings=lambda: ( - {"warehouse.trending_table": "foobar"} if with_trending else {} - ), + get_settings=lambda: settings, add_periodic_task=pretend.call_recorder(lambda *a, **kw: None), ) @@ -104,7 +114,18 @@ def key_factory(keystring, iterate_on=None): ), ] - if with_trending: + if with_trending and with_bq_sync: + assert config.add_periodic_task.calls == [ + pretend.call(crontab(minute="*/5"), update_description_html), + pretend.call(crontab(minute=0, hour=3), compute_trending), + pretend.call(crontab(minute="*/60"), sync_bigquery_release_files), + ] + elif with_bq_sync: + assert config.add_periodic_task.calls == [ + pretend.call(crontab(minute="*/5"), update_description_html), + pretend.call(crontab(minute="*/60"), sync_bigquery_release_files), + ] + elif with_trending: assert config.add_periodic_task.calls == [ pretend.call(crontab(minute="*/5"), update_description_html), pretend.call(crontab(minute=0, hour=3), compute_trending), diff --git a/tests/unit/packaging/test_tasks.py b/tests/unit/packaging/test_tasks.py --- a/tests/unit/packaging/test_tasks.py +++ b/tests/unit/packaging/test_tasks.py @@ -13,14 +13,27 @@ import pretend import pytest -from google.cloud.bigquery import Row +from google.cloud.bigquery import Row, SchemaField +from wtforms import Field, Form, StringField from warehouse.cache.origin import IOriginCache from warehouse.packaging.models import Description, Project -from warehouse.packaging.tasks import compute_trending, update_description_html +from warehouse.packaging.tasks import ( + compute_trending, + sync_bigquery_release_files, + update_bigquery_release_files, + update_description_html, +) from warehouse.utils import readme -from ...common.db.packaging import DescriptionFactory, ProjectFactory +from ...common.db.classifiers import ClassifierFactory +from ...common.db.packaging import ( + DependencyFactory, + DescriptionFactory, + FileFactory, + ProjectFactory, + ReleaseFactory, +) class TestComputeTrending: @@ -134,3 +147,293 @@ def test_update_description_html(monkeypatch, db_request): (descriptions[1].raw, readme.render(descriptions[1].raw), current_version), (descriptions[2].raw, readme.render(descriptions[2].raw), current_version), } + + +bq_schema = [ + SchemaField("metadata_version", "STRING", "NULLABLE"), + SchemaField("name", "STRING", "REQUIRED"), + SchemaField("version", "STRING", "REQUIRED"), + SchemaField("summary", "STRING", "NULLABLE"), + SchemaField("description", "STRING", "NULLABLE"), + SchemaField("description_content_type", "STRING", "NULLABLE"), + SchemaField("author", "STRING", "NULLABLE"), + SchemaField("author_email", "STRING", "NULLABLE"), + SchemaField("maintainer", "STRING", "NULLABLE"), + SchemaField("maintainer_email", "STRING", "NULLABLE"), + SchemaField("license", "STRING", "NULLABLE"), + SchemaField("keywords", "STRING", "NULLABLE"), + SchemaField("classifiers", "STRING", "REPEATED"), + SchemaField("platform", "STRING", "REPEATED"), + SchemaField("home_page", "STRING", "NULLABLE"), + SchemaField("download_url", "STRING", "NULLABLE"), + SchemaField("requires_python", "STRING", "NULLABLE"), + SchemaField("requires", "STRING", "REPEATED"), + SchemaField("provides", "STRING", "REPEATED"), + SchemaField("obsoletes", "STRING", "REPEATED"), + SchemaField("requires_dist", "STRING", "REPEATED"), + SchemaField("provides_dist", "STRING", "REPEATED"), + SchemaField("obsoletes_dist", "STRING", "REPEATED"), + SchemaField("requires_external", "STRING", "REPEATED"), + SchemaField("project_urls", "STRING", "REPEATED"), + SchemaField("uploaded_via", "STRING", "NULLABLE"), + SchemaField("upload_time", "TIMESTAMP", "REQUIRED"), + SchemaField("filename", "STRING", "REQUIRED"), + SchemaField("size", "INTEGER", "REQUIRED"), + SchemaField("path", "STRING", "REQUIRED"), + SchemaField("python_version", "STRING", "REQUIRED"), + SchemaField("packagetype", "STRING", "REQUIRED"), + SchemaField("comment_text", "STRING", "NULLABLE"), + SchemaField("has_signature", "BOOLEAN", "REQUIRED"), + SchemaField("md5_digest", "STRING", "REQUIRED"), + SchemaField("sha256_digest", "STRING", "REQUIRED"), + SchemaField("blake2_256_digest", "STRING", "REQUIRED"), +] + + +class TestUpdateBigQueryMetadata: + class ListField(Field): + def process_formdata(self, valuelist): + self.data = [v.strip() for v in valuelist if v.strip()] + + input_parameters = [ + ( + { + "metadata_version": StringField(default="1.2").bind(Form(), "test"), + "name": StringField(default="OfDABTihRTmE").bind(Form(), "test"), + "version": StringField(default="1.0").bind(Form(), "test"), + "summary": StringField(default="").bind(Form(), "test"), + "description": StringField(default="an example description").bind( + Form(), "test" + ), + "author": StringField(default="").bind(Form(), "test"), + "description_content_type": StringField(default="").bind(Form(), "a"), + "author_email": StringField(default="").bind(Form(), "test"), + "maintainer": StringField(default="").bind(Form(), "test"), + "maintainer_email": StringField(default="").bind(Form(), "test"), + "license": StringField(default="").bind(Form(), "test"), + "keywords": StringField(default="").bind(Form(), "test"), + "classifiers": ListField( + default=["Environment :: Other Environment"] + ).bind(Form(), "test"), + "platform": StringField(default="").bind(Form(), "test"), + "home_page": StringField(default="").bind(Form(), "test"), + "download_url": StringField(default="").bind(Form(), "test"), + "requires_python": StringField(default="").bind(Form(), "test"), + "pyversion": StringField(default="source").bind(Form(), "test"), + "filetype": StringField(default="sdist").bind(Form(), "test"), + "comment": StringField(default="").bind(Form(), "test"), + "md5_digest": StringField( + default="7fcdcb15530ea82d2a5daf98a4997c57" + ).bind(Form(), "test"), + "sha256_digest": StringField( + default=( + "a983cbea389641f78541e25c14ab1a488ede2641119a5be807e" + "94645c4f3d25d" + ) + ).bind(Form(), "test"), + "blake2_256_digest": StringField(default="").bind(Form(), "test"), + "requires": ListField(default=[]).bind(Form(), "test"), + "provides": ListField(default=[]).bind(Form(), "test"), + "obsoletes": ListField(default=[]).bind(Form(), "test"), + "requires_dist": ListField(default=[]).bind(Form(), "test"), + "provides_dist": ListField(default=[]).bind(Form(), "test"), + "obsoletes_dist": ListField(default=[]).bind(Form(), "test"), + "requires_external": ListField(default=[]).bind(Form(), "test"), + "project_urls": ListField(default=[]).bind(Form(), "test"), + }, + bq_schema, + ) + ] + + @pytest.mark.parametrize(("form_factory", "bq_schema"), input_parameters) + def test_insert_new_row(self, db_request, form_factory, bq_schema): + project = ProjectFactory.create() + release = ReleaseFactory.create(project=project, version="1.0") + release_file = FileFactory.create( + release=release, filename=f"foobar-{release.version}.tar.gz" + ) + + # Process the mocked wtform fields + for key, value in form_factory.items(): + if isinstance(value, StringField) or isinstance(value, self.ListField): + value.process(None) + + @pretend.call_recorder + def insert_rows(table, json_rows): + if table != "example.pypi.distributions": + raise Exception("Incorrect table name") + return [] + + get_table = pretend.stub(schema=bq_schema) + bigquery = pretend.stub( + get_table=pretend.call_recorder(lambda t: get_table), + insert_rows_json=insert_rows, + ) + + @pretend.call_recorder + def find_service(name=None): + if name == "gcloud.bigquery": + return bigquery + raise LookupError + + db_request.find_service = find_service + db_request.registry.settings = { + "warehouse.release_files_table": "example.pypi.distributions" + } + + task = pretend.stub() + update_bigquery_release_files(task, db_request, release_file, form_factory) + + assert db_request.find_service.calls == [pretend.call(name="gcloud.bigquery")] + assert bigquery.get_table.calls == [pretend.call("example.pypi.distributions")] + assert bigquery.insert_rows_json.calls == [ + pretend.call( + table="example.pypi.distributions", + json_rows=[ + { + "metadata_version": form_factory["metadata_version"].data, + "name": form_factory["name"].data, + "version": release_file.release.version, + "summary": form_factory["summary"].data or None, + "description": form_factory["description"].data or None, + "description_content_type": form_factory[ + "description_content_type" + ].data + or None, + "author": form_factory["author"].data or None, + "author_email": form_factory["author_email"].data or None, + "maintainer": form_factory["maintainer"].data or None, + "maintainer_email": form_factory["maintainer_email"].data + or None, + "license": form_factory["license"].data or None, + "keywords": form_factory["description_content_type"].data + or None, + "classifiers": form_factory["classifiers"].data or [], + "platform": form_factory["platform"].data or [], + "home_page": form_factory["home_page"].data or None, + "download_url": form_factory["download_url"].data or None, + "requires_python": form_factory["requires_python"].data or None, + "requires": form_factory["requires"].data or [], + "provides": form_factory["provides"].data or [], + "obsoletes": form_factory["obsoletes"].data or [], + "requires_dist": form_factory["requires_dist"].data or [], + "provides_dist": form_factory["provides_dist"].data or [], + "obsoletes_dist": form_factory["obsoletes_dist"].data or [], + "requires_external": form_factory["requires_external"].data + or [], + "project_urls": form_factory["project_urls"].data or [], + "uploaded_via": release_file.uploaded_via, + "upload_time": release_file.upload_time.isoformat(), + "filename": release_file.filename, + "size": release_file.size, + "path": release_file.path, + "python_version": release_file.python_version, + "packagetype": release_file.packagetype, + "comment_text": release_file.comment_text or None, + "has_signature": release_file.has_signature, + "md5_digest": release_file.md5_digest, + "sha256_digest": release_file.sha256_digest, + "blake2_256_digest": release_file.blake2_256_digest, + }, + ], + ) + ] + + +class TestSyncBigQueryMetadata: + @pytest.mark.parametrize("bq_schema", [bq_schema]) + def test_sync_rows(self, db_request, bq_schema): + project = ProjectFactory.create() + description = DescriptionFactory.create() + release = ReleaseFactory.create(project=project, description=description) + release_file = FileFactory.create( + release=release, filename=f"foobar-{release.version}.tar.gz" + ) + release_file2 = FileFactory.create( + release=release, filename=f"fizzbuzz-{release.version}.tar.gz" + ) + release._classifiers.append(ClassifierFactory.create(classifier="foo :: bar")) + release._classifiers.append(ClassifierFactory.create(classifier="foo :: baz")) + release._classifiers.append(ClassifierFactory.create(classifier="fiz :: buz")) + DependencyFactory.create(release=release, kind=1) + DependencyFactory.create(release=release, kind=1) + DependencyFactory.create(release=release, kind=2) + DependencyFactory.create(release=release, kind=3) + DependencyFactory.create(release=release, kind=4) + + query = pretend.stub( + result=pretend.call_recorder( + lambda *a, **kw: [{"md5_digest": release_file2.md5_digest}] + ) + ) + get_table = pretend.stub(schema=bq_schema) + bigquery = pretend.stub( + get_table=pretend.call_recorder(lambda t: get_table), + insert_rows_json=pretend.call_recorder(lambda *a, **kw: []), + query=pretend.call_recorder(lambda q: query), + ) + + @pretend.call_recorder + def find_service(name=None): + if name == "gcloud.bigquery": + return bigquery + raise LookupError + + db_request.find_service = find_service + db_request.registry.settings = { + "warehouse.release_files_table": "example.pypi.distributions" + } + + sync_bigquery_release_files(db_request) + + assert db_request.find_service.calls == [pretend.call(name="gcloud.bigquery")] + assert bigquery.get_table.calls == [pretend.call("example.pypi.distributions")] + assert bigquery.query.calls == [ + pretend.call("SELECT md5_digest FROM example.pypi.distributions") + ] + assert bigquery.insert_rows_json.calls == [ + pretend.call( + table="example.pypi.distributions", + json_rows=[ + { + "metadata_version": None, + "name": project.name, + "version": release.version, + "summary": release.summary, + "description": description.raw, + "description_content_type": description.content_type or None, + "author": release.author or None, + "author_email": release.author_email or None, + "maintainer": release.maintainer or None, + "maintainer_email": release.maintainer_email or None, + "license": release.license or None, + "keywords": release.keywords or None, + "classifiers": release.classifiers or [], + "platform": release.platform or [], + "home_page": release.home_page or None, + "download_url": release.download_url or None, + "requires_python": release.requires_python or None, + "requires": release.requires or [], + "provides": release.provides or [], + "obsoletes": release.obsoletes or [], + "requires_dist": release.requires_dist or [], + "provides_dist": release.provides_dist or [], + "obsoletes_dist": release.obsoletes_dist or [], + "requires_external": release.requires_external or [], + "project_urls": release.project_urls or [], + "uploaded_via": release_file.uploaded_via, + "upload_time": release_file.upload_time.isoformat(), + "filename": release_file.filename, + "size": release_file.size, + "path": release_file.path, + "python_version": release_file.python_version, + "packagetype": release_file.packagetype, + "comment_text": release_file.comment_text or None, + "has_signature": release_file.has_signature, + "md5_digest": release_file.md5_digest, + "sha256_digest": release_file.sha256_digest, + "blake2_256_digest": release_file.blake2_256_digest, + }, + ], + ) + ] diff --git a/tests/unit/test_tasks.py b/tests/unit/test_tasks.py --- a/tests/unit/test_tasks.py +++ b/tests/unit/test_tasks.py @@ -17,6 +17,7 @@ import transaction from celery import Celery, Task +from celery.exceptions import MaxRetriesExceededError from kombu import Queue from pyramid import scripting from pyramid_retry import RetryableException @@ -153,6 +154,24 @@ def test_after_commit_hook(self, monkeypatch, success): else: assert apply_async.calls == [] + def test_on_failure(self, monkeypatch): + task = tasks.WarehouseTask() + task.app = Celery() + + logger = pretend.stub(error=pretend.call_recorder(lambda *a, **kw: None)) + monkeypatch.setattr(tasks, "logger", logger) + + with pytest.raises(MaxRetriesExceededError) as exc_info: + raise (MaxRetriesExceededError) + + task.on_failure( + MaxRetriesExceededError, "1234", pretend.stub(), pretend.stub(), exc_info + ) + + assert logger.error.calls == [ + pretend.call("Task id 1234 failed.", exc_info=exc_info) + ] + def test_creates_request(self, monkeypatch): registry = pretend.stub() pyramid_env = {"request": pretend.stub()}
Public Dataset for distribution metadata As requested in https://github.com/pypa/packaging-problems/issues/323, we should explore publishing the metadata for each released distribution in a public dataset via BigQuery. I'm imagining that each row would contain all the core metadata fields included in each release, as well as filename, digests, file size, upload time, URL to the distribution, etc. Essentially everything in the ["Release" JSON API](https://warehouse.readthedocs.io/api-reference/json/#release), with the per-release `info` field included for every individual distribution. Once we're publishing to the dataset on upload, we'd also need to backfill prior distributions as well. Not entirely sure what we'd name it, does `the-psf:pypi.distributions` make sense?
One problem with distributing via BigQuery is that it adds additional barriers to access the data. Although sometimes it is useful to run a quick query on the data without setting up anything. What will be the size of metadata dump? I think it should only be a few GBs. Can't it be distributed via alternate channels? @ChillarAnand you bring up a great point about barrier to entry, but as far as I'm aware there isn't really any good "requestor pays" options for online queryable datasets aside from BigQuery. We could publish it as a single file, but I'm not sure how much less of a barrier that is. In addition when combined with the existing data in BigQuery that we already have this metadata would provide all kinds of interesting options for analyzing downloads. Another thought is how we handle when releases and such are deleted, should they be removed from the public dataset? If the public dataset matches PyPI's db 1:1 it would really be a headache for people doing retrospective analysis. > Another thought is how we handle when releases and such are deleted, should they be removed from the public dataset? IMO, they should not. @ewdurbin I agree. I was wondering if the dump size is less, we can also distribute via google drive or dropbox or any other channels. This makes it easy to play with data offline. @ChillarAnand Ultimately I'm not sure if the limited volunteer admin time can be spent maintaining two sources, but the dataset is permissibly licensed under a [Creative Commons Attribution 4.0 International License](https://creativecommons.org/licenses/by/4.0/) so redistributions of dumps of metadata discussed here would be 100% ok. For the time being I'm going to work under the assumption that we'll be outputting to BigQuery. Actually, this brings up a possible concern with licensing. We'd need to be careful to ensure that what is published in these tables _can_ be licensed under Creative Commons. This may exclude us from some fields like description/description_html.
2020-06-11T17:03:20Z
[]
[]
pypi/warehouse
8,111
pypi__warehouse-8111
[ "7778" ]
696162baf8a0efc140b487ac888c8458fa8a9b9b
diff --git a/warehouse/search/tasks.py b/warehouse/search/tasks.py --- a/warehouse/search/tasks.py +++ b/warehouse/search/tasks.py @@ -41,7 +41,7 @@ def _project_docs(db, project_name=None): releases_list = ( db.query(Release.id) - .filter(Release.yanked.is_(False)) + .filter(Release.yanked.is_(False), Release.files) .order_by( Release.project_id, Release.is_prerelease.nullslast(),
diff --git a/tests/unit/search/test_tasks.py b/tests/unit/search/test_tasks.py --- a/tests/unit/search/test_tasks.py +++ b/tests/unit/search/test_tasks.py @@ -31,7 +31,7 @@ unindex_project, ) -from ...common.db.packaging import ProjectFactory, ReleaseFactory +from ...common.db.packaging import FileFactory, ProjectFactory, ReleaseFactory def test_project_docs(db_session): @@ -45,6 +45,16 @@ def test_project_docs(db_session): for p in projects } + for p in projects: + for r in releases[p]: + r.files = [ + FileFactory.create( + release=r, + filename="{}-{}.tar.gz".format(p.name, r.version), + python_version="source", + ) + ] + assert list(_project_docs(db_session)) == [ { "_id": p.normalized_name, @@ -75,6 +85,16 @@ def test_single_project_doc(db_session): for p in projects } + for p in projects: + for r in releases[p]: + r.files = [ + FileFactory.create( + release=r, + filename="{}-{}.tar.gz".format(p.name, r.version), + python_version="source", + ) + ] + assert list(_project_docs(db_session, project_name=projects[1].name)) == [ { "_id": p.normalized_name, @@ -95,6 +115,47 @@ def test_single_project_doc(db_session): ] +def test_project_docs_empty(db_session): + projects = [ProjectFactory.create() for _ in range(2)] + releases = { + p: sorted( + [ReleaseFactory.create(project=p) for _ in range(3)], + key=lambda r: packaging.version.parse(r.version), + reverse=True, + ) + for p in projects + } + + project_with_files = projects[0] + for r in releases[project_with_files]: + r.files = [ + FileFactory.create( + release=r, + filename="{}-{}.tar.gz".format(project_with_files.name, r.version), + python_version="source", + ) + ] + + assert list(_project_docs(db_session)) == [ + { + "_id": p.normalized_name, + "_type": "doc", + "_source": { + "created": p.created, + "name": p.name, + "normalized_name": p.normalized_name, + "version": [r.version for r in prs], + "latest_version": first(prs, key=lambda r: not r.is_prerelease).version, + "description": first( + prs, key=lambda r: not r.is_prerelease + ).description.raw, + }, + } + for p, prs in sorted(releases.items(), key=lambda x: x[0].id) + if p == project_with_files + ] + + class FakeESIndices: def __init__(self): self.indices = {}
Search default or option: exclude projects with zero downloadable distributions **What's the problem this feature will solve?** Some PyPI projects have zero past releases, or have released distributions in the past but then deleted the distributions. When users search for projects, they are probably searching for packages they can download, and may be frustrated if the results include projects without any downloadable files. **Describe the solution you'd like** Either by default, or as a faceted search option, in search results, we could filter out packages that don't have downloadable distributions available. **Additional context** Requested in https://github.com/pypa/warehouse/issues/194#issuecomment-41197783 by @agronholm and seconded in the subsequent comment by @dstufft.
This can be done by adjusting the query used for reindexing to exclude releases without files: https://github.com/pypa/warehouse/blob/dc120b9c0f8fba9dc98809e68e1b56a1d1bda5e6/warehouse/search/tasks.py#L33-L95
2020-06-16T04:56:06Z
[]
[]
pypi/warehouse
8,129
pypi__warehouse-8129
[ "7446" ]
f641e1f057414a08a84444cb87d423123a13c407
diff --git a/warehouse/forklift/legacy.py b/warehouse/forklift/legacy.py --- a/warehouse/forklift/legacy.py +++ b/warehouse/forklift/legacy.py @@ -59,9 +59,13 @@ ) from warehouse.utils import http, readme -MAX_FILESIZE = 60 * 1024 * 1024 # 60M -MAX_SIGSIZE = 8 * 1024 # 8K -MAX_PROJECT_SIZE = 10 * 1024 * 1024 * 1024 # 10GB +ONE_MB = 1 * 1024 * 1024 +ONE_GB = 1 * 1024 * 1024 * 1024 + +MAX_FILESIZE = 60 * ONE_MB +MAX_SIGSIZE = 8 * 1024 +MAX_PROJECT_SIZE = 10 * ONE_GB + PATH_HASHER = "blake2_256" @@ -1185,6 +1189,7 @@ def file_upload(request): # it does then it may or may not be smaller or larger than our global file # size limits. file_size_limit = max(filter(None, [MAX_FILESIZE, project.upload_limit])) + project_size_limit = max(filter(None, [MAX_PROJECT_SIZE, project.total_size_limit])) with tempfile.TemporaryDirectory() as tmpdir: temporary_filename = os.path.join(tmpdir, filename) @@ -1205,12 +1210,22 @@ def file_upload(request): HTTPBadRequest, "File too large. " + "Limit for project {name!r} is {limit} MB. ".format( - name=project.name, limit=file_size_limit // (1024 * 1024) + name=project.name, limit=file_size_limit // ONE_MB ) + "See " + request.help_url(_anchor="file-size-limit") + " for more information.", ) + if file_size + project.total_size > project_size_limit: + raise _exc_with_message( + HTTPBadRequest, + "Project size too large. Limit for " + + "project {name!r} total size is {limit} GB. ".format( + name=project.name, limit=project_size_limit // ONE_GB, + ) + + "See " + + request.help_url(_anchor="project-size-limit"), + ) fp.write(chunk) for hasher in file_hashes.values(): hasher.update(chunk)
diff --git a/tests/unit/forklift/test_legacy.py b/tests/unit/forklift/test_legacy.py --- a/tests/unit/forklift/test_legacy.py +++ b/tests/unit/forklift/test_legacy.py @@ -1887,6 +1887,208 @@ def test_upload_fails_with_too_large_file(self, pyramid_config, db_request): "See /the/help/url/ for more information." ) + def test_upload_fails_with_too_large_project_size_default_limit( + self, pyramid_config, db_request + ): + pyramid_config.testing_securitypolicy(userid=1) + + user = UserFactory.create() + db_request.user = user + EmailFactory.create(user=user) + project = ProjectFactory.create( + name="foobar", + upload_limit=legacy.MAX_FILESIZE, + total_size=legacy.MAX_PROJECT_SIZE - 1, + ) + release = ReleaseFactory.create(project=project, version="1.0") + RoleFactory.create(user=user, project=project) + + filename = "{}-{}.tar.gz".format(project.name, release.version) + + db_request.POST = MultiDict( + { + "metadata_version": "1.2", + "name": project.name, + "version": release.version, + "filetype": "sdist", + "md5_digest": "nope!", + "content": pretend.stub( + filename=filename, + file=io.BytesIO(b"a" * 2), + type="application/tar", + ), + } + ) + db_request.help_url = pretend.call_recorder(lambda **kw: "/the/help/url/") + + with pytest.raises(HTTPBadRequest) as excinfo: + legacy.file_upload(db_request) + + resp = excinfo.value + + assert db_request.help_url.calls == [pretend.call(_anchor="project-size-limit")] + assert resp.status_code == 400 + assert resp.status == ( + "400 Project size too large." + + " Limit for project 'foobar' total size is 10 GB. " + "See /the/help/url/" + ) + + def test_upload_fails_with_too_large_project_size_custom_limit( + self, pyramid_config, db_request + ): + pyramid_config.testing_securitypolicy(userid=1) + + user = UserFactory.create() + db_request.user = user + EmailFactory.create(user=user) + one_megabyte = 1 * 1024 * 1024 + project = ProjectFactory.create( + name="foobar", + upload_limit=legacy.MAX_FILESIZE, + total_size=legacy.MAX_PROJECT_SIZE, + total_size_limit=legacy.MAX_PROJECT_SIZE + + one_megabyte, # Custom Limit for the project + ) + release = ReleaseFactory.create(project=project, version="1.0") + RoleFactory.create(user=user, project=project) + + filename = "{}-{}.tar.gz".format(project.name, release.version) + + db_request.POST = MultiDict( + { + "metadata_version": "1.2", + "name": project.name, + "version": release.version, + "filetype": "sdist", + "md5_digest": "nope!", + "content": pretend.stub( + filename=filename, + file=io.BytesIO(b"a" * (one_megabyte + 1)), + type="application/tar", + ), + } + ) + db_request.help_url = pretend.call_recorder(lambda **kw: "/the/help/url/") + + with pytest.raises(HTTPBadRequest) as excinfo: + legacy.file_upload(db_request) + + resp = excinfo.value + + assert db_request.help_url.calls == [pretend.call(_anchor="project-size-limit")] + assert resp.status_code == 400 + assert resp.status == ( + "400 Project size too large." + + " Limit for project 'foobar' total size is 10 GB. " + "See /the/help/url/" + ) + + def test_upload_succeeds_custom_project_size_limit( + self, pyramid_config, db_request, metrics + ): + pyramid_config.testing_securitypolicy(userid=1) + + user = UserFactory.create() + db_request.user = user + EmailFactory.create(user=user) + one_megabyte = 1 * 1024 * 1024 + project = ProjectFactory.create( + name="foobar", + upload_limit=legacy.MAX_FILESIZE, + total_size=legacy.MAX_PROJECT_SIZE, + total_size_limit=legacy.MAX_PROJECT_SIZE + + (one_megabyte * 60), # Custom Limit for the project + ) + release = ReleaseFactory.create(project=project, version="1.0") + RoleFactory.create(user=user, project=project) + + filename = "{}-{}.tar.gz".format("example", "1.0") + + db_request.POST = MultiDict( + { + "metadata_version": "1.2", + "name": "example", + "version": "1.0", + "filetype": "sdist", + "md5_digest": _TAR_GZ_PKG_MD5, + "content": pretend.stub( + filename=filename, + file=io.BytesIO(_TAR_GZ_PKG_TESTDATA), + type="application/tar", + ), + } + ) + + storage_service = pretend.stub(store=lambda path, filepath, meta: None) + db_request.find_service = lambda svc, name=None, context=None: { + IFileStorage: storage_service, + IMetricsService: metrics, + }.get(svc) + db_request.remote_addr = "10.10.10.10" + db_request.user_agent = "warehouse-tests/6.6.6" + + resp = legacy.file_upload(db_request) + + assert resp.status_code == 200 + + # Ensure that a Project object has been created. + project = db_request.db.query(Project).filter(Project.name == "example").one() + + # Ensure that a Role with the user as owner has been created. + role = ( + db_request.db.query(Role) + .filter((Role.user == user) & (Role.project == project)) + .one() + ) + assert role.role_name == "Owner" + + # Ensure that a Release object has been created. + release = ( + db_request.db.query(Release) + .filter((Release.project == project) & (Release.version == "1.0")) + .one() + ) + + assert release.uploaded_via == "warehouse-tests/6.6.6" + + # Ensure that a File object has been created. + db_request.db.query(File).filter( + (File.release == release) & (File.filename == filename) + ).one() + + # Ensure that a Filename object has been created. + db_request.db.query(Filename).filter(Filename.filename == filename).one() + + # Ensure that all of our journal entries have been created + journals = ( + db_request.db.query(JournalEntry) + .options(joinedload("submitted_by")) + .order_by("submitted_date", "id") + .all() + ) + assert [ + (j.name, j.version, j.action, j.submitted_by, j.submitted_from) + for j in journals + ] == [ + ("example", None, "create", user, "10.10.10.10"), + ( + "example", + None, + "add Owner {}".format(user.username), + user, + "10.10.10.10", + ), + ("example", "1.0", "new release", user, "10.10.10.10"), + ( + "example", + "1.0", + "add source file example-1.0.tar.gz", + user, + "10.10.10.10", + ), + ] + def test_upload_fails_with_too_large_signature(self, pyramid_config, db_request): pyramid_config.testing_securitypolicy(userid=1)
Implement limit on Project.total_size Now that we have the `Project.total_size` attribute, we should implement a limit on this value across all projects similar to per-file limit. This would help catch projects that are a significant burden on our overall size on disk but remain under the 60MB per-file limit. It would also encourage projects that publish large nightlies to clean up older nightlies. I think this limit should probably be something like 100GB, but happy to hear justification for different values. This should also be easy to adjust if need be.
2020-06-18T01:42:36Z
[]
[]
pypi/warehouse
8,170
pypi__warehouse-8170
[ "7992" ]
b01919396ba22e85d95fb0677abfb168c381836b
diff --git a/warehouse/admin/views/emails.py b/warehouse/admin/views/emails.py --- a/warehouse/admin/views/emails.py +++ b/warehouse/admin/views/emails.py @@ -99,6 +99,17 @@ def email_mass(request): "body_text": row["body_text"], "body_html": row.get("body_html"), }, + { + "tag": "account:email:sent", + "user_id": user.id, + "ip_address": request.remote_addr, + "additional": { + "from_": request.registry.settings.get("mail.sender"), + "to": email.email, + "subject": row["subject"], + "redact_ip": True, + }, + }, ) request.session.flash("Mass emails sent", queue="success") else: diff --git a/warehouse/email/__init__.py b/warehouse/email/__init__.py --- a/warehouse/email/__init__.py +++ b/warehouse/email/__init__.py @@ -20,7 +20,8 @@ from first import first from warehouse import tasks -from warehouse.accounts.interfaces import ITokenService +from warehouse.accounts.interfaces import ITokenService, IUserService +from warehouse.accounts.models import Email from warehouse.email.interfaces import IEmailSender from warehouse.email.services import EmailMessage from warehouse.email.ses.tasks import cleanup as ses_cleanup @@ -33,12 +34,15 @@ def _compute_recipient(user, email): @tasks.task(bind=True, ignore_result=True, acks_late=True) -def send_email(task, request, recipient, msg): +def send_email(task, request, recipient, msg, success_event): msg = EmailMessage(**msg) sender = request.find_service(IEmailSender) try: sender.send(recipient, msg) + + user_service = request.find_service(IUserService, context=None) + user_service.record_event(**success_event) except Exception as exc: task.retry(exc=exc) @@ -56,8 +60,27 @@ def _send_email_to_user(request, user, msg, *, email=None, allow_unverified=Fals if email is None or not (email.verified or allow_unverified): return + # We should only store/display IP address of an 'email sent' event if the user + # who triggered the email event is the one who receives the email. Else display + # 'Redacted' to prevent user privacy concerns. If we don't know the user who + # triggered the action, default to showing the IP of the source. + user_email = request.db.query(Email).filter(Email.email == email.email).one() + redact_ip = user_email.user_id != request.user.id if request.user else False + request.task(send_email).delay( - _compute_recipient(user, email.email), attr.asdict(msg) + _compute_recipient(user, email.email), + attr.asdict(msg), + { + "tag": "account:email:sent", + "user_id": user.id, + "ip_address": request.remote_addr, + "additional": { + "from_": request.registry.settings.get("mail.sender"), + "to": email.email, + "subject": msg.subject, + "redact_ip": redact_ip, + }, + }, )
diff --git a/tests/unit/admin/views/test_emails.py b/tests/unit/admin/views/test_emails.py --- a/tests/unit/admin/views/test_emails.py +++ b/tests/unit/admin/views/test_emails.py @@ -145,9 +145,11 @@ def test_sends_emails(self, db_request): db_request.params = {"csvfile": pretend.stub(file=input_file)} db_request.task = lambda a: pretend.stub(delay=delay) db_request.route_path = pretend.call_recorder(lambda *a, **kw: "/the-redirect") + db_request.remote_addr = "0.0.0.0" db_request.session = pretend.stub( flash=pretend.call_recorder(lambda *a, **kw: None) ) + db_request.registry.settings = {"mail.sender": "[email protected]"} result = views.email_mass(db_request) @@ -165,6 +167,17 @@ def test_sends_emails(self, db_request): "body_text": "Test Body 1", "body_html": None, }, + { + "tag": "account:email:sent", + "user_id": user1.id, + "ip_address": db_request.remote_addr, + "additional": { + "from_": "[email protected]", + "to": email1.email, + "subject": "Test Subject 1", + "redact_ip": True, + }, + }, ), pretend.call( email2.email, @@ -173,6 +186,17 @@ def test_sends_emails(self, db_request): "body_text": "Test Body 2", "body_html": None, }, + { + "tag": "account:email:sent", + "user_id": user2.id, + "ip_address": db_request.remote_addr, + "additional": { + "from_": "[email protected]", + "to": email2.email, + "subject": "Test Subject 2", + "redact_ip": True, + }, + }, ), ] diff --git a/tests/unit/email/test_init.py b/tests/unit/email/test_init.py --- a/tests/unit/email/test_init.py +++ b/tests/unit/email/test_init.py @@ -18,7 +18,7 @@ import pytest from warehouse import email -from warehouse.accounts.interfaces import ITokenService +from warehouse.accounts.interfaces import ITokenService, IUserService from warehouse.email.interfaces import IEmailSender from warehouse.email.services import EmailMessage @@ -82,13 +82,26 @@ class TestSendEmailToUser: def test_sends_to_user_with_verified( self, name, username, primary_email, address, expected ): - task = pretend.stub(delay=pretend.call_recorder(lambda *a, **kw: None)) - request = pretend.stub(task=pretend.call_recorder(lambda x: task)) - user = pretend.stub( name=name, username=username, primary_email=pretend.stub(email=primary_email, verified=True), + id="id", + ) + + task = pretend.stub(delay=pretend.call_recorder(lambda *a, **kw: None)) + request = pretend.stub( + task=pretend.call_recorder(lambda x: task), + db=pretend.stub( + query=lambda a: pretend.stub( + filter=lambda *a: pretend.stub( + one=lambda: pretend.stub(user_id=user.id) + ) + ), + ), + remote_addr="0.0.0.0", + user=user, + registry=pretend.stub(settings={"mail.sender": "[email protected]"}), ) if address is not None: @@ -103,6 +116,17 @@ def test_sends_to_user_with_verified( pretend.call( expected, {"subject": "My Subject", "body_text": "My Body", "body_html": None}, + { + "tag": "account:email:sent", + "user_id": user.id, + "ip_address": request.remote_addr, + "additional": { + "from_": "[email protected]", + "to": address.email if address else primary_email, + "subject": "My Subject", + "redact_ip": False, + }, + }, ) ] @@ -120,7 +144,7 @@ def test_doesnt_send_with_unverified(self, primary_email, address): user = pretend.stub( primary_email=pretend.stub( email=primary_email, verified=True if address is not None else False - ) + ), ) if address is not None: @@ -148,15 +172,28 @@ def test_doesnt_send_with_unverified(self, primary_email, address): def test_sends_unverified_with_override( self, username, primary_email, address, expected ): - task = pretend.stub(delay=pretend.call_recorder(lambda *a, **kw: None)) - request = pretend.stub(task=pretend.call_recorder(lambda x: task)) - user = pretend.stub( username=username, name="", primary_email=pretend.stub( email=primary_email, verified=True if address is not None else False ), + id="id", + ) + + task = pretend.stub(delay=pretend.call_recorder(lambda *a, **kw: None)) + request = pretend.stub( + task=pretend.call_recorder(lambda x: task), + db=pretend.stub( + query=lambda a: pretend.stub( + filter=lambda *a: pretend.stub( + one=lambda: pretend.stub(user_id=user.id) + ) + ), + ), + remote_addr="0.0.0.0", + user=user, + registry=pretend.stub(settings={"mail.sender": "[email protected]"}), ) if address is not None: @@ -173,12 +210,23 @@ def test_sends_unverified_with_override( pretend.call( expected, {"subject": "My Subject", "body_text": "My Body", "body_html": None}, + { + "tag": "account:email:sent", + "user_id": user.id, + "ip_address": request.remote_addr, + "additional": { + "from_": "[email protected]", + "to": address.email if address else primary_email, + "subject": "My Subject", + "redact_ip": False, + }, + }, ) ] class TestSendEmail: - def test_send_email_success(self, monkeypatch): + def test_send_email_success(self, db_session, monkeypatch): class FakeMailSender: def __init__(self): self.emails = [] @@ -193,17 +241,57 @@ def send(self, recipient, msg): } ) + class FakeUserEventService: + def __init__(self): + self.events = [] + + def record_event(self, user_id, tag, ip_address, additional): + self.events.append( + { + "user_id": user_id, + "tag": tag, + "ip_address": ip_address, + "additional": additional, + } + ) + + user_service = FakeUserEventService() sender = FakeMailSender() task = pretend.stub() request = pretend.stub( - find_service=pretend.call_recorder(lambda *a, **kw: sender) + find_service=pretend.call_recorder( + lambda svc, context=None: { + IUserService: user_service, + IEmailSender: sender, + }.get(svc) + ), ) + user_id = pretend.stub() msg = EmailMessage(subject="subject", body_text="body") - email.send_email(task, request, "recipient", attr.asdict(msg)) - - assert request.find_service.calls == [pretend.call(IEmailSender)] + email.send_email( + task, + request, + "recipient", + attr.asdict(msg), + { + "tag": "account:email:sent", + "user_id": user_id, + "ip_address": "0.0.0.0", + "additional": { + "from_": "[email protected]", + "to": "recipient", + "subject": msg.subject, + "redact_ip": False, + }, + }, + ) + + assert request.find_service.calls == [ + pretend.call(IEmailSender), + pretend.call(IUserService, context=None), + ] assert sender.emails == [ { "subject": "subject", @@ -212,6 +300,19 @@ def send(self, recipient, msg): "recipient": "recipient", } ] + assert user_service.events == [ + { + "user_id": user_id, + "tag": "account:email:sent", + "ip_address": "0.0.0.0", + "additional": { + "from_": "[email protected]", + "to": "recipient", + "subject": msg.subject, + "redact_ip": False, + }, + } + ] def test_send_email_failure(self, monkeypatch): exc = Exception() @@ -228,10 +329,27 @@ def retry(exc): sender, task = FakeMailSender(), Task() request = pretend.stub(find_service=lambda *a, **kw: sender) + user_id = pretend.stub() msg = EmailMessage(subject="subject", body_text="body") with pytest.raises(celery.exceptions.Retry): - email.send_email(task, request, "recipient", attr.asdict(msg)) + email.send_email( + task, + request, + "recipient", + attr.asdict(msg), + { + "tag": "account:email:sent", + "user_id": user_id, + "ip_address": "0.0.0.0", + "additional": { + "from_": "[email protected]", + "to": "recipient", + "subject": msg.subject, + "redact_ip": False, + }, + }, + ) assert task.retry.calls == [pretend.call(exc=exc)] @@ -294,6 +412,17 @@ def test_send_password_reset_email( pyramid_request.task = pretend.call_recorder(lambda *args, **kwargs: send_email) monkeypatch.setattr(email, "send_email", send_email) + pyramid_request.db = pretend.stub( + query=lambda a: pretend.stub( + filter=lambda *a: pretend.stub( + one=lambda: pretend.stub(user_id=stub_user.id) + ) + ), + ) + pyramid_request.remote_addr = "0.0.0.0" + pyramid_request.user = stub_user + pyramid_request.registry.settings = {"mail.sender": "[email protected]"} + result = email.send_password_reset_email( pyramid_request, (stub_user, stub_email) ) @@ -335,6 +464,19 @@ def test_send_password_reset_email( ), ) ), + { + "tag": "account:email:sent", + "user_id": stub_user.id, + "ip_address": pyramid_request.remote_addr, + "additional": { + "from_": "[email protected]", + "to": "[email protected]" + if stub_email + else "[email protected]", + "subject": "Email Subject", + "redact_ip": False, + }, + }, ) ] @@ -344,6 +486,9 @@ def test_email_verification_email( self, pyramid_request, pyramid_config, token_service, monkeypatch ): + stub_user = pretend.stub( + id="id", username=None, name=None, email="[email protected]", + ) stub_email = pretend.stub(id="id", email="[email protected]", verified=False) pyramid_request.method = "POST" token_service.dumps = pretend.call_recorder(lambda a: "TOKEN") @@ -370,13 +515,20 @@ def test_email_verification_email( pyramid_request.task = pretend.call_recorder(lambda *args, **kwargs: send_email) monkeypatch.setattr(email, "send_email", send_email) - result = email.send_email_verification_email( - pyramid_request, - ( - pretend.stub(username=None, name=None, email="[email protected]"), - stub_email, + pyramid_request.db = pretend.stub( + query=lambda a: pretend.stub( + filter=lambda *a: pretend.stub( + one=lambda: pretend.stub(user_id=stub_user.id) + ) ), ) + pyramid_request.remote_addr = "0.0.0.0" + pyramid_request.user = stub_user + pyramid_request.registry.settings = {"mail.sender": "[email protected]"} + + result = email.send_email_verification_email( + pyramid_request, (stub_user, stub_email,), + ) assert result == { "token": "TOKEN", @@ -406,6 +558,17 @@ def test_email_verification_email( ), ) ), + { + "tag": "account:email:sent", + "user_id": stub_user.id, + "ip_address": pyramid_request.remote_addr, + "additional": { + "from_": "[email protected]", + "to": stub_email.email, + "subject": "Email Subject", + "redact_ip": False, + }, + }, ) ] @@ -413,6 +576,7 @@ def test_email_verification_email( class TestPasswordChangeEmail: def test_password_change_email(self, pyramid_request, pyramid_config, monkeypatch): stub_user = pretend.stub( + id="id", username="username", name="", email="[email protected]", @@ -437,6 +601,17 @@ def test_password_change_email(self, pyramid_request, pyramid_config, monkeypatc pyramid_request.task = pretend.call_recorder(lambda *args, **kwargs: send_email) monkeypatch.setattr(email, "send_email", send_email) + pyramid_request.db = pretend.stub( + query=lambda a: pretend.stub( + filter=lambda *a: pretend.stub( + one=lambda: pretend.stub(user_id=stub_user.id) + ) + ), + ) + pyramid_request.remote_addr = "0.0.0.0" + pyramid_request.user = stub_user + pyramid_request.registry.settings = {"mail.sender": "[email protected]"} + result = email.send_password_change_email(pyramid_request, stub_user) assert result == {"username": stub_user.username} @@ -457,6 +632,17 @@ def test_password_change_email(self, pyramid_request, pyramid_config, monkeypatc ), ) ), + { + "tag": "account:email:sent", + "user_id": stub_user.id, + "ip_address": pyramid_request.remote_addr, + "additional": { + "from_": "[email protected]", + "to": stub_user.email, + "subject": "Email Subject", + "redact_ip": False, + }, + }, ) ] @@ -488,6 +674,17 @@ def test_password_change_email_unverified( pyramid_request.task = pretend.call_recorder(lambda *args, **kwargs: send_email) monkeypatch.setattr(email, "send_email", send_email) + pyramid_request.db = pretend.stub( + query=lambda a: pretend.stub( + filter=lambda *a: pretend.stub( + one=lambda: pretend.stub(user_id=stub_user.id) + ) + ), + ) + pyramid_request.remote_addr = "0.0.0.0" + pyramid_request.user = stub_user + pyramid_request.registry.settings = {"mail.sender": "[email protected]"} + result = email.send_password_change_email(pyramid_request, stub_user) assert result == {"username": stub_user.username} @@ -504,6 +701,7 @@ def test_password_compromised_email_hibp( self, pyramid_request, pyramid_config, monkeypatch, verified ): stub_user = pretend.stub( + id="id", username="username", name="", email="[email protected]", @@ -528,6 +726,17 @@ def test_password_compromised_email_hibp( pyramid_request.task = pretend.call_recorder(lambda *args, **kwargs: send_email) monkeypatch.setattr(email, "send_email", send_email) + pyramid_request.db = pretend.stub( + query=lambda a: pretend.stub( + filter=lambda *a: pretend.stub( + one=lambda: pretend.stub(user_id=stub_user.id) + ) + ), + ) + pyramid_request.remote_addr = "0.0.0.0" + pyramid_request.user = stub_user + pyramid_request.registry.settings = {"mail.sender": "[email protected]"} + result = email.send_password_compromised_email_hibp(pyramid_request, stub_user) assert result == {} @@ -545,6 +754,17 @@ def test_password_compromised_email_hibp( ), ) ), + { + "tag": "account:email:sent", + "user_id": stub_user.id, + "ip_address": pyramid_request.remote_addr, + "additional": { + "from_": "[email protected]", + "to": stub_user.email, + "subject": "Email Subject", + "redact_ip": False, + }, + }, ) ] @@ -555,6 +775,7 @@ def test_password_compromised_email( self, pyramid_request, pyramid_config, monkeypatch, verified ): stub_user = pretend.stub( + id="id", username="username", name="", email="[email protected]", @@ -579,6 +800,17 @@ def test_password_compromised_email( pyramid_request.task = pretend.call_recorder(lambda *args, **kwargs: send_email) monkeypatch.setattr(email, "send_email", send_email) + pyramid_request.db = pretend.stub( + query=lambda a: pretend.stub( + filter=lambda *a: pretend.stub( + one=lambda: pretend.stub(user_id=stub_user.id) + ) + ), + ) + pyramid_request.remote_addr = "0.0.0.0" + pyramid_request.user = stub_user + pyramid_request.registry.settings = {"mail.sender": "[email protected]"} + result = email.send_password_compromised_email(pyramid_request, stub_user) assert result == {} @@ -596,6 +828,17 @@ def test_password_compromised_email( ), ) ), + { + "tag": "account:email:sent", + "user_id": stub_user.id, + "ip_address": pyramid_request.remote_addr, + "additional": { + "from_": "[email protected]", + "to": stub_user.email, + "subject": "Email Subject", + "redact_ip": False, + }, + }, ) ] @@ -604,6 +847,7 @@ class TestAccountDeletionEmail: def test_account_deletion_email(self, pyramid_request, pyramid_config, monkeypatch): stub_user = pretend.stub( + id="id", username="username", name="", email="[email protected]", @@ -628,6 +872,17 @@ def test_account_deletion_email(self, pyramid_request, pyramid_config, monkeypat pyramid_request.task = pretend.call_recorder(lambda *args, **kwargs: send_email) monkeypatch.setattr(email, "send_email", send_email) + pyramid_request.db = pretend.stub( + query=lambda a: pretend.stub( + filter=lambda *a: pretend.stub( + one=lambda: pretend.stub(user_id=stub_user.id) + ) + ), + ) + pyramid_request.remote_addr = "0.0.0.0" + pyramid_request.user = stub_user + pyramid_request.registry.settings = {"mail.sender": "[email protected]"} + result = email.send_account_deletion_email(pyramid_request, stub_user) assert result == {"username": stub_user.username} @@ -648,6 +903,17 @@ def test_account_deletion_email(self, pyramid_request, pyramid_config, monkeypat ), ) ), + { + "tag": "account:email:sent", + "user_id": stub_user.id, + "ip_address": pyramid_request.remote_addr, + "additional": { + "from_": "[email protected]", + "to": stub_user.email, + "subject": "Email Subject", + "redact_ip": False, + }, + }, ) ] @@ -656,6 +922,7 @@ def test_account_deletion_email_unverified( ): stub_user = pretend.stub( + id="id", username="username", name="", email="[email protected]", @@ -680,6 +947,17 @@ def test_account_deletion_email_unverified( pyramid_request.task = pretend.call_recorder(lambda *args, **kwargs: send_email) monkeypatch.setattr(email, "send_email", send_email) + pyramid_request.db = pretend.stub( + query=lambda a: pretend.stub( + filter=lambda *a: pretend.stub( + one=lambda: pretend.stub(user_id=stub_user.id) + ) + ), + ) + pyramid_request.remote_addr = "0.0.0.0" + pyramid_request.user = stub_user + pyramid_request.registry.settings = {"mail.sender": "[email protected]"} + result = email.send_account_deletion_email(pyramid_request, stub_user) assert result == {"username": stub_user.username} @@ -696,7 +974,7 @@ def test_primary_email_change_email( ): stub_user = pretend.stub( - email="[email protected]", username="username", name="" + id="id", email="[email protected]", username="username", name="" ) subject_renderer = pyramid_config.testing_add_renderer( "email/primary-email-change/subject.txt" @@ -717,6 +995,17 @@ def test_primary_email_change_email( pyramid_request.task = pretend.call_recorder(lambda *args, **kwargs: send_email) monkeypatch.setattr(email, "send_email", send_email) + pyramid_request.db = pretend.stub( + query=lambda a: pretend.stub( + filter=lambda *a: pretend.stub( + one=lambda: pretend.stub(user_id=stub_user.id) + ) + ), + ) + pyramid_request.remote_addr = "0.0.0.0" + pyramid_request.user = stub_user + pyramid_request.registry.settings = {"mail.sender": "[email protected]"} + result = email.send_primary_email_change_email( pyramid_request, (stub_user, pretend.stub(email="[email protected]", verified=True)), @@ -744,6 +1033,17 @@ def test_primary_email_change_email( ), ) ), + { + "tag": "account:email:sent", + "user_id": stub_user.id, + "ip_address": pyramid_request.remote_addr, + "additional": { + "from_": "[email protected]", + "to": "[email protected]", + "subject": "Email Subject", + "redact_ip": False, + }, + }, ) ] @@ -752,7 +1052,7 @@ def test_primary_email_change_email_unverified( ): stub_user = pretend.stub( - email="[email protected]", username="username", name="" + id="id", email="[email protected]", username="username", name="" ) subject_renderer = pyramid_config.testing_add_renderer( "email/primary-email-change/subject.txt" @@ -773,6 +1073,17 @@ def test_primary_email_change_email_unverified( pyramid_request.task = pretend.call_recorder(lambda *args, **kwargs: send_email) monkeypatch.setattr(email, "send_email", send_email) + pyramid_request.db = pretend.stub( + query=lambda a: pretend.stub( + filter=lambda *a: pretend.stub( + one=lambda: pretend.stub(user_id=stub_user.id) + ) + ), + ) + pyramid_request.remote_addr = "0.0.0.0" + pyramid_request.user = stub_user + pyramid_request.registry.settings = {"mail.sender": "[email protected]"} + result = email.send_primary_email_change_email( pyramid_request, (stub_user, pretend.stub(email="[email protected]", verified=False)), @@ -796,12 +1107,14 @@ def test_collaborator_added_email( ): stub_user = pretend.stub( + id="id_1", username="username", name="", email="[email protected]", primary_email=pretend.stub(email="[email protected]", verified=True), ) stub_submitter_user = pretend.stub( + id="id_2", username="submitterusername", name="", email="[email protected]", @@ -828,6 +1141,18 @@ def test_collaborator_added_email( pyramid_request.task = pretend.call_recorder(lambda *args, **kwargs: send_email) monkeypatch.setattr(email, "send_email", send_email) + ids = [stub_submitter_user.id, stub_user.id] + pyramid_request.db = pretend.stub( + query=lambda a: pretend.stub( + filter=lambda *a: pretend.stub( + one=lambda: pretend.stub(user_id=ids.pop()) + ) + ), + ) + pyramid_request.remote_addr = "0.0.0.0" + pyramid_request.user = stub_submitter_user + pyramid_request.registry.settings = {"mail.sender": "[email protected]"} + result = email.send_collaborator_added_email( pyramid_request, [stub_user, stub_submitter_user], @@ -870,6 +1195,17 @@ def test_collaborator_added_email( ), ) ), + { + "tag": "account:email:sent", + "user_id": stub_user.id, + "ip_address": pyramid_request.remote_addr, + "additional": { + "from_": "[email protected]", + "to": "[email protected]", + "subject": "Email Subject", + "redact_ip": True, + }, + }, ), pretend.call( "submitterusername <[email protected]>", @@ -883,6 +1219,17 @@ def test_collaborator_added_email( ), ) ), + { + "tag": "account:email:sent", + "user_id": stub_submitter_user.id, + "ip_address": pyramid_request.remote_addr, + "additional": { + "from_": "[email protected]", + "to": "[email protected]", + "subject": "Email Subject", + "redact_ip": False, + }, + }, ), ] @@ -891,12 +1238,14 @@ def test_collaborator_added_email_unverified( ): stub_user = pretend.stub( + id="id_1", username="username", name="", email="[email protected]", primary_email=pretend.stub(email="[email protected]", verified=False), ) stub_submitter_user = pretend.stub( + id="id_2", username="submitterusername", name="", email="[email protected]", @@ -923,6 +1272,17 @@ def test_collaborator_added_email_unverified( pyramid_request.task = pretend.call_recorder(lambda *args, **kwargs: send_email) monkeypatch.setattr(email, "send_email", send_email) + pyramid_request.db = pretend.stub( + query=lambda a: pretend.stub( + filter=lambda *a: pretend.stub( + one=lambda: pretend.stub(user_id=stub_submitter_user.id) + ) + ), + ) + pyramid_request.remote_addr = "0.0.0.0" + pyramid_request.user = stub_submitter_user + pyramid_request.registry.settings = {"mail.sender": "[email protected]"} + result = email.send_collaborator_added_email( pyramid_request, [stub_user, stub_submitter_user], @@ -962,6 +1322,17 @@ def test_collaborator_added_email_unverified( ), ) ), + { + "tag": "account:email:sent", + "user_id": stub_submitter_user.id, + "ip_address": pyramid_request.remote_addr, + "additional": { + "from_": "[email protected]", + "to": "[email protected]", + "subject": "Email Subject", + "redact_ip": False, + }, + }, ) ] @@ -972,13 +1343,14 @@ def test_added_as_collaborator_email( ): stub_user = pretend.stub( + id="id_1", username="username", name="", email="[email protected]", primary_email=pretend.stub(email="[email protected]", verified=True), ) stub_submitter_user = pretend.stub( - username="submitterusername", email="submiteremail" + id="id_2", username="submitterusername", email="submiteremail", ) subject_renderer = pyramid_config.testing_add_renderer( "email/added-as-collaborator/subject.txt" @@ -999,6 +1371,17 @@ def test_added_as_collaborator_email( pyramid_request.task = pretend.call_recorder(lambda *args, **kwargs: send_email) monkeypatch.setattr(email, "send_email", send_email) + pyramid_request.db = pretend.stub( + query=lambda a: pretend.stub( + filter=lambda *a: pretend.stub( + one=lambda: pretend.stub(user_id=stub_user.id) + ) + ), + ) + pyramid_request.remote_addr = "0.0.0.0" + pyramid_request.user = stub_submitter_user + pyramid_request.registry.settings = {"mail.sender": "[email protected]"} + result = email.send_added_as_collaborator_email( pyramid_request, stub_user, @@ -1034,6 +1417,17 @@ def test_added_as_collaborator_email( ), ) ), + { + "tag": "account:email:sent", + "user_id": stub_user.id, + "ip_address": pyramid_request.remote_addr, + "additional": { + "from_": "[email protected]", + "to": "[email protected]", + "subject": "Email Subject", + "redact_ip": True, + }, + }, ) ] @@ -1042,13 +1436,14 @@ def test_added_as_collaborator_email_unverified( ): stub_user = pretend.stub( + id="id_1", username="username", name="", email="[email protected]", primary_email=pretend.stub(email="[email protected]", verified=False), ) stub_submitter_user = pretend.stub( - username="submitterusername", email="submiteremail" + id="id_2", username="submitterusername", email="submiteremail", ) subject_renderer = pyramid_config.testing_add_renderer( "email/added-as-collaborator/subject.txt" @@ -1069,6 +1464,17 @@ def test_added_as_collaborator_email_unverified( pyramid_request.task = pretend.call_recorder(lambda *args, **kwargs: send_email) monkeypatch.setattr(email, "send_email", send_email) + pyramid_request.db = pretend.stub( + query=lambda a: pretend.stub( + filter=lambda *a: pretend.stub( + one=lambda: pretend.stub(user_id=stub_user.id) + ) + ), + ) + pyramid_request.remote_addr = "0.0.0.0" + pyramid_request.user = stub_submitter_user + pyramid_request.registry.settings = {"mail.sender": "[email protected]"} + result = email.send_added_as_collaborator_email( pyramid_request, stub_user, @@ -1099,12 +1505,14 @@ def test_removed_project_email_to_maintainer( self, pyramid_request, pyramid_config, monkeypatch ): stub_user = pretend.stub( + id="id_1", username="username", name="", email="[email protected]", primary_email=pretend.stub(email="[email protected]", verified=True), ) stub_submitter_user = pretend.stub( + id="id_2", username="submitterusername", name="", email="[email protected]", @@ -1131,6 +1539,18 @@ def test_removed_project_email_to_maintainer( pyramid_request.task = pretend.call_recorder(lambda *args, **kwargs: send_email) monkeypatch.setattr(email, "send_email", send_email) + ids = [stub_submitter_user.id, stub_user.id] + pyramid_request.db = pretend.stub( + query=lambda a: pretend.stub( + filter=lambda *a: pretend.stub( + one=lambda: pretend.stub(user_id=ids.pop()) + ) + ), + ) + pyramid_request.remote_addr = "0.0.0.0" + pyramid_request.user = stub_submitter_user + pyramid_request.registry.settings = {"mail.sender": "[email protected]"} + result = email.send_removed_project_email( pyramid_request, [stub_user, stub_submitter_user], @@ -1171,6 +1591,17 @@ def test_removed_project_email_to_maintainer( ), ), ), + { + "tag": "account:email:sent", + "user_id": stub_user.id, + "ip_address": pyramid_request.remote_addr, + "additional": { + "from_": "[email protected]", + "to": "[email protected]", + "subject": "Email Subject", + "redact_ip": True, + }, + }, ), pretend.call( "submitterusername <[email protected]>", @@ -1184,6 +1615,17 @@ def test_removed_project_email_to_maintainer( ), ) ), + { + "tag": "account:email:sent", + "user_id": stub_submitter_user.id, + "ip_address": pyramid_request.remote_addr, + "additional": { + "from_": "[email protected]", + "to": "[email protected]", + "subject": "Email Subject", + "redact_ip": False, + }, + }, ), ] @@ -1191,12 +1633,14 @@ def test_removed_project_email_to_owner( self, pyramid_request, pyramid_config, monkeypatch ): stub_user = pretend.stub( + id="id_1", username="username", name="", email="[email protected]", primary_email=pretend.stub(email="[email protected]", verified=True), ) stub_submitter_user = pretend.stub( + id="id_2", username="submitterusername", name="", email="[email protected]", @@ -1223,6 +1667,18 @@ def test_removed_project_email_to_owner( pyramid_request.task = pretend.call_recorder(lambda *args, **kwargs: send_email) monkeypatch.setattr(email, "send_email", send_email) + ids = [stub_submitter_user.id, stub_user.id] + pyramid_request.db = pretend.stub( + query=lambda a: pretend.stub( + filter=lambda *a: pretend.stub( + one=lambda: pretend.stub(user_id=ids.pop()) + ) + ), + ) + pyramid_request.remote_addr = "0.0.0.0" + pyramid_request.user = stub_submitter_user + pyramid_request.registry.settings = {"mail.sender": "[email protected]"} + result = email.send_removed_project_email( pyramid_request, [stub_user, stub_submitter_user], @@ -1263,6 +1719,17 @@ def test_removed_project_email_to_owner( ), ), ), + { + "tag": "account:email:sent", + "user_id": stub_user.id, + "ip_address": pyramid_request.remote_addr, + "additional": { + "from_": "[email protected]", + "to": "[email protected]", + "subject": "Email Subject", + "redact_ip": True, + }, + }, ), pretend.call( "submitterusername <[email protected]>", @@ -1276,6 +1743,17 @@ def test_removed_project_email_to_owner( ), ) ), + { + "tag": "account:email:sent", + "user_id": stub_submitter_user.id, + "ip_address": pyramid_request.remote_addr, + "additional": { + "from_": "[email protected]", + "to": "[email protected]", + "subject": "Email Subject", + "redact_ip": False, + }, + }, ), ] @@ -1285,12 +1763,14 @@ def test_send_yanked_project_release_email_to_maintainer( self, pyramid_request, pyramid_config, monkeypatch ): stub_user = pretend.stub( + id="id_1", username="username", name="", email="[email protected]", primary_email=pretend.stub(email="[email protected]", verified=True), ) stub_submitter_user = pretend.stub( + id="id_2", username="submitterusername", name="", email="[email protected]", @@ -1318,6 +1798,18 @@ def test_send_yanked_project_release_email_to_maintainer( pyramid_request.task = pretend.call_recorder(lambda *args, **kwargs: send_email) monkeypatch.setattr(email, "send_email", send_email) + ids = [stub_submitter_user.id, stub_user.id] + pyramid_request.db = pretend.stub( + query=lambda a: pretend.stub( + filter=lambda *a: pretend.stub( + one=lambda: pretend.stub(user_id=ids.pop()) + ) + ), + ) + pyramid_request.remote_addr = "0.0.0.0" + pyramid_request.user = stub_submitter_user + pyramid_request.registry.settings = {"mail.sender": "[email protected]"} + release = pretend.stub( version="0.0.0", project=pretend.stub(name="test_project"), @@ -1371,6 +1863,17 @@ def test_send_yanked_project_release_email_to_maintainer( ), ), ), + { + "tag": "account:email:sent", + "user_id": stub_user.id, + "ip_address": pyramid_request.remote_addr, + "additional": { + "from_": "[email protected]", + "to": "[email protected]", + "subject": "Email Subject", + "redact_ip": True, + }, + }, ), pretend.call( "submitterusername <[email protected]>", @@ -1384,6 +1887,17 @@ def test_send_yanked_project_release_email_to_maintainer( ), ) ), + { + "tag": "account:email:sent", + "user_id": stub_submitter_user.id, + "ip_address": pyramid_request.remote_addr, + "additional": { + "from_": "[email protected]", + "to": "[email protected]", + "subject": "Email Subject", + "redact_ip": False, + }, + }, ), ] @@ -1391,12 +1905,14 @@ def test_send_yanked_project_release_email_to_owner( self, pyramid_request, pyramid_config, monkeypatch ): stub_user = pretend.stub( + id="id_1", username="username", name="", email="[email protected]", primary_email=pretend.stub(email="[email protected]", verified=True), ) stub_submitter_user = pretend.stub( + id="id_2", username="submitterusername", name="", email="[email protected]", @@ -1424,6 +1940,18 @@ def test_send_yanked_project_release_email_to_owner( pyramid_request.task = pretend.call_recorder(lambda *args, **kwargs: send_email) monkeypatch.setattr(email, "send_email", send_email) + ids = [stub_submitter_user.id, stub_user.id] + pyramid_request.db = pretend.stub( + query=lambda a: pretend.stub( + filter=lambda *a: pretend.stub( + one=lambda: pretend.stub(user_id=ids.pop()) + ) + ), + ) + pyramid_request.remote_addr = "0.0.0.0" + pyramid_request.user = stub_submitter_user + pyramid_request.registry.settings = {"mail.sender": "[email protected]"} + release = pretend.stub( version="0.0.0", project=pretend.stub(name="test_project"), @@ -1477,6 +2005,17 @@ def test_send_yanked_project_release_email_to_owner( ), ), ), + { + "tag": "account:email:sent", + "user_id": stub_user.id, + "ip_address": pyramid_request.remote_addr, + "additional": { + "from_": "[email protected]", + "to": "[email protected]", + "subject": "Email Subject", + "redact_ip": True, + }, + }, ), pretend.call( "submitterusername <[email protected]>", @@ -1490,6 +2029,17 @@ def test_send_yanked_project_release_email_to_owner( ), ) ), + { + "tag": "account:email:sent", + "user_id": stub_submitter_user.id, + "ip_address": pyramid_request.remote_addr, + "additional": { + "from_": "[email protected]", + "to": "[email protected]", + "subject": "Email Subject", + "redact_ip": False, + }, + }, ), ] @@ -1499,12 +2049,14 @@ def test_send_unyanked_project_release_email_to_maintainer( self, pyramid_request, pyramid_config, monkeypatch ): stub_user = pretend.stub( + id="id_1", username="username", name="", email="[email protected]", primary_email=pretend.stub(email="[email protected]", verified=True), ) stub_submitter_user = pretend.stub( + id="id_2", username="submitterusername", name="", email="[email protected]", @@ -1532,6 +2084,18 @@ def test_send_unyanked_project_release_email_to_maintainer( pyramid_request.task = pretend.call_recorder(lambda *args, **kwargs: send_email) monkeypatch.setattr(email, "send_email", send_email) + ids = [stub_submitter_user.id, stub_user.id] + pyramid_request.db = pretend.stub( + query=lambda a: pretend.stub( + filter=lambda *a: pretend.stub( + one=lambda: pretend.stub(user_id=ids.pop()) + ) + ), + ) + pyramid_request.remote_addr = "0.0.0.0" + pyramid_request.user = stub_submitter_user + pyramid_request.registry.settings = {"mail.sender": "[email protected]"} + release = pretend.stub( version="0.0.0", project=pretend.stub(name="test_project"), @@ -1584,6 +2148,17 @@ def test_send_unyanked_project_release_email_to_maintainer( ), ), ), + { + "tag": "account:email:sent", + "user_id": stub_user.id, + "ip_address": pyramid_request.remote_addr, + "additional": { + "from_": "[email protected]", + "to": "[email protected]", + "subject": "Email Subject", + "redact_ip": True, + }, + }, ), pretend.call( "submitterusername <[email protected]>", @@ -1597,6 +2172,17 @@ def test_send_unyanked_project_release_email_to_maintainer( ), ) ), + { + "tag": "account:email:sent", + "user_id": stub_submitter_user.id, + "ip_address": pyramid_request.remote_addr, + "additional": { + "from_": "[email protected]", + "to": "[email protected]", + "subject": "Email Subject", + "redact_ip": False, + }, + }, ), ] @@ -1604,12 +2190,14 @@ def test_send_unyanked_project_release_email_to_owner( self, pyramid_request, pyramid_config, monkeypatch ): stub_user = pretend.stub( + id="id_1", username="username", name="", email="[email protected]", primary_email=pretend.stub(email="[email protected]", verified=True), ) stub_submitter_user = pretend.stub( + id="id_2", username="submitterusername", name="", email="[email protected]", @@ -1637,6 +2225,18 @@ def test_send_unyanked_project_release_email_to_owner( pyramid_request.task = pretend.call_recorder(lambda *args, **kwargs: send_email) monkeypatch.setattr(email, "send_email", send_email) + ids = [stub_submitter_user.id, stub_user.id] + pyramid_request.db = pretend.stub( + query=lambda a: pretend.stub( + filter=lambda *a: pretend.stub( + one=lambda: pretend.stub(user_id=ids.pop()) + ) + ), + ) + pyramid_request.remote_addr = "0.0.0.0" + pyramid_request.user = stub_submitter_user + pyramid_request.registry.settings = {"mail.sender": "[email protected]"} + release = pretend.stub( version="0.0.0", project=pretend.stub(name="test_project"), @@ -1689,6 +2289,17 @@ def test_send_unyanked_project_release_email_to_owner( ), ), ), + { + "tag": "account:email:sent", + "user_id": stub_user.id, + "ip_address": pyramid_request.remote_addr, + "additional": { + "from_": "[email protected]", + "to": "[email protected]", + "subject": "Email Subject", + "redact_ip": True, + }, + }, ), pretend.call( "submitterusername <[email protected]>", @@ -1702,6 +2313,17 @@ def test_send_unyanked_project_release_email_to_owner( ), ) ), + { + "tag": "account:email:sent", + "user_id": stub_submitter_user.id, + "ip_address": pyramid_request.remote_addr, + "additional": { + "from_": "[email protected]", + "to": "[email protected]", + "subject": "Email Subject", + "redact_ip": False, + }, + }, ), ] @@ -1711,12 +2333,14 @@ def test_send_removed_project_release_email_to_maintainer( self, pyramid_request, pyramid_config, monkeypatch ): stub_user = pretend.stub( + id="id_1", username="username", name="", email="[email protected]", primary_email=pretend.stub(email="[email protected]", verified=True), ) stub_submitter_user = pretend.stub( + id="id_2", username="submitterusername", name="", email="[email protected]", @@ -1744,6 +2368,18 @@ def test_send_removed_project_release_email_to_maintainer( pyramid_request.task = pretend.call_recorder(lambda *args, **kwargs: send_email) monkeypatch.setattr(email, "send_email", send_email) + ids = [stub_submitter_user.id, stub_user.id] + pyramid_request.db = pretend.stub( + query=lambda a: pretend.stub( + filter=lambda *a: pretend.stub( + one=lambda: pretend.stub(user_id=ids.pop()) + ) + ), + ) + pyramid_request.remote_addr = "0.0.0.0" + pyramid_request.user = stub_submitter_user + pyramid_request.registry.settings = {"mail.sender": "[email protected]"} + release = pretend.stub( version="0.0.0", project=pretend.stub(name="test_project"), @@ -1796,6 +2432,17 @@ def test_send_removed_project_release_email_to_maintainer( ), ), ), + { + "tag": "account:email:sent", + "user_id": stub_user.id, + "ip_address": pyramid_request.remote_addr, + "additional": { + "from_": "[email protected]", + "to": "[email protected]", + "subject": "Email Subject", + "redact_ip": True, + }, + }, ), pretend.call( "submitterusername <[email protected]>", @@ -1809,6 +2456,17 @@ def test_send_removed_project_release_email_to_maintainer( ), ) ), + { + "tag": "account:email:sent", + "user_id": stub_submitter_user.id, + "ip_address": pyramid_request.remote_addr, + "additional": { + "from_": "[email protected]", + "to": "[email protected]", + "subject": "Email Subject", + "redact_ip": False, + }, + }, ), ] @@ -1816,12 +2474,14 @@ def test_send_removed_project_release_email_to_owner( self, pyramid_request, pyramid_config, monkeypatch ): stub_user = pretend.stub( + id="id_1", username="username", name="", email="[email protected]", primary_email=pretend.stub(email="[email protected]", verified=True), ) stub_submitter_user = pretend.stub( + id="id_2", username="submitterusername", name="", email="[email protected]", @@ -1849,6 +2509,18 @@ def test_send_removed_project_release_email_to_owner( pyramid_request.task = pretend.call_recorder(lambda *args, **kwargs: send_email) monkeypatch.setattr(email, "send_email", send_email) + ids = [stub_submitter_user.id, stub_user.id] + pyramid_request.db = pretend.stub( + query=lambda a: pretend.stub( + filter=lambda *a: pretend.stub( + one=lambda: pretend.stub(user_id=ids.pop()) + ) + ), + ) + pyramid_request.remote_addr = "0.0.0.0" + pyramid_request.user = stub_submitter_user + pyramid_request.registry.settings = {"mail.sender": "[email protected]"} + release = pretend.stub( version="0.0.0", project=pretend.stub(name="test_project"), @@ -1901,6 +2573,17 @@ def test_send_removed_project_release_email_to_owner( ), ), ), + { + "tag": "account:email:sent", + "user_id": stub_user.id, + "ip_address": pyramid_request.remote_addr, + "additional": { + "from_": "[email protected]", + "to": "[email protected]", + "subject": "Email Subject", + "redact_ip": True, + }, + }, ), pretend.call( "submitterusername <[email protected]>", @@ -1914,6 +2597,17 @@ def test_send_removed_project_release_email_to_owner( ), ) ), + { + "tag": "account:email:sent", + "user_id": stub_submitter_user.id, + "ip_address": pyramid_request.remote_addr, + "additional": { + "from_": "[email protected]", + "to": "[email protected]", + "subject": "Email Subject", + "redact_ip": False, + }, + }, ), ] @@ -1923,12 +2617,14 @@ def test_send_removed_project_release_file_email_to_owner( self, pyramid_request, pyramid_config, monkeypatch ): stub_user = pretend.stub( + id="id_1", username="username", name="", email="[email protected]", primary_email=pretend.stub(email="[email protected]", verified=True), ) stub_submitter_user = pretend.stub( + id="id_2", username="submitterusername", name="", email="[email protected]", @@ -1956,6 +2652,18 @@ def test_send_removed_project_release_file_email_to_owner( pyramid_request.task = pretend.call_recorder(lambda *args, **kwargs: send_email) monkeypatch.setattr(email, "send_email", send_email) + ids = [stub_submitter_user.id, stub_user.id] + pyramid_request.db = pretend.stub( + query=lambda a: pretend.stub( + filter=lambda *a: pretend.stub( + one=lambda: pretend.stub(user_id=ids.pop()) + ) + ), + ) + pyramid_request.remote_addr = "0.0.0.0" + pyramid_request.user = stub_submitter_user + pyramid_request.registry.settings = {"mail.sender": "[email protected]"} + release = pretend.stub( version="0.0.0", project=pretend.stub(name="test_project"), @@ -2009,6 +2717,17 @@ def test_send_removed_project_release_file_email_to_owner( ), ), ), + { + "tag": "account:email:sent", + "user_id": stub_user.id, + "ip_address": pyramid_request.remote_addr, + "additional": { + "from_": "[email protected]", + "to": "[email protected]", + "subject": "Email Subject", + "redact_ip": True, + }, + }, ), pretend.call( "submitterusername <[email protected]>", @@ -2022,6 +2741,17 @@ def test_send_removed_project_release_file_email_to_owner( ), ) ), + { + "tag": "account:email:sent", + "user_id": stub_submitter_user.id, + "ip_address": pyramid_request.remote_addr, + "additional": { + "from_": "[email protected]", + "to": "[email protected]", + "subject": "Email Subject", + "redact_ip": False, + }, + }, ), ] @@ -2029,12 +2759,14 @@ def test_send_removed_project_release_file_email_to_maintainer( self, pyramid_request, pyramid_config, monkeypatch ): stub_user = pretend.stub( + id="id_1", username="username", name="", email="[email protected]", primary_email=pretend.stub(email="[email protected]", verified=True), ) stub_submitter_user = pretend.stub( + id="id_2", username="submitterusername", name="", email="[email protected]", @@ -2062,6 +2794,18 @@ def test_send_removed_project_release_file_email_to_maintainer( pyramid_request.task = pretend.call_recorder(lambda *args, **kwargs: send_email) monkeypatch.setattr(email, "send_email", send_email) + ids = [stub_submitter_user.id, stub_user.id] + pyramid_request.db = pretend.stub( + query=lambda a: pretend.stub( + filter=lambda *a: pretend.stub( + one=lambda: pretend.stub(user_id=ids.pop()) + ) + ), + ) + pyramid_request.remote_addr = "0.0.0.0" + pyramid_request.user = stub_submitter_user + pyramid_request.registry.settings = {"mail.sender": "[email protected]"} + release = pretend.stub( version="0.0.0", project=pretend.stub(name="test_project"), @@ -2115,6 +2859,17 @@ def test_send_removed_project_release_file_email_to_maintainer( ), ), ), + { + "tag": "account:email:sent", + "user_id": stub_user.id, + "ip_address": pyramid_request.remote_addr, + "additional": { + "from_": "[email protected]", + "to": "[email protected]", + "subject": "Email Subject", + "redact_ip": True, + }, + }, ), pretend.call( "submitterusername <[email protected]>", @@ -2128,6 +2883,17 @@ def test_send_removed_project_release_file_email_to_maintainer( ), ) ), + { + "tag": "account:email:sent", + "user_id": stub_submitter_user.id, + "ip_address": pyramid_request.remote_addr, + "additional": { + "from_": "[email protected]", + "to": "[email protected]", + "subject": "Email Subject", + "redact_ip": False, + }, + }, ), ] @@ -2152,6 +2918,7 @@ def test_two_factor_email( pretty_method, ): stub_user = pretend.stub( + id="id", username="username", name="", email="[email protected]", @@ -2176,6 +2943,17 @@ def test_two_factor_email( pyramid_request.task = pretend.call_recorder(lambda *args, **kwargs: send_email) monkeypatch.setattr(email, "send_email", send_email) + pyramid_request.db = pretend.stub( + query=lambda a: pretend.stub( + filter=lambda *a: pretend.stub( + one=lambda: pretend.stub(user_id=stub_user.id) + ) + ), + ) + pyramid_request.remote_addr = "0.0.0.0" + pyramid_request.user = stub_user + pyramid_request.registry.settings = {"mail.sender": "[email protected]"} + send_method = getattr(email, f"send_two_factor_{action}_email") result = send_method(pyramid_request, stub_user, method=method) @@ -2197,5 +2975,16 @@ def test_two_factor_email( ), ) ), + { + "tag": "account:email:sent", + "user_id": stub_user.id, + "ip_address": pyramid_request.remote_addr, + "additional": { + "from_": "[email protected]", + "to": stub_user.email, + "subject": "Email Subject", + "redact_ip": False, + }, + }, ) ]
list recent sent emails in security/account settings **What's the problem this feature will solve?** Sometimes administrators want to send our users email notifications to ask that they do something, but we don't want to inure them to clicking on links in emails, and we want a way to credibly demonstrate that the email is legitimate. **Describe the solution you'd like** Via <a href="https://twitter.com/runasand/status/1248385736677576706">this tweet</a> I saw that Facebook archives recent automated emails sent to users so that each user can check in their settings to find those notifications: > To confirm whether an email claiming to be from Facebook is authentic, review recent emails we've sent in your Security and Login Settings Could we do this? **Additional context** In the next 12-18 months, PyPI will probably add more features or increase the strictness of authentication, etc. for project maintainers and owners. When we do those things, we'll want to notify users in a credible way.
> Could we do this? Seems reasonable. We could potentially add sent emails to the existing "Security History" feed. Does the Facebook implementation of this feature include the entire email, or just the subject/timestamp? Good question! I don't know. (I don't have a Facebook account.) Me neither 🙂 It's a table, with: - Date (eg: "July 20, 20:26") - Sent to (i.e. the email) - Email subject --- I've made a lot of questionable choices in life. :)
2020-06-26T03:51:59Z
[]
[]
pypi/warehouse
8,182
pypi__warehouse-8182
[ "5790", "6054" ]
42cca6639172b7b2dea7c67639e97f8490bb0be9
diff --git a/warehouse/accounts/views.py b/warehouse/accounts/views.py --- a/warehouse/accounts/views.py +++ b/warehouse/accounts/views.py @@ -51,11 +51,19 @@ from warehouse.admin.flags import AdminFlagValue from warehouse.cache.origin import origin_cache from warehouse.email import ( + send_added_as_collaborator_email, + send_collaborator_added_email, send_email_verification_email, send_password_change_email, send_password_reset_email, ) -from warehouse.packaging.models import Project, Release +from warehouse.packaging.models import ( + JournalEntry, + Project, + Release, + Role, + RoleInvitation, +) from warehouse.rate_limiting.interfaces import IRateLimiter from warehouse.utils.http import is_safe_url @@ -737,6 +745,147 @@ def _get_two_factor_data(request, _redirect_to="/"): return two_factor_data +@view_config( + route_name="accounts.verify-project-role", + renderer="accounts/invite-confirmation.html", + require_methods=False, + uses_session=True, + permission="manage:user", + has_translations=True, +) +def verify_project_role(request): + token_service = request.find_service(ITokenService, name="email") + user_service = request.find_service(IUserService, context=None) + + def _error(message): + request.session.flash(message, queue="error") + return HTTPSeeOther(request.route_path("manage.projects")) + + try: + token = request.params.get("token") + data = token_service.loads(token) + except TokenExpired: + return _error(request._("Expired token: request a new project role invite")) + except TokenInvalid: + return _error(request._("Invalid token: request a new project role invite")) + except TokenMissing: + return _error(request._("Invalid token: no token supplied")) + + # Check whether this token is being used correctly + if data.get("action") != "email-project-role-verify": + return _error(request._("Invalid token: not a collaboration invitation token")) + + user = user_service.get_user(data.get("user_id")) + if user != request.user: + return _error(request._("Role invitation is not valid.")) + + project = ( + request.db.query(Project).filter(Project.id == data.get("project_id")).one() + ) + desired_role = data.get("desired_role") + + role_invite = ( + request.db.query(RoleInvitation) + .filter(RoleInvitation.project == project) + .filter(RoleInvitation.user == user) + .one_or_none() + ) + + if not role_invite: + return _error(request._("Role invitation no longer exists.")) + + # Use the renderer to bring up a confirmation page + # before adding as contributor + if request.method == "GET": + return { + "project_name": project.name, + "desired_role": desired_role, + } + elif request.method == "POST" and "decline" in request.POST: + request.db.delete(role_invite) + request.session.flash( + request._( + "Invitation for '${project_name}' is declined.", + mapping={"project_name": project.name}, + ), + queue="success", + ) + return HTTPSeeOther(request.route_path("manage.projects")) + + request.db.add(Role(user=user, project=project, role_name=desired_role)) + request.db.delete(role_invite) + request.db.add( + JournalEntry( + name=project.name, + action=f"accepted {desired_role} {user.username}", + submitted_by=request.user, + submitted_from=request.remote_addr, + ) + ) + project.record_event( + tag="project:role:accepted", + ip_address=request.remote_addr, + additional={ + "submitted_by": request.user.username, + "role_name": desired_role, + "target_user": user.username, + }, + ) + user.record_event( + tag="account:role:accepted", + ip_address=request.remote_addr, + additional={ + "submitted_by": request.user.username, + "project_name": project.name, + "role_name": desired_role, + }, + ) + + owner_roles = ( + request.db.query(Role) + .filter(Role.project == project) + .filter(Role.role_name == "Owner") + .all() + ) + owner_users = {owner.user for owner in owner_roles} + + # Don't send email to new user if they are now an owner + owner_users.discard(user) + + submitter_user = user_service.get_user(data.get("submitter_id")) + send_collaborator_added_email( + request, + owner_users, + user=user, + submitter=submitter_user, + project_name=project.name, + role=desired_role, + ) + + send_added_as_collaborator_email( + request, + user, + submitter=submitter_user, + project_name=project.name, + role=desired_role, + ) + + request.session.flash( + request._( + "You are now ${role} of the '${project_name}' project.", + mapping={"project_name": project.name, "role": desired_role}, + ), + queue="success", + ) + + if desired_role == "Owner": + return HTTPSeeOther( + request.route_path("manage.project.roles", project_name=project.name) + ) + else: + return HTTPSeeOther(request.route_path("packaging.project", name=project.name)) + + def _login_user(request, userid, two_factor_method=None): # We have a session factory associated with this request, so in order # to protect against session fixation attacks we're going to make sure diff --git a/warehouse/config.py b/warehouse/config.py --- a/warehouse/config.py +++ b/warehouse/config.py @@ -317,6 +317,11 @@ def configure(settings=None): jglobals.setdefault("gravatar_profile", "warehouse.utils.gravatar:profile") jglobals.setdefault("now", "warehouse.utils:now") + # And some enums to reuse in the templates + jglobals.setdefault( + "RoleInvitationStatus", "warehouse.packaging.models:RoleInvitationStatus" + ) + # We'll store all of our templates in one location, warehouse/templates # so we'll go ahead and add that to the Jinja2 search path. config.add_jinja2_search_path("warehouse:templates", name=".html") diff --git a/warehouse/email/__init__.py b/warehouse/email/__init__.py --- a/warehouse/email/__init__.py +++ b/warehouse/email/__init__.py @@ -219,6 +219,26 @@ def send_collaborator_added_email( } +@_email("verify-project-role", allow_unverified=True) +def send_project_role_verification_email( + request, + user, + desired_role, + initiator_username, + project_name, + email_token, + token_age, +): + return { + "desired_role": desired_role, + "email_address": user.email, + "initiator_username": initiator_username, + "n_hours": token_age // 60 // 60, + "project_name": project_name, + "token": email_token, + } + + @_email("added-as-collaborator") def send_added_as_collaborator_email(request, user, *, submitter, project_name, role): return {"project": project_name, "submitter": submitter.username, "role": role} diff --git a/warehouse/legacy/api/xmlrpc/views.py b/warehouse/legacy/api/xmlrpc/views.py --- a/warehouse/legacy/api/xmlrpc/views.py +++ b/warehouse/legacy/api/xmlrpc/views.py @@ -215,7 +215,6 @@ def mapply(self, fn, args, kwargs): memo = typeguard._CallMemo(fn, args=args, kwargs=kwargs) typeguard.check_argument_types(memo) except TypeError as exc: - print(exc) raise XMLRPCInvalidParamTypes(exc) return super().mapply(fn, args, kwargs) diff --git a/warehouse/manage/tasks.py b/warehouse/manage/tasks.py new file mode 100644 --- /dev/null +++ b/warehouse/manage/tasks.py @@ -0,0 +1,31 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from warehouse import tasks +from warehouse.accounts.interfaces import ITokenService, TokenExpired +from warehouse.packaging.models import RoleInvitation, RoleInvitationStatus + + [email protected](ignore_result=True, acks_late=True) +def update_role_invitation_status(request): + invites = ( + request.db.query(RoleInvitation) + .filter(RoleInvitation.invite_status == RoleInvitationStatus.Pending) + .all() + ) + token_service = request.find_service(ITokenService, name="email") + + for invite in invites: + try: + token_service.loads(invite.token) + except TokenExpired: + invite.invite_status = RoleInvitationStatus.Expired diff --git a/warehouse/manage/views.py b/warehouse/manage/views.py --- a/warehouse/manage/views.py +++ b/warehouse/manage/views.py @@ -25,19 +25,23 @@ import warehouse.utils.otp as otp -from warehouse.accounts.interfaces import IPasswordBreachedService, IUserService +from warehouse.accounts.interfaces import ( + IPasswordBreachedService, + ITokenService, + IUserService, + TokenExpired, +) from warehouse.accounts.models import Email, User from warehouse.accounts.views import logout from warehouse.admin.flags import AdminFlagValue from warehouse.email import ( send_account_deletion_email, - send_added_as_collaborator_email, - send_collaborator_added_email, send_collaborator_removed_email, send_collaborator_role_changed_email, send_email_verification_email, send_password_change_email, send_primary_email_change_email, + send_project_role_verification_email, send_removed_as_collaborator_email, send_removed_project_email, send_removed_project_release_email, @@ -71,6 +75,8 @@ ProjectEvent, Release, Role, + RoleInvitation, + RoleInvitationStatus, ) from warehouse.utils.http import is_safe_url from warehouse.utils.paginate import paginate_url_factory @@ -919,11 +925,20 @@ def _key(project): projects_sole_owned = set( project.name for project in all_user_projects["projects_sole_owned"] ) - + project_invites = ( + request.db.query(RoleInvitation) + .filter(RoleInvitation.invite_status == RoleInvitationStatus.Pending) + .filter(RoleInvitation.user == request.user) + .all() + ) + project_invites = [ + (role_invite.project, role_invite.token) for role_invite in project_invites + ] return { "projects": sorted(request.user.projects, key=_key, reverse=True), "projects_owned": projects_owned, "projects_sole_owned": projects_sole_owned, + "project_invites": project_invites, } @@ -1463,40 +1478,101 @@ def manage_project_roles(project, request, _form_class=CreateRoleForm): role_name = form.role_name.data userid = user_service.find_userid(username) user = user_service.get_user(userid) + token_service = request.find_service(ITokenService, name="email") existing_role = ( request.db.query(Role) .filter(Role.user == user, Role.project == project) .first() ) + user_invite = ( + request.db.query(RoleInvitation) + .filter(RoleInvitation.user == user) + .filter(RoleInvitation.project == project) + .one_or_none() + ) + # Cover edge case where invite is invalid but task + # has not updated invite status + try: + invite_token = token_service.loads(user_invite.token) + except (TokenExpired, AttributeError): + invite_token = None + if existing_role: request.session.flash( - ( - f"User '{username}' already has {existing_role.role_name} " - "role for project" + request._( + "User '${username}' already has ${role_name} role for project", + mapping={ + "username": username, + "role_name": existing_role.role_name, + }, ), queue="error", ) elif user.primary_email is None or not user.primary_email.verified: request.session.flash( - f"User '{username}' does not have a verified primary email " - f"address and cannot be added as a {role_name} for project", + request._( + "User '${username}' does not have a verified primary email " + "address and cannot be added as a ${role_name} for project", + mapping={"username": username, "role_name": role_name}, + ), queue="error", ) - else: - request.db.add( - Role(user=user, project=project, role_name=form.role_name.data) + elif ( + user_invite + and user_invite.invite_status == RoleInvitationStatus.Pending + and invite_token + ): + request.session.flash( + request._( + "User '${username}' already has an active invite. " + "Please try again later.", + mapping={"username": username}, + ), + queue="error", ) + else: + invite_token = token_service.dumps( + { + "action": "email-project-role-verify", + "desired_role": role_name, + "user_id": user.id, + "project_id": project.id, + "submitter_id": request.user.id, + } + ) + if user_invite: + user_invite.invite_status = RoleInvitationStatus.Pending + user_invite.token = invite_token + else: + request.db.add( + RoleInvitation( + user=user, + project=project, + invite_status=RoleInvitationStatus.Pending, + token=invite_token, + ) + ) + request.db.add( JournalEntry( name=project.name, - action=f"add {role_name} {username}", + action=f"invite {role_name} {username}", submitted_by=request.user, submitted_from=request.remote_addr, ) ) + send_project_role_verification_email( + request, + user, + desired_role=role_name, + initiator_username=request.user.username, + project_name=project.name, + email_token=invite_token, + token_age=token_service.max_age, + ) project.record_event( - tag="project:role:add", + tag="project:role:invite", ip_address=request.remote_addr, additional={ "submitted_by": request.user.username, @@ -1504,45 +1580,100 @@ def manage_project_roles(project, request, _form_class=CreateRoleForm): "target_user": username, }, ) - - owner_roles = ( - request.db.query(Role) - .join(Role.user) - .filter(Role.role_name == "Owner", Role.project == project) + request.db.flush() # in order to get id + request.session.flash( + request._( + "Invitation sent to '${username}'", + mapping={"username": username}, + ), + queue="success", ) - owner_users = {owner.user for owner in owner_roles} - # Don't send to the owner that added the new role - owner_users.discard(request.user) + form = _form_class(user_service=user_service) - # Don't send owners email to new user if they are now an owner - owner_users.discard(user) + roles = set(request.db.query(Role).join(User).filter(Role.project == project).all()) + invitations = set( + request.db.query(RoleInvitation) + .join(User) + .filter(RoleInvitation.project == project) + .all() + ) - send_collaborator_added_email( - request, - owner_users, - user=user, - submitter=request.user, - project_name=project.name, - role=form.role_name.data, - ) + return { + "project": project, + "roles": roles, + "invitations": invitations, + "form": form, + } - send_added_as_collaborator_email( - request, - user, - submitter=request.user, - project_name=project.name, - role=form.role_name.data, - ) - request.session.flash( - f"Added collaborator '{form.username.data}'", queue="success" - ) - form = _form_class(user_service=user_service) +@view_config( + route_name="manage.project.revoke_invite", + context=Project, + uses_session=True, + require_methods=["POST"], + permission="manage:project", + has_translations=True, +) +def revoke_project_role_invitation(project, request, _form_class=ChangeRoleForm): + user_service = request.find_service(IUserService, context=None) + token_service = request.find_service(ITokenService, name="email") + user = user_service.get_user(request.POST["user_id"]) - roles = set(request.db.query(Role).join(User).filter(Role.project == project).all()) + try: + user_invite = ( + request.db.query(RoleInvitation) + .filter(RoleInvitation.project == project) + .filter(RoleInvitation.user == user) + .one() + ) + except NoResultFound: + request.session.flash( + request._("Could not find role invitation."), queue="error" + ) + return HTTPSeeOther( + request.route_path("manage.project.roles", project_name=project.name) + ) + + request.db.delete(user_invite) + + try: + token_data = token_service.loads(user_invite.token) + except TokenExpired: + request.session.flash(request._("Invitation already expired."), queue="success") + return HTTPSeeOther( + request.route_path("manage.project.roles", project_name=project.name) + ) + role_name = token_data.get("desired_role") + + request.db.add( + JournalEntry( + name=project.name, + action=f"revoke_invite {role_name} {user.username}", + submitted_by=request.user, + submitted_from=request.remote_addr, + ) + ) + project.record_event( + tag="project:role:revoke_invite", + ip_address=request.remote_addr, + additional={ + "submitted_by": request.user.username, + "role_name": role_name, + "target_user": user.username, + }, + ) + request.session.flash( + request._( + "Invitation revoked from '${username}'.", + mapping={"username": user.username}, + ), + queue="success", + ) - return {"project": project, "roles": roles, "form": form} + return HTTPSeeOther( + request.route_path("manage.project.roles", project_name=project.name) + ) @view_config( diff --git a/warehouse/migrations/versions/80018e46c5a4_create_role_invitation_table.py b/warehouse/migrations/versions/80018e46c5a4_create_role_invitation_table.py new file mode 100755 --- /dev/null +++ b/warehouse/migrations/versions/80018e46c5a4_create_role_invitation_table.py @@ -0,0 +1,59 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +create role invitation table + +Revision ID: 80018e46c5a4 +Revises: 87509f4ae027 +Create Date: 2020-06-28 14:53:07.803972 +""" +import sqlalchemy as sa + +from alembic import op +from sqlalchemy.dialects import postgresql + +revision = "80018e46c5a4" +down_revision = "87509f4ae027" + + +def upgrade(): + op.create_table( + "role_invitations", + sa.Column( + "id", + postgresql.UUID(as_uuid=True), + server_default=sa.text("gen_random_uuid()"), + nullable=False, + ), + sa.Column("invite_status", sa.Text(), nullable=False), + sa.Column("token", sa.Text(), nullable=False), + sa.Column("user_id", postgresql.UUID(as_uuid=True), nullable=False), + sa.Column("project_id", postgresql.UUID(as_uuid=True), nullable=False), + sa.ForeignKeyConstraint( + ["project_id"], ["projects.id"], onupdate="CASCADE", ondelete="CASCADE" + ), + sa.ForeignKeyConstraint( + ["user_id"], ["users.id"], onupdate="CASCADE", ondelete="CASCADE" + ), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint( + "user_id", "project_id", name="_role_invitations_user_project_uc" + ), + ) + op.create_index( + "role_invitations_user_id_idx", "role_invitations", ["user_id"], unique=False + ) + + +def downgrade(): + op.drop_index("role_invitations_user_id_idx", table_name="role_invitations") + op.drop_table("role_invitations") diff --git a/warehouse/packaging/__init__.py b/warehouse/packaging/__init__.py --- a/warehouse/packaging/__init__.py +++ b/warehouse/packaging/__init__.py @@ -16,6 +16,7 @@ from warehouse import db from warehouse.accounts.models import Email, User from warehouse.cache.origin import key_factory, receive_set +from warehouse.manage.tasks import update_role_invitation_status from warehouse.packaging.interfaces import IDocsStorage, IFileStorage from warehouse.packaging.models import File, Project, Release, Role from warehouse.packaging.tasks import ( @@ -94,6 +95,7 @@ def includeme(config): ) config.add_periodic_task(crontab(minute="*/5"), update_description_html) + config.add_periodic_task(crontab(minute="*/5"), update_role_invitation_status) # Add a periodic task to compute trending once a day, assuming we have # been configured to be able to access BigQuery. diff --git a/warehouse/packaging/models.py b/warehouse/packaging/models.py --- a/warehouse/packaging/models.py +++ b/warehouse/packaging/models.py @@ -77,6 +77,41 @@ class Role(db.Model): project = orm.relationship("Project", lazy=False) +class RoleInvitationStatus(enum.Enum): + + Pending = "pending" + Expired = "expired" + + +class RoleInvitation(db.Model): + + __tablename__ = "role_invitations" + __table_args__ = ( + Index("role_invitations_user_id_idx", "user_id"), + UniqueConstraint( + "user_id", "project_id", name="_role_invitations_user_project_uc" + ), + ) + + __repr__ = make_repr("invite_status", "user", "project") + + invite_status = Column( + Enum(RoleInvitationStatus, values_callable=lambda x: [e.value for e in x]), + nullable=False, + ) + token = Column(Text, nullable=False) + user_id = Column( + ForeignKey("users.id", onupdate="CASCADE", ondelete="CASCADE"), nullable=False + ) + project_id = Column( + ForeignKey("projects.id", onupdate="CASCADE", ondelete="CASCADE"), + nullable=False, + ) + + user = orm.relationship(User, lazy=False) + project = orm.relationship("Project", lazy=False) + + class ProjectFactory: def __init__(self, request): self.request = request diff --git a/warehouse/routes.py b/warehouse/routes.py --- a/warehouse/routes.py +++ b/warehouse/routes.py @@ -158,6 +158,11 @@ def includeme(config): config.add_route( "accounts.verify-email", "/account/verify-email/", domain=warehouse ) + config.add_route( + "accounts.verify-project-role", + "/account/verify-project-role/", + domain=warehouse, + ) # Management (views for logged-in users) config.add_route("manage.account", "/manage/account/", domain=warehouse) @@ -245,6 +250,13 @@ def includeme(config): traverse="/{project_name}", domain=warehouse, ) + config.add_route( + "manage.project.revoke_invite", + "/manage/project/{project_name}/collaboration/revoke_invite/", + factory="warehouse.packaging.models:ProjectFactory", + traverse="/{project_name}", + domain=warehouse, + ) config.add_route( "manage.project.change_role", "/manage/project/{project_name}/collaboration/change/",
diff --git a/tests/common/db/packaging.py b/tests/common/db/packaging.py --- a/tests/common/db/packaging.py +++ b/tests/common/db/packaging.py @@ -29,6 +29,7 @@ ProjectEvent, Release, Role, + RoleInvitation, ) from warehouse.utils import readme @@ -115,6 +116,16 @@ class Meta: project = factory.SubFactory(ProjectFactory) +class RoleInvitationFactory(WarehouseFactory): + class Meta: + model = RoleInvitation + + invite_status = "pending" + token = "test_token" + user = factory.SubFactory(UserFactory) + project = factory.SubFactory(ProjectFactory) + + class DependencyFactory(WarehouseFactory): class Meta: model = Dependency diff --git a/tests/unit/accounts/test_views.py b/tests/unit/accounts/test_views.py --- a/tests/unit/accounts/test_views.py +++ b/tests/unit/accounts/test_views.py @@ -34,9 +34,11 @@ TooManyFailedLogins, ) from warehouse.admin.flags import AdminFlag, AdminFlagValue +from warehouse.packaging.models import Role, RoleInvitation from warehouse.rate_limiting.interfaces import IRateLimiter from ...common.db.accounts import EmailFactory, UserFactory +from ...common.db.packaging import ProjectFactory, RoleFactory, RoleInvitationFactory class TestFailedLoginView: @@ -1899,6 +1901,310 @@ def test_verify_email_already_verified(self, db_request): ] +class TestVerifyProjectRole: + @pytest.mark.parametrize("desired_role", ["Maintainer", "Owner"]) + def test_verify_project_role( + self, db_request, user_service, token_service, monkeypatch, desired_role + ): + project = ProjectFactory.create() + user = UserFactory.create() + RoleInvitationFactory.create(user=user, project=project) + owner_user = UserFactory.create() + RoleFactory(user=owner_user, project=project, role_name="Owner") + + db_request.user = user + db_request.method = "POST" + db_request.GET.update({"token": "RANDOM_KEY"}) + db_request.route_path = pretend.call_recorder(lambda *a, **kw: "/") + db_request.remote_addr = "192.168.1.1" + token_service.loads = pretend.call_recorder( + lambda token: { + "action": "email-project-role-verify", + "desired_role": desired_role, + "user_id": user.id, + "project_id": project.id, + "submitter_id": db_request.user.id, + } + ) + user_service.get_user = pretend.call_recorder(lambda user_id: user) + db_request.find_service = pretend.call_recorder( + lambda iface, context=None, name=None: { + ITokenService: token_service, + IUserService: user_service, + }.get(iface) + ) + db_request.session.flash = pretend.call_recorder(lambda *a, **kw: None) + + collaborator_added_email = pretend.call_recorder(lambda *args, **kwargs: None) + monkeypatch.setattr( + views, "send_collaborator_added_email", collaborator_added_email + ) + added_as_collaborator_email = pretend.call_recorder( + lambda *args, **kwargs: None + ) + monkeypatch.setattr( + views, "send_added_as_collaborator_email", added_as_collaborator_email + ) + + result = views.verify_project_role(db_request) + + db_request.db.flush() + + assert db_request.find_service.calls == [ + pretend.call(ITokenService, name="email"), + pretend.call(IUserService, context=None), + ] + + assert token_service.loads.calls == [pretend.call("RANDOM_KEY")] + assert user_service.get_user.calls == [ + pretend.call(user.id), + pretend.call(db_request.user.id), + ] + + assert not ( + db_request.db.query(RoleInvitation) + .filter(RoleInvitation.user == user) + .filter(RoleInvitation.project == project) + .one_or_none() + ) + assert ( + db_request.db.query(Role) + .filter(Role.project == project, Role.user == user) + .one() + ) + + assert db_request.session.flash.calls == [ + pretend.call( + f"You are now {desired_role} of the '{project.name}' project.", + queue="success", + ) + ] + + assert collaborator_added_email.calls == [ + pretend.call( + db_request, + {owner_user}, + user=user, + submitter=db_request.user, + project_name=project.name, + role=desired_role, + ) + ] + assert added_as_collaborator_email.calls == [ + pretend.call( + db_request, + user, + submitter=db_request.user, + project_name=project.name, + role=desired_role, + ) + ] + + assert isinstance(result, HTTPSeeOther) + assert result.headers["Location"] == "/" + assert db_request.route_path.calls == [ + pretend.call("manage.project.roles", project_name=project.name) + if desired_role == "Owner" + else pretend.call("packaging.project", name=project.name) + ] + + @pytest.mark.parametrize( + ("exception", "message"), + [ + (TokenInvalid, "Invalid token: request a new project role invite"), + (TokenExpired, "Expired token: request a new project role invite"), + (TokenMissing, "Invalid token: no token supplied"), + ], + ) + def test_verify_project_role_loads_failure( + self, pyramid_request, exception, message + ): + def loads(token): + raise exception + + pyramid_request.find_service = lambda *a, **kw: pretend.stub(loads=loads) + pyramid_request.params = {"token": "RANDOM_KEY"} + pyramid_request.route_path = pretend.call_recorder(lambda name: "/") + pyramid_request.session.flash = pretend.call_recorder(lambda *a, **kw: None) + + views.verify_project_role(pyramid_request) + + assert pyramid_request.route_path.calls == [pretend.call("manage.projects")] + assert pyramid_request.session.flash.calls == [ + pretend.call(message, queue="error") + ] + + def test_verify_email_invalid_action(self, pyramid_request): + data = {"action": "invalid-action"} + pyramid_request.find_service = lambda *a, **kw: pretend.stub( + loads=lambda a: data + ) + pyramid_request.params = {"token": "RANDOM_KEY"} + pyramid_request.route_path = pretend.call_recorder(lambda name: "/") + pyramid_request.session.flash = pretend.call_recorder(lambda *a, **kw: None) + + views.verify_project_role(pyramid_request) + + assert pyramid_request.route_path.calls == [pretend.call("manage.projects")] + assert pyramid_request.session.flash.calls == [ + pretend.call( + "Invalid token: not a collaboration invitation token", queue="error" + ) + ] + + def test_verify_project_role_revoked(self, db_request, user_service, token_service): + project = ProjectFactory.create() + user = UserFactory.create() + + db_request.user = user + db_request.method = "POST" + db_request.GET.update({"token": "RANDOM_KEY"}) + db_request.route_path = pretend.call_recorder(lambda name: "/") + db_request.remote_addr = "192.168.1.1" + token_service.loads = pretend.call_recorder( + lambda token: { + "action": "email-project-role-verify", + "desired_role": "Maintainer", + "user_id": user.id, + "project_id": project.id, + "submitter_id": db_request.user.id, + } + ) + user_service.get_user = pretend.call_recorder(lambda user_id: user) + db_request.find_service = pretend.call_recorder( + lambda iface, context=None, name=None: { + ITokenService: token_service, + IUserService: user_service, + }.get(iface) + ) + db_request.session.flash = pretend.call_recorder(lambda *a, **kw: None) + + views.verify_project_role(db_request) + + assert db_request.session.flash.calls == [ + pretend.call( + "Role invitation no longer exists.", + queue="error", + ) + ] + assert db_request.route_path.calls == [pretend.call("manage.projects")] + + def test_verify_project_role_declined( + self, db_request, user_service, token_service + ): + project = ProjectFactory.create() + user = UserFactory.create() + RoleInvitationFactory.create(user=user, project=project) + + db_request.user = user + db_request.method = "POST" + db_request.POST.update({"token": "RANDOM_KEY", "decline": "Decline"}) + db_request.route_path = pretend.call_recorder(lambda name: "/") + db_request.remote_addr = "192.168.1.1" + token_service.loads = pretend.call_recorder( + lambda token: { + "action": "email-project-role-verify", + "desired_role": "Maintainer", + "user_id": user.id, + "project_id": project.id, + "submitter_id": db_request.user.id, + } + ) + user_service.get_user = pretend.call_recorder(lambda user_id: user) + db_request.find_service = pretend.call_recorder( + lambda iface, context=None, name=None: { + ITokenService: token_service, + IUserService: user_service, + }.get(iface) + ) + db_request.session.flash = pretend.call_recorder(lambda *a, **kw: None) + + result = views.verify_project_role(db_request) + + assert not ( + db_request.db.query(RoleInvitation) + .filter(RoleInvitation.user == user) + .filter(RoleInvitation.project == project) + .one_or_none() + ) + assert isinstance(result, HTTPSeeOther) + assert db_request.route_path.calls == [pretend.call("manage.projects")] + + def test_verify_fails_with_different_user( + self, db_request, user_service, token_service + ): + project = ProjectFactory.create() + user = UserFactory.create() + user_2 = UserFactory.create() + + db_request.user = user_2 + db_request.method = "POST" + db_request.GET.update({"token": "RANDOM_KEY"}) + db_request.route_path = pretend.call_recorder(lambda name: "/") + db_request.remote_addr = "192.168.1.1" + token_service.loads = pretend.call_recorder( + lambda token: { + "action": "email-project-role-verify", + "desired_role": "Maintainer", + "user_id": user.id, + "project_id": project.id, + "submitter_id": db_request.user.id, + } + ) + user_service.get_user = pretend.call_recorder(lambda user_id: user) + db_request.find_service = pretend.call_recorder( + lambda iface, context=None, name=None: { + ITokenService: token_service, + IUserService: user_service, + }.get(iface) + ) + db_request.session.flash = pretend.call_recorder(lambda *a, **kw: None) + + views.verify_project_role(db_request) + + assert db_request.session.flash.calls == [ + pretend.call("Role invitation is not valid.", queue="error") + ] + assert db_request.route_path.calls == [pretend.call("manage.projects")] + + def test_verify_role_get_confirmation( + self, db_request, user_service, token_service + ): + project = ProjectFactory.create() + user = UserFactory.create() + RoleInvitationFactory.create(user=user, project=project) + + db_request.user = user + db_request.method = "GET" + db_request.GET.update({"token": "RANDOM_KEY"}) + db_request.route_path = pretend.call_recorder(lambda name: "/") + db_request.remote_addr = "192.168.1.1" + token_service.loads = pretend.call_recorder( + lambda token: { + "action": "email-project-role-verify", + "desired_role": "Maintainer", + "user_id": user.id, + "project_id": project.id, + "submitter_id": db_request.user.id, + } + ) + user_service.get_user = pretend.call_recorder(lambda user_id: user) + db_request.find_service = pretend.call_recorder( + lambda iface, context=None, name=None: { + ITokenService: token_service, + IUserService: user_service, + }.get(iface) + ) + db_request.session.flash = pretend.call_recorder(lambda *a, **kw: None) + + roles = views.verify_project_role(db_request) + + assert roles == { + "project_name": project.name, + "desired_role": "Maintainer", + } + + class TestProfileCallout: def test_profile_callout_returns_user(self): user = pretend.stub() diff --git a/tests/unit/email/test_init.py b/tests/unit/email/test_init.py --- a/tests/unit/email/test_init.py +++ b/tests/unit/email/test_init.py @@ -1327,6 +1327,91 @@ def test_collaborator_added_email_unverified( ] +class TestProjectRoleVerificationEmail: + def test_project_role_verification_email( + self, db_request, pyramid_config, token_service, monkeypatch + ): + stub_user = UserFactory.create() + EmailFactory.create( + email="[email protected]", + primary=True, + verified=True, + public=True, + user=stub_user, + ) + + subject_renderer = pyramid_config.testing_add_renderer( + "email/verify-project-role/subject.txt" + ) + subject_renderer.string_response = "Email Subject" + body_renderer = pyramid_config.testing_add_renderer( + "email/verify-project-role/body.txt" + ) + body_renderer.string_response = "Email Body" + html_renderer = pyramid_config.testing_add_renderer( + "email/verify-project-role/body.html" + ) + html_renderer.string_response = "Email HTML Body" + + send_email = pretend.stub( + delay=pretend.call_recorder(lambda *args, **kwargs: None) + ) + db_request.task = pretend.call_recorder(lambda *args, **kwargs: send_email) + db_request.user = stub_user + db_request.registry.settings = {"mail.sender": "[email protected]"} + db_request.remote_addr = "0.0.0.0" + monkeypatch.setattr(email, "send_email", send_email) + + result = email.send_project_role_verification_email( + db_request, + stub_user, + desired_role="Maintainer", + initiator_username="initiating_user", + project_name="project_name", + email_token="TOKEN", + token_age=token_service.max_age, + ) + + assert result == { + "desired_role": "Maintainer", + "email_address": stub_user.email, + "initiator_username": "initiating_user", + "n_hours": token_service.max_age // 60 // 60, + "project_name": "project_name", + "token": "TOKEN", + } + subject_renderer.assert_() + body_renderer.assert_(token="TOKEN", email_address=stub_user.email) + html_renderer.assert_(token="TOKEN", email_address=stub_user.email) + assert db_request.task.calls == [pretend.call(send_email)] + assert send_email.delay.calls == [ + pretend.call( + f"{stub_user.name} <{stub_user.email}>", + attr.asdict( + EmailMessage( + subject="Email Subject", + body_text="Email Body", + body_html=( + "<html>\n<head></head>\n" + "<body><p>Email HTML Body</p></body>\n</html>\n" + ), + ) + ), + { + "tag": "account:email:sent", + "user_id": stub_user.id, + "ip_address": "0.0.0.0", + "additional": { + "from_": "[email protected]", + "to": "[email protected]", + "subject": "Email Subject", + "redact_ip": False, + }, + }, + ) + ] + + class TestAddedAsCollaboratorEmail: def test_added_as_collaborator_email( self, pyramid_request, pyramid_config, monkeypatch diff --git a/tests/unit/manage/test_tasks.py b/tests/unit/manage/test_tasks.py new file mode 100644 --- /dev/null +++ b/tests/unit/manage/test_tasks.py @@ -0,0 +1,51 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pretend + +from warehouse.accounts.interfaces import ITokenService, TokenExpired +from warehouse.manage.tasks import update_role_invitation_status +from warehouse.packaging.models import RoleInvitationStatus + +from ...common.db.packaging import ProjectFactory, RoleInvitationFactory, UserFactory + + +class TestUpdateInvitationStatus: + def test_update_invitation_status(self, db_request): + project = ProjectFactory.create() + user = UserFactory.create() + invite = RoleInvitationFactory(user=user, project=project) + + token_service = pretend.stub(loads=pretend.raiser(TokenExpired)) + db_request.find_service = pretend.call_recorder(lambda *a, **kw: token_service) + + update_role_invitation_status(db_request) + + assert db_request.find_service.calls == [ + pretend.call(ITokenService, name="email") + ] + assert invite.invite_status == RoleInvitationStatus.Expired + + def test_no_updates(self, db_request): + project = ProjectFactory.create() + user = UserFactory.create() + invite = RoleInvitationFactory(user=user, project=project) + + token_service = pretend.stub(loads=lambda token: {}) + db_request.find_service = pretend.call_recorder(lambda *a, **kw: token_service) + + update_role_invitation_status(db_request) + + assert db_request.find_service.calls == [ + pretend.call(ITokenService, name="email") + ] + assert invite.invite_status == RoleInvitationStatus.Pending diff --git a/tests/unit/manage/test_views.py b/tests/unit/manage/test_views.py --- a/tests/unit/manage/test_views.py +++ b/tests/unit/manage/test_views.py @@ -26,7 +26,12 @@ import warehouse.utils.otp as otp -from warehouse.accounts.interfaces import IPasswordBreachedService, IUserService +from warehouse.accounts.interfaces import ( + IPasswordBreachedService, + ITokenService, + IUserService, + TokenExpired, +) from warehouse.admin.flags import AdminFlagValue from warehouse.forklift.legacy import MAX_FILESIZE, MAX_PROJECT_SIZE from warehouse.macaroons.interfaces import IMacaroonService @@ -37,6 +42,7 @@ Project, ProjectEvent, Role, + RoleInvitation, User, ) from warehouse.utils.paginate import paginate_url_factory @@ -50,6 +56,7 @@ ProjectFactory, ReleaseFactory, RoleFactory, + RoleInvitationFactory, UserFactory, ) @@ -2276,6 +2283,7 @@ def test_manage_projects(self, db_request): newer_project_with_no_releases.name, }, "projects_sole_owned": {newer_project_with_no_releases.name}, + "project_invites": [], } @@ -3344,7 +3352,9 @@ def test_get_manage_project_roles(self, db_request): project = ProjectFactory.create(name="foobar") user = UserFactory.create() + user_2 = UserFactory.create() role = RoleFactory.create(user=user, project=project) + role_invitation = RoleInvitationFactory.create(user=user_2, project=project) result = views.manage_project_roles(project, db_request, _form_class=form_class) @@ -3357,13 +3367,16 @@ def test_get_manage_project_roles(self, db_request): assert result == { "project": project, "roles": {role}, + "invitations": {role_invitation}, "form": form_obj, } def test_post_new_role_validation_fails(self, db_request): project = ProjectFactory.create(name="foobar") user = UserFactory.create(username="testuser") + user_2 = UserFactory.create(username="newuser") role = RoleFactory.create(user=user, project=project) + role_invitation = RoleInvitationFactory.create(user=user_2, project=project) user_service = pretend.stub() db_request.find_service = pretend.call_recorder( @@ -3385,6 +3398,7 @@ def test_post_new_role_validation_fails(self, db_request): assert result == { "project": project, "roles": {role}, + "invitations": {role_invitation}, "form": form_obj, } @@ -3404,8 +3418,14 @@ def test_post_new_role(self, monkeypatch, db_request): user_service = pretend.stub( find_userid=lambda username: new_user.id, get_user=lambda userid: new_user ) + token_service = pretend.stub( + dumps=lambda data: "TOKEN", max_age=6 * 60 * 60, loads=lambda data: None + ) db_request.find_service = pretend.call_recorder( - lambda iface, context: user_service + lambda iface, context=None, name=None: { + ITokenService: token_service, + IUserService: user_service, + }.get(iface) ) db_request.method = "POST" db_request.POST = pretend.stub() @@ -3420,20 +3440,20 @@ def test_post_new_role(self, monkeypatch, db_request): flash=pretend.call_recorder(lambda *a, **kw: None) ) - send_collaborator_added_email = pretend.call_recorder(lambda r, u, **k: None) - monkeypatch.setattr( - views, "send_collaborator_added_email", send_collaborator_added_email + send_project_role_verification_email = pretend.call_recorder( + lambda r, u, **k: None ) - - send_added_as_collaborator_email = pretend.call_recorder(lambda r, u, **k: None) monkeypatch.setattr( - views, "send_added_as_collaborator_email", send_added_as_collaborator_email + views, + "send_project_role_verification_email", + send_project_role_verification_email, ) result = views.manage_project_roles(project, db_request, _form_class=form_class) assert db_request.find_service.calls == [ - pretend.call(IUserService, context=None) + pretend.call(IUserService, context=None), + pretend.call(ITokenService, name="email"), ] assert form_obj.validate.calls == [pretend.call()] assert form_class.calls == [ @@ -3441,48 +3461,43 @@ def test_post_new_role(self, monkeypatch, db_request): pretend.call(user_service=user_service), ] assert db_request.session.flash.calls == [ - pretend.call("Added collaborator 'new_user'", queue="success") + pretend.call(f"Invitation sent to '{new_user.username}'", queue="success") ] - assert send_collaborator_added_email.calls == [ - pretend.call( - db_request, - {owner_2}, - user=new_user, - submitter=db_request.user, - project_name=project.name, - role=form_obj.role_name.data, - ) - ] + # Only one role invitation is created + role_invitation = ( + db_request.db.query(RoleInvitation) + .filter(RoleInvitation.user == new_user) + .filter(RoleInvitation.project == project) + .one() + ) - assert send_added_as_collaborator_email.calls == [ + assert result == { + "project": project, + "roles": {owner_1_role, owner_2_role}, + "invitations": {role_invitation}, + "form": form_obj, + } + + assert send_project_role_verification_email.calls == [ pretend.call( db_request, new_user, - submitter=db_request.user, + desired_role=form_obj.role_name.data, + initiator_username=db_request.user.username, project_name=project.name, - role=form_obj.role_name.data, + email_token=token_service.dumps( + { + "action": "email-project-role-verify", + "desired_role": form_obj.role_name.data, + "user_id": new_user.id, + "project_id": project.id, + } + ), + token_age=token_service.max_age, ) ] - # Only one role is created - role = db_request.db.query(Role).filter(Role.user == new_user).one() - - assert result == { - "project": project, - "roles": {role, owner_1_role, owner_2_role}, - "form": form_obj, - } - - entry = ( - db_request.db.query(JournalEntry).options(joinedload("submitted_by")).one() - ) - - assert entry.name == project.name - assert entry.action == "add Owner new_user" - assert entry.submitted_by == db_request.user - assert entry.submitted_from == db_request.remote_addr - def test_post_duplicate_role(self, db_request): project = ProjectFactory.create(name="foobar") user = UserFactory.create(username="testuser") @@ -3491,8 +3506,14 @@ def test_post_duplicate_role(self, db_request): user_service = pretend.stub( find_userid=lambda username: user.id, get_user=lambda userid: user ) + token_service = pretend.stub( + dumps=lambda data: "TOKEN", max_age=6 * 60 * 60, loads=lambda data: None + ) db_request.find_service = pretend.call_recorder( - lambda iface, context: user_service + lambda iface, context=None, name=None: { + ITokenService: token_service, + IUserService: user_service, + }.get(iface) ) db_request.method = "POST" db_request.POST = pretend.stub() @@ -3509,7 +3530,8 @@ def test_post_duplicate_role(self, db_request): result = views.manage_project_roles(project, db_request, _form_class=form_class) assert db_request.find_service.calls == [ - pretend.call(IUserService, context=None) + pretend.call(IUserService, context=None), + pretend.call(ITokenService, name="email"), ] assert form_obj.validate.calls == [pretend.call()] assert form_class.calls == [ @@ -3528,9 +3550,112 @@ def test_post_duplicate_role(self, db_request): assert result == { "project": project, "roles": {role}, + "invitations": set(), + "form": form_obj, + } + + def test_reinvite_role_after_expiration(self, monkeypatch, db_request): + project = ProjectFactory.create(name="foobar") + new_user = UserFactory.create(username="new_user") + EmailFactory.create(user=new_user, verified=True, primary=True) + owner_1 = UserFactory.create(username="owner_1") + owner_2 = UserFactory.create(username="owner_2") + owner_1_role = RoleFactory.create( + user=owner_1, project=project, role_name="Owner" + ) + owner_2_role = RoleFactory.create( + user=owner_2, project=project, role_name="Owner" + ) + new_user_role_invitation = RoleInvitationFactory.create( + user=new_user, project=project, invite_status="expired" + ) + + user_service = pretend.stub( + find_userid=lambda username: new_user.id, get_user=lambda userid: new_user + ) + token_service = pretend.stub( + dumps=lambda data: "TOKEN", max_age=6 * 60 * 60, loads=lambda data: None + ) + db_request.find_service = pretend.call_recorder( + lambda iface, context=None, name=None: { + ITokenService: token_service, + IUserService: user_service, + }.get(iface) + ) + db_request.method = "POST" + db_request.POST = pretend.stub() + db_request.remote_addr = "10.10.10.10" + db_request.user = owner_1 + form_obj = pretend.stub( + validate=pretend.call_recorder(lambda: True), + username=pretend.stub(data=new_user.username), + role_name=pretend.stub(data="Owner"), + ) + form_class = pretend.call_recorder(lambda *a, **kw: form_obj) + db_request.session = pretend.stub( + flash=pretend.call_recorder(lambda *a, **kw: None) + ) + + send_project_role_verification_email = pretend.call_recorder( + lambda r, u, **k: None + ) + monkeypatch.setattr( + views, + "send_project_role_verification_email", + send_project_role_verification_email, + ) + + result = views.manage_project_roles(project, db_request, _form_class=form_class) + + assert db_request.find_service.calls == [ + pretend.call(IUserService, context=None), + pretend.call(ITokenService, name="email"), + ] + assert form_obj.validate.calls == [pretend.call()] + assert form_class.calls == [ + pretend.call(db_request.POST, user_service=user_service), + pretend.call(user_service=user_service), + ] + assert db_request.session.flash.calls == [ + pretend.call(f"Invitation sent to '{new_user.username}'", queue="success") + ] + + # Only one role invitation is created + role_invitation = ( + db_request.db.query(RoleInvitation) + .filter(RoleInvitation.user == new_user) + .filter(RoleInvitation.project == project) + .one() + ) + + assert result["invitations"] == {new_user_role_invitation} + + assert result == { + "project": project, + "roles": {owner_1_role, owner_2_role}, + "invitations": {role_invitation}, "form": form_obj, } + assert send_project_role_verification_email.calls == [ + pretend.call( + db_request, + new_user, + desired_role=form_obj.role_name.data, + initiator_username=db_request.user.username, + project_name=project.name, + email_token=token_service.dumps( + { + "action": "email-project-role-verify", + "desired_role": form_obj.role_name.data, + "user_id": new_user.id, + "project_id": project.id, + } + ), + token_age=token_service.max_age, + ) + ] + @pytest.mark.parametrize("with_email", [True, False]) def test_post_unverified_email(self, db_request, with_email): project = ProjectFactory.create(name="foobar") @@ -3541,8 +3666,16 @@ def test_post_unverified_email(self, db_request, with_email): user_service = pretend.stub( find_userid=lambda username: user.id, get_user=lambda userid: user ) + token_service = pretend.stub( + dumps=lambda data: "TOKEN", + max_age=6 * 60 * 60, + loads=lambda data: None, + ) db_request.find_service = pretend.call_recorder( - lambda iface, context: user_service + lambda iface, context=None, name=None: { + ITokenService: token_service, + IUserService: user_service, + }.get(iface) ) db_request.method = "POST" db_request.POST = pretend.stub() @@ -3559,7 +3692,8 @@ def test_post_unverified_email(self, db_request, with_email): result = views.manage_project_roles(project, db_request, _form_class=form_class) assert db_request.find_service.calls == [ - pretend.call(IUserService, context=None) + pretend.call(IUserService, context=None), + pretend.call(ITokenService, name="email"), ] assert form_obj.validate.calls == [pretend.call()] assert form_class.calls == [ @@ -3577,7 +3711,224 @@ def test_post_unverified_email(self, db_request, with_email): # No additional roles are created assert db_request.db.query(Role).all() == [] - assert result == {"project": project, "roles": set(), "form": form_obj} + assert result == { + "project": project, + "roles": set(), + "invitations": set(), + "form": form_obj, + } + + def test_cannot_reinvite_role(self, db_request): + project = ProjectFactory.create(name="foobar") + new_user = UserFactory.create(username="new_user") + EmailFactory.create(user=new_user, verified=True, primary=True) + owner_1 = UserFactory.create(username="owner_1") + owner_2 = UserFactory.create(username="owner_2") + owner_1_role = RoleFactory.create( + user=owner_1, project=project, role_name="Owner" + ) + owner_2_role = RoleFactory.create( + user=owner_2, project=project, role_name="Owner" + ) + new_user_invitation = RoleInvitationFactory.create( + user=new_user, project=project, invite_status="pending" + ) + + user_service = pretend.stub( + find_userid=lambda username: new_user.id, get_user=lambda userid: new_user + ) + token_service = pretend.stub( + dumps=lambda data: "TOKEN", + max_age=6 * 60 * 60, + loads=lambda data: {"desired_role": "Maintainer"}, + ) + db_request.find_service = pretend.call_recorder( + lambda iface, context=None, name=None: { + ITokenService: token_service, + IUserService: user_service, + }.get(iface) + ) + db_request.method = "POST" + db_request.POST = pretend.stub() + db_request.remote_addr = "10.10.10.10" + db_request.user = owner_1 + form_obj = pretend.stub( + validate=pretend.call_recorder(lambda: True), + username=pretend.stub(data=new_user.username), + role_name=pretend.stub(data="Owner"), + ) + form_class = pretend.call_recorder(lambda *a, **kw: form_obj) + db_request.session = pretend.stub( + flash=pretend.call_recorder(lambda *a, **kw: None) + ) + + result = views.manage_project_roles(project, db_request, _form_class=form_class) + + assert db_request.find_service.calls == [ + pretend.call(IUserService, context=None), + pretend.call(ITokenService, name="email"), + ] + assert form_obj.validate.calls == [pretend.call()] + assert form_class.calls == [ + pretend.call(db_request.POST, user_service=user_service), + pretend.call(user_service=user_service), + ] + assert db_request.session.flash.calls == [ + pretend.call( + "User 'new_user' already has an active invite. Please try again later.", + queue="error", + ) + ] + + assert result == { + "project": project, + "roles": {owner_1_role, owner_2_role}, + "invitations": {new_user_invitation}, + "form": form_obj, + } + + +class TestRevokeRoleInvitation: + def test_revoke_invitation(self, db_request, token_service): + project = ProjectFactory.create(name="foobar") + user = UserFactory.create(username="testuser") + RoleInvitationFactory.create(user=user, project=project) + owner_user = UserFactory.create() + RoleFactory(user=owner_user, project=project, role_name="Owner") + + user_service = pretend.stub(get_user=lambda userid: user) + token_service.loads = pretend.call_recorder( + lambda token: { + "action": "email-project-role-verify", + "desired_role": "Maintainer", + "user_id": user.id, + "project_id": project.id, + "submitter_id": db_request.user.id, + } + ) + db_request.find_service = pretend.call_recorder( + lambda iface, context=None, name=None: { + ITokenService: token_service, + IUserService: user_service, + }.get(iface) + ) + db_request.method = "POST" + db_request.POST = MultiDict({"user_id": user.id, "token": "TOKEN"}) + db_request.remote_addr = "10.10.10.10" + db_request.user = owner_user + db_request.route_path = pretend.call_recorder( + lambda *a, **kw: "/manage/projects" + ) + form_class = pretend.call_recorder(lambda *a, **kw: None) + db_request.session = pretend.stub( + flash=pretend.call_recorder(lambda *a, **kw: None) + ) + + result = views.revoke_project_role_invitation( + project, db_request, _form_class=form_class + ) + db_request.db.flush() + + assert not ( + db_request.db.query(RoleInvitation) + .filter(RoleInvitation.user == user) + .filter(RoleInvitation.project == project) + .one_or_none() + ) + assert db_request.session.flash.calls == [ + pretend.call(f"Invitation revoked from '{user.username}'.", queue="success") + ] + assert isinstance(result, HTTPSeeOther) + assert result.headers["Location"] == "/manage/projects" + + def test_invitation_does_not_exist(self, db_request, token_service): + project = ProjectFactory.create(name="foobar") + user = UserFactory.create(username="testuser") + owner_user = UserFactory.create() + RoleFactory(user=owner_user, project=project, role_name="Owner") + + user_service = pretend.stub(get_user=lambda userid: user) + token_service.loads = pretend.call_recorder( + lambda token: { + "action": "email-project-role-verify", + "desired_role": "Maintainer", + "user_id": user.id, + "project_id": project.id, + "submitter_id": db_request.user.id, + } + ) + db_request.find_service = pretend.call_recorder( + lambda iface, context=None, name=None: { + ITokenService: token_service, + IUserService: user_service, + }.get(iface) + ) + db_request.method = "POST" + db_request.POST = MultiDict({"user_id": user.id, "token": "TOKEN"}) + db_request.remote_addr = "10.10.10.10" + db_request.user = owner_user + db_request.route_path = pretend.call_recorder( + lambda *a, **kw: "/manage/projects" + ) + form_class = pretend.call_recorder(lambda *a, **kw: None) + db_request.session = pretend.stub( + flash=pretend.call_recorder(lambda *a, **kw: None) + ) + + result = views.revoke_project_role_invitation( + project, db_request, _form_class=form_class + ) + db_request.db.flush() + + assert db_request.session.flash.calls == [ + pretend.call("Could not find role invitation.", queue="error") + ] + assert isinstance(result, HTTPSeeOther) + assert result.headers["Location"] == "/manage/projects" + + def test_token_expired(self, db_request, token_service): + project = ProjectFactory.create(name="foobar") + user = UserFactory.create(username="testuser") + RoleInvitationFactory.create(user=user, project=project) + owner_user = UserFactory.create() + RoleFactory(user=owner_user, project=project, role_name="Owner") + + user_service = pretend.stub(get_user=lambda userid: user) + token_service.loads = pretend.call_recorder(pretend.raiser(TokenExpired)) + db_request.find_service = pretend.call_recorder( + lambda iface, context=None, name=None: { + ITokenService: token_service, + IUserService: user_service, + }.get(iface) + ) + db_request.method = "POST" + db_request.POST = MultiDict({"user_id": user.id, "token": "TOKEN"}) + db_request.remote_addr = "10.10.10.10" + db_request.user = owner_user + db_request.route_path = pretend.call_recorder( + lambda *a, **kw: "/manage/projects/roles" + ) + form_class = pretend.call_recorder(lambda *a, **kw: None) + db_request.session = pretend.stub( + flash=pretend.call_recorder(lambda *a, **kw: None) + ) + + result = views.revoke_project_role_invitation( + project, db_request, _form_class=form_class + ) + db_request.db.flush() + + assert not ( + db_request.db.query(RoleInvitation) + .filter(RoleInvitation.user == user) + .filter(RoleInvitation.project == project) + .one_or_none() + ) + assert db_request.session.flash.calls == [ + pretend.call("Invitation already expired.", queue="success") + ] + assert isinstance(result, HTTPSeeOther) + assert result.headers["Location"] == "/manage/projects/roles" class TestChangeProjectRoles: diff --git a/tests/unit/packaging/test_init.py b/tests/unit/packaging/test_init.py --- a/tests/unit/packaging/test_init.py +++ b/tests/unit/packaging/test_init.py @@ -17,6 +17,7 @@ from warehouse import packaging from warehouse.accounts.models import Email, User +from warehouse.manage.tasks import update_role_invitation_status from warehouse.packaging.interfaces import IDocsStorage, IFileStorage from warehouse.packaging.models import File, Project, Release, Role from warehouse.packaging.tasks import ( @@ -117,20 +118,24 @@ def key_factory(keystring, iterate_on=None): if with_trending and with_bq_sync: assert config.add_periodic_task.calls == [ pretend.call(crontab(minute="*/5"), update_description_html), + pretend.call(crontab(minute="*/5"), update_role_invitation_status), pretend.call(crontab(minute=0, hour=3), compute_trending), pretend.call(crontab(minute=0), sync_bigquery_release_files), ] elif with_bq_sync: assert config.add_periodic_task.calls == [ pretend.call(crontab(minute="*/5"), update_description_html), + pretend.call(crontab(minute="*/5"), update_role_invitation_status), pretend.call(crontab(minute=0), sync_bigquery_release_files), ] elif with_trending: assert config.add_periodic_task.calls == [ pretend.call(crontab(minute="*/5"), update_description_html), + pretend.call(crontab(minute="*/5"), update_role_invitation_status), pretend.call(crontab(minute=0, hour=3), compute_trending), ] else: assert config.add_periodic_task.calls == [ - pretend.call(crontab(minute="*/5"), update_description_html) + pretend.call(crontab(minute="*/5"), update_description_html), + pretend.call(crontab(minute="*/5"), update_role_invitation_status), ] diff --git a/tests/unit/test_routes.py b/tests/unit/test_routes.py --- a/tests/unit/test_routes.py +++ b/tests/unit/test_routes.py @@ -166,6 +166,11 @@ def add_policy(name, filename): pretend.call( "accounts.verify-email", "/account/verify-email/", domain=warehouse ), + pretend.call( + "accounts.verify-project-role", + "/account/verify-project-role/", + domain=warehouse, + ), pretend.call("manage.account", "/manage/account/", domain=warehouse), pretend.call( "manage.account.totp-provision", @@ -253,6 +258,13 @@ def add_policy(name, filename): traverse="/{project_name}", domain=warehouse, ), + pretend.call( + "manage.project.revoke_invite", + "/manage/project/{project_name}/collaboration/revoke_invite/", + factory="warehouse.packaging.models:ProjectFactory", + traverse="/{project_name}", + domain=warehouse, + ), pretend.call( "manage.project.change_role", "/manage/project/{project_name}/collaboration/change/",
Send invitations when adding owner/maintainer roles **What's the problem this feature will solve?** Currently as a PyPI package maintainer, it's possible to give any other user an owner/maintainer role for your package without their consent. **Describe the solution you'd like** We should change the way role assignment currently works, from just allowing owners to assign other users roles on a given project, to allowing owners to _request_ that other users accept roles for a given project. The way we should do this is by sending an email to the new owner/maintainer with a validation link, and allowing them to accept/reject the request before the new role is added. **Additional context** Originally described in #4729. Add Collaborators by invitation resolves #5790, carrying on from work by @DavidBord
@di as discussed, we should implement a draft role so that the request initiator / admin would be able to revoke the invitation @DavidBord How's it going here? It seemed like you were close to finishing this, need any assistance?
2020-06-30T15:32:30Z
[]
[]
pypi/warehouse
8,227
pypi__warehouse-8227
[ "8187" ]
6e83a279b59d291a5101ab2750ac3869457f1d67
diff --git a/warehouse/manage/views.py b/warehouse/manage/views.py --- a/warehouse/manage/views.py +++ b/warehouse/manage/views.py @@ -44,6 +44,7 @@ send_unyanked_project_release_email, send_yanked_project_release_email, ) +from warehouse.forklift.legacy import MAX_FILESIZE, MAX_PROJECT_SIZE from warehouse.macaroons.interfaces import IMacaroonService from warehouse.manage.forms import ( AddEmailForm, @@ -915,7 +916,11 @@ def _key(project): has_translations=True, ) def manage_project_settings(project, request): - return {"project": project} + return { + "project": project, + "MAX_FILESIZE": MAX_FILESIZE, + "MAX_PROJECT_SIZE": MAX_PROJECT_SIZE, + } def get_user_role_in_project(project, user, request):
diff --git a/tests/unit/manage/test_views.py b/tests/unit/manage/test_views.py --- a/tests/unit/manage/test_views.py +++ b/tests/unit/manage/test_views.py @@ -28,6 +28,7 @@ from warehouse.accounts.interfaces import IPasswordBreachedService, IUserService from warehouse.admin.flags import AdminFlagValue +from warehouse.forklift.legacy import MAX_FILESIZE, MAX_PROJECT_SIZE from warehouse.macaroons.interfaces import IMacaroonService from warehouse.manage import views from warehouse.packaging.models import ( @@ -2287,7 +2288,11 @@ def test_manage_project_settings(self): request = pretend.stub() project = pretend.stub() - assert views.manage_project_settings(project, request) == {"project": project} + assert views.manage_project_settings(project, request) == { + "project": project, + "MAX_FILESIZE": MAX_FILESIZE, + "MAX_PROJECT_SIZE": MAX_PROJECT_SIZE, + } def test_delete_project_no_confirm(self): project = pretend.stub(normalized_name="foo")
Display file and total project size limits on project settings page We should display the current file size limit (`Project.upload_limit`) and total size limit (`Project.total_size_limit`) on the project settings page for all projects, with external links to https://pypi.org/help/#file-size-limit and https://pypi.org/help/#project-size-limit, respectively. --- **Good First Issue**: This issue is good for first time contributors. If you've already contributed to Warehouse, work on [another issue without this label](https://github.com/pypa/warehouse/issues?utf8=%E2%9C%93&q=is%3Aissue+is%3Aopen+-label%3A%22good+first+issue%22) instead. If there is not a corresponding pull request for this issue, it is up for grabs. For directions for getting set up, see our [Getting Started Guide](https://warehouse.pypa.io/development/getting-started/). If you are working on this issue and have questions, feel free to ask them here, in the [`#pypa-dev` chat channel on Freenode](https://webchat.freenode.net/?channels=%23pypa-dev), or on the [distutils-sig.python.org mailing list](https://mail.python.org/mailman3/lists/distutils-sig.python.org/). **Screenshot Required**: *If your pull request makes a visual change*, include a screenshot of your update. This helps our team give you feedback faster.
Hey, @di I have updated the template for this. Which file do I have to update for updating the text value inside the anchor tag? Re-opening this, I missed that in #8221 it is showing `0 bytes` instead of the default values when a limit is not set. @imsahil007, can you take a look? > Re-opening this, I missed that in #8221 it is showing `0 bytes` instead of the default values when a limit is not set. > > @imsahil007, can you take a look? I missed that! I checked the values only after updating the limits. I will look into that. ![Screenshot from 2020-07-08 12-14-13](https://user-images.githubusercontent.com/35963992/86886484-a29e2000-c114-11ea-9b02-04c9f593d476.png) I think this will close the issue for now. The default values in the database are set to 0. Even in the admin panel if the value is not there we are printing the warehouse.forklift constants MAX_FILESIZE and so... I used the same approach.
2020-07-08T07:06:19Z
[]
[]
pypi/warehouse
8,245
pypi__warehouse-8245
[ "8229" ]
b385581b4c661609786c00a1e5e25d78cd4135b8
diff --git a/warehouse/config.py b/warehouse/config.py --- a/warehouse/config.py +++ b/warehouse/config.py @@ -424,7 +424,6 @@ def configure(settings=None): over=[ "warehouse.cache.http.conditional_http_tween_factory", "pyramid_debugtoolbar.toolbar_tween_factory", - "warehouse.raven.raven_tween_factory", EXCVIEW, ], ) @@ -470,9 +469,9 @@ def configure(settings=None): # TODO: Remove this, this is at the wrong layer. config.add_wsgi_middleware(HostRewrite) - # We want Raven to be the last things we add here so that it's the outer + # We want Sentry to be the last things we add here so that it's the outer # most WSGI middleware. - config.include(".raven") + config.include(".sentry") # Register Content-Security-Policy service config.include(".csp") diff --git a/warehouse/logging.py b/warehouse/logging.py --- a/warehouse/logging.py +++ b/warehouse/logging.py @@ -65,17 +65,10 @@ def includeme(config): "stream": "ext://sys.stdout", "formatter": "structlog", }, - "sentry": { - "class": "raven.handlers.logging.SentryHandler", - "level": "ERROR", - "dsn": config.registry.settings.get("sentry.dsn"), - "release": config.registry.settings.get("warehouse.commit"), - "transport": config.registry.settings.get("sentry.transport"), - }, }, "root": { "level": config.registry.settings.get("logging.level", "INFO"), - "handlers": ["primary", "sentry"], + "handlers": ["primary"], }, } ) diff --git a/warehouse/raven.py b/warehouse/raven.py deleted file mode 100644 --- a/warehouse/raven.py +++ /dev/null @@ -1,107 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import raven -import raven.middleware - -from pyramid.tweens import EXCVIEW, INGRESS -from raven.utils.serializer.base import Serializer -from raven.utils.serializer.manager import manager as serialization_manager - -from warehouse.sessions import InvalidSession - - -class InvalidSessionSerializer(Serializer): - - types = (InvalidSession,) - - def serialize(self, value, **kwargs): - return "<InvalidSession>" - - -# We need to register our SessionSerializer before any of the other serializers -# in the list. -serializer_registry = serialization_manager._SerializationManager__registry -serializer_registry.insert(0, InvalidSessionSerializer) - - -def raven_tween_factory(handler, registry): - def raven_tween(request): - try: - return handler(request) - except: # noqa - request.raven.captureException() - raise - - return raven_tween - - -def _raven(request): - request.add_finished_callback(lambda r: r.raven.context.clear()) - return request.registry["raven.client"] - - -def includeme(config): - # Create a client and stash it in the registry - client = raven.Client( - dsn=config.registry.settings.get("sentry.dsn"), - include_paths=["warehouse"], - release=config.registry.settings["warehouse.commit"], - transport=config.registry.settings.get("sentry.transport"), - # For some reason we get periodic SystemExit exceptions, I think it is because - # of OpenSSL generating a SIGABRT when OpenSSL_Die() is called, and then - # Gunicorn treating that as being told to exit the process. Either way, there - # isn't anything we can do about them, so they just cause noise. - ignore_exceptions=[ - # For some reason we get periodic SystemExit exceptions, I think it is - # because of OpenSSL generating a SIGABRT when OpenSSL_Die() is called, and - # then Gunicorn treating that as being told to exit the process. Either way, - # there isn't anything we can do about them, so they just cause noise. - SystemExit, - # Gunicorn internally raises these errors, and will catch them and handle - # them correctly... however they have to first pass through our WSGI - # middleware for Raven which is catching them and logging them. Instead we - # will ignore them. - # We have to list these as strings, and list all of them because we don't - # want to import Gunicorn in our application, and when using strings Raven - # doesn't handle inheritance. - "gunicorn.http.errors.ParseException", - "gunicorn.http.errors.NoMoreData", - "gunicorn.http.errors.InvalidRequestLine", - "gunicorn.http.errors.InvalidRequestMethod", - "gunicorn.http.errors.InvalidHTTPVersion", - "gunicorn.http.errors.InvalidHeader", - "gunicorn.http.errors.InvalidHeaderName", - "gunicorn.http.errors.InvalidChunkSize", - "gunicorn.http.errors.ChunkMissingTerminator", - "gunicorn.http.errors.LimitRequestLine", - "gunicorn.http.errors.LimitRequestHeaders", - "gunicorn.http.errors.InvalidProxyLine", - "gunicorn.http.errors.ForbiddenProxyRequest", - "gunicorn.http.errors.InvalidSchemeHeaders", - ], - ) - config.registry["raven.client"] = client - - # Create a request method that'll get us the Raven client in each request. - config.add_request_method(_raven, name="raven", reify=True) - - # Add a tween that will handle catching any exceptions that get raised. - config.add_tween( - "warehouse.raven.raven_tween_factory", - under=["pyramid_debugtoolbar.toolbar_tween_factory", INGRESS], - over=EXCVIEW, - ) - - # Wrap the WSGI object with the middle to catch any exceptions we don't - # catch elsewhere. - config.add_wsgi_middleware(raven.middleware.Sentry, client=client) diff --git a/warehouse/sentry.py b/warehouse/sentry.py new file mode 100644 --- /dev/null +++ b/warehouse/sentry.py @@ -0,0 +1,97 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sentry_sdk + +from sentry_sdk.integrations.celery import CeleryIntegration +from sentry_sdk.integrations.logging import LoggingIntegration +from sentry_sdk.integrations.pyramid import PyramidIntegration +from sentry_sdk.integrations.sqlalchemy import SqlalchemyIntegration +from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware + + +def _sentry(request): + return request.registry["sentry"] + + +# There is an 'ignore_errors' kwarg for sentry_sdk.init() however it is supposedly +# WIP and unstable compared to the 'before_send' kwarg. We can switch to +# 'ignore_errors' once https://github.com/getsentry/sentry-python/issues/149 +# is closed. +ignore_exceptions = ( + # For some reason we get periodic SystemExit exceptions, I think it is + # because of OpenSSL generating a SIGABRT when OpenSSL_Die() is called, and + # then Gunicorn treating that as being told to exit the process. Either way, + # there isn't anything we can do about them, so they just cause noise. + SystemExit, + # Gunicorn internally raises these errors, and will catch them and handle + # them correctly... however they have to first pass through our WSGI + # middleware which is catching them and logging them. Instead we + # will ignore them. We have to list these as strings, and list all + # of them because we don't want to import Gunicorn in our application + "gunicorn.http.errors.ParseException", + "gunicorn.http.errors.NoMoreData", + "gunicorn.http.errors.InvalidRequestLine", + "gunicorn.http.errors.InvalidRequestMethod", + "gunicorn.http.errors.InvalidHTTPVersion", + "gunicorn.http.errors.InvalidHeader", + "gunicorn.http.errors.InvalidHeaderName", + "gunicorn.http.errors.InvalidChunkSize", + "gunicorn.http.errors.ChunkMissingTerminator", + "gunicorn.http.errors.LimitRequestLine", + "gunicorn.http.errors.LimitRequestHeaders", + "gunicorn.http.errors.InvalidProxyLine", + "gunicorn.http.errors.ForbiddenProxyRequest", + "gunicorn.http.errors.InvalidSchemeHeaders", +) + + +def before_send(event, hint): + if "exc_info" in hint: + exc_type, exc_value, tb = hint["exc_info"] + if ( + exc_type in ignore_exceptions + or type(exc_value).__name__ in ignore_exceptions + ): + return None + else: + return event + return event + + +def includeme(config): + + # Initialize sentry and stash it in the registry + sentry_sdk.init( + dsn=config.registry.settings.get("sentry.dsn"), + release=config.registry.settings.get("warehouse.commit"), + transport=config.registry.settings.get("sentry.transport"), + before_send=before_send, + attach_stacktrace=True, + integrations=[ + # This allows us to not create a tween + # and a tween handler as it automatically + # integrates with pyramid + PyramidIntegration(), + CeleryIntegration(), + SqlalchemyIntegration(), + LoggingIntegration(), + ], + ) + config.registry["sentry"] = sentry_sdk + + # Create a request method that'll get us the Sentry SDK in each request. + config.add_request_method(_sentry, name="sentry", reify=True) + + # Wrap the WSGI object with the middle to catch any exceptions we don't + # catch elsewhere. + config.add_wsgi_middleware(SentryWsgiMiddleware) diff --git a/warehouse/tasks.py b/warehouse/tasks.py --- a/warehouse/tasks.py +++ b/warehouse/tasks.py @@ -122,9 +122,6 @@ def _after_commit_hook(self, success, *args, **kwargs): if success: super().apply_async(*args, **kwargs) - def on_failure(self, exc, task_id, args, kwargs, einfo): - logger.error("Task id {id} failed.".format(id=task_id), exc_info=einfo) - def task(**kwargs): kwargs.setdefault("shared", False)
diff --git a/tests/unit/test_config.py b/tests/unit/test_config.py --- a/tests/unit/test_config.py +++ b/tests/unit/test_config.py @@ -324,7 +324,7 @@ def __init__(self): pretend.call(".routes"), pretend.call(".admin"), pretend.call(".forklift"), - pretend.call(".raven"), + pretend.call(".sentry"), pretend.call(".csp"), pretend.call(".referrer_policy"), pretend.call(".http"), @@ -365,7 +365,6 @@ def __init__(self): over=[ "warehouse.cache.http.conditional_http_tween_factory", "pyramid_debugtoolbar.toolbar_tween_factory", - "warehouse.raven.raven_tween_factory", EXCVIEW, ], ), diff --git a/tests/unit/test_logging.py b/tests/unit/test_logging.py --- a/tests/unit/test_logging.py +++ b/tests/unit/test_logging.py @@ -100,15 +100,8 @@ def test_includeme(monkeypatch, settings, expected_level): "stream": "ext://sys.stdout", "formatter": "structlog", }, - "sentry": { - "class": "raven.handlers.logging.SentryHandler", - "level": "ERROR", - "release": None, - "dsn": None, - "transport": None, - }, }, - "root": {"level": expected_level, "handlers": ["primary", "sentry"]}, + "root": {"level": expected_level, "handlers": ["primary"]}, } ) ] diff --git a/tests/unit/test_raven.py b/tests/unit/test_raven.py deleted file mode 100644 --- a/tests/unit/test_raven.py +++ /dev/null @@ -1,148 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from unittest import mock - -import pretend -import pytest -import raven as real_raven - -from pyramid.tweens import EXCVIEW, INGRESS -from raven.middleware import Sentry as SentryMiddleware - -from warehouse import raven - - -def test_invalid_serializer(): - s = raven.InvalidSessionSerializer(pretend.stub()) - assert s.serialize(pretend.stub()) == "<InvalidSession>" - - -class TestRavenTween: - def test_with_error(self): - request = pretend.stub( - raven=pretend.stub(captureException=pretend.call_recorder(lambda: None)) - ) - - class TestException(Exception): - pass - - @pretend.call_recorder - def handler(request): - raise TestException - - tween = raven.raven_tween_factory(handler, pretend.stub()) - - with pytest.raises(TestException): - tween(request) - - assert handler.calls == [pretend.call(request)] - assert request.raven.captureException.calls == [pretend.call()] - - def test_without_error(self): - request = pretend.stub( - raven=pretend.stub(captureException=pretend.call_recorder(lambda: None)) - ) - response = pretend.stub() - - @pretend.call_recorder - def handler(request): - return response - - tween = raven.raven_tween_factory(handler, pretend.stub()) - - assert tween(request) is response - assert handler.calls == [pretend.call(request)] - assert request.raven.captureException.calls == [] - - -def test_raven_request_method(): - client = pretend.stub( - context=pretend.stub(clear=pretend.call_recorder(lambda: None)) - ) - request = pretend.stub( - add_finished_callback=pretend.call_recorder(lambda cb: None), - registry={"raven.client": client}, - raven=client, - ) - assert raven._raven(request) is client - assert request.add_finished_callback.calls == [pretend.call(mock.ANY)] - assert client.context.clear.calls == [] - cb = request.add_finished_callback.calls[0].args[0] - cb(request) - assert client.context.clear.calls == [pretend.call()] - - -def test_includeme(monkeypatch): - class Registry(dict): - def __init__(self): - self.settings = {} - - client_obj = pretend.stub() - client_cls = pretend.call_recorder(lambda **kw: client_obj) - monkeypatch.setattr(real_raven, "Client", client_cls) - - config = pretend.stub( - registry=Registry(), - add_request_method=pretend.call_recorder(lambda *a, **kw: None), - add_wsgi_middleware=pretend.call_recorder(lambda *a, **kw: None), - add_tween=pretend.call_recorder(lambda *a, **kw: None), - ) - config.registry.settings.update( - { - "warehouse.commit": "blargh", - "sentry.dsn": "the dsn", - "sentry.transport": "the transport", - } - ) - - raven.includeme(config) - - assert client_cls.calls == [ - pretend.call( - dsn="the dsn", - include_paths=["warehouse"], - release="blargh", - transport="the transport", - ignore_exceptions=[ - SystemExit, - "gunicorn.http.errors.ParseException", - "gunicorn.http.errors.NoMoreData", - "gunicorn.http.errors.InvalidRequestLine", - "gunicorn.http.errors.InvalidRequestMethod", - "gunicorn.http.errors.InvalidHTTPVersion", - "gunicorn.http.errors.InvalidHeader", - "gunicorn.http.errors.InvalidHeaderName", - "gunicorn.http.errors.InvalidChunkSize", - "gunicorn.http.errors.ChunkMissingTerminator", - "gunicorn.http.errors.LimitRequestLine", - "gunicorn.http.errors.LimitRequestHeaders", - "gunicorn.http.errors.InvalidProxyLine", - "gunicorn.http.errors.ForbiddenProxyRequest", - "gunicorn.http.errors.InvalidSchemeHeaders", - ], - ) - ] - assert config.registry["raven.client"] is client_obj - assert config.add_request_method.calls == [ - pretend.call(raven._raven, name="raven", reify=True) - ] - assert config.add_tween.calls == [ - pretend.call( - "warehouse.raven.raven_tween_factory", - over=EXCVIEW, - under=["pyramid_debugtoolbar.toolbar_tween_factory", INGRESS], - ) - ] - assert config.add_wsgi_middleware.calls == [ - pretend.call(SentryMiddleware, client=client_obj) - ] diff --git a/tests/unit/test_sentry.py b/tests/unit/test_sentry.py new file mode 100644 --- /dev/null +++ b/tests/unit/test_sentry.py @@ -0,0 +1,96 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pretend +import pytest +import sentry_sdk + +from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware + +from warehouse import sentry + + +def test_sentry_request_method(): + sentry_sdk = pretend.stub() + request = pretend.stub(registry={"sentry": sentry_sdk}, sentry=sentry,) + + assert sentry._sentry(request) is sentry_sdk + + +class TestSentryBeforeSend: + def test_ignore_exception(self): + hint = {"exc_info": (SystemExit, SystemExit(), "tracebk")} + + assert sentry.before_send(pretend.stub(), hint) is None + + @pytest.mark.parametrize( + "hint", + [ + {"exc_info": (ConnectionError, ConnectionError(), "tracebk")}, + {"event_info": "This is a random event."}, + ], + ) + def test_report_event(self, hint): + event = pretend.stub() + assert sentry.before_send(event, hint) is event + + +def test_includeme(monkeypatch): + class Registry(dict): + def __init__(self): + self.settings = {} + + init_obj = pretend.call_recorder(lambda *a, **kw: "1") + pyramid_obj = pretend.call_recorder(lambda *a, **kw: "2") + celery_obj = pretend.call_recorder(lambda *a, **kw: "3") + sql_obj = pretend.call_recorder(lambda *a, **kw: "4") + log_obj = pretend.call_recorder(lambda *a, **kw: "5") + monkeypatch.setattr(sentry_sdk, "init", init_obj) + monkeypatch.setattr("warehouse.sentry.PyramidIntegration", pyramid_obj) + monkeypatch.setattr("warehouse.sentry.CeleryIntegration", celery_obj) + monkeypatch.setattr("warehouse.sentry.SqlalchemyIntegration", sql_obj) + monkeypatch.setattr("warehouse.sentry.LoggingIntegration", log_obj) + + config = pretend.stub( + registry=Registry(), + add_request_method=pretend.call_recorder(lambda *a, **kw: None), + add_wsgi_middleware=pretend.call_recorder(lambda *a, **kw: None), + ) + config.registry.settings.update( + { + "warehouse.commit": "rand3rfgkn3424", + "sentry.dsn": "test_dsn", + "sentry.transport": "proxy_transport", + } + ) + + sentry.includeme(config) + + assert init_obj.calls == [ + pretend.call( + dsn="test_dsn", + release="rand3rfgkn3424", + transport="proxy_transport", + before_send=sentry.before_send, + attach_stacktrace=True, + integrations=["2", "3", "4", "5"], + ) + ] + assert pyramid_obj.calls == [pretend.call()] + assert celery_obj.calls == [pretend.call()] + assert sql_obj.calls == [pretend.call()] + assert log_obj.calls == [pretend.call()] + assert config.registry["sentry"] is sentry_sdk + assert config.add_request_method.calls == [ + pretend.call(sentry._sentry, name="sentry", reify=True) + ] + assert config.add_wsgi_middleware.calls == [pretend.call(SentryWsgiMiddleware)] diff --git a/tests/unit/test_tasks.py b/tests/unit/test_tasks.py --- a/tests/unit/test_tasks.py +++ b/tests/unit/test_tasks.py @@ -17,7 +17,6 @@ import transaction from celery import Celery, Task -from celery.exceptions import MaxRetriesExceededError from kombu import Queue from pyramid import scripting from pyramid_retry import RetryableException @@ -154,24 +153,6 @@ def test_after_commit_hook(self, monkeypatch, success): else: assert apply_async.calls == [] - def test_on_failure(self, monkeypatch): - task = tasks.WarehouseTask() - task.app = Celery() - - logger = pretend.stub(error=pretend.call_recorder(lambda *a, **kw: None)) - monkeypatch.setattr(tasks, "logger", logger) - - with pytest.raises(MaxRetriesExceededError) as exc_info: - raise (MaxRetriesExceededError) - - task.on_failure( - MaxRetriesExceededError, "1234", pretend.stub(), pretend.stub(), exc_info - ) - - assert logger.error.calls == [ - pretend.call("Task id 1234 failed.", exc_info=exc_info) - ] - def test_creates_request(self, monkeypatch): registry = pretend.stub() pyramid_env = {"request": pretend.stub()}
Better integration between Celery workers and Sentry Right now, exceptions raised in a Celery worker are not reported in Sentry. We should improve the integration here (possibly along the lines of https://docs.sentry.io/platforms/python/celery/) so that an exception raised in a task is caught by Sentry.
2020-07-10T21:00:54Z
[]
[]
pypi/warehouse
8,269
pypi__warehouse-8269
[ "3492" ]
6b2465bac295f954ccf33290b94ef31f48815d46
diff --git a/warehouse/tasks.py b/warehouse/tasks.py --- a/warehouse/tasks.py +++ b/warehouse/tasks.py @@ -51,8 +51,6 @@ def _params_from_url(self, url, defaults): class WarehouseTask(celery.Task): def __new__(cls, *args, **kwargs): obj = super().__new__(cls, *args, **kwargs) - if getattr(obj, "__header__", None) is not None: - obj.__header__ = functools.partial(obj.__header__, object()) # We do this here instead of inside of __call__ so that exceptions # coming from the transaction manager get caught by the autoretry
diff --git a/tests/unit/test_tasks.py b/tests/unit/test_tasks.py --- a/tests/unit/test_tasks.py +++ b/tests/unit/test_tasks.py @@ -38,17 +38,6 @@ def test_tls_redis_backend(): class TestWarehouseTask: - def test_header(self): - def header(request, thing): - pass - - task_type = type( - "Foo", (tasks.WarehouseTask,), {"__header__": staticmethod(header)} - ) - - obj = task_type() - obj.__header__(object()) - def test_call(self, monkeypatch): request = pretend.stub() registry = pretend.stub()
Celerybeat Cleanup Error ``` worker_1 | Traceback (most recent call last): worker_1 | File "/opt/warehouse/lib/python3.6/site-packages/celery/beat.py", line 320, in apply_async worker_1 | **entry.options) worker_1 | File "/opt/warehouse/src/warehouse/tasks.py", line 95, in apply_async worker_1 | return super().apply_async(*args, **kwargs) worker_1 | File "/opt/warehouse/lib/python3.6/site-packages/celery/app/task.py", line 518, in apply_async worker_1 | check_arguments(*(args or ()), **(kwargs or {})) worker_1 | TypeError: backend_cleanup() takes 0 positional arguments but 1 was given worker_1 | worker_1 | During handling of the above exception, another exception occurred: worker_1 | worker_1 | Traceback (most recent call last): worker_1 | File "/opt/warehouse/lib/python3.6/site-packages/celery/beat.py", line 222, in apply_entry worker_1 | result = self.apply_async(entry, producer=producer, advance=False) worker_1 | File "/opt/warehouse/lib/python3.6/site-packages/celery/beat.py", line 328, in apply_async worker_1 | entry, exc=exc)), sys.exc_info()[2]) worker_1 | File "/opt/warehouse/lib/python3.6/site-packages/vine/five.py", line 178, in reraise worker_1 | raise value.with_traceback(tb) worker_1 | File "/opt/warehouse/lib/python3.6/site-packages/celery/beat.py", line 320, in apply_async worker_1 | **entry.options) worker_1 | File "/opt/warehouse/src/warehouse/tasks.py", line 95, in apply_async worker_1 | return super().apply_async(*args, **kwargs) worker_1 | File "/opt/warehouse/lib/python3.6/site-packages/celery/app/task.py", line 518, in apply_async worker_1 | check_arguments(*(args or ()), **(kwargs or {})) worker_1 | celery.beat.SchedulingError: Couldn't apply scheduled task celery.backend_cleanup: backend_cleanup() takes 0 positional arguments but 1 was given ``` I assume this is an integration issue with how the warehouse <-> celery integration happens. Not sure what it's actual affect is though.
may I know which version of celery is throughing this? I encountered this while building the local dev environment, looks like it's using celery 4.1.0. It appears like apply_async is getting called with an arg, which it's passing on to backend_cleanup. When I run the app locally, however, it looks like results are disabled. Should they be running? worker_1 | -------------- celery@b7560aaf9e97 v4.1.0 (latentcall) worker_1 | ---- **** ----- worker_1 | --- * *** * -- Linux-3.16.0-4-amd64-x86_64-with-debian-9.3 2018-05-15 20:27:24 worker_1 | -- * - **** --- worker_1 | - ** ---------- [config] worker_1 | - ** ---------- .> app: warehouse:0x7f0fbe02dac8 worker_1 | - ** ---------- .> transport: amqp://guest:**@rabbitmq:5672// worker_1 | - ** ---------- .> results: disabled:// worker_1 | - *** --- * --- .> concurrency: 4 (prefork) worker_1 | -- ******* ---- .> task events: OFF (enable -E to monitor tasks in this worker) worker_1 | --- ***** ----- worker_1 | -------------- [queues] worker_1 | .> celery exchange=celery(direct) key=celery worker_1 | worker_1 | worker_1 | [tasks] worker_1 | . warehouse.cache.origin.fastly.purge_key worker_1 | . warehouse.email.send_email worker_1 | . warehouse.email.ses.tasks.cleanup worker_1 | . warehouse.legacy.api.xmlrpc.cache.services.purge_tag worker_1 | . warehouse.packaging.tasks.compute_trending worker_1 | . warehouse.search.tasks.reindex worker_1 | . warehouse.utils.project.remove_documentation i will try to check warehouse with celery 4.2.x soon may I know the current update of this issue? Potentially unrelated, but I often see the following error with celery that blocks me from accessing `localhost`: ``` worker_1 | [2020-06-23 17:45:29,510: INFO/MainProcess] Found credentials in environment variables. worker_1 | [2020-06-23 17:45:29,577: INFO/MainProcess] Connected to sqs://localstack:4576// worker_1 | [2020-06-23 17:45:29,599: INFO/MainProcess] Found credentials in environment variables. worker_1 | [2020-06-23 17:45:29,708: INFO/MainProcess] celery@0d90aa0b13cf ready. worker_1 | [2020-06-23 17:45:31,539: INFO/Beat] beat: Starting... worker_1 | [2020-06-23 17:45:31,541: ERROR/Beat] Removing corrupted schedule file 'celerybeat-schedule': error(11, 'Resource temporarily unavailable') worker_1 | Traceback (most recent call last): worker_1 | File "/opt/warehouse/lib/python3.8/site-packages/kombu/utils/objects.py", line 42, in __get__ worker_1 | return obj.__dict__[self.__name__] worker_1 | KeyError: 'scheduler' worker_1 | worker_1 | During handling of the above exception, another exception occurred: worker_1 | worker_1 | Traceback (most recent call last): worker_1 | File "/opt/warehouse/lib/python3.8/site-packages/celery/beat.py", line 519, in setup_schedule worker_1 | self._store = self._open_schedule() worker_1 | File "/opt/warehouse/lib/python3.8/site-packages/celery/beat.py", line 509, in _open_schedule worker_1 | return self.persistence.open(self.schedule_filename, writeback=True) worker_1 | File "/usr/local/lib/python3.8/shelve.py", line 243, in open worker_1 | return DbfilenameShelf(filename, flag, protocol, writeback) worker_1 | File "/usr/local/lib/python3.8/shelve.py", line 227, in __init__ worker_1 | Shelf.__init__(self, dbm.open(filename, flag), protocol, writeback) worker_1 | File "/usr/local/lib/python3.8/dbm/__init__.py", line 95, in open worker_1 | return mod.open(file, flag, mode) worker_1 | _gdbm.error: [Errno 11] Resource temporarily unavailable: 'celerybeat-schedule' ``` This is due to these lines: https://github.com/pypa/warehouse/blob/0c9ffd5ccb2171dd6141e5cf69409df3249ad805/warehouse/tasks.py#L54-L55 Celery is using `obj.__header__` to check arguments: https://github.com/celery/celery/blob/bf6139bf651b20bc04b895a5f6eb8d50320bc252/celery/app/task.py#L524-L529 Normally this would be something like: ``` >>> from celery import task >>> args = [] >>> kwargs = {} >>> check_arguments = task.backend_cleanup.__header__ >>> check_arguments(*(args or ()), **(kwargs or {})) 1 ``` But by overriding `__header__` with a partial, it's this: ``` >>> from celery import task >>> import functools >>> args = [] >>> kwargs = {} >>> check_arguments = functools.partial(task.backend_cleanup.__header__, object()) >>> check_arguments(*(args or ()), **(kwargs or {})) Traceback (most recent call last): File "<stdin>", line 1, in <module> TypeError: backend_cleanup() takes 0 positional arguments but 1 was given ``` It's not clear to me why we're doing this, @dstufft any ideas? I have no memory of why that is, and looking into the PR doesn't bring back any memories, sorry.
2020-07-15T16:37:36Z
[]
[]
pypi/warehouse
8,320
pypi__warehouse-8320
[ "8104" ]
8bdb927aadba8294cd4ec1f2a7c0ad4616f0f069
diff --git a/warehouse/forklift/__init__.py b/warehouse/forklift/__init__.py --- a/warehouse/forklift/__init__.py +++ b/warehouse/forklift/__init__.py @@ -40,6 +40,11 @@ def includeme(config): config.add_legacy_action_route( "forklift.legacy.doc_upload", "doc_upload", domain=forklift ) + + config.add_route( + "forklift.legacy.missing_trailing_slash", "/legacy", domain=forklift + ) + config.add_request_method(_help_url, name="help_url") if forklift: diff --git a/warehouse/forklift/legacy.py b/warehouse/forklift/legacy.py --- a/warehouse/forklift/legacy.py +++ b/warehouse/forklift/legacy.py @@ -32,7 +32,12 @@ import wtforms import wtforms.validators -from pyramid.httpexceptions import HTTPBadRequest, HTTPForbidden, HTTPGone +from pyramid.httpexceptions import ( + HTTPBadRequest, + HTTPForbidden, + HTTPGone, + HTTPPermanentRedirect, +) from pyramid.response import Response from pyramid.view import view_config from sqlalchemy import exists, func, orm @@ -180,10 +185,10 @@ def _valid_platform_tag(platform_tag): _valid_markdown_variants = {"CommonMark", "GFM"} -def _exc_with_message(exc, message): +def _exc_with_message(exc, message, **kwargs): # The crappy old API that PyPI offered uses the status to pass down # messages to the client. So this function will make that easier to do. - resp = exc(message) + resp = exc(detail=message, **kwargs) resp.status = "{} {}".format(resp.status_code, message) return resp @@ -1504,3 +1509,21 @@ def doc_upload(request): "Uploading documentation is no longer supported, we recommend using " "https://readthedocs.org/.", ) + + +@view_config( + route_name="forklift.legacy.missing_trailing_slash", + require_csrf=False, + require_methods=["POST"], +) +def missing_trailing_slash_redirect(request): + """ + Redirect requests to /legacy to the correct /legacy/ route with a + HTTP-308 Permanent Redirect + """ + return _exc_with_message( + HTTPPermanentRedirect, + "An upload was attempted to /legacy but the expected upload URL is " + "/legacy/ (with a trailing slash)", + location=request.route_path("forklift.legacy.file_upload"), + )
diff --git a/tests/functional/legacy_api/test_basic.py b/tests/functional/legacy_api/test_basic.py new file mode 100644 --- /dev/null +++ b/tests/functional/legacy_api/test_basic.py @@ -0,0 +1,29 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def test_incorrect_post_redirect(webtest): + """ + Per issue #8104, we should issue an HTTP-308 for a POST + in /legacy and point the user to the correct endpoint, + /legacy/ + + See: https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/308 + """ + resp = webtest.post("/legacy", status=308) + assert resp.status == ( + "308 An upload was attempted to /legacy but the expected upload URL is " + "/legacy/ (with a trailing slash)" + ) + + assert "location" in resp.headers + assert resp.headers["location"] == "http://localhost/legacy/" diff --git a/tests/unit/forklift/test_init.py b/tests/unit/forklift/test_init.py --- a/tests/unit/forklift/test_init.py +++ b/tests/unit/forklift/test_init.py @@ -31,6 +31,7 @@ def test_includeme(forklift_domain, monkeypatch): add_legacy_action_route=pretend.call_recorder(lambda *a, **k: None), add_template_view=pretend.call_recorder(lambda *a, **kw: None), add_request_method=pretend.call_recorder(lambda *a, **kw: None), + add_route=pretend.call_recorder(lambda *a, **kw: None), ) forklift.includeme(config) @@ -48,6 +49,13 @@ def test_includeme(forklift_domain, monkeypatch): "forklift.legacy.doc_upload", "doc_upload", domain=forklift_domain ), ] + + assert config.add_route.calls == [ + pretend.call( + "forklift.legacy.missing_trailing_slash", "/legacy", domain=forklift_domain + ), + ] + assert config.add_request_method.calls == [pretend.call(_help_url, name="help_url")] if forklift_domain: assert config.add_template_view.calls == [ diff --git a/tests/unit/forklift/test_legacy.py b/tests/unit/forklift/test_legacy.py --- a/tests/unit/forklift/test_legacy.py +++ b/tests/unit/forklift/test_legacy.py @@ -3430,3 +3430,17 @@ def test_doc_upload(pyramid_request): "410 Uploading documentation is no longer supported, we recommend " "using https://readthedocs.org/." ) + + +def test_missing_trailing_slash_redirect(pyramid_request): + + pyramid_request.route_path = pretend.call_recorder(lambda *a, **kw: "/legacy/") + + resp = legacy.missing_trailing_slash_redirect(pyramid_request) + + assert resp.status_code == 308 + assert resp.status == ( + "308 An upload was attempted to /legacy but the expected upload URL is " + "/legacy/ (with a trailing slash)" + ) + assert resp.headers["Location"] == "/legacy/"
Incorrect http response from invalid upload endpoint **Describe the bug** As a user attempting to upload distributions to PyPI, I want to get a failure response from the PyPI http API when I attempt to upload files to an incorrect endpoint, so that I am able to correctly identify failures and successes and correct them if necessary or provide meaningful error messages. The correct endpoint for uploading is `https://test.pypi.org/legacy/` (notice the trailing slash) If an upload is attempted at `https://test.pypi.org/legacy` (without a trailing slash), the upload does not succeed, but a `200` response is returned. **Expected behavior** A 4xx or 5xx error code should be returned. **To Reproduce** Attempt to upload a distribution to `https://test.pypi.org/legacy` **My Platform** Mac, python 3.8, using `flit` as the upload front-end **Additional context**
As far as I can tell this URL is correctly returning a 301 redirect in all cases: ``` $ curl -X POST https://test.pypi.org/legacy <html> <head> <title>301 Moved Permanently</title> </head> <body> <h1>301 Moved Permanently</h1> The resource has been moved to /legacy/; you should be redirected automatically. </body> </html>% ``` ``` $ curl -I https://test.pypi.org/legacy HTTP/2 301 content-security-policy: base-uri 'self'; block-all-mixed-content; connect-src 'self' https://api.github.com/repos/ *.fastly-insights.com sentry.io https://api.pwnedpasswords.com; default-src 'none'; font-src 'self' fonts.gstatic.com; form-action 'self'; frame-ancestors 'none'; frame-src 'none'; img-src 'self' https://warehouse-test-camo.ingress.cmh1.psfhosted.org/ www.google-analytics.com *.fastly-insights.com; script-src 'self' www.googletagmanager.com www.google-analytics.com *.fastly-insights.com https://cdn.ravenjs.com; style-src 'self' fonts.googleapis.com; worker-src *.fastly-insights.com content-type: text/html; charset=UTF-8 location: https://test.pypi.org/legacy/ referrer-policy: origin-when-cross-origin server: nginx/1.13.9 accept-ranges: bytes date: Mon, 15 Jun 2020 15:47:07 GMT age: 50 x-served-by: cache-bwi5128-BWI, cache-mci5929-MCI x-cache: MISS, HIT x-cache-hits: 0, 1 x-timer: S1592236028.844284,VS0,VE1 vary: Accept-Encoding strict-transport-security: max-age=31536000; includeSubDomains; preload x-frame-options: deny x-xss-protection: 1; mode=block x-content-type-options: nosniff x-permitted-cross-domain-policies: none x-robots-header: noindex content-length: 205 ``` Am I missing something? POST requests typically don't return redirection responses because clients aren't supposed to follow them for non-GET/HEAD requests, or if they do follow them, they get changed to a GET request (which I suspect is what is happening here). I'll do some debugging with the http client I'm using, which is `requests/urllib3`, to confirm this is the case. It's one of those things where a 301 might technically be correct according to the http specification, but given the de facto behavior of all the http clients, it doesn't actually work the way it was intended. That does indeed seem to be happening here: ``` >>> import requests >>> resp = requests.post('https://test.pypi.org/legacy') >>> resp.status_code 200 >>> resp.content b'\n\n\n\n\n\n<!DOCTYPE html>\n<html lang="en" dir="ltr">\n <head>\n <meta charset="utf-8">\n <meta http-equiv="X-UA-Compatible" content="IE=edge">\n <meta name="viewport" content="width=device-width, initial-scale=1">\n\n <meta name="defaultLanguage" content="en">\n <meta name="availableLanguages" content="en, es, fr, ja, pt_BR, uk, el, de, zh_Hans, ru, he">\n\n \n \n \n\n <title>test.pypi.org \xc2\xb7 TestPyPI</title>\n <meta name="description" content="The Python Package Index (PyPI) is a repository of software for the Python programming language.">\n\n <link rel="stylesheet" href="/static/css/warehouse-ltr.f2d4f304.css">\n <link rel="stylesheet" href="/static/css/fontawesome.6002a161.css">\n <link rel="stylesheet" href="/static/css/regular.98fbf39a.css">\n <link rel="stylesheet" href="/static/css/solid.c3b5f0b5.css">\n <link rel="stylesheet" href="/static/css/brands.2c303be1.css">\n <link rel="stylesheet" href="https://fonts.googleapis.com/css?family=Source+Sans+Pro:400,400italic,600,600italic,700,700italic%7CSource+Code+Pro:500">\n <noscript>\n <link rel="stylesheet" href="/static/css/noscript.14e57721.css">\n </noscript>\n\n \n\n <link rel="icon" href="/static/images/favicon.6a76275d.ico" type="image/x-icon">\n\n <link rel="alternate" type="application/rss+xml" title="RSS: 40 latest updates" href="/rss/updates.xml">\n <link rel="alternate" type="application/rss+xml" title="RSS: 40 newest packages" href="/rss/packages.xml">\n \n \n\n <meta property="og:url" content="https://test.pypi.org/legacy/">\n <meta property="og:site_name" content="PyPI">\n <meta property="og:type" content="website">\n <meta property="og:image" content="https://test.pypi.org/static/images/twitter.90915068.jpg">\n <meta property="og:title" content="test.pypi.org">\n <meta property="og:description" content="The Python Package Index (PyPI) is a repository of software for the Python programming language.">\n\n <link rel="search" type="application/opensearchdescription+xml" title="PyPI" href="/opensearch.xml">\n\n \n <script\n src="https://cdn.ravenjs.com/3.26.2/raven.min.js"\n integrity="sha384-D6LXy67EIC102DTuqypxwQsTHgiatlbvg7q/1YAWFb6lRyZ1lIZ6bGDsX7jxHNKA"\n crossorigin="anonymous">\n </script>\n \n <script async\n data-sentry-frontend-dsn="https://[email protected]/1231156"\n src="/static/js/warehouse.943ec9d4.js">\n </script>\n \n \n <script defer src="https://www.fastly-insights.com/insights.js?k=6a52360a-f306-421e-8ed5-7417d0d4a4e9&dnt=true"></script>\n </head>\n\n <body data-controller="viewport-toggle">\n \n<div class="viewport-section viewport-section--dark">\n <div class="viewport-section__content">\n <img alt="TestPyPI" src="/static/images/logo-small.6eef541e.svg">\n <h2 class="viewport-section__heading">API endpoint</h2>\n <div class="viewport-section__text no-top-margin">\n <p>\n This URL is an API endpoint for uploading files to PyPI.<br>\n For more information on uploading projects to PyPI, visit the <a href="https://packaging.python.org/distributing/#uploading-your-project-to-pypi" title="External link" target="_blank" rel="noopener">Python Packaging User Guide</a>.\n </p>\n <p>Otherwise, we suggest you <a href="https://pypi.org/">go to the PyPI homepage</a>.</p>\n </div>\n </div>\n</div>\n\n </body>\n\n</html>' ``` Would a 404 for `POST` requests be preferred? I.e., only issue the redirect for GET requests? Seems like maybe whatever library is using `requests` could be setting `allow_redirects=False` instead: https://requests.readthedocs.io/en/latest/user/quickstart/#redirection-and-history Yes, I think a 404 for `POST` requests would give more expected results. I'll update the `flit` issue to implement the `allow_redirects=False` so it can correctly handle either way. https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/301 says: > It is therefore recommended to use the `301` code only as a response for `GET` or `HEAD` methods and to use the `308 Permanent Redirect` for `POST` methods instead, as the method change is explicitly prohibited with this status. So it seems like a 308 response would be the "most correct" thing for us to respond with here, although I'm not sure how `flit` would have handled that in this case. I've added the "good first issue" label to this issue. Hello all! I'll try and tackle this issue today at EuroPython Sprints! Hopefully I'll have a patch ready soon
2020-07-25T13:14:49Z
[]
[]
pypi/warehouse
8,328
pypi__warehouse-8328
[ "3264" ]
8daea6268a4b2226708b089085736ad97857206d
diff --git a/warehouse/email/__init__.py b/warehouse/email/__init__.py --- a/warehouse/email/__init__.py +++ b/warehouse/email/__init__.py @@ -224,6 +224,48 @@ def send_added_as_collaborator_email(request, user, *, submitter, project_name, return {"project": project_name, "submitter": submitter.username, "role": role} +@_email("collaborator-removed") +def send_collaborator_removed_email( + request, email_recipients, *, user, submitter, project_name +): + return { + "username": user.username, + "project": project_name, + "submitter": submitter.username, + } + + +@_email("removed-as-collaborator") +def send_removed_as_collaborator_email(request, user, *, submitter, project_name): + return { + "project": project_name, + "submitter": submitter.username, + } + + +@_email("collaborator-role-changed") +def send_collaborator_role_changed_email( + request, recipients, *, user, submitter, project_name, role +): + return { + "username": user.username, + "project": project_name, + "submitter": submitter.username, + "role": role, + } + + +@_email("role-changed-as-collaborator") +def send_role_changed_as_collaborator_email( + request, user, *, submitter, project_name, role +): + return { + "project": project_name, + "submitter": submitter.username, + "role": role, + } + + @_email("two-factor-added") def send_two_factor_added_email(request, user, method): pretty_methods = {"totp": "TOTP", "webauthn": "WebAuthn"} diff --git a/warehouse/manage/views.py b/warehouse/manage/views.py --- a/warehouse/manage/views.py +++ b/warehouse/manage/views.py @@ -33,12 +33,16 @@ send_account_deletion_email, send_added_as_collaborator_email, send_collaborator_added_email, + send_collaborator_removed_email, + send_collaborator_role_changed_email, send_email_verification_email, send_password_change_email, send_primary_email_change_email, + send_removed_as_collaborator_email, send_removed_project_email, send_removed_project_release_email, send_removed_project_release_file_email, + send_role_changed_as_collaborator_email, send_two_factor_added_email, send_two_factor_removed_email, send_unyanked_project_release_email, @@ -1585,6 +1589,34 @@ def change_project_role(project, request, _form_class=ChangeRoleForm): "target_user": role.user.username, }, ) + + owner_roles = ( + request.db.query(Role) + .filter(Role.project == project) + .filter(Role.role_name == "Owner") + .all() + ) + owner_users = {owner.user for owner in owner_roles} + # Don't send owner notification email to new user + # if they are now an owner + owner_users.discard(role.user) + send_collaborator_role_changed_email( + request, + owner_users, + user=role.user, + submitter=request.user, + project_name=project.name, + role=role.role_name, + ) + + send_role_changed_as_collaborator_email( + request, + role.user, + submitter=request.user, + project_name=project.name, + role=role.role_name, + ) + request.session.flash("Changed role", queue="success") except NoResultFound: request.session.flash("Could not find role", queue="error") @@ -1633,6 +1665,29 @@ def delete_project_role(project, request): "target_user": role.user.username, }, ) + + owner_roles = ( + request.db.query(Role) + .filter(Role.project == project) + .filter(Role.role_name == "Owner") + .all() + ) + owner_users = {owner.user for owner in owner_roles} + # Don't send owner notification email to new user + # if they are now an owner + owner_users.discard(role.user) + send_collaborator_removed_email( + request, + owner_users, + user=role.user, + submitter=request.user, + project_name=project.name, + ) + + send_removed_as_collaborator_email( + request, role.user, submitter=request.user, project_name=project.name, + ) + request.session.flash("Removed role", queue="success") except NoResultFound: request.session.flash("Could not find role", queue="error")
diff --git a/tests/unit/email/test_init.py b/tests/unit/email/test_init.py --- a/tests/unit/email/test_init.py +++ b/tests/unit/email/test_init.py @@ -22,6 +22,8 @@ from warehouse.email.interfaces import IEmailSender from warehouse.email.services import EmailMessage +from ...common.db.accounts import EmailFactory, UserFactory + @pytest.mark.parametrize( ("user", "address", "expected"), @@ -1500,6 +1502,384 @@ def test_added_as_collaborator_email_unverified( assert send_email.delay.calls == [] +class TestCollaboratorRemovedEmail: + def test_collaborator_removed_email(self, db_request, pyramid_config, monkeypatch): + removed_user = UserFactory.create() + EmailFactory.create(primary=True, verified=True, public=True, user=removed_user) + submitter_user = UserFactory.create() + EmailFactory.create( + primary=True, verified=True, public=True, user=submitter_user + ) + db_request.user = submitter_user + db_request.remote_addr = "0.0.0.0" + + subject_renderer = pyramid_config.testing_add_renderer( + "email/collaborator-removed/subject.txt" + ) + subject_renderer.string_response = "Email Subject" + body_renderer = pyramid_config.testing_add_renderer( + "email/collaborator-removed/body.txt" + ) + body_renderer.string_response = "Email Body" + html_renderer = pyramid_config.testing_add_renderer( + "email/collaborator-removed/body.html" + ) + html_renderer.string_response = "Email HTML Body" + + send_email = pretend.stub( + delay=pretend.call_recorder(lambda *args, **kwargs: None) + ) + db_request.task = pretend.call_recorder(lambda *args, **kwargs: send_email) + monkeypatch.setattr(email, "send_email", send_email) + + result = email.send_collaborator_removed_email( + db_request, + [removed_user, submitter_user], + user=removed_user, + submitter=submitter_user, + project_name="test_project", + ) + + assert result == { + "username": removed_user.username, + "project": "test_project", + "submitter": submitter_user.username, + } + subject_renderer.assert_() + body_renderer.assert_(username=removed_user.username) + body_renderer.assert_(project="test_project") + body_renderer.assert_(submitter=submitter_user.username) + html_renderer.assert_(username=removed_user.username) + html_renderer.assert_(project="test_project") + html_renderer.assert_(submitter=submitter_user.username) + + assert db_request.task.calls == [ + pretend.call(send_email), + pretend.call(send_email), + ] + assert send_email.delay.calls == [ + pretend.call( + f"{ removed_user.name } <{ removed_user.primary_email.email }>", + attr.asdict( + EmailMessage( + subject="Email Subject", + body_text="Email Body", + body_html=( + "<html>\n<head></head>\n" + "<body><p>Email HTML Body</p></body>\n</html>\n" + ), + ) + ), + { + "tag": "account:email:sent", + "user_id": removed_user.id, + "ip_address": db_request.remote_addr, + "additional": { + "from_": None, + "to": removed_user.primary_email.email, + "subject": "Email Subject", + "redact_ip": True, + }, + }, + ), + pretend.call( + f"{ submitter_user.name } <{ submitter_user.primary_email.email }>", + attr.asdict( + EmailMessage( + subject="Email Subject", + body_text="Email Body", + body_html=( + "<html>\n<head></head>\n" + "<body><p>Email HTML Body</p></body>\n</html>\n" + ), + ) + ), + { + "tag": "account:email:sent", + "user_id": submitter_user.id, + "ip_address": db_request.remote_addr, + "additional": { + "from_": None, + "to": submitter_user.primary_email.email, + "subject": "Email Subject", + "redact_ip": False, + }, + }, + ), + ] + + +class TestRemovedAsCollaboratorEmail: + def test_removed_as_collaborator_email( + self, db_request, pyramid_config, monkeypatch + ): + removed_user = UserFactory.create() + EmailFactory.create(primary=True, verified=True, public=True, user=removed_user) + submitter_user = UserFactory.create() + EmailFactory.create( + primary=True, verified=True, public=True, user=submitter_user + ) + db_request.user = submitter_user + db_request.remote_addr = "0.0.0.0" + + subject_renderer = pyramid_config.testing_add_renderer( + "email/removed-as-collaborator/subject.txt" + ) + subject_renderer.string_response = "Email Subject" + body_renderer = pyramid_config.testing_add_renderer( + "email/removed-as-collaborator/body.txt" + ) + body_renderer.string_response = "Email Body" + html_renderer = pyramid_config.testing_add_renderer( + "email/removed-as-collaborator/body.html" + ) + html_renderer.string_response = "Email HTML Body" + + send_email = pretend.stub( + delay=pretend.call_recorder(lambda *args, **kwargs: None) + ) + db_request.task = pretend.call_recorder(lambda *args, **kwargs: send_email) + monkeypatch.setattr(email, "send_email", send_email) + + result = email.send_removed_as_collaborator_email( + db_request, + removed_user, + submitter=submitter_user, + project_name="test_project", + ) + + assert result == { + "project": "test_project", + "submitter": submitter_user.username, + } + subject_renderer.assert_() + body_renderer.assert_(submitter=submitter_user.username) + body_renderer.assert_(project="test_project") + html_renderer.assert_(submitter=submitter_user.username) + html_renderer.assert_(project="test_project") + + assert db_request.task.calls == [pretend.call(send_email)] + assert send_email.delay.calls == [ + pretend.call( + f"{ removed_user.name } <{ removed_user.primary_email.email }>", + attr.asdict( + EmailMessage( + subject="Email Subject", + body_text="Email Body", + body_html=( + "<html>\n<head></head>\n" + "<body><p>Email HTML Body</p></body>\n</html>\n" + ), + ) + ), + { + "tag": "account:email:sent", + "user_id": removed_user.id, + "ip_address": db_request.remote_addr, + "additional": { + "from_": None, + "to": removed_user.primary_email.email, + "subject": "Email Subject", + "redact_ip": True, + }, + }, + ) + ] + + +class TestRoleChangedEmail: + def test_role_changed_email(self, db_request, pyramid_config, monkeypatch): + changed_user = UserFactory.create() + EmailFactory.create(primary=True, verified=True, public=True, user=changed_user) + submitter_user = UserFactory.create() + EmailFactory.create( + primary=True, verified=True, public=True, user=submitter_user + ) + db_request.user = submitter_user + db_request.remote_addr = "0.0.0.0" + + subject_renderer = pyramid_config.testing_add_renderer( + "email/collaborator-role-changed/subject.txt" + ) + subject_renderer.string_response = "Email Subject" + body_renderer = pyramid_config.testing_add_renderer( + "email/collaborator-role-changed/body.txt" + ) + body_renderer.string_response = "Email Body" + html_renderer = pyramid_config.testing_add_renderer( + "email/collaborator-role-changed/body.html" + ) + html_renderer.string_response = "Email HTML Body" + + send_email = pretend.stub( + delay=pretend.call_recorder(lambda *args, **kwargs: None) + ) + db_request.task = pretend.call_recorder(lambda *args, **kwargs: send_email) + monkeypatch.setattr(email, "send_email", send_email) + + result = email.send_collaborator_role_changed_email( + db_request, + [changed_user, submitter_user], + user=changed_user, + submitter=submitter_user, + project_name="test_project", + role="Owner", + ) + + assert result == { + "username": changed_user.username, + "project": "test_project", + "role": "Owner", + "submitter": submitter_user.username, + } + subject_renderer.assert_() + body_renderer.assert_(username=changed_user.username) + body_renderer.assert_(project="test_project") + body_renderer.assert_(role="Owner") + body_renderer.assert_(submitter=submitter_user.username) + html_renderer.assert_(username=changed_user.username) + html_renderer.assert_(project="test_project") + html_renderer.assert_(role="Owner") + html_renderer.assert_(submitter=submitter_user.username) + + assert db_request.task.calls == [ + pretend.call(send_email), + pretend.call(send_email), + ] + assert send_email.delay.calls == [ + pretend.call( + f"{ changed_user.name } <{ changed_user.primary_email.email }>", + attr.asdict( + EmailMessage( + subject="Email Subject", + body_text="Email Body", + body_html=( + "<html>\n<head></head>\n" + "<body><p>Email HTML Body</p></body>\n</html>\n" + ), + ) + ), + { + "tag": "account:email:sent", + "user_id": changed_user.id, + "ip_address": db_request.remote_addr, + "additional": { + "from_": None, + "to": changed_user.primary_email.email, + "subject": "Email Subject", + "redact_ip": True, + }, + }, + ), + pretend.call( + f"{ submitter_user.name } <{ submitter_user.primary_email.email }>", + attr.asdict( + EmailMessage( + subject="Email Subject", + body_text="Email Body", + body_html=( + "<html>\n<head></head>\n" + "<body><p>Email HTML Body</p></body>\n</html>\n" + ), + ) + ), + { + "tag": "account:email:sent", + "user_id": submitter_user.id, + "ip_address": db_request.remote_addr, + "additional": { + "from_": None, + "to": submitter_user.primary_email.email, + "subject": "Email Subject", + "redact_ip": False, + }, + }, + ), + ] + + +class TestRoleChangedAsCollaboratorEmail: + def test_role_changed_as_collaborator_email( + self, db_request, pyramid_config, monkeypatch + ): + changed_user = UserFactory.create() + EmailFactory.create(primary=True, verified=True, public=True, user=changed_user) + submitter_user = UserFactory.create() + EmailFactory.create( + primary=True, verified=True, public=True, user=submitter_user + ) + db_request.user = submitter_user + db_request.remote_addr = "0.0.0.0" + + subject_renderer = pyramid_config.testing_add_renderer( + "email/role-changed-as-collaborator/subject.txt" + ) + subject_renderer.string_response = "Email Subject" + body_renderer = pyramid_config.testing_add_renderer( + "email/role-changed-as-collaborator/body.txt" + ) + body_renderer.string_response = "Email Body" + html_renderer = pyramid_config.testing_add_renderer( + "email/role-changed-as-collaborator/body.html" + ) + html_renderer.string_response = "Email HTML Body" + + send_email = pretend.stub( + delay=pretend.call_recorder(lambda *args, **kwargs: None) + ) + db_request.task = pretend.call_recorder(lambda *args, **kwargs: send_email) + monkeypatch.setattr(email, "send_email", send_email) + + result = email.send_role_changed_as_collaborator_email( + db_request, + changed_user, + submitter=submitter_user, + project_name="test_project", + role="Owner", + ) + + assert result == { + "project": "test_project", + "role": "Owner", + "submitter": submitter_user.username, + } + subject_renderer.assert_() + body_renderer.assert_(submitter=submitter_user.username) + body_renderer.assert_(project="test_project") + body_renderer.assert_(role="Owner") + html_renderer.assert_(submitter=submitter_user.username) + html_renderer.assert_(project="test_project") + html_renderer.assert_(role="Owner") + + assert db_request.task.calls == [pretend.call(send_email)] + assert send_email.delay.calls == [ + pretend.call( + f"{ changed_user.name } <{ changed_user.primary_email.email }>", + attr.asdict( + EmailMessage( + subject="Email Subject", + body_text="Email Body", + body_html=( + "<html>\n<head></head>\n" + "<body><p>Email HTML Body</p></body>\n</html>\n" + ), + ) + ), + { + "tag": "account:email:sent", + "user_id": changed_user.id, + "ip_address": db_request.remote_addr, + "additional": { + "from_": None, + "to": changed_user.primary_email.email, + "subject": "Email Subject", + "redact_ip": True, + }, + }, + ), + ] + + class TestRemovedProjectEmail: def test_removed_project_email_to_maintainer( self, pyramid_request, pyramid_config, monkeypatch diff --git a/tests/unit/manage/test_views.py b/tests/unit/manage/test_views.py --- a/tests/unit/manage/test_views.py +++ b/tests/unit/manage/test_views.py @@ -3590,14 +3590,16 @@ def test_post_unverified_email(self, db_request, with_email): class TestChangeProjectRoles: - def test_change_role(self, db_request): + def test_change_role(self, db_request, monkeypatch): project = ProjectFactory.create(name="foobar") user = UserFactory.create(username="testuser") role = RoleFactory.create(user=user, project=project, role_name="Owner") new_role_name = "Maintainer" + user_2 = UserFactory.create() + db_request.method = "POST" - db_request.user = UserFactory.create() + db_request.user = user_2 db_request.remote_addr = "10.10.10.10" db_request.POST = MultiDict({"role_id": role.id, "role_name": new_role_name}) db_request.session = pretend.stub( @@ -3605,12 +3607,48 @@ def test_change_role(self, db_request): ) db_request.route_path = pretend.call_recorder(lambda *a, **kw: "/the-redirect") + send_collaborator_role_changed_email = pretend.call_recorder( + lambda *a, **kw: None + ) + monkeypatch.setattr( + views, + "send_collaborator_role_changed_email", + send_collaborator_role_changed_email, + ) + send_role_changed_as_collaborator_email = pretend.call_recorder( + lambda *a, **kw: None + ) + monkeypatch.setattr( + views, + "send_role_changed_as_collaborator_email", + send_role_changed_as_collaborator_email, + ) + result = views.change_project_role(project, db_request) assert role.role_name == new_role_name assert db_request.route_path.calls == [ pretend.call("manage.project.roles", project_name=project.name) ] + assert send_collaborator_role_changed_email.calls == [ + pretend.call( + db_request, + set(), + user=user, + submitter=user_2, + project_name="foobar", + role=new_role_name, + ) + ] + assert send_role_changed_as_collaborator_email.calls == [ + pretend.call( + db_request, + user, + submitter=user_2, + project_name="foobar", + role=new_role_name, + ) + ] assert db_request.session.flash.calls == [ pretend.call("Changed role", queue="success") ] @@ -3688,13 +3726,14 @@ def test_change_own_owner_role(self, db_request): class TestDeleteProjectRoles: - def test_delete_role(self, db_request): + def test_delete_role(self, db_request, monkeypatch): project = ProjectFactory.create(name="foobar") user = UserFactory.create(username="testuser") role = RoleFactory.create(user=user, project=project, role_name="Owner") + user_2 = UserFactory.create() db_request.method = "POST" - db_request.user = UserFactory.create() + db_request.user = user_2 db_request.remote_addr = "10.10.10.10" db_request.POST = MultiDict({"role_id": role.id}) db_request.session = pretend.stub( @@ -3702,12 +3741,33 @@ def test_delete_role(self, db_request): ) db_request.route_path = pretend.call_recorder(lambda *a, **kw: "/the-redirect") + send_collaborator_removed_email = pretend.call_recorder(lambda *a, **kw: None) + monkeypatch.setattr( + views, "send_collaborator_removed_email", send_collaborator_removed_email + ) + send_removed_as_collaborator_email = pretend.call_recorder( + lambda *a, **kw: None + ) + monkeypatch.setattr( + views, + "send_removed_as_collaborator_email", + send_removed_as_collaborator_email, + ) + result = views.delete_project_role(project, db_request) assert db_request.route_path.calls == [ pretend.call("manage.project.roles", project_name=project.name) ] assert db_request.db.query(Role).all() == [] + assert send_collaborator_removed_email.calls == [ + pretend.call( + db_request, set(), user=user, submitter=user_2, project_name="foobar", + ) + ] + assert send_removed_as_collaborator_email.calls == [ + pretend.call(db_request, user, submitter=user_2, project_name="foobar",) + ] assert db_request.session.flash.calls == [ pretend.call("Removed role", queue="success") ]
Email notification for demoted/removed user Followup to #1000: we should email the user (and possibly all the other owners/maintainers of a project as well) when they were an owner/maintainer of a project but have just been demoted or removed from the project. > if a user is removed from a [maintainer or owner] role, then that user should also be emailed (e.g., to prevent an attacker who has compromised one owner, from silently removing other owners of a package, prior to uploading a new malicious package, thereby circumventing #997). -- @edmorley
To do this the right way, we should wait till we have #5863 implemented, so we can draw on the event logging and use it to trigger this notification. I finally got the code in #3853 rebased on master, and am looking at driving the notifications from the event logging. I don't see any triggers or signal plumbing in #6339. When you say the event logging should trigger the notification, are you talking about calling these notifications directly from `Project.record_event` and `User.record_event`? Or are you looking for us to use the db signals to watch for `UserEvent` objects being created? I defer to @di and @ewdurbin and @yeraydiazdiaz on this. @di do you have some feedback on @rascalking's question? I think either could work. I think it would be probably more straightforward to just do this in `*.record_event`, and probably more thorough to hook into `ProjectEvent`/`UserEvent` object creation. I think what would be even more powerful is if every `Event` was published to our task queue, and for every individual type of event, there was a way to resolve asynchronous actions that should happen as a result of that event, like sending an email. That's a bit more than what we're asking for here right now though. @rascalking Any updates here? Are you still working on this?
2020-07-27T20:17:49Z
[]
[]
pypi/warehouse
8,482
pypi__warehouse-8482
[ "8441" ]
9ef67e207c9569f1014e70dc594c01fb884b6144
diff --git a/warehouse/accounts/views.py b/warehouse/accounts/views.py --- a/warehouse/accounts/views.py +++ b/warehouse/accounts/views.py @@ -197,7 +197,6 @@ def login(request, redirect_field_name=REDIRECT_FIELD_NAME, _form_class=LoginFor .lower(), ) - request.session.record_auth_timestamp() return resp return { @@ -257,7 +256,6 @@ def two_factor_and_totp_validate(request, _form_class=TOTPAuthenticationForm): .hexdigest() .lower(), ) - return resp else: form.totp_value.data = "" @@ -784,7 +782,7 @@ def _login_user(request, userid, two_factor_method=None): ip_address=request.remote_addr, additional={"two_factor_method": two_factor_method}, ) - + request.session.record_auth_timestamp() return headers
diff --git a/tests/unit/accounts/test_views.py b/tests/unit/accounts/test_views.py --- a/tests/unit/accounts/test_views.py +++ b/tests/unit/accounts/test_views.py @@ -573,6 +573,9 @@ def test_totp_auth(self, monkeypatch, pyramid_request, redirect_url): pyramid_request.set_property( lambda r: str(uuid.uuid4()), name="unauthenticated_userid" ) + pyramid_request.session.record_auth_timestamp = pretend.call_recorder( + lambda *args: None + ) form_obj = pretend.stub( validate=pretend.call_recorder(lambda: True), @@ -606,6 +609,7 @@ def test_totp_auth(self, monkeypatch, pyramid_request, redirect_url): additional={"two_factor_method": "totp"}, ) ] + assert pyramid_request.session.record_auth_timestamp.calls == [pretend.call()] def test_totp_auth_already_authed(self): request = pretend.stub( @@ -984,6 +988,9 @@ def test_recovery_code_auth(self, monkeypatch, pyramid_request, redirect_url): pyramid_request.set_property( lambda r: str(uuid.uuid4()), name="unauthenticated_userid" ) + pyramid_request.session.record_auth_timestamp = pretend.call_recorder( + lambda *args: None + ) form_obj = pretend.stub( validate=pretend.call_recorder(lambda: True), @@ -1026,6 +1033,7 @@ def test_recovery_code_auth(self, monkeypatch, pyramid_request, redirect_url): queue="success", ) ] + assert pyramid_request.session.record_auth_timestamp.calls == [pretend.call()] def test_recovery_code_form_invalid(self): token_data = {"userid": 1} @@ -1202,6 +1210,9 @@ def test_register_redirect(self, db_request, monkeypatch): create_user = pretend.call_recorder(lambda *args, **kwargs: user) add_email = pretend.call_recorder(lambda *args, **kwargs: email) record_event = pretend.call_recorder(lambda *a, **kw: None) + db_request.session.record_auth_timestamp = pretend.call_recorder( + lambda *args: None + ) db_request.find_service = pretend.call_recorder( lambda *args, **kwargs: pretend.stub( csp_policy={}, @@ -1227,6 +1238,7 @@ def test_register_redirect(self, db_request, monkeypatch): "full_name": "full_name", } ) + send_email = pretend.call_recorder(lambda *a: None) monkeypatch.setattr(views, "send_email_verification_email", send_email)
Attempting to view account settings for an account with 2FA enabled asks to reauthenticate even immediately after login **Describe the bug** After logging in, and immediately visiting the account settings page only a few seconds later, a "Confirm password to continue" page is shown. My account has U2F 2FA enabled, in case that ends up being relevant. **Expected behavior** Immediately after login (and having provided both a password and the U2F), access should likely be enabled across any part of the site, including sensitive areas, until some timeout has elapsed. **To Reproduce** In a private window (or other place where you have not logged in), visit the home page, click log in, fill out your credentials, authenticate via U2F if applicable, then click "Account Settings" in the menu bar after logging in. Quick video attached: ![login](https://user-images.githubusercontent.com/329822/90337291-84e39680-dfaf-11ea-93fb-602163e41af8.gif) **My Platform** FF80, macOS, U2F via Yubikey **Additional context** #8191 seems likely related (though don't know that the behavior is new after it).
Thanks for the report, I've noticed this as well. It looks like the initial login is not setting the "time to reauthenticate" in the session. ![output](https://user-images.githubusercontent.com/35963992/91219677-a4f62100-e738-11ea-9aae-b247a700c415.gif) I couldn't create this issue again Perhaps indeed 2FA/U2F is required to observe the behavior then? Yes, it definitely is. The `record_auth_timestamp` function is only called when the user doesn't have 2FA set: https://github.com/pypa/warehouse/blob/ab78391c8e69173aa878c08f88259756de1db7b4/warehouse/accounts/views.py#L200 This needs to happen here as well: https://github.com/pypa/warehouse/blob/ab78391c8e69173aa878c08f88259756de1db7b4/warehouse/accounts/views.py#L249-L261 Really, since `_login_user` is used in both, it should happen here instead: https://github.com/pypa/warehouse/blob/ab78391c8e69173aa878c08f88259756de1db7b4/warehouse/accounts/views.py#L777-L788
2020-08-27T17:47:09Z
[]
[]
pypi/warehouse
8,555
pypi__warehouse-8555
[ "8554" ]
e514fd046a974927b6914d9dbac17f02dae15002
diff --git a/warehouse/config.py b/warehouse/config.py --- a/warehouse/config.py +++ b/warehouse/config.py @@ -162,7 +162,6 @@ def configure(settings=None): maybe_set( settings, "warehouse.release_files_table", "WAREHOUSE_RELEASE_FILES_TABLE" ) - maybe_set(settings, "github.token", "GITHUB_TOKEN") maybe_set(settings, "warehouse.trending_table", "WAREHOUSE_TRENDING_TABLE") maybe_set(settings, "celery.broker_url", "BROKER_URL") maybe_set(settings, "celery.result_url", "REDIS_URL") @@ -364,8 +363,8 @@ def configure(settings=None): # Register support for our legacy action URLs config.include(".legacy.action_routing") - # Register support for our custom predicates - config.include(".predicates") + # Register support for our domain predicates + config.include(".domain") # Register support for template views. config.add_directive("add_template_view", template_view, action_wrap=False) diff --git a/warehouse/predicates.py b/warehouse/domain.py similarity index 61% rename from warehouse/predicates.py rename to warehouse/domain.py --- a/warehouse/predicates.py +++ b/warehouse/domain.py @@ -10,10 +10,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import List - -from pyramid import predicates -from pyramid.exceptions import ConfigurationError from pyramid.util import is_same_domain @@ -35,26 +31,5 @@ def __call__(self, info, request): return is_same_domain(request.domain, self.val) -class HeadersPredicate: - def __init__(self, val: List[str], config): - if not val: - raise ConfigurationError( - "Excpected at least one value in headers predicate" - ) - - self.sub_predicates = [ - predicates.HeaderPredicate(subval, config) for subval in val - ] - - def text(self): - return ", ".join(sub.text() for sub in self.sub_predicates) - - phash = text - - def __call__(self, context, request): - return all(sub(context, request) for sub in self.sub_predicates) - - def includeme(config): config.add_route_predicate("domain", DomainPredicate) - config.add_view_predicate("require_headers", HeadersPredicate) diff --git a/warehouse/email/__init__.py b/warehouse/email/__init__.py --- a/warehouse/email/__init__.py +++ b/warehouse/email/__init__.py @@ -192,11 +192,6 @@ def send_password_compromised_email_hibp(request, user): return {} -@_email("token-compromised-leak", allow_unverified=True) -def send_token_compromised_email_leak(request, user, *, public_url, origin): - return {"username": user.username, "public_url": public_url, "origin": origin} - - @_email("account-deleted") def send_account_deletion_email(request, user): return {"username": user.username} diff --git a/warehouse/integrations/__init__.py b/warehouse/integrations/__init__.py deleted file mode 100644 --- a/warehouse/integrations/__init__.py +++ /dev/null @@ -1,11 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/warehouse/integrations/github/__init__.py b/warehouse/integrations/github/__init__.py deleted file mode 100644 --- a/warehouse/integrations/github/__init__.py +++ /dev/null @@ -1,11 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/warehouse/integrations/github/tasks.py b/warehouse/integrations/github/tasks.py deleted file mode 100644 --- a/warehouse/integrations/github/tasks.py +++ /dev/null @@ -1,24 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -from warehouse import tasks -from warehouse.integrations.github import utils - - [email protected](ignore_result=True, acks_late=True) -def analyze_disclosure_task(task, request, disclosure_record, origin): - utils.analyze_disclosure( - request=request, - disclosure_record=disclosure_record, - origin=origin, - ) diff --git a/warehouse/integrations/github/utils.py b/warehouse/integrations/github/utils.py deleted file mode 100644 --- a/warehouse/integrations/github/utils.py +++ /dev/null @@ -1,370 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import base64 -import json -import re -import time - -from typing import Optional - -import requests - -from cryptography.exceptions import InvalidSignature -from cryptography.hazmat.backends import default_backend -from cryptography.hazmat.primitives import serialization -from cryptography.hazmat.primitives.asymmetric.ec import ECDSA -from cryptography.hazmat.primitives.hashes import SHA256 - -from warehouse.email import send_token_compromised_email_leak -from warehouse.macaroons.caveats import InvalidMacaroon -from warehouse.macaroons.interfaces import IMacaroonService -from warehouse.metrics import IMetricsService - - -class ExtractionFailed(Exception): - pass - - -class TokenLeakMatcher: - """ - A TokenLeakMatcher is linked to a specific regex pattern. When provided - a string that matches this pattern, the matcher can extract a token-like string - from it. - """ - - name: str - pattern: re.Pattern - - def extract(self, text): - raise NotImplementedError - - -class PlainTextTokenLeakMatcher(TokenLeakMatcher): - name = "token" - # Macaroons are urlsafe_b64 encodeded so non-alphanumeric chars are - and _ - # https://github.com/ecordell/pymacaroons/blob/06b55110eda2fb192c130dee0bcedf8b124d1056/pymacaroons/serializers/binary_serializer.py#L32 - pattern = re.compile(r"pypi-[A-Za-z0-9-_=]+") - - def extract(self, text): - """ - From a string containing everything that was matched, extract the token - to check - """ - return text - - -class Base64BasicAuthTokenLeakMatcher(TokenLeakMatcher): - name = "base64-basic-auth" - # This is what we would expect to find if a basic auth value was leaked - # The following string was obtained by: - # base64.b64encode(b"__token__:pypi-").decode("utf-8") - # Basic auth is standard base64, so non-alphanumeric chars are + and / - pattern = re.compile(r"X190b2tlbl9fOnB5cGkt[A-Za-z0-9+/=]+") - - def extract(self, text): - try: - _, token = ( - base64.b64decode(text.encode("utf-8")).decode("utf-8").split(":", 1) - ) - return token - except Exception as exc: - raise ExtractionFailed from exc - - -TOKEN_LEAK_MATCHERS = { - matcher.name: matcher - for matcher in [PlainTextTokenLeakMatcher(), Base64BasicAuthTokenLeakMatcher()] -} - - -class InvalidTokenLeakRequest(Exception): - def __init__(self, message, reason): - self.reason = reason - super().__init__(message) - - -class TokenLeakDisclosureRequest: - def __init__(self, token: str, public_url: str): - self.token = token - self.public_url = public_url - - @classmethod - def from_api_record(cls, record, *, matchers=TOKEN_LEAK_MATCHERS): - - if not isinstance(record, dict): - raise InvalidTokenLeakRequest( - f"Record is not a dict but: {str(record)[:100]}", reason="format" - ) - - missing_keys = sorted({"token", "type", "url"} - set(record)) - if missing_keys: - raise InvalidTokenLeakRequest( - f"Record is missing attribute(s): {', '.join(missing_keys)}", - reason="format", - ) - - matcher_code = record["type"] - - matcher = matchers.get(matcher_code) - if not matcher: - raise InvalidTokenLeakRequest( - f"Matcher with code {matcher_code} not found. " - f"Available codes are: {', '.join(matchers)}", - reason="invalid_matcher", - ) - - try: - extracted_token = matcher.extract(record["token"]) - except ExtractionFailed: - raise InvalidTokenLeakRequest( - "Cannot extract token from recieved match", reason="extraction" - ) - - return cls(token=extracted_token, public_url=record["url"]) - - -class GitHubPublicKeyMetaAPIError(InvalidTokenLeakRequest): - pass - - -class CacheMiss(Exception): - pass - - -PUBLIC_KEYS_CACHE_TIME = 60 * 30 # 30 minutes - - -class GitHubTokenScanningPayloadVerifier: - """ - Checks payload signature using: - - `requests` for HTTP calls - - `cryptography` for signature verification - """ - - def __init__(self, *, session, metrics, api_token: Optional[str] = None): - self._metrics = metrics - self._session = session - self._api_token = api_token - - self.public_keys_cached_at = 0 - self.public_keys_cache = None - - def verify(self, *, payload, key_id, signature): - - public_key = None - try: - public_keys = self._get_cached_public_keys() - public_key = self._check_public_key( - github_public_keys=public_keys, key_id=key_id - ) - except (CacheMiss, InvalidTokenLeakRequest): - # No cache or outdated cache, it's ok, we'll do a real call. - # Just record a metric so that we can know if all calls lead to - # cache misses - self._metrics.increment("warehouse.token_leak.github.auth.cache.miss") - else: - self._metrics.increment("warehouse.token_leak.github.auth.cache.hit") - - try: - if not public_key: - pubkey_api_data = self._retrieve_public_key_payload() - public_keys = self._extract_public_keys(pubkey_api_data) - public_key = self._check_public_key( - github_public_keys=public_keys, key_id=key_id - ) - - self._check_signature( - payload=payload, public_key=public_key, signature=signature - ) - except InvalidTokenLeakRequest as exc: - self._metrics.increment( - f"warehouse.token_leak.github.auth.error.{exc.reason}" - ) - return False - - self._metrics.increment("warehouse.token_leak.github.auth.success") - return True - - def _get_cached_public_keys(self): - if not self.public_keys_cache: - raise CacheMiss - - if self.public_keys_cached_at + PUBLIC_KEYS_CACHE_TIME < time.time(): - raise CacheMiss - - return self.public_keys_cache - - def _headers_auth(self): - if not self._api_token: - return {} - return {"Authorization": f"token {self._api_token}"} - - def _retrieve_public_key_payload(self): - - token_scanning_pubkey_api_url = ( - "https://api.github.com/meta/public_keys/token_scanning" - ) - - try: - response = self._session.get( - token_scanning_pubkey_api_url, headers=self._headers_auth() - ) - response.raise_for_status() - return response.json() - except requests.HTTPError as exc: - raise GitHubPublicKeyMetaAPIError( - f"Invalid response code {response.status_code}: {response.text[:100]}", - f"public_key_api.status.{response.status_code}", - ) from exc - except json.JSONDecodeError as exc: - raise GitHubPublicKeyMetaAPIError( - f"Non-JSON response received: {response.text[:100]}", - "public_key_api.invalid_json", - ) from exc - except requests.RequestException as exc: - raise GitHubPublicKeyMetaAPIError( - "Could not connect to GitHub", "public_key_api.network_error" - ) from exc - - def _extract_public_keys(self, pubkey_api_data): - if not isinstance(pubkey_api_data, dict): - raise GitHubPublicKeyMetaAPIError( - f"Payload is not a dict but: {str(pubkey_api_data)[:100]}", - "public_key_api.format_error", - ) - try: - public_keys = pubkey_api_data["public_keys"] - except KeyError: - raise GitHubPublicKeyMetaAPIError( - "Payload misses 'public_keys' attribute", "public_key_api.format_error" - ) - - if not isinstance(public_keys, list): - raise GitHubPublicKeyMetaAPIError( - "Payload 'public_keys' attribute is not a list", - "public_key_api.format_error", - ) - - expected_attributes = {"key", "key_identifier"} - for public_key in public_keys: - - if not isinstance(public_key, dict): - raise GitHubPublicKeyMetaAPIError( - f"Key is not a dict but: {public_key}", - "public_key_api.format_error", - ) - - attributes = set(public_key) - if not expected_attributes <= attributes: - raise GitHubPublicKeyMetaAPIError( - "Missing attribute in key: " - f"{sorted(expected_attributes - attributes)}", - "public_key_api.format_error", - ) - - yield {"key": public_key["key"], "key_id": public_key["key_identifier"]} - - self.public_keys_cache = public_keys - - def _check_public_key(self, github_public_keys, key_id): - for record in github_public_keys: - if record["key_id"] == key_id: - return record["key"] - - raise InvalidTokenLeakRequest( - f"Key {key_id} not found in github public keys", reason="wrong_key_id" - ) - - def _check_signature(self, payload, public_key, signature): - try: - loaded_public_key = serialization.load_pem_public_key( - data=public_key.encode("utf-8"), backend=default_backend() - ) - loaded_public_key.verify( - signature=base64.b64decode(signature), - data=payload.encode("utf-8"), - # This validates the ECDSA and SHA256 part - signature_algorithm=ECDSA(algorithm=SHA256()), - ) - except InvalidSignature as exc: - raise InvalidTokenLeakRequest( - "Invalid signature", "invalid_signature" - ) from exc - except Exception as exc: - # Maybe the key is not a valid ECDSA key, maybe the data is not properly - # padded, etc. So many things can go wrong... - raise InvalidTokenLeakRequest( - "Invalid cryptographic values", "invalid_crypto" - ) from exc - - -def _analyze_disclosure(request, disclosure_record, origin): - - metrics = request.find_service(IMetricsService, context=None) - - metrics.increment(f"warehouse.token_leak.{origin}.recieved") - - try: - disclosure = TokenLeakDisclosureRequest.from_api_record( - record=disclosure_record - ) - except InvalidTokenLeakRequest as exc: - metrics.increment(f"warehouse.token_leak.{origin}.error.{exc.reason}") - return - - macaroon_service = request.find_service(IMacaroonService, context=None) - try: - database_macaroon = macaroon_service.check_if_macaroon_exists( - raw_macaroon=disclosure.token - ) - except InvalidMacaroon: - metrics.increment(f"warehouse.token_leak.{origin}.error.invalid") - return - - metrics.increment(f"warehouse.token_leak.{origin}.valid") - - macaroon_service.delete_macaroon(macaroon_id=str(database_macaroon.id)) - - send_token_compromised_email_leak( - request, - database_macaroon.user, - public_url=disclosure.public_url, - origin=origin, - ) - metrics.increment(f"warehouse.token_leak.{origin}.processed") - - -def analyze_disclosure(request, disclosure_record, origin): - try: - _analyze_disclosure( - request=request, - disclosure_record=disclosure_record, - origin=origin, - ) - except Exception: - metrics = request.find_service(IMetricsService, context=None) - metrics.increment(f"warehouse.token_leak.{origin}.error.unknown") - raise - - -def analyze_disclosures(disclosure_records, origin, metrics): - from warehouse.integrations.github import tasks - - if not isinstance(disclosure_records, list): - metrics.increment(f"warehouse.token_leak.{origin}.error.format") - raise InvalidTokenLeakRequest("Invalid format: payload is not a list", "format") - - for disclosure_record in disclosure_records: - tasks.analyze_disclosure_task.delay( - disclosure_record=disclosure_record, origin=origin - ) diff --git a/warehouse/integrations/github/views.py b/warehouse/integrations/github/views.py deleted file mode 100644 --- a/warehouse/integrations/github/views.py +++ /dev/null @@ -1,70 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import json - -from pyramid.response import Response -from pyramid.view import view_config - -from warehouse.integrations.github import utils -from warehouse.metrics import IMetricsService - - -@view_config( - require_methods=["POST"], - require_csrf=False, - renderer="json", - route_name="integrations.github.disclose-token", - # If those headers are missing, response will be a 404 - require_headers=["GITHUB-PUBLIC-KEY-IDENTIFIER", "GITHUB-PUBLIC-KEY-SIGNATURE"], - has_translations=False, -) -def github_disclose_token(request): - # GitHub calls this API view when they have identified a string matching - # the regular expressions we provided them. - # Our job is to validate we're talking to github, check if the string contains - # valid credentials and, if they do, invalidate them and warn the owner - - # The documentation for this process is at - # https://developer.github.com/partnerships/token-scanning/ - - body = request.body - - # Thanks to the predicates, we know the headers we need are defined. - key_id = request.headers.get("GITHUB-PUBLIC-KEY-IDENTIFIER") - signature = request.headers.get("GITHUB-PUBLIC-KEY-SIGNATURE") - metrics = request.find_service(IMetricsService, context=None) - - verifier = utils.GitHubTokenScanningPayloadVerifier( - session=request.http, - metrics=metrics, - api_token=request.registry.settings.get("github.token"), - ) - - if not verifier.verify(payload=body, key_id=key_id, signature=signature): - return Response(status=400) - - try: - disclosures = request.json_body - except json.decoder.JSONDecodeError: - metrics.increment("warehouse.token_leak.github.error.payload.json_error") - return Response(status=400) - - try: - utils.analyze_disclosures( - disclosure_records=disclosures, origin="github", metrics=metrics - ) - except utils.InvalidTokenLeakRequest: - return Response(status=400) - - # 204 No Content: we acknowledge but we won't comment on the outcome.# - return Response(status=204) diff --git a/warehouse/macaroons/caveats.py b/warehouse/macaroons/caveats.py --- a/warehouse/macaroons/caveats.py +++ b/warehouse/macaroons/caveats.py @@ -83,9 +83,7 @@ def __init__(self, macaroon, context, principals, permission): def verify(self, key): self.verifier.satisfy_general(V1Caveat(self)) - self.verify_signature(key=key) - def verify_signature(self, key): try: return self.verifier.verify(self.macaroon, key) except ( diff --git a/warehouse/macaroons/interfaces.py b/warehouse/macaroons/interfaces.py --- a/warehouse/macaroons/interfaces.py +++ b/warehouse/macaroons/interfaces.py @@ -43,14 +43,6 @@ def verify(raw_macaroon, context, principals, permission): Raises InvalidMacaroon if the macaroon is not valid. """ - def check_if_macaroon_exists(raw_macaroon): - """ - Returns the database macaroon if the given raw (serialized) macaroon is - an existing valid macaroon, whatever its permissions. - - Raises InvalidMacaroon otherwise. - """ - def create_macaroon(location, user_id, description, caveats): """ Returns a new raw (serialized) macaroon. The description provided diff --git a/warehouse/macaroons/services.py b/warehouse/macaroons/services.py --- a/warehouse/macaroons/services.py +++ b/warehouse/macaroons/services.py @@ -117,32 +117,6 @@ def verify(self, raw_macaroon, context, principals, permission): raise InvalidMacaroon("invalid macaroon") - def check_if_macaroon_exists(self, raw_macaroon): - """ - Returns the database macaroon if the given raw (serialized) macaroon is - an existing valid macaroon, whatever its permissions. - - Raises InvalidMacaroon otherwise. - """ - raw_macaroon = self._extract_raw_macaroon(raw_macaroon) - if raw_macaroon is None: - raise InvalidMacaroon("malformed or nonexistent macaroon") - - try: - m = pymacaroons.Macaroon.deserialize(raw_macaroon) - except MacaroonDeserializationException: - raise InvalidMacaroon("malformed macaroon") - - dm = self.find_macaroon(m.identifier.decode()) - - if dm is None: - raise InvalidMacaroon("deleted or nonexistent macaroon") - - verifier = Verifier(m, context=None, principals=None, permission=None) - verifier.verify_signature(dm.key) - - return dm - def create_macaroon(self, location, user_id, description, caveats): """ Returns a tuple of a new raw (serialized) macaroon and its DB model. diff --git a/warehouse/routes.py b/warehouse/routes.py --- a/warehouse/routes.py +++ b/warehouse/routes.py @@ -163,6 +163,7 @@ def includeme(config): "/account/verify-project-role/", domain=warehouse, ) + # Management (views for logged-in users) config.add_route("manage.account", "/manage/account/", domain=warehouse) config.add_route( @@ -324,13 +325,6 @@ def includeme(config): read_only=True, domain=warehouse, ) - # Integration URLs - - config.add_route( - "integrations.github.disclose-token", - "/_/github/disclose-token", - domain=warehouse, - ) # Legacy URLs config.add_route("legacy.api.simple.index", "/simple/", domain=warehouse)
diff --git a/tests/unit/accounts/test_core.py b/tests/unit/accounts/test_core.py --- a/tests/unit/accounts/test_core.py +++ b/tests/unit/accounts/test_core.py @@ -325,7 +325,6 @@ def test_includeme(monkeypatch): set_authentication_policy=pretend.call_recorder(lambda p: None), set_authorization_policy=pretend.call_recorder(lambda p: None), maybe_dotted=pretend.call_recorder(lambda path: path), - add_route_predicate=pretend.call_recorder(lambda name, cls: None), ) accounts.includeme(config) diff --git a/tests/unit/email/test_init.py b/tests/unit/email/test_init.py --- a/tests/unit/email/test_init.py +++ b/tests/unit/email/test_init.py @@ -766,82 +766,6 @@ def test_password_compromised_email_hibp( ] -class TestTokenCompromisedLeakEmail: - @pytest.mark.parametrize("verified", [True, False]) - def test_password_compromised_email( - self, pyramid_request, pyramid_config, monkeypatch, verified - ): - stub_user = pretend.stub( - id=3, - username="username", - name="", - email="[email protected]", - primary_email=pretend.stub(email="[email protected]", verified=verified), - ) - pyramid_request.user = None - pyramid_request.db = pretend.stub( - query=lambda a: pretend.stub( - filter=lambda *a: pretend.stub(one=lambda: stub_user) - ), - ) - - subject_renderer = pyramid_config.testing_add_renderer( - "email/token-compromised-leak/subject.txt" - ) - subject_renderer.string_response = "Email Subject" - body_renderer = pyramid_config.testing_add_renderer( - "email/token-compromised-leak/body.txt" - ) - body_renderer.string_response = "Email Body" - html_renderer = pyramid_config.testing_add_renderer( - "email/token-compromised-leak/body.html" - ) - html_renderer.string_response = "Email HTML Body" - - send_email = pretend.stub( - delay=pretend.call_recorder(lambda *args, **kwargs: None) - ) - pyramid_request.task = pretend.call_recorder(lambda *args, **kwargs: send_email) - monkeypatch.setattr(email, "send_email", send_email) - - result = email.send_token_compromised_email_leak( - pyramid_request, stub_user, public_url="http://example.com", origin="github" - ) - - assert result == { - "username": "username", - "public_url": "http://example.com", - "origin": "github", - } - assert pyramid_request.task.calls == [pretend.call(send_email)] - assert send_email.delay.calls == [ - pretend.call( - f"{stub_user.username} <{stub_user.email}>", - attr.asdict( - EmailMessage( - subject="Email Subject", - body_text="Email Body", - body_html=( - "<html>\n<head></head>\n" - "<body><p>Email HTML Body</p></body>\n</html>\n" - ), - ) - ), - { - "tag": "account:email:sent", - "user_id": 3, - "ip_address": "1.2.3.4", - "additional": { - "from_": None, - "to": "[email protected]", - "subject": "Email Subject", - "redact_ip": False, - }, - }, - ) - ] - - class TestPasswordCompromisedEmail: @pytest.mark.parametrize("verified", [True, False]) def test_password_compromised_email( diff --git a/tests/unit/integration/__init__.py b/tests/unit/integration/__init__.py deleted file mode 100644 --- a/tests/unit/integration/__init__.py +++ /dev/null @@ -1,11 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/tests/unit/integration/github/__init__.py b/tests/unit/integration/github/__init__.py deleted file mode 100644 --- a/tests/unit/integration/github/__init__.py +++ /dev/null @@ -1,11 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/tests/unit/integration/github/test_tasks.py b/tests/unit/integration/github/test_tasks.py deleted file mode 100644 --- a/tests/unit/integration/github/test_tasks.py +++ /dev/null @@ -1,40 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import pretend - -from warehouse.integrations.github import tasks, utils - - -def test_analyze_disclosure_task(monkeypatch): - analyze_disclosure = pretend.call_recorder(lambda *a, **k: None) - monkeypatch.setattr(utils, "analyze_disclosure", analyze_disclosure) - - task = pretend.stub() - request = pretend.stub() - disclosure_record = pretend.stub() - origin = pretend.stub() - - tasks.analyze_disclosure_task( - task=task, - request=request, - disclosure_record=disclosure_record, - origin=origin, - ) - - assert analyze_disclosure.calls == [ - pretend.call( - request=request, - disclosure_record=disclosure_record, - origin=origin, - ) - ] diff --git a/tests/unit/integration/github/test_utils.py b/tests/unit/integration/github/test_utils.py deleted file mode 100644 --- a/tests/unit/integration/github/test_utils.py +++ /dev/null @@ -1,653 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import base64 -import collections -import json -import time - -import pretend -import pytest -import requests - -from warehouse.integrations.github import tasks, utils - -basic_auth_pypi_1234 = base64.b64encode(b"__token__:pypi-1234").decode("utf-8") - - -def test_token_leak_matcher_extract(): - with pytest.raises(NotImplementedError): - utils.TokenLeakMatcher().extract("a") - - -def test_plain_text_token_leak_matcher_extract(): - assert utils.PlainTextTokenLeakMatcher().extract("a") == "a" - - -def test_base64_basic_auth_token_leak_extract(): - assert ( - utils.Base64BasicAuthTokenLeakMatcher().extract(basic_auth_pypi_1234) - == "pypi-1234" - ) - - [email protected]( - "input", [base64.b64encode(b"pypi-1234").decode("utf-8"), "foo bar"] -) -def test_base64_basic_auth_token_leak_extract_error(input): - with pytest.raises(utils.ExtractionFailed): - utils.Base64BasicAuthTokenLeakMatcher().extract(input) - - -def test_invalid_token_leak_request(): - exc = utils.InvalidTokenLeakRequest("a", "b") - - assert str(exc) == "a" - assert exc.reason == "b" - - [email protected]( - "record, error, reason", - [ - (None, "Record is not a dict but: None", "format"), - ({}, "Record is missing attribute(s): token, type, url", "format"), - ( - {"type": "not_found", "token": "a", "url": "b"}, - "Matcher with code not_found not found. " - "Available codes are: token, base64-basic-auth", - "invalid_matcher", - ), - ( - {"type": "base64-basic-auth", "token": "foo bar", "url": "a"}, - "Cannot extract token from recieved match", - "extraction", - ), - ], -) -def test_token_leak_disclosure_request_from_api_record_error(record, error, reason): - with pytest.raises(utils.InvalidTokenLeakRequest) as exc: - utils.TokenLeakDisclosureRequest.from_api_record(record) - - assert str(exc.value) == error - assert exc.value.reason == reason - - [email protected]( - "type, token", - [("token", "pypi-1234"), ("base64-basic-auth", basic_auth_pypi_1234)], -) -def test_token_leak_disclosure_request_from_api_record(type, token): - request = utils.TokenLeakDisclosureRequest.from_api_record( - {"type": type, "token": token, "url": "http://example.com"} - ) - - assert request.token == "pypi-1234" - assert request.public_url == "http://example.com" - - -class TestGitHubTokenScanningPayloadVerifier: - def test_init(self): - metrics = pretend.stub() - session = pretend.stub() - token = "api_token" - - verifier = utils.GitHubTokenScanningPayloadVerifier( - session=session, metrics=metrics, api_token=token - ) - - assert verifier._session is session - assert verifier._metrics is metrics - assert verifier._api_token == token - - def test_verify_cache_miss(self): - # Example taken from - # https://gist.github.com/ewjoachim/7dde11c31d9686ed6b4431c3ca166da2 - meta_payload = { - "public_keys": [ - { - "key_identifier": "90a421169f0a406205f1563a953312f0be898d3c" - "7b6c06b681aa86a874555f4a", - "key": "-----BEGIN PUBLIC KEY-----\n" - "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE9MJJHnMfn2+H4xL4YaPDA4RpJqU" - "q\nkCmRCBnYERxZanmcpzQSXs1X/AljlKkbJ8qpVIW4clayyef9gWhFbNHWAA==\n" - "-----END PUBLIC KEY-----", - "is_current": True, - } - ] - } - response = pretend.stub( - json=lambda: meta_payload, raise_for_status=lambda: None - ) - session = pretend.stub(get=lambda *a, **k: response) - metrics = pretend.stub(increment=pretend.call_recorder(lambda str: None)) - verifier = utils.GitHubTokenScanningPayloadVerifier( - session=session, metrics=metrics, api_token="api-token" - ) - key_id = "90a421169f0a406205f1563a953312f0be898d3c7b6c06b681aa86a874555f4a" - signature = ( - "MEQCIAfgjgz6Ou/3DXMYZBervz1TKCHFsvwMcbuJhNZse622AiAG86/" - "cku2XdcmFWNHl2WSJi2fkE8t+auvB24eURaOd2A==" - ) - - payload = ( - '[{"type":"github_oauth_token","token":"cb4985f91f740272c0234202299' - 'f43808034d7f5","url":" https://github.com/github/faketestrepo/blob/' - 'b0dd59c0b500650cacd4551ca5989a6194001b10/production.env"}]' - ) - assert ( - verifier.verify(payload=payload, key_id=key_id, signature=signature) is True - ) - - assert metrics.increment.calls == [ - pretend.call("warehouse.token_leak.github.auth.cache.miss"), - pretend.call("warehouse.token_leak.github.auth.success"), - ] - - def test_verify_cache_hit(self): - session = pretend.stub() - metrics = pretend.stub(increment=pretend.call_recorder(lambda str: None)) - verifier = utils.GitHubTokenScanningPayloadVerifier( - session=session, metrics=metrics, api_token="api-token" - ) - verifier.public_keys_cached_at = time.time() - verifier.public_keys_cache = [ - { - "key_id": "90a421169f0a406205f1563a953312f0be898d3c" - "7b6c06b681aa86a874555f4a", - "key": "-----BEGIN PUBLIC KEY-----\n" - "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE9MJJHnMfn2+H4xL4YaPDA4RpJqU" - "q\nkCmRCBnYERxZanmcpzQSXs1X/AljlKkbJ8qpVIW4clayyef9gWhFbNHWAA==\n" - "-----END PUBLIC KEY-----", - } - ] - - key_id = "90a421169f0a406205f1563a953312f0be898d3c7b6c06b681aa86a874555f4a" - signature = ( - "MEQCIAfgjgz6Ou/3DXMYZBervz1TKCHFsvwMcbuJhNZse622AiAG86/" - "cku2XdcmFWNHl2WSJi2fkE8t+auvB24eURaOd2A==" - ) - - payload = ( - '[{"type":"github_oauth_token","token":"cb4985f91f740272c0234202299' - 'f43808034d7f5","url":" https://github.com/github/faketestrepo/blob/' - 'b0dd59c0b500650cacd4551ca5989a6194001b10/production.env"}]' - ) - assert ( - verifier.verify(payload=payload, key_id=key_id, signature=signature) is True - ) - - assert metrics.increment.calls == [ - pretend.call("warehouse.token_leak.github.auth.cache.hit"), - pretend.call("warehouse.token_leak.github.auth.success"), - ] - - def test_verify_error(self): - metrics = pretend.stub(increment=pretend.call_recorder(lambda str: None)) - verifier = utils.GitHubTokenScanningPayloadVerifier( - session=pretend.stub(), metrics=metrics, api_token="api-token" - ) - verifier._retrieve_public_key_payload = pretend.raiser( - utils.InvalidTokenLeakRequest("Bla", "bla") - ) - - assert verifier.verify(payload={}, key_id="a", signature="a") is False - - assert metrics.increment.calls == [ - pretend.call("warehouse.token_leak.github.auth.cache.miss"), - pretend.call("warehouse.token_leak.github.auth.error.bla"), - ] - - def test_headers_auth_no_token(self): - headers = utils.GitHubTokenScanningPayloadVerifier( - session=pretend.stub(), metrics=pretend.stub(), api_token=None - )._headers_auth() - assert headers == {} - - def test_headers_auth_token(self): - headers = utils.GitHubTokenScanningPayloadVerifier( - session=pretend.stub(), metrics=pretend.stub(), api_token="api-token" - )._headers_auth() - assert headers == {"Authorization": "token api-token"} - - def test_retrieve_public_key_payload(self): - meta_payload = { - "public_keys": [ - { - "key_identifier": "90a421169f0a406205f1563a953312f0be898d3c" - "7b6c06b681aa86a874555f4a", - "key": "-----BEGIN PUBLIC KEY-----\n" - "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE9MJJHnMfn2+H4xL4YaPDA4RpJqU" - "q\nkCmRCBnYERxZanmcpzQSXs1X/AljlKkbJ8qpVIW4clayyef9gWhFbNHWAA==\n" - "-----END PUBLIC KEY-----", - "is_current": True, - } - ] - } - response = pretend.stub( - json=lambda: meta_payload, raise_for_status=lambda: None - ) - session = pretend.stub(get=pretend.call_recorder(lambda *a, **k: response)) - metrics = pretend.stub(increment=pretend.call_recorder(lambda str: None)) - - verifier = utils.GitHubTokenScanningPayloadVerifier( - session=session, metrics=metrics, api_token="api-token" - ) - assert verifier._retrieve_public_key_payload() == meta_payload - assert session.get.calls == [ - pretend.call( - "https://api.github.com/meta/public_keys/token_scanning", - headers={"Authorization": "token api-token"}, - ) - ] - - def test_get_cached_public_key_cache_hit(self): - metrics = pretend.stub() - session = pretend.stub() - - verifier = utils.GitHubTokenScanningPayloadVerifier( - session=session, metrics=metrics - ) - verifier.public_keys_cached_at = time.time() - cache = verifier.public_keys_cache = pretend.stub() - - assert verifier._get_cached_public_keys() is cache - - def test_get_cached_public_key_cache_miss_no_cache(self): - metrics = pretend.stub() - session = pretend.stub() - - verifier = utils.GitHubTokenScanningPayloadVerifier( - session=session, metrics=metrics - ) - - with pytest.raises(utils.CacheMiss): - verifier._get_cached_public_keys() - - def test_get_cached_public_key_cache_miss_too_old(self): - metrics = pretend.stub() - session = pretend.stub() - - verifier = utils.GitHubTokenScanningPayloadVerifier( - session=session, metrics=metrics - ) - verifier.public_keys_cache = pretend.stub() - - with pytest.raises(utils.CacheMiss): - verifier._get_cached_public_keys() - - def test_retrieve_public_key_payload_http_error(self): - response = pretend.stub( - status_code=418, - text="I'm a teapot", - raise_for_status=pretend.raiser(requests.HTTPError), - ) - session = pretend.stub( - get=lambda *a, **k: response, - ) - verifier = utils.GitHubTokenScanningPayloadVerifier( - session=session, metrics=pretend.stub() - ) - with pytest.raises(utils.GitHubPublicKeyMetaAPIError) as exc: - verifier._retrieve_public_key_payload() - - assert str(exc.value) == "Invalid response code 418: I'm a teapot" - assert exc.value.reason == "public_key_api.status.418" - - def test_retrieve_public_key_payload_json_error(self): - response = pretend.stub( - text="Still a non-json teapot", - json=pretend.raiser(json.JSONDecodeError("", "", 3)), - raise_for_status=lambda: None, - ) - session = pretend.stub(get=lambda *a, **k: response) - verifier = utils.GitHubTokenScanningPayloadVerifier( - session=session, metrics=pretend.stub() - ) - with pytest.raises(utils.GitHubPublicKeyMetaAPIError) as exc: - verifier._retrieve_public_key_payload() - - assert str(exc.value) == "Non-JSON response received: Still a non-json teapot" - assert exc.value.reason == "public_key_api.invalid_json" - - def test_retrieve_public_key_payload_connection_error(self): - session = pretend.stub(get=pretend.raiser(requests.ConnectionError)) - - verifier = utils.GitHubTokenScanningPayloadVerifier( - session=session, metrics=pretend.stub() - ) - - with pytest.raises(utils.GitHubPublicKeyMetaAPIError) as exc: - verifier._retrieve_public_key_payload() - - assert str(exc.value) == "Could not connect to GitHub" - assert exc.value.reason == "public_key_api.network_error" - - def test_extract_public_keys(self): - meta_payload = { - "public_keys": [ - { - "key_identifier": "90a421169f0a406205f1563a953312f0be898d3c" - "7b6c06b681aa86a874555f4a", - "key": "-----BEGIN PUBLIC KEY-----\n" - "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE9MJJHnMfn2+H4xL4YaPDA4RpJqU" - "q\nkCmRCBnYERxZanmcpzQSXs1X/AljlKkbJ8qpVIW4clayyef9gWhFbNHWAA==\n" - "-----END PUBLIC KEY-----", - "is_current": True, - } - ] - } - verifier = utils.GitHubTokenScanningPayloadVerifier( - session=pretend.stub(), metrics=pretend.stub() - ) - - keys = list(verifier._extract_public_keys(pubkey_api_data=meta_payload)) - - assert keys == [ - { - "key": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcD" - "QgAE9MJJHnMfn2+H4xL4YaPDA4RpJqUq\nkCmRCBnYERxZanmcpzQSXs1X/AljlKkbJ" - "8qpVIW4clayyef9gWhFbNHWAA==\n-----END PUBLIC KEY-----", - "key_id": "90a421169f0a406205f1563a953312f0be" - "898d3c7b6c06b681aa86a874555f4a", - } - ] - - @pytest.mark.parametrize( - "payload, expected", - [ - ([], "Payload is not a dict but: []"), - ({}, "Payload misses 'public_keys' attribute"), - ({"public_keys": None}, "Payload 'public_keys' attribute is not a list"), - ({"public_keys": [None]}, "Key is not a dict but: None"), - ( - {"public_keys": [{}]}, - "Missing attribute in key: ['key', 'key_identifier']", - ), - ( - {"public_keys": [{"key": "a"}]}, - "Missing attribute in key: ['key_identifier']", - ), - ( - {"public_keys": [{"key_identifier": "a"}]}, - "Missing attribute in key: ['key']", - ), - ], - ) - def test_extract_public_keys_error(self, payload, expected): - verifier = utils.GitHubTokenScanningPayloadVerifier( - session=pretend.stub(), metrics=pretend.stub() - ) - - with pytest.raises(utils.GitHubPublicKeyMetaAPIError) as exc: - list(verifier._extract_public_keys(pubkey_api_data=payload)) - - assert exc.value.reason == "public_key_api.format_error" - assert str(exc.value) == expected - - def test_check_public_key(self): - verifier = utils.GitHubTokenScanningPayloadVerifier( - session=pretend.stub(), metrics=pretend.stub() - ) - - keys = [ - {"key_id": "a", "key": "b"}, - {"key_id": "c", "key": "d"}, - ] - assert verifier._check_public_key(github_public_keys=keys, key_id="c") == "d" - - def test_check_public_key_error(self): - verifier = utils.GitHubTokenScanningPayloadVerifier( - session=pretend.stub(), metrics=pretend.stub() - ) - - with pytest.raises(utils.InvalidTokenLeakRequest) as exc: - verifier._check_public_key(github_public_keys=[], key_id="c") - - assert str(exc.value) == "Key c not found in github public keys" - assert exc.value.reason == "wrong_key_id" - - def test_check_signature(self): - verifier = utils.GitHubTokenScanningPayloadVerifier( - session=pretend.stub(), metrics=pretend.stub() - ) - public_key = ( - "-----BEGIN PUBLIC KEY-----\n" - "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE9MJJHnMfn2+H4xL4YaPDA4RpJqU" - "q\nkCmRCBnYERxZanmcpzQSXs1X/AljlKkbJ8qpVIW4clayyef9gWhFbNHWAA==\n" - "-----END PUBLIC KEY-----" - ) - signature = ( - "MEQCIAfgjgz6Ou/3DXMYZBervz1TKCHFsvwMcbuJhNZse622AiAG86/" - "cku2XdcmFWNHl2WSJi2fkE8t+auvB24eURaOd2A==" - ) - - payload = ( - '[{"type":"github_oauth_token","token":"cb4985f91f740272c0234202299' - 'f43808034d7f5","url":" https://github.com/github/faketestrepo/blob/' - 'b0dd59c0b500650cacd4551ca5989a6194001b10/production.env"}]' - ) - assert ( - verifier._check_signature( - payload=payload, public_key=public_key, signature=signature - ) - is None - ) - - def test_check_signature_invalid_signature(self): - verifier = utils.GitHubTokenScanningPayloadVerifier( - session=pretend.stub(), metrics=pretend.stub() - ) - public_key = ( - "-----BEGIN PUBLIC KEY-----\n" - "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE9MJJHnMfn2+H4xL4YaPDA4RpJqU" - "q\nkCmRCBnYERxZanmcpzQSXs1X/AljlKkbJ8qpVIW4clayyef9gWhFbNHWAA==\n" - "-----END PUBLIC KEY-----" - ) - # Changed the initial N for an M - signature = ( - "NEQCIAfgjgz6Ou/3DXMYZBervz1TKCHFsvwMcbuJhNZse622AiAG86/" - "cku2XdcmFWNHl2WSJi2fkE8t+auvB24eURaOd2A==" - ) - - payload = ( - '[{"type":"github_oauth_token","token":"cb4985f91f740272c0234202299' - 'f43808034d7f5","url":" https://github.com/github/faketestrepo/blob/' - 'b0dd59c0b500650cacd4551ca5989a6194001b10/production.env"}]' - ) - with pytest.raises(utils.InvalidTokenLeakRequest) as exc: - verifier._check_signature( - payload=payload, public_key=public_key, signature=signature - ) - - assert str(exc.value) == "Invalid signature" - assert exc.value.reason == "invalid_signature" - - def test_check_signature_invalid_crypto(self): - verifier = utils.GitHubTokenScanningPayloadVerifier( - session=pretend.stub(), metrics=pretend.stub() - ) - public_key = "" - signature = "" - - payload = "yeah, nope, that won't pass" - - with pytest.raises(utils.InvalidTokenLeakRequest) as exc: - verifier._check_signature( - payload=payload, public_key=public_key, signature=signature - ) - - assert str(exc.value) == "Invalid cryptographic values" - assert exc.value.reason == "invalid_crypto" - - -def test_analyze_disclosure(monkeypatch): - - metrics = collections.Counter() - - def metrics_increment(key): - metrics.update([key]) - - user = pretend.stub() - database_macaroon = pretend.stub(user=user, id=12) - - check = pretend.call_recorder(lambda *a, **kw: database_macaroon) - delete = pretend.call_recorder(lambda *a, **kw: None) - svc = { - utils.IMetricsService: pretend.stub(increment=metrics_increment), - utils.IMacaroonService: pretend.stub( - check_if_macaroon_exists=check, delete_macaroon=delete - ), - } - - request = pretend.stub(find_service=lambda iface, context: svc[iface]) - - send_email = pretend.call_recorder(lambda *a, **kw: None) - monkeypatch.setattr(utils, "send_token_compromised_email_leak", send_email) - - utils.analyze_disclosure( - request=request, - disclosure_record={ - "type": "token", - "token": "pypi-1234", - "url": "http://example.com", - }, - origin="github", - ) - assert metrics == { - "warehouse.token_leak.github.recieved": 1, - "warehouse.token_leak.github.processed": 1, - "warehouse.token_leak.github.valid": 1, - } - assert send_email.calls == [ - pretend.call(request, user, public_url="http://example.com", origin="github") - ] - assert check.calls == [pretend.call(raw_macaroon="pypi-1234")] - assert delete.calls == [pretend.call(macaroon_id="12")] - - -def test_analyze_disclosure_wrong_record(): - - metrics = collections.Counter() - - def metrics_increment(key): - metrics.update([key]) - - svc = { - utils.IMetricsService: pretend.stub(increment=metrics_increment), - utils.IMacaroonService: pretend.stub(), - } - - request = pretend.stub(find_service=lambda iface, context: svc[iface]) - - utils.analyze_disclosure( - request=request, - disclosure_record={}, - origin="github", - ) - assert metrics == { - "warehouse.token_leak.github.recieved": 1, - "warehouse.token_leak.github.error.format": 1, - } - - -def test_analyze_disclosure_invalid_macaroon(): - - metrics = collections.Counter() - - def metrics_increment(key): - metrics.update([key]) - - check = pretend.raiser(utils.InvalidMacaroon("Bla", "bla")) - svc = { - utils.IMetricsService: pretend.stub(increment=metrics_increment), - utils.IMacaroonService: pretend.stub(check_if_macaroon_exists=check), - } - - request = pretend.stub(find_service=lambda iface, context: svc[iface]) - - utils.analyze_disclosure( - request=request, - disclosure_record={ - "type": "token", - "token": "pypi-1234", - "url": "http://example.com", - }, - origin="github", - ) - assert metrics == { - "warehouse.token_leak.github.recieved": 1, - "warehouse.token_leak.github.error.invalid": 1, - } - - -def test_analyze_disclosure_unknown_error(monkeypatch): - - metrics = collections.Counter() - - def metrics_increment(key): - metrics.update([key]) - - request = pretend.stub( - find_service=lambda *a, **k: pretend.stub(increment=metrics_increment) - ) - monkeypatch.setattr(utils, "_analyze_disclosure", pretend.raiser(ValueError())) - - with pytest.raises(ValueError): - utils.analyze_disclosure( - request=request, - disclosure_record={}, - origin="github", - ) - assert metrics == { - "warehouse.token_leak.github.error.unknown": 1, - } - - -def test_analyze_disclosures_wrong_type(): - - metrics = collections.Counter() - - def metrics_increment(key): - metrics.update([key]) - - metrics_service = pretend.stub(increment=metrics_increment) - - with pytest.raises(utils.InvalidTokenLeakRequest) as exc: - utils.analyze_disclosures( - disclosure_records={}, origin="yay", metrics=metrics_service - ) - - assert str(exc.value) == "Invalid format: payload is not a list" - assert exc.value.reason == "format" - - -def test_analyze_disclosures_raise(monkeypatch): - metrics = collections.Counter() - - def metrics_increment(key): - metrics.update([key]) - - metrics_service = pretend.stub(increment=metrics_increment) - - task = pretend.stub(delay=pretend.call_recorder(lambda *a, **k: None)) - - monkeypatch.setattr(tasks, "analyze_disclosure_task", task) - - utils.analyze_disclosures( - disclosure_records=[1, 2, 3], origin="yay", metrics=metrics_service - ) - - assert task.delay.calls == [ - pretend.call(disclosure_record=1, origin="yay"), - pretend.call(disclosure_record=2, origin="yay"), - pretend.call(disclosure_record=3, origin="yay"), - ] diff --git a/tests/unit/integration/github/test_views.py b/tests/unit/integration/github/test_views.py deleted file mode 100644 --- a/tests/unit/integration/github/test_views.py +++ /dev/null @@ -1,171 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import collections -import json - -import pretend - -from warehouse.integrations.github import utils, views - - -class TestGitHubDiscloseToken: - def test_github_disclose_token(self, pyramid_request, monkeypatch): - - pyramid_request.headers = { - "GITHUB-PUBLIC-KEY-IDENTIFIER": "foo", - "GITHUB-PUBLIC-KEY-SIGNATURE": "bar", - } - metrics = pretend.stub() - - pyramid_request.body = "[1, 2, 3]" - pyramid_request.json_body = [1, 2, 3] - pyramid_request.registry.settings = {"github.token": "token"} - pyramid_request.find_service = lambda *a, **k: metrics - - http = pyramid_request.http = pretend.stub() - - verify = pretend.call_recorder(lambda **k: True) - verifier = pretend.stub(verify=verify) - verifier_cls = pretend.call_recorder(lambda **k: verifier) - monkeypatch.setattr(utils, "GitHubTokenScanningPayloadVerifier", verifier_cls) - - analyze_disclosures = pretend.call_recorder(lambda **k: None) - monkeypatch.setattr(utils, "analyze_disclosures", analyze_disclosures) - - response = views.github_disclose_token(pyramid_request) - - assert response.status_code == 204 - assert verifier_cls.calls == [ - pretend.call(session=http, metrics=metrics, api_token="token") - ] - assert verify.calls == [ - pretend.call(payload="[1, 2, 3]", key_id="foo", signature="bar") - ] - assert analyze_disclosures.calls == [ - pretend.call(disclosure_records=[1, 2, 3], origin="github", metrics=metrics) - ] - - def test_github_disclose_token_no_token(self, pyramid_request, monkeypatch): - - pyramid_request.headers = { - "GITHUB-PUBLIC-KEY-IDENTIFIER": "foo", - "GITHUB-PUBLIC-KEY-SIGNATURE": "bar", - } - metrics = pretend.stub() - - pyramid_request.body = "[1, 2, 3]" - pyramid_request.json_body = [1, 2, 3] - pyramid_request.registry.settings = {} - pyramid_request.find_service = lambda *a, **k: metrics - pyramid_request.http = pretend.stub() - - verify = pretend.call_recorder(lambda **k: True) - verifier = pretend.stub(verify=verify) - verifier_cls = pretend.call_recorder(lambda **k: verifier) - monkeypatch.setattr(utils, "GitHubTokenScanningPayloadVerifier", verifier_cls) - - analyze_disclosures = pretend.call_recorder(lambda **k: None) - monkeypatch.setattr(utils, "analyze_disclosures", analyze_disclosures) - - response = views.github_disclose_token(pyramid_request) - - assert response.status_code == 204 - - def test_github_disclose_token_verify_fail(self, monkeypatch, pyramid_request): - - pyramid_request.headers = { - "GITHUB-PUBLIC-KEY-IDENTIFIER": "foo", - "GITHUB-PUBLIC-KEY-SIGNATURE": "bar", - } - metrics = pretend.stub() - - pyramid_request.body = "[1, 2, 3]" - pyramid_request.find_service = lambda *a, **k: metrics - pyramid_request.registry.settings = {"github.token": "token"} - - pyramid_request.http = pretend.stub() - - verify = pretend.call_recorder(lambda **k: False) - verifier = pretend.stub(verify=verify) - verifier_cls = pretend.call_recorder(lambda **k: verifier) - monkeypatch.setattr(utils, "GitHubTokenScanningPayloadVerifier", verifier_cls) - - response = views.github_disclose_token(pyramid_request) - - assert response.status_int == 400 - - def test_github_disclose_token_verify_invalid_json(self, monkeypatch): - verify = pretend.call_recorder(lambda **k: True) - verifier = pretend.stub(verify=verify) - verifier_cls = pretend.call_recorder(lambda **k: verifier) - monkeypatch.setattr(utils, "GitHubTokenScanningPayloadVerifier", verifier_cls) - - metrics = collections.Counter() - - def metrics_increment(key): - metrics.update([key]) - - # We need to raise on a property access, can't do that with a stub. - class Request: - headers = { - "GITHUB-PUBLIC-KEY-IDENTIFIER": "foo", - "GITHUB-PUBLIC-KEY-SIGNATURE": "bar", - } - body = "[" - - @property - def json_body(self): - return json.loads(self.body) - - def find_service(self, *a, **k): - return pretend.stub(increment=metrics_increment) - - response = pretend.stub(status_int=200) - http = pretend.stub() - registry = pretend.stub(settings={"github.token": "token"}) - - request = Request() - response = views.github_disclose_token(request) - - assert response.status_int == 400 - assert metrics == {"warehouse.token_leak.github.error.payload.json_error": 1} - - def test_github_disclose_token_wrong_payload(self, pyramid_request, monkeypatch): - pyramid_request.headers = { - "GITHUB-PUBLIC-KEY-IDENTIFIER": "foo", - "GITHUB-PUBLIC-KEY-SIGNATURE": "bar", - } - - metrics = collections.Counter() - - def metrics_increment(key): - metrics.update([key]) - - metrics_service = pretend.stub(increment=metrics_increment) - - pyramid_request.body = "{}" - pyramid_request.json_body = {} - pyramid_request.registry.settings = {"github.token": "token"} - pyramid_request.find_service = lambda *a, **k: metrics_service - - pyramid_request.http = pretend.stub() - - verify = pretend.call_recorder(lambda **k: True) - verifier = pretend.stub(verify=verify) - verifier_cls = pretend.call_recorder(lambda **k: verifier) - monkeypatch.setattr(utils, "GitHubTokenScanningPayloadVerifier", verifier_cls) - - response = views.github_disclose_token(pyramid_request) - - assert response.status_code == 400 - assert metrics == {"warehouse.token_leak.github.error.format": 1} diff --git a/tests/unit/macaroons/test_services.py b/tests/unit/macaroons/test_services.py --- a/tests/unit/macaroons/test_services.py +++ b/tests/unit/macaroons/test_services.py @@ -72,16 +72,13 @@ def test_find_macaroon(self, user_service, macaroon_service): def test_find_userid_no_macaroon(self, macaroon_service): assert macaroon_service.find_userid(None) is None - @pytest.fixture - def raw_macaroon(self): - return pymacaroons.Macaroon( + def test_find_userid_invalid_macaroon(self, macaroon_service): + raw_macaroon = pymacaroons.Macaroon( location="fake location", identifier=str(uuid4()), key=b"fake key", version=pymacaroons.MACAROON_V2, ).serialize() - - def test_find_userid_invalid_macaroon(self, macaroon_service, raw_macaroon): raw_macaroon = f"pypi-{raw_macaroon}" assert macaroon_service.find_userid(raw_macaroon) is None @@ -105,13 +102,26 @@ def test_find_userid(self, macaroon_service): assert user.id == user_id - def test_verify_unprefixed_macaroon(self, macaroon_service, raw_macaroon): + def test_verify_unprefixed_macaroon(self, macaroon_service): + raw_macaroon = pymacaroons.Macaroon( + location="fake location", + identifier=str(uuid4()), + key=b"fake key", + version=pymacaroons.MACAROON_V2, + ).serialize() + with pytest.raises(services.InvalidMacaroon): macaroon_service.verify( raw_macaroon, pretend.stub(), pretend.stub(), pretend.stub() ) - def test_verify_no_macaroon(self, macaroon_service, raw_macaroon): + def test_verify_no_macaroon(self, macaroon_service): + raw_macaroon = pymacaroons.Macaroon( + location="fake location", + identifier=str(uuid4()), + key=b"fake key", + version=pymacaroons.MACAROON_V2, + ).serialize() raw_macaroon = f"pypi-{raw_macaroon}" with pytest.raises(services.InvalidMacaroon): @@ -228,50 +238,3 @@ def test_get_macaroon_by_description(self, macaroon_service): macaroon_service.get_macaroon_by_description(user.id, macaroon.description) == dm ) - - def test_check_if_macaroon_exists_unprefixed_macaroon( - self, macaroon_service, raw_macaroon - ): - with pytest.raises(services.InvalidMacaroon): - macaroon_service.check_if_macaroon_exists(raw_macaroon) - - def test_check_if_macaroon_exists_no_macaroon(self, macaroon_service, raw_macaroon): - raw_macaroon = f"pypi-{raw_macaroon}" - - with pytest.raises(services.InvalidMacaroon): - macaroon_service.check_if_macaroon_exists(raw_macaroon) - - def test_check_if_macaroon_exists_invalid_macaroon( - self, monkeypatch, user_service, macaroon_service - ): - user = UserFactory.create() - raw_macaroon, _ = macaroon_service.create_macaroon( - "fake location", user.id, "fake description", {"fake": "caveats"} - ) - - verifier_obj = pretend.stub( - verify_signature=pretend.raiser(services.InvalidMacaroon) - ) - verifier_cls = pretend.call_recorder(lambda *a, **k: verifier_obj) - monkeypatch.setattr(services, "Verifier", verifier_cls) - - with pytest.raises(services.InvalidMacaroon): - macaroon_service.check_if_macaroon_exists(raw_macaroon) - - def test_check_if_macaroon_exists_malformed_macaroon(self, macaroon_service): - with pytest.raises(services.InvalidMacaroon): - macaroon_service.check_if_macaroon_exists("pypi-thiswillnotdeserialize") - - def test_check_if_macaroon_exists_valid_macaroon( - self, monkeypatch, macaroon_service - ): - user = UserFactory.create() - raw_macaroon, data_macaroon = macaroon_service.create_macaroon( - "fake location", user.id, "fake description", {"fake": "caveats"} - ) - - verifier_obj = pretend.stub(verify_signature=lambda k: None) - verifier_cls = pretend.call_recorder(lambda *a, **k: verifier_obj) - monkeypatch.setattr(services, "Verifier", verifier_cls) - - assert macaroon_service.check_if_macaroon_exists(raw_macaroon) is data_macaroon diff --git a/tests/unit/test_config.py b/tests/unit/test_config.py --- a/tests/unit/test_config.py +++ b/tests/unit/test_config.py @@ -231,6 +231,7 @@ def __init__(self): "token.default.max_age": 21600, "warehouse.xmlrpc.client.ratelimit_string": "3600 per hour", } + if environment == config.Environment.development: expected_settings.update( { @@ -303,7 +304,7 @@ def __init__(self): pretend.call(".legacy.api.xmlrpc.cache"), pretend.call("pyramid_rpc.xmlrpc"), pretend.call(".legacy.action_routing"), - pretend.call(".predicates"), + pretend.call(".domain"), pretend.call(".i18n"), pretend.call(".db"), pretend.call(".tasks"), diff --git a/tests/unit/test_predicates.py b/tests/unit/test_domain.py similarity index 53% rename from tests/unit/test_predicates.py rename to tests/unit/test_domain.py --- a/tests/unit/test_predicates.py +++ b/tests/unit/test_domain.py @@ -13,9 +13,7 @@ import pretend import pytest -from pyramid.exceptions import ConfigurationError - -from warehouse.predicates import DomainPredicate, HeadersPredicate, includeme +from warehouse.domain import DomainPredicate, includeme class TestDomainPredicate: @@ -41,49 +39,10 @@ def test_invalid_value(self): assert not predicate(None, pretend.stub(domain="pypi.io")) -class TestHeadersPredicate: - @pytest.mark.parametrize( - ("value", "expected"), - [ - (["Foo", "Bar"], "header Foo, header Bar"), - (["Foo", "Bar:baz"], "header Foo, header Bar=baz"), - ], - ) - def test_text(self, value, expected): - predicate = HeadersPredicate(value, None) - assert predicate.text() == expected - assert predicate.phash() == expected - - def test_when_empty(self): - with pytest.raises(ConfigurationError): - HeadersPredicate([], None) - - @pytest.mark.parametrize( - "value", - [["Foo", "Bar"], ["Foo", "Bar:baz"]], - ) - def test_valid_value(self, value): - predicate = HeadersPredicate(value, None) - assert predicate(None, pretend.stub(headers={"Foo": "a", "Bar": "baz"})) - - @pytest.mark.parametrize( - "value", - [["Foo", "Baz"], ["Foo", "Bar:foo"]], - ) - def test_invalid_value(self, value): - predicate = HeadersPredicate(value, None) - assert not predicate(None, pretend.stub(headers={"Foo": "a", "Bar": "baz"})) - - def test_includeme(): config = pretend.stub( - add_route_predicate=pretend.call_recorder(lambda name, pred: None), - add_view_predicate=pretend.call_recorder(lambda name, pred: None), + add_route_predicate=pretend.call_recorder(lambda name, pred: None) ) includeme(config) assert config.add_route_predicate.calls == [pretend.call("domain", DomainPredicate)] - - assert config.add_view_predicate.calls == [ - pretend.call("require_headers", HeadersPredicate) - ] diff --git a/tests/unit/test_routes.py b/tests/unit/test_routes.py --- a/tests/unit/test_routes.py +++ b/tests/unit/test_routes.py @@ -326,11 +326,6 @@ def add_policy(name, filename): read_only=True, domain=warehouse, ), - pretend.call( - "integrations.github.disclose-token", - "/_/github/disclose-token", - domain=warehouse, - ), pretend.call("legacy.api.simple.index", "/simple/", domain=warehouse), pretend.call( "legacy.api.simple.detail",
API token failed to work after #7124 fix <!-- NOTE: This issue should be for problems with PyPI itself, including: * pypi.org * test.pypi.org * files.pythonhosted.org This issue should NOT be for a project installed from PyPI. If you are having an issue with a specific package, you should reach out to the maintainers of that project directly instead. Furthermore, this issue should NOT be for any non-PyPI properties (like python.org, docs.python.org, etc.) --> **Describe the bug** <!-- A clear and concise description the bug --> (1) python3 -m twine upload --repository testpypi dist/* ... HTTPError: 403 Forbidden from https://test.pypi.org/legacy/ Invalid API Token: invalid macaroon!r This can be reproduced in pypi.org and test.pypi.org (2) Also, there is a minor typo here: https://test.pypi.org/manage/account/token/ After API token is created, under "Use this token" Set your password to the token value, including the **test**pypi- prefix It should be: Set your password to the token value, including the pypi- prefix **Expected behavior** <!-- A clear and concise description of what you expected to happen --> python3 -m twine upload --repository testpypi dist/* ... Successfully installed ... **To Reproduce** <!-- Steps to reproduce the bug, or a link to PyPI where the bug is visible --> Follow the instruction here: https://packaging.python.org/tutorials/packaging-projects/ **My Platform** <!-- Any details about your specific platform: * If the problem is in the browser, what browser, version, and OS? * If the problem is with a command-line tool, what version of that tool? * If the problem is with connecting to PyPI, include some details about your network, including SSL/TLS implementation in use, internet service provider, and if there are any firewalls or proxies in use. --> Mac OS Python 3.8.3. setuptools-50.3.0 wheel-0.35.1 twine-3.2.0 **Additional context** <!-- Add any other context, links, etc. about the feature here. -->
2020-09-15T05:02:35Z
[]
[]
pypi/warehouse
8,563
pypi__warehouse-8563
[ "6051" ]
d15a8d238e797eefc3d8aeba42644cd0a892f00a
diff --git a/warehouse/config.py b/warehouse/config.py --- a/warehouse/config.py +++ b/warehouse/config.py @@ -163,6 +163,7 @@ def configure(settings=None): maybe_set( settings, "warehouse.release_files_table", "WAREHOUSE_RELEASE_FILES_TABLE" ) + maybe_set(settings, "github.token", "GITHUB_TOKEN") maybe_set(settings, "warehouse.trending_table", "WAREHOUSE_TRENDING_TABLE") maybe_set(settings, "celery.broker_url", "BROKER_URL") maybe_set(settings, "celery.result_url", "REDIS_URL") @@ -371,8 +372,8 @@ def configure(settings=None): # Register support for our legacy action URLs config.include(".legacy.action_routing") - # Register support for our domain predicates - config.include(".domain") + # Register support for our custom predicates + config.include(".predicates") # Register support for template views. config.add_directive("add_template_view", template_view, action_wrap=False) diff --git a/warehouse/email/__init__.py b/warehouse/email/__init__.py --- a/warehouse/email/__init__.py +++ b/warehouse/email/__init__.py @@ -207,6 +207,11 @@ def send_password_compromised_email_hibp(request, user): return {} +@_email("token-compromised-leak", allow_unverified=True) +def send_token_compromised_email_leak(request, user, *, public_url, origin): + return {"username": user.username, "public_url": public_url, "origin": origin} + + @_email("account-deleted") def send_account_deletion_email(request, user): return {"username": user.username} diff --git a/warehouse/integrations/__init__.py b/warehouse/integrations/__init__.py new file mode 100644 --- /dev/null +++ b/warehouse/integrations/__init__.py @@ -0,0 +1,11 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/warehouse/integrations/github/__init__.py b/warehouse/integrations/github/__init__.py new file mode 100644 --- /dev/null +++ b/warehouse/integrations/github/__init__.py @@ -0,0 +1,11 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/warehouse/integrations/github/tasks.py b/warehouse/integrations/github/tasks.py new file mode 100644 --- /dev/null +++ b/warehouse/integrations/github/tasks.py @@ -0,0 +1,24 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from warehouse import tasks +from warehouse.integrations.github import utils + + [email protected](ignore_result=True, acks_late=True) +def analyze_disclosure_task(request, disclosure_record, origin): + utils.analyze_disclosure( + request=request, + disclosure_record=disclosure_record, + origin=origin, + ) diff --git a/warehouse/integrations/github/utils.py b/warehouse/integrations/github/utils.py new file mode 100644 --- /dev/null +++ b/warehouse/integrations/github/utils.py @@ -0,0 +1,393 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import base64 +import json +import re +import time + +from typing import Optional + +import requests + +from cryptography.exceptions import InvalidSignature +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives.asymmetric.ec import ECDSA +from cryptography.hazmat.primitives.hashes import SHA256 + +from warehouse.accounts.interfaces import IUserService +from warehouse.email import send_token_compromised_email_leak +from warehouse.macaroons.caveats import InvalidMacaroon +from warehouse.macaroons.interfaces import IMacaroonService +from warehouse.metrics import IMetricsService + + +class ExtractionFailed(Exception): + pass + + +class TokenLeakMatcher: + """ + A TokenLeakMatcher is linked to a specific regex pattern. When provided + a string that matches this pattern, the matcher can extract a token-like string + from it. + """ + + name: str + pattern: re.Pattern + + def extract(self, text): + raise NotImplementedError + + +class PlainTextTokenLeakMatcher(TokenLeakMatcher): + name = "token" + # Macaroons are urlsafe_b64 encodeded so non-alphanumeric chars are - and _ + # https://github.com/ecordell/pymacaroons/blob/06b55110eda2fb192c130dee0bcedf8b124d1056/pymacaroons/serializers/binary_serializer.py#L32 + pattern = re.compile(r"pypi-[A-Za-z0-9-_=]+") + + def extract(self, text): + """ + From a string containing everything that was matched, extract the token + to check + """ + return text + + +TOKEN_LEAK_MATCHERS = { + matcher.name: matcher for matcher in [PlainTextTokenLeakMatcher()] +} + + +class InvalidTokenLeakRequest(Exception): + def __init__(self, message, reason): + self.reason = reason + super().__init__(message) + + +class TokenLeakDisclosureRequest: + def __init__(self, token: str, public_url: str): + self.token = token + self.public_url = public_url + + @classmethod + def from_api_record(cls, record, *, matchers=TOKEN_LEAK_MATCHERS): + + if not isinstance(record, dict): + raise InvalidTokenLeakRequest( + f"Record is not a dict but: {str(record)[:100]}", reason="format" + ) + + missing_keys = sorted({"token", "type", "url"} - set(record)) + if missing_keys: + raise InvalidTokenLeakRequest( + f"Record is missing attribute(s): {', '.join(missing_keys)}", + reason="format", + ) + + matcher_code = record["type"] + + matcher = matchers.get(matcher_code) + if not matcher: + raise InvalidTokenLeakRequest( + f"Matcher with code {matcher_code} not found. " + f"Available codes are: {', '.join(matchers)}", + reason="invalid_matcher", + ) + + try: + extracted_token = matcher.extract(record["token"]) + except ExtractionFailed: + raise InvalidTokenLeakRequest( + "Cannot extract token from recieved match", reason="extraction" + ) + + return cls(token=extracted_token, public_url=record["url"]) + + +class GitHubPublicKeyMetaAPIError(InvalidTokenLeakRequest): + pass + + +class CacheMiss(Exception): + pass + + +class PublicKeysCache: + """ + In-memory time-based cache. store with set(), retrieve with get(). + """ + + def __init__(self, cache_time): + self.cached_at = 0 + self.cache = None + self.cache_time = cache_time + + def get(self, now): + if not self.cache: + raise CacheMiss + + if self.cached_at + self.cache_time < now: + self.cache = None + raise CacheMiss + + return self.cache + + def set(self, now, value): + self.cached_at = now + self.cache = value + + +PUBLIC_KEYS_CACHE_TIME = 60 * 30 # 30 minutes +PUBLIC_KEYS_CACHE = PublicKeysCache(cache_time=PUBLIC_KEYS_CACHE_TIME) + + +class GitHubTokenScanningPayloadVerifier: + """ + Checks payload signature using: + - `requests` for HTTP calls + - `cryptography` for signature verification + """ + + def __init__( + self, + *, + session, + metrics, + api_token: Optional[str] = None, + public_keys_cache=PUBLIC_KEYS_CACHE, + ): + self._metrics = metrics + self._session = session + self._api_token = api_token + self._public_keys_cache = public_keys_cache + + def verify(self, *, payload, key_id, signature): + + public_key = None + try: + public_keys = self._get_cached_public_keys() + public_key = self._check_public_key( + github_public_keys=public_keys, key_id=key_id + ) + except (CacheMiss, InvalidTokenLeakRequest): + # No cache or outdated cache, it's ok, we'll do a real call. + # Just record a metric so that we can know if all calls lead to + # cache misses + self._metrics.increment("warehouse.token_leak.github.auth.cache.miss") + else: + self._metrics.increment("warehouse.token_leak.github.auth.cache.hit") + + try: + if not public_key: + pubkey_api_data = self._retrieve_public_key_payload() + public_keys = self._extract_public_keys(pubkey_api_data) + public_key = self._check_public_key( + github_public_keys=public_keys, key_id=key_id + ) + + self._check_signature( + payload=payload, public_key=public_key, signature=signature + ) + except InvalidTokenLeakRequest as exc: + self._metrics.increment( + f"warehouse.token_leak.github.auth.error.{exc.reason}" + ) + return False + + self._metrics.increment("warehouse.token_leak.github.auth.success") + return True + + def _get_cached_public_keys(self): + return self._public_keys_cache.get(now=time.time()) + + def _headers_auth(self): + if not self._api_token: + return {} + return {"Authorization": f"token {self._api_token}"} + + def _retrieve_public_key_payload(self): + + token_scanning_pubkey_api_url = ( + "https://api.github.com/meta/public_keys/token_scanning" + ) + + try: + response = self._session.get( + token_scanning_pubkey_api_url, headers=self._headers_auth() + ) + response.raise_for_status() + return response.json() + except requests.HTTPError as exc: + raise GitHubPublicKeyMetaAPIError( + f"Invalid response code {response.status_code}: {response.text[:100]}", + f"public_key_api.status.{response.status_code}", + ) from exc + except json.JSONDecodeError as exc: + raise GitHubPublicKeyMetaAPIError( + f"Non-JSON response received: {response.text[:100]}", + "public_key_api.invalid_json", + ) from exc + except requests.RequestException as exc: + raise GitHubPublicKeyMetaAPIError( + "Could not connect to GitHub", "public_key_api.network_error" + ) from exc + + def _extract_public_keys(self, pubkey_api_data): + if not isinstance(pubkey_api_data, dict): + raise GitHubPublicKeyMetaAPIError( + f"Payload is not a dict but: {str(pubkey_api_data)[:100]}", + "public_key_api.format_error", + ) + try: + public_keys = pubkey_api_data["public_keys"] + except KeyError: + raise GitHubPublicKeyMetaAPIError( + "Payload misses 'public_keys' attribute", "public_key_api.format_error" + ) + + if not isinstance(public_keys, list): + raise GitHubPublicKeyMetaAPIError( + "Payload 'public_keys' attribute is not a list", + "public_key_api.format_error", + ) + + expected_attributes = {"key", "key_identifier"} + result = [] + for public_key in public_keys: + + if not isinstance(public_key, dict): + raise GitHubPublicKeyMetaAPIError( + f"Key is not a dict but: {public_key}", + "public_key_api.format_error", + ) + + attributes = set(public_key) + if not expected_attributes <= attributes: + raise GitHubPublicKeyMetaAPIError( + "Missing attribute in key: " + f"{sorted(expected_attributes - attributes)}", + "public_key_api.format_error", + ) + + result.append( + {"key": public_key["key"], "key_id": public_key["key_identifier"]} + ) + self._public_keys_cache.set(now=time.time(), value=result) + return result + + def _check_public_key(self, github_public_keys, key_id): + for record in github_public_keys: + if record["key_id"] == key_id: + return record["key"] + + raise InvalidTokenLeakRequest( + f"Key {key_id} not found in github public keys", reason="wrong_key_id" + ) + + def _check_signature(self, payload, public_key, signature): + try: + loaded_public_key = serialization.load_pem_public_key( + data=public_key.encode("utf-8"), backend=default_backend() + ) + loaded_public_key.verify( + signature=base64.b64decode(signature), + data=payload, + # This validates the ECDSA and SHA256 part + signature_algorithm=ECDSA(algorithm=SHA256()), + ) + except InvalidSignature as exc: + raise InvalidTokenLeakRequest( + "Invalid signature", "invalid_signature" + ) from exc + except Exception as exc: + # Maybe the key is not a valid ECDSA key, maybe the data is not properly + # padded, etc. So many things can go wrong... + raise InvalidTokenLeakRequest( + "Invalid cryptographic values", "invalid_crypto" + ) from exc + + +def _analyze_disclosure(request, disclosure_record, origin): + + metrics = request.find_service(IMetricsService, context=None) + + metrics.increment(f"warehouse.token_leak.{origin}.recieved") + + try: + disclosure = TokenLeakDisclosureRequest.from_api_record( + record=disclosure_record + ) + except InvalidTokenLeakRequest as exc: + metrics.increment(f"warehouse.token_leak.{origin}.error.{exc.reason}") + return + + macaroon_service = request.find_service(IMacaroonService, context=None) + try: + database_macaroon = macaroon_service.find_from_raw( + raw_macaroon=disclosure.token + ) + except InvalidMacaroon: + metrics.increment(f"warehouse.token_leak.{origin}.error.invalid") + return + + metrics.increment(f"warehouse.token_leak.{origin}.valid") + + macaroon_service.delete_macaroon(macaroon_id=str(database_macaroon.id)) + + send_token_compromised_email_leak( + request, + database_macaroon.user, + public_url=disclosure.public_url, + origin=origin, + ) + user_service = request.find_service(IUserService, context=None) + + user_service.record_event( + database_macaroon.user.id, + tag="account:api_token:removed_leak", + ip_address="127.0.0.1", + additional={ + "macaroon_id": str(database_macaroon.id), + "public_url": disclosure.public_url, + "permissions": database_macaroon.caveats.get("permissions", "user"), + "description": database_macaroon.description, + }, + ) + metrics.increment(f"warehouse.token_leak.{origin}.processed") + + +def analyze_disclosure(request, disclosure_record, origin): + try: + _analyze_disclosure( + request=request, + disclosure_record=disclosure_record, + origin=origin, + ) + except Exception: + metrics = request.find_service(IMetricsService, context=None) + metrics.increment(f"warehouse.token_leak.{origin}.error.unknown") + raise + + +def analyze_disclosures(request, disclosure_records, origin, metrics): + from warehouse.integrations.github import tasks + + if not isinstance(disclosure_records, list): + metrics.increment(f"warehouse.token_leak.{origin}.error.format") + raise InvalidTokenLeakRequest("Invalid format: payload is not a list", "format") + + for disclosure_record in disclosure_records: + request.task(tasks.analyze_disclosure_task).delay( + disclosure_record=disclosure_record, origin=origin + ) diff --git a/warehouse/integrations/github/views.py b/warehouse/integrations/github/views.py new file mode 100644 --- /dev/null +++ b/warehouse/integrations/github/views.py @@ -0,0 +1,73 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json + +from pyramid.response import Response +from pyramid.view import view_config + +from warehouse.integrations.github import utils +from warehouse.metrics import IMetricsService + + +@view_config( + require_methods=["POST"], + require_csrf=False, + renderer="json", + route_name="integrations.github.disclose-token", + # If those headers are missing, response will be a 404 + require_headers=["GITHUB-PUBLIC-KEY-IDENTIFIER", "GITHUB-PUBLIC-KEY-SIGNATURE"], + has_translations=False, +) +def github_disclose_token(request): + # GitHub calls this API view when they have identified a string matching + # the regular expressions we provided them. + # Our job is to validate we're talking to github, check if the string contains + # valid credentials and, if they do, invalidate them and warn the owner + + # The documentation for this process is at + # https://developer.github.com/partnerships/token-scanning/ + + body = request.body + + # Thanks to the predicates, we know the headers we need are defined. + key_id = request.headers.get("GITHUB-PUBLIC-KEY-IDENTIFIER") + signature = request.headers.get("GITHUB-PUBLIC-KEY-SIGNATURE") + metrics = request.find_service(IMetricsService, context=None) + + verifier = utils.GitHubTokenScanningPayloadVerifier( + session=request.http, + metrics=metrics, + api_token=request.registry.settings.get("github.token"), + ) + + if not verifier.verify(payload=body, key_id=key_id, signature=signature): + return Response(status=400) + + try: + disclosures = request.json_body + except json.decoder.JSONDecodeError: + metrics.increment("warehouse.token_leak.github.error.payload.json_error") + return Response(status=400) + + try: + utils.analyze_disclosures( + request=request, + disclosure_records=disclosures, + origin="github", + metrics=metrics, + ) + except utils.InvalidTokenLeakRequest: + return Response(status=400) + + # 204 No Content: we acknowledge but we won't comment on the outcome. + return Response(status=204) diff --git a/warehouse/macaroons/interfaces.py b/warehouse/macaroons/interfaces.py --- a/warehouse/macaroons/interfaces.py +++ b/warehouse/macaroons/interfaces.py @@ -23,6 +23,12 @@ def _extract_raw_macaroon(raw_macaroon): wrong prefix. """ + def find_from_raw(raw_macaroon): + """ + Returns a macaroon model from the DB from a raw macaroon, or raises + InvalidMacaroon if not found or for malformed macaroons. + """ + def find_macaroon(macaroon_id): """ Returns a macaroon model from the DB by its identifier. diff --git a/warehouse/macaroons/services.py b/warehouse/macaroons/services.py --- a/warehouse/macaroons/services.py +++ b/warehouse/macaroons/services.py @@ -97,6 +97,16 @@ def find_userid(self, raw_macaroon): return dm.user.id + def find_from_raw(self, raw_macaroon): + """ + Returns a DB macaroon matching the imput, or raises InvalidMacaroon + """ + m = self._deserialize_raw_macaroon(raw_macaroon) + dm = self.find_macaroon(m.identifier.decode()) + if not dm: + raise InvalidMacaroon("Macaroon not found") + return dm + def verify(self, raw_macaroon, context, principals, permission): """ Returns True if the given raw (serialized) macaroon is diff --git a/warehouse/domain.py b/warehouse/predicates.py similarity index 61% rename from warehouse/domain.py rename to warehouse/predicates.py --- a/warehouse/domain.py +++ b/warehouse/predicates.py @@ -10,6 +10,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +from typing import List + +from pyramid import predicates +from pyramid.exceptions import ConfigurationError from pyramid.util import is_same_domain @@ -31,5 +35,26 @@ def __call__(self, info, request): return is_same_domain(request.domain, self.val) +class HeadersPredicate: + def __init__(self, val: List[str], config): + if not val: + raise ConfigurationError( + "Excpected at least one value in headers predicate" + ) + + self.sub_predicates = [ + predicates.HeaderPredicate(subval, config) for subval in val + ] + + def text(self): + return ", ".join(sub.text() for sub in self.sub_predicates) + + phash = text + + def __call__(self, context, request): + return all(sub(context, request) for sub in self.sub_predicates) + + def includeme(config): config.add_route_predicate("domain", DomainPredicate) + config.add_view_predicate("require_headers", HeadersPredicate) diff --git a/warehouse/routes.py b/warehouse/routes.py --- a/warehouse/routes.py +++ b/warehouse/routes.py @@ -164,7 +164,6 @@ def includeme(config): "/account/verify-project-role/", domain=warehouse, ) - # Management (views for logged-in users) config.add_route("manage.account", "/manage/account/", domain=warehouse) config.add_route( @@ -326,6 +325,13 @@ def includeme(config): read_only=True, domain=warehouse, ) + # Integration URLs + + config.add_route( + "integrations.github.disclose-token", + "/_/github/disclose-token", + domain=warehouse, + ) # Legacy URLs config.add_route("legacy.api.simple.index", "/simple/", domain=warehouse) diff --git a/warehouse/tasks.py b/warehouse/tasks.py --- a/warehouse/tasks.py +++ b/warehouse/tasks.py @@ -12,6 +12,7 @@ import functools import logging +import time import urllib.parse import celery @@ -90,6 +91,8 @@ def get_request(self): registry = self.app.pyramid_config.registry env = pyramid.scripting.prepare(registry=registry) env["request"].tm = transaction.TransactionManager(explicit=True) + env["request"].timings = {"new_request_start": time.time() * 1000} + env["request"].remote_addr = "127.0.0.1" self.request.update(pyramid_env=env) return self.request.pyramid_env["request"]
diff --git a/tests/unit/accounts/test_core.py b/tests/unit/accounts/test_core.py --- a/tests/unit/accounts/test_core.py +++ b/tests/unit/accounts/test_core.py @@ -325,6 +325,7 @@ def test_includeme(monkeypatch): set_authentication_policy=pretend.call_recorder(lambda p: None), set_authorization_policy=pretend.call_recorder(lambda p: None), maybe_dotted=pretend.call_recorder(lambda path: path), + add_route_predicate=pretend.call_recorder(lambda name, cls: None), ) accounts.includeme(config) diff --git a/tests/unit/email/test_init.py b/tests/unit/email/test_init.py --- a/tests/unit/email/test_init.py +++ b/tests/unit/email/test_init.py @@ -797,6 +797,80 @@ def test_password_compromised_email_hibp( ] +class TestTokenLeakEmail: + @pytest.mark.parametrize("verified", [True, False]) + def test_token_leak_email( + self, pyramid_request, pyramid_config, monkeypatch, verified + ): + stub_user = pretend.stub( + id=3, + username="username", + name="", + email="[email protected]", + primary_email=pretend.stub(email="[email protected]", verified=verified), + ) + pyramid_request.user = None + pyramid_request.db = pretend.stub( + query=lambda a: pretend.stub( + filter=lambda *a: pretend.stub(one=lambda: stub_user) + ), + ) + + subject_renderer = pyramid_config.testing_add_renderer( + "email/token-compromised-leak/subject.txt" + ) + subject_renderer.string_response = "Email Subject" + body_renderer = pyramid_config.testing_add_renderer( + "email/token-compromised-leak/body.txt" + ) + body_renderer.string_response = "Email Body" + html_renderer = pyramid_config.testing_add_renderer( + "email/token-compromised-leak/body.html" + ) + html_renderer.string_response = "Email HTML Body" + + send_email = pretend.stub( + delay=pretend.call_recorder(lambda *args, **kwargs: None) + ) + pyramid_request.task = pretend.call_recorder(lambda *args, **kwargs: send_email) + monkeypatch.setattr(email, "send_email", send_email) + + result = email.send_token_compromised_email_leak( + pyramid_request, stub_user, public_url="http://example.com", origin="github" + ) + + assert result == { + "username": "username", + "public_url": "http://example.com", + "origin": "github", + } + assert pyramid_request.task.calls == [pretend.call(send_email)] + assert send_email.delay.calls == [ + pretend.call( + f"{stub_user.username} <{stub_user.email}>", + { + "subject": "Email Subject", + "body_text": "Email Body", + "body_html": ( + "<html>\n<head></head>\n" + "<body><p>Email HTML Body</p></body>\n</html>\n" + ), + }, + { + "tag": "account:email:sent", + "user_id": 3, + "ip_address": "1.2.3.4", + "additional": { + "from_": None, + "to": "[email protected]", + "subject": "Email Subject", + "redact_ip": False, + }, + }, + ) + ] + + class TestPasswordCompromisedEmail: @pytest.mark.parametrize("verified", [True, False]) def test_password_compromised_email( diff --git a/tests/unit/integration/__init__.py b/tests/unit/integration/__init__.py new file mode 100644 --- /dev/null +++ b/tests/unit/integration/__init__.py @@ -0,0 +1,11 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/tests/unit/integration/github/__init__.py b/tests/unit/integration/github/__init__.py new file mode 100644 --- /dev/null +++ b/tests/unit/integration/github/__init__.py @@ -0,0 +1,11 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/tests/unit/integration/github/test_tasks.py b/tests/unit/integration/github/test_tasks.py new file mode 100644 --- /dev/null +++ b/tests/unit/integration/github/test_tasks.py @@ -0,0 +1,38 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pretend + +from warehouse.integrations.github import tasks, utils + + +def test_analyze_disclosure_task(monkeypatch): + analyze_disclosure = pretend.call_recorder(lambda *a, **k: None) + monkeypatch.setattr(utils, "analyze_disclosure", analyze_disclosure) + + request = pretend.stub() + disclosure_record = pretend.stub() + origin = pretend.stub() + + tasks.analyze_disclosure_task( + request=request, + disclosure_record=disclosure_record, + origin=origin, + ) + + assert analyze_disclosure.calls == [ + pretend.call( + request=request, + disclosure_record=disclosure_record, + origin=origin, + ) + ] diff --git a/tests/unit/integration/github/test_utils.py b/tests/unit/integration/github/test_utils.py new file mode 100644 --- /dev/null +++ b/tests/unit/integration/github/test_utils.py @@ -0,0 +1,719 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import collections +import json +import time +import uuid + +import pretend +import pytest +import requests + +from warehouse.integrations.github import tasks, utils + + +def test_token_leak_matcher_extract(): + with pytest.raises(NotImplementedError): + utils.TokenLeakMatcher().extract("a") + + +def test_plain_text_token_leak_matcher_extract(): + assert utils.PlainTextTokenLeakMatcher().extract("a") == "a" + + +def test_invalid_token_leak_request(): + exc = utils.InvalidTokenLeakRequest("a", "b") + + assert str(exc) == "a" + assert exc.reason == "b" + + [email protected]( + "record, error, reason", + [ + (None, "Record is not a dict but: None", "format"), + ({}, "Record is missing attribute(s): token, type, url", "format"), + ( + {"type": "not_found", "token": "a", "url": "b"}, + "Matcher with code not_found not found. Available codes are: failer, token", + "invalid_matcher", + ), + ( + {"type": "failer", "token": "a", "url": "b"}, + "Cannot extract token from recieved match", + "extraction", + ), + ], +) +def test_token_leak_disclosure_request_from_api_record_error(record, error, reason): + class MyFailingMatcher(utils.TokenLeakMatcher): + name = "failer" + + def extract(self, text): + raise utils.ExtractionFailed() + + with pytest.raises(utils.InvalidTokenLeakRequest) as exc: + utils.TokenLeakDisclosureRequest.from_api_record( + record, matchers={"failer": MyFailingMatcher(), **utils.TOKEN_LEAK_MATCHERS} + ) + + assert str(exc.value) == error + assert exc.value.reason == reason + + +def test_token_leak_disclosure_request_from_api_record(): + request = utils.TokenLeakDisclosureRequest.from_api_record( + {"type": "token", "token": "pypi-1234", "url": "http://example.com"} + ) + + assert request.token == "pypi-1234" + assert request.public_url == "http://example.com" + + +class TestCache: + def test_set(self): + cache = utils.PublicKeysCache(cache_time=10) + cache.set(now=1, value="foo") + + assert cache.cached_at == 1 + assert cache.cache == "foo" + + def test_get_no_cache(self): + cache = utils.PublicKeysCache(cache_time=10) + + with pytest.raises(utils.CacheMiss): + cache.get(now=1) + + def test_get_old_cache(self): + cache = utils.PublicKeysCache(cache_time=10) + cache.set(now=5, value="foo") + + with pytest.raises(utils.CacheMiss): + cache.get(now=20) + + def test_get_valid(self): + cache = utils.PublicKeysCache(cache_time=10) + cache.set(now=5, value="foo") + + assert cache.get(now=10) == "foo" + + +class TestGitHubTokenScanningPayloadVerifier: + def test_init(self): + metrics = pretend.stub() + session = pretend.stub() + token = "api_token" + cache = utils.PublicKeysCache(cache_time=12) + + verifier = utils.GitHubTokenScanningPayloadVerifier( + session=session, metrics=metrics, api_token=token, public_keys_cache=cache + ) + + assert verifier._session is session + assert verifier._metrics is metrics + assert verifier._api_token == token + assert verifier._public_keys_cache is cache + + def test_verify_cache_miss(self): + # Example taken from + # https://gist.github.com/ewjoachim/7dde11c31d9686ed6b4431c3ca166da2 + meta_payload = { + "public_keys": [ + { + "key_identifier": "90a421169f0a406205f1563a953312f0be898d3c" + "7b6c06b681aa86a874555f4a", + "key": "-----BEGIN PUBLIC KEY-----\n" + "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE9MJJHnMfn2+H4xL4YaPDA4RpJqU" + "q\nkCmRCBnYERxZanmcpzQSXs1X/AljlKkbJ8qpVIW4clayyef9gWhFbNHWAA==\n" + "-----END PUBLIC KEY-----", + "is_current": True, + } + ] + } + response = pretend.stub( + json=lambda: meta_payload, raise_for_status=lambda: None + ) + session = pretend.stub(get=lambda *a, **k: response) + metrics = pretend.stub(increment=pretend.call_recorder(lambda str: None)) + cache = utils.PublicKeysCache(cache_time=12) + verifier = utils.GitHubTokenScanningPayloadVerifier( + session=session, + metrics=metrics, + api_token="api-token", + public_keys_cache=cache, + ) + key_id = "90a421169f0a406205f1563a953312f0be898d3c7b6c06b681aa86a874555f4a" + signature = ( + "MEQCIAfgjgz6Ou/3DXMYZBervz1TKCHFsvwMcbuJhNZse622AiAG86/" + "cku2XdcmFWNHl2WSJi2fkE8t+auvB24eURaOd2A==" + ) + + payload = ( + b'[{"type":"github_oauth_token","token":"cb4985f91f740272c0234202299' + b'f43808034d7f5","url":" https://github.com/github/faketestrepo/blob/' + b'b0dd59c0b500650cacd4551ca5989a6194001b10/production.env"}]' + ) + assert ( + verifier.verify(payload=payload, key_id=key_id, signature=signature) is True + ) + + assert metrics.increment.calls == [ + pretend.call("warehouse.token_leak.github.auth.cache.miss"), + pretend.call("warehouse.token_leak.github.auth.success"), + ] + + def test_verify_cache_hit(self): + session = pretend.stub() + metrics = pretend.stub(increment=pretend.call_recorder(lambda str: None)) + cache = utils.PublicKeysCache(cache_time=12) + cache.cached_at = time.time() + cache.cache = [ + { + "key_id": "90a421169f0a406205f1563a953312f0be898d3c" + "7b6c06b681aa86a874555f4a", + "key": "-----BEGIN PUBLIC KEY-----\n" + "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE9MJJHnMfn2+H4xL4YaPDA4RpJqU" + "q\nkCmRCBnYERxZanmcpzQSXs1X/AljlKkbJ8qpVIW4clayyef9gWhFbNHWAA==\n" + "-----END PUBLIC KEY-----", + } + ] + verifier = utils.GitHubTokenScanningPayloadVerifier( + session=session, + metrics=metrics, + api_token="api-token", + public_keys_cache=cache, + ) + + key_id = "90a421169f0a406205f1563a953312f0be898d3c7b6c06b681aa86a874555f4a" + signature = ( + "MEQCIAfgjgz6Ou/3DXMYZBervz1TKCHFsvwMcbuJhNZse622AiAG86/" + "cku2XdcmFWNHl2WSJi2fkE8t+auvB24eURaOd2A==" + ) + + payload = ( + b'[{"type":"github_oauth_token","token":"cb4985f91f740272c0234202299' + b'f43808034d7f5","url":" https://github.com/github/faketestrepo/blob/' + b'b0dd59c0b500650cacd4551ca5989a6194001b10/production.env"}]' + ) + assert ( + verifier.verify(payload=payload, key_id=key_id, signature=signature) is True + ) + + assert metrics.increment.calls == [ + pretend.call("warehouse.token_leak.github.auth.cache.hit"), + pretend.call("warehouse.token_leak.github.auth.success"), + ] + + def test_verify_error(self): + metrics = pretend.stub(increment=pretend.call_recorder(lambda str: None)) + cache = utils.PublicKeysCache(cache_time=12) + verifier = utils.GitHubTokenScanningPayloadVerifier( + session=pretend.stub(), + metrics=metrics, + api_token="api-token", + public_keys_cache=cache, + ) + verifier._retrieve_public_key_payload = pretend.raiser( + utils.InvalidTokenLeakRequest("Bla", "bla") + ) + + assert verifier.verify(payload={}, key_id="a", signature="a") is False + + assert metrics.increment.calls == [ + pretend.call("warehouse.token_leak.github.auth.cache.miss"), + pretend.call("warehouse.token_leak.github.auth.error.bla"), + ] + + def test_headers_auth_no_token(self): + headers = utils.GitHubTokenScanningPayloadVerifier( + session=pretend.stub(), + metrics=pretend.stub(), + api_token=None, + public_keys_cache=pretend.stub(), + )._headers_auth() + assert headers == {} + + def test_headers_auth_token(self): + headers = utils.GitHubTokenScanningPayloadVerifier( + session=pretend.stub(), + metrics=pretend.stub(), + api_token="api-token", + public_keys_cache=pretend.stub(), + )._headers_auth() + assert headers == {"Authorization": "token api-token"} + + def test_retrieve_public_key_payload(self): + meta_payload = { + "public_keys": [ + { + "key_identifier": "90a421169f0a406205f1563a953312f0be898d3c" + "7b6c06b681aa86a874555f4a", + "key": "-----BEGIN PUBLIC KEY-----\n" + "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE9MJJHnMfn2+H4xL4YaPDA4RpJqU" + "q\nkCmRCBnYERxZanmcpzQSXs1X/AljlKkbJ8qpVIW4clayyef9gWhFbNHWAA==\n" + "-----END PUBLIC KEY-----", + "is_current": True, + } + ] + } + response = pretend.stub( + json=lambda: meta_payload, raise_for_status=lambda: None + ) + session = pretend.stub(get=pretend.call_recorder(lambda *a, **k: response)) + metrics = pretend.stub(increment=pretend.call_recorder(lambda str: None)) + + verifier = utils.GitHubTokenScanningPayloadVerifier( + session=session, + metrics=metrics, + api_token="api-token", + public_keys_cache=pretend.stub(), + ) + assert verifier._retrieve_public_key_payload() == meta_payload + assert session.get.calls == [ + pretend.call( + "https://api.github.com/meta/public_keys/token_scanning", + headers={"Authorization": "token api-token"}, + ) + ] + + def test_get_cached_public_key_cache_hit(self): + metrics = pretend.stub() + session = pretend.stub() + cache = utils.PublicKeysCache(cache_time=12) + cache_value = pretend.stub() + cache.set(now=time.time(), value=cache_value) + + verifier = utils.GitHubTokenScanningPayloadVerifier( + session=session, metrics=metrics, public_keys_cache=cache + ) + + assert verifier._get_cached_public_keys() is cache_value + + def test_get_cached_public_key_cache_miss_no_cache(self): + metrics = pretend.stub() + session = pretend.stub() + cache = utils.PublicKeysCache(cache_time=12) + + verifier = utils.GitHubTokenScanningPayloadVerifier( + session=session, metrics=metrics, public_keys_cache=cache + ) + + with pytest.raises(utils.CacheMiss): + verifier._get_cached_public_keys() + + def test_retrieve_public_key_payload_http_error(self): + response = pretend.stub( + status_code=418, + text="I'm a teapot", + raise_for_status=pretend.raiser(requests.HTTPError), + ) + session = pretend.stub( + get=lambda *a, **k: response, + ) + verifier = utils.GitHubTokenScanningPayloadVerifier( + session=session, metrics=pretend.stub(), public_keys_cache=pretend.stub() + ) + with pytest.raises(utils.GitHubPublicKeyMetaAPIError) as exc: + verifier._retrieve_public_key_payload() + + assert str(exc.value) == "Invalid response code 418: I'm a teapot" + assert exc.value.reason == "public_key_api.status.418" + + def test_retrieve_public_key_payload_json_error(self): + response = pretend.stub( + text="Still a non-json teapot", + json=pretend.raiser(json.JSONDecodeError("", "", 3)), + raise_for_status=lambda: None, + ) + session = pretend.stub(get=lambda *a, **k: response) + verifier = utils.GitHubTokenScanningPayloadVerifier( + session=session, metrics=pretend.stub(), public_keys_cache=pretend.stub() + ) + with pytest.raises(utils.GitHubPublicKeyMetaAPIError) as exc: + verifier._retrieve_public_key_payload() + + assert str(exc.value) == "Non-JSON response received: Still a non-json teapot" + assert exc.value.reason == "public_key_api.invalid_json" + + def test_retrieve_public_key_payload_connection_error(self): + session = pretend.stub(get=pretend.raiser(requests.ConnectionError)) + + verifier = utils.GitHubTokenScanningPayloadVerifier( + session=session, metrics=pretend.stub(), public_keys_cache=pretend.stub() + ) + + with pytest.raises(utils.GitHubPublicKeyMetaAPIError) as exc: + verifier._retrieve_public_key_payload() + + assert str(exc.value) == "Could not connect to GitHub" + assert exc.value.reason == "public_key_api.network_error" + + def test_extract_public_keys(self): + meta_payload = { + "public_keys": [ + { + "key_identifier": "90a421169f0a406205f1563a953312f0be898d3c" + "7b6c06b681aa86a874555f4a", + "key": "-----BEGIN PUBLIC KEY-----\n" + "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE9MJJHnMfn2+H4xL4YaPDA4RpJqU" + "q\nkCmRCBnYERxZanmcpzQSXs1X/AljlKkbJ8qpVIW4clayyef9gWhFbNHWAA==\n" + "-----END PUBLIC KEY-----", + "is_current": True, + } + ] + } + cache = utils.PublicKeysCache(cache_time=12) + verifier = utils.GitHubTokenScanningPayloadVerifier( + session=pretend.stub(), metrics=pretend.stub(), public_keys_cache=cache + ) + + keys = verifier._extract_public_keys(pubkey_api_data=meta_payload) + + assert keys == [ + { + "key": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcD" + "QgAE9MJJHnMfn2+H4xL4YaPDA4RpJqUq\nkCmRCBnYERxZanmcpzQSXs1X/AljlKkbJ" + "8qpVIW4clayyef9gWhFbNHWAA==\n-----END PUBLIC KEY-----", + "key_id": "90a421169f0a406205f1563a953312f0be" + "898d3c7b6c06b681aa86a874555f4a", + } + ] + assert cache.cache == keys + + @pytest.mark.parametrize( + "payload, expected", + [ + ([], "Payload is not a dict but: []"), + ({}, "Payload misses 'public_keys' attribute"), + ({"public_keys": None}, "Payload 'public_keys' attribute is not a list"), + ({"public_keys": [None]}, "Key is not a dict but: None"), + ( + {"public_keys": [{}]}, + "Missing attribute in key: ['key', 'key_identifier']", + ), + ( + {"public_keys": [{"key": "a"}]}, + "Missing attribute in key: ['key_identifier']", + ), + ( + {"public_keys": [{"key_identifier": "a"}]}, + "Missing attribute in key: ['key']", + ), + ], + ) + def test_extract_public_keys_error(self, payload, expected): + cache = utils.PublicKeysCache(cache_time=12) + verifier = utils.GitHubTokenScanningPayloadVerifier( + session=pretend.stub(), metrics=pretend.stub(), public_keys_cache=cache + ) + + with pytest.raises(utils.GitHubPublicKeyMetaAPIError) as exc: + list(verifier._extract_public_keys(pubkey_api_data=payload)) + + assert exc.value.reason == "public_key_api.format_error" + assert str(exc.value) == expected + assert cache.cache is None + + def test_check_public_key(self): + verifier = utils.GitHubTokenScanningPayloadVerifier( + session=pretend.stub(), + metrics=pretend.stub(), + public_keys_cache=pretend.stub(), + ) + + keys = [ + {"key_id": "a", "key": "b"}, + {"key_id": "c", "key": "d"}, + ] + assert verifier._check_public_key(github_public_keys=keys, key_id="c") == "d" + + def test_check_public_key_error(self): + verifier = utils.GitHubTokenScanningPayloadVerifier( + session=pretend.stub(), + metrics=pretend.stub(), + public_keys_cache=pretend.stub(), + ) + + with pytest.raises(utils.InvalidTokenLeakRequest) as exc: + verifier._check_public_key(github_public_keys=[], key_id="c") + + assert str(exc.value) == "Key c not found in github public keys" + assert exc.value.reason == "wrong_key_id" + + def test_check_signature(self): + verifier = utils.GitHubTokenScanningPayloadVerifier( + session=pretend.stub(), + metrics=pretend.stub(), + public_keys_cache=pretend.stub(), + ) + public_key = ( + "-----BEGIN PUBLIC KEY-----\n" + "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE9MJJHnMfn2+H4xL4YaPDA4RpJqU" + "q\nkCmRCBnYERxZanmcpzQSXs1X/AljlKkbJ8qpVIW4clayyef9gWhFbNHWAA==\n" + "-----END PUBLIC KEY-----" + ) + signature = ( + "MEQCIAfgjgz6Ou/3DXMYZBervz1TKCHFsvwMcbuJhNZse622AiAG86/" + "cku2XdcmFWNHl2WSJi2fkE8t+auvB24eURaOd2A==" + ) + + payload = ( + b'[{"type":"github_oauth_token","token":"cb4985f91f740272c0234202299' + b'f43808034d7f5","url":" https://github.com/github/faketestrepo/blob/' + b'b0dd59c0b500650cacd4551ca5989a6194001b10/production.env"}]' + ) + assert ( + verifier._check_signature( + payload=payload, public_key=public_key, signature=signature + ) + is None + ) + + def test_check_signature_invalid_signature(self): + verifier = utils.GitHubTokenScanningPayloadVerifier( + session=pretend.stub(), + metrics=pretend.stub(), + public_keys_cache=pretend.stub(), + ) + public_key = ( + "-----BEGIN PUBLIC KEY-----\n" + "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE9MJJHnMfn2+H4xL4YaPDA4RpJqU" + "q\nkCmRCBnYERxZanmcpzQSXs1X/AljlKkbJ8qpVIW4clayyef9gWhFbNHWAA==\n" + "-----END PUBLIC KEY-----" + ) + # Changed the initial N for an M + signature = ( + "NEQCIAfgjgz6Ou/3DXMYZBervz1TKCHFsvwMcbuJhNZse622AiAG86/" + "cku2XdcmFWNHl2WSJi2fkE8t+auvB24eURaOd2A==" + ) + + payload = ( + b'[{"type":"github_oauth_token","token":"cb4985f91f740272c0234202299' + b'f43808034d7f5","url":" https://github.com/github/faketestrepo/blob/' + b'b0dd59c0b500650cacd4551ca5989a6194001b10/production.env"}]' + ) + with pytest.raises(utils.InvalidTokenLeakRequest) as exc: + verifier._check_signature( + payload=payload, public_key=public_key, signature=signature + ) + + assert str(exc.value) == "Invalid signature" + assert exc.value.reason == "invalid_signature" + + def test_check_signature_invalid_crypto(self): + verifier = utils.GitHubTokenScanningPayloadVerifier( + session=pretend.stub(), + metrics=pretend.stub(), + public_keys_cache=pretend.stub(), + ) + public_key = "" + signature = "" + + payload = "yeah, nope, that won't pass" + + with pytest.raises(utils.InvalidTokenLeakRequest) as exc: + verifier._check_signature( + payload=payload, public_key=public_key, signature=signature + ) + + assert str(exc.value) == "Invalid cryptographic values" + assert exc.value.reason == "invalid_crypto" + + +def test_analyze_disclosure(monkeypatch): + + metrics = collections.Counter() + + def metrics_increment(key): + metrics.update([key]) + + user_id = uuid.UUID(bytes=b"0" * 16) + user = pretend.stub(id=user_id) + database_macaroon = pretend.stub( + user=user, id=12, caveats={"permissions": "user"}, description="foo" + ) + + find = pretend.call_recorder(lambda *a, **kw: database_macaroon) + delete = pretend.call_recorder(lambda *a, **kw: None) + record_event = pretend.call_recorder(lambda *a, **kw: None) + svc = { + utils.IMetricsService: pretend.stub(increment=metrics_increment), + utils.IMacaroonService: pretend.stub( + find_from_raw=find, delete_macaroon=delete + ), + utils.IUserService: pretend.stub(record_event=record_event), + } + + request = pretend.stub(find_service=lambda iface, context: svc[iface]) + + send_email = pretend.call_recorder(lambda *a, **kw: None) + monkeypatch.setattr(utils, "send_token_compromised_email_leak", send_email) + + utils.analyze_disclosure( + request=request, + disclosure_record={ + "type": "token", + "token": "pypi-1234", + "url": "http://example.com", + }, + origin="github", + ) + assert metrics == { + "warehouse.token_leak.github.recieved": 1, + "warehouse.token_leak.github.processed": 1, + "warehouse.token_leak.github.valid": 1, + } + assert send_email.calls == [ + pretend.call(request, user, public_url="http://example.com", origin="github") + ] + assert find.calls == [pretend.call(raw_macaroon="pypi-1234")] + assert delete.calls == [pretend.call(macaroon_id="12")] + assert record_event.calls == [ + pretend.call( + user_id, + tag="account:api_token:removed_leak", + ip_address="127.0.0.1", + additional={ + "macaroon_id": "12", + "public_url": "http://example.com", + "permissions": "user", + "description": "foo", + }, + ) + ] + + +def test_analyze_disclosure_wrong_record(): + + metrics = collections.Counter() + + def metrics_increment(key): + metrics.update([key]) + + svc = { + utils.IMetricsService: pretend.stub(increment=metrics_increment), + utils.IMacaroonService: pretend.stub(), + } + + request = pretend.stub(find_service=lambda iface, context: svc[iface]) + + utils.analyze_disclosure( + request=request, + disclosure_record={}, + origin="github", + ) + assert metrics == { + "warehouse.token_leak.github.recieved": 1, + "warehouse.token_leak.github.error.format": 1, + } + + +def test_analyze_disclosure_invalid_macaroon(): + + metrics = collections.Counter() + + def metrics_increment(key): + metrics.update([key]) + + find = pretend.raiser(utils.InvalidMacaroon("Bla", "bla")) + svc = { + utils.IMetricsService: pretend.stub(increment=metrics_increment), + utils.IMacaroonService: pretend.stub(find_from_raw=find), + } + + request = pretend.stub(find_service=lambda iface, context: svc[iface]) + + utils.analyze_disclosure( + request=request, + disclosure_record={ + "type": "token", + "token": "pypi-1234", + "url": "http://example.com", + }, + origin="github", + ) + assert metrics == { + "warehouse.token_leak.github.recieved": 1, + "warehouse.token_leak.github.error.invalid": 1, + } + + +def test_analyze_disclosure_unknown_error(monkeypatch): + + metrics = collections.Counter() + + def metrics_increment(key): + metrics.update([key]) + + request = pretend.stub( + find_service=lambda *a, **k: pretend.stub(increment=metrics_increment) + ) + monkeypatch.setattr(utils, "_analyze_disclosure", pretend.raiser(ValueError())) + + with pytest.raises(ValueError): + utils.analyze_disclosure( + request=request, + disclosure_record={}, + origin="github", + ) + assert metrics == { + "warehouse.token_leak.github.error.unknown": 1, + } + + +def test_analyze_disclosures_wrong_type(): + + metrics = collections.Counter() + + def metrics_increment(key): + metrics.update([key]) + + metrics_service = pretend.stub(increment=metrics_increment) + + with pytest.raises(utils.InvalidTokenLeakRequest) as exc: + utils.analyze_disclosures( + request=pretend.stub(), + disclosure_records={}, + origin="yay", + metrics=metrics_service, + ) + + assert str(exc.value) == "Invalid format: payload is not a list" + assert exc.value.reason == "format" + + +def test_analyze_disclosures_raise(monkeypatch): + metrics = collections.Counter() + + def metrics_increment(key): + metrics.update([key]) + + metrics_service = pretend.stub(increment=metrics_increment) + + task = pretend.stub(delay=pretend.call_recorder(lambda *a, **k: None)) + request = pretend.stub(task=lambda x: task) + + monkeypatch.setattr(tasks, "analyze_disclosure_task", task) + + utils.analyze_disclosures( + request=request, + disclosure_records=[1, 2, 3], + origin="yay", + metrics=metrics_service, + ) + + assert task.delay.calls == [ + pretend.call(disclosure_record=1, origin="yay"), + pretend.call(disclosure_record=2, origin="yay"), + pretend.call(disclosure_record=3, origin="yay"), + ] diff --git a/tests/unit/integration/github/test_views.py b/tests/unit/integration/github/test_views.py new file mode 100644 --- /dev/null +++ b/tests/unit/integration/github/test_views.py @@ -0,0 +1,176 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import collections +import json + +import pretend + +from warehouse.integrations.github import utils, views + + +class TestGitHubDiscloseToken: + def test_github_disclose_token(self, pyramid_request, monkeypatch): + + pyramid_request.headers = { + "GITHUB-PUBLIC-KEY-IDENTIFIER": "foo", + "GITHUB-PUBLIC-KEY-SIGNATURE": "bar", + } + metrics = pretend.stub() + + pyramid_request.body = "[1, 2, 3]" + pyramid_request.json_body = [1, 2, 3] + pyramid_request.registry.settings = {"github.token": "token"} + pyramid_request.find_service = lambda *a, **k: metrics + + http = pyramid_request.http = pretend.stub() + + verify = pretend.call_recorder(lambda **k: True) + verifier = pretend.stub(verify=verify) + verifier_cls = pretend.call_recorder(lambda **k: verifier) + monkeypatch.setattr(utils, "GitHubTokenScanningPayloadVerifier", verifier_cls) + + analyze_disclosures = pretend.call_recorder(lambda **k: None) + monkeypatch.setattr(utils, "analyze_disclosures", analyze_disclosures) + + response = views.github_disclose_token(pyramid_request) + + assert response.status_code == 204 + assert verifier_cls.calls == [ + pretend.call(session=http, metrics=metrics, api_token="token") + ] + assert verify.calls == [ + pretend.call(payload="[1, 2, 3]", key_id="foo", signature="bar") + ] + assert analyze_disclosures.calls == [ + pretend.call( + request=pyramid_request, + disclosure_records=[1, 2, 3], + origin="github", + metrics=metrics, + ) + ] + + def test_github_disclose_token_no_token(self, pyramid_request, monkeypatch): + + pyramid_request.headers = { + "GITHUB-PUBLIC-KEY-IDENTIFIER": "foo", + "GITHUB-PUBLIC-KEY-SIGNATURE": "bar", + } + metrics = pretend.stub() + + pyramid_request.body = "[1, 2, 3]" + pyramid_request.json_body = [1, 2, 3] + pyramid_request.registry.settings = {} + pyramid_request.find_service = lambda *a, **k: metrics + pyramid_request.http = pretend.stub() + + verify = pretend.call_recorder(lambda **k: True) + verifier = pretend.stub(verify=verify) + verifier_cls = pretend.call_recorder(lambda **k: verifier) + monkeypatch.setattr(utils, "GitHubTokenScanningPayloadVerifier", verifier_cls) + + analyze_disclosures = pretend.call_recorder(lambda **k: None) + monkeypatch.setattr(utils, "analyze_disclosures", analyze_disclosures) + + response = views.github_disclose_token(pyramid_request) + + assert response.status_code == 204 + + def test_github_disclose_token_verify_fail(self, monkeypatch, pyramid_request): + + pyramid_request.headers = { + "GITHUB-PUBLIC-KEY-IDENTIFIER": "foo", + "GITHUB-PUBLIC-KEY-SIGNATURE": "bar", + } + metrics = pretend.stub() + + pyramid_request.body = "[1, 2, 3]" + pyramid_request.find_service = lambda *a, **k: metrics + pyramid_request.registry.settings = {"github.token": "token"} + + pyramid_request.http = pretend.stub() + + verify = pretend.call_recorder(lambda **k: False) + verifier = pretend.stub(verify=verify) + verifier_cls = pretend.call_recorder(lambda **k: verifier) + monkeypatch.setattr(utils, "GitHubTokenScanningPayloadVerifier", verifier_cls) + + response = views.github_disclose_token(pyramid_request) + + assert response.status_int == 400 + + def test_github_disclose_token_verify_invalid_json(self, monkeypatch): + verify = pretend.call_recorder(lambda **k: True) + verifier = pretend.stub(verify=verify) + verifier_cls = pretend.call_recorder(lambda **k: verifier) + monkeypatch.setattr(utils, "GitHubTokenScanningPayloadVerifier", verifier_cls) + + metrics = collections.Counter() + + def metrics_increment(key): + metrics.update([key]) + + # We need to raise on a property access, can't do that with a stub. + class Request: + headers = { + "GITHUB-PUBLIC-KEY-IDENTIFIER": "foo", + "GITHUB-PUBLIC-KEY-SIGNATURE": "bar", + } + body = "[" + + @property + def json_body(self): + return json.loads(self.body) + + def find_service(self, *a, **k): + return pretend.stub(increment=metrics_increment) + + response = pretend.stub(status_int=200) + http = pretend.stub() + registry = pretend.stub(settings={"github.token": "token"}) + + request = Request() + response = views.github_disclose_token(request) + + assert response.status_int == 400 + assert metrics == {"warehouse.token_leak.github.error.payload.json_error": 1} + + def test_github_disclose_token_wrong_payload(self, pyramid_request, monkeypatch): + pyramid_request.headers = { + "GITHUB-PUBLIC-KEY-IDENTIFIER": "foo", + "GITHUB-PUBLIC-KEY-SIGNATURE": "bar", + } + + metrics = collections.Counter() + + def metrics_increment(key): + metrics.update([key]) + + metrics_service = pretend.stub(increment=metrics_increment) + + pyramid_request.body = "{}" + pyramid_request.json_body = {} + pyramid_request.registry.settings = {"github.token": "token"} + pyramid_request.find_service = lambda *a, **k: metrics_service + + pyramid_request.http = pretend.stub() + + verify = pretend.call_recorder(lambda **k: True) + verifier = pretend.stub(verify=verify) + verifier_cls = pretend.call_recorder(lambda **k: verifier) + monkeypatch.setattr(utils, "GitHubTokenScanningPayloadVerifier", verifier_cls) + + response = views.github_disclose_token(pyramid_request) + + assert response.status_code == 400 + assert metrics == {"warehouse.token_leak.github.error.format": 1} diff --git a/tests/unit/macaroons/test_services.py b/tests/unit/macaroons/test_services.py --- a/tests/unit/macaroons/test_services.py +++ b/tests/unit/macaroons/test_services.py @@ -69,6 +69,33 @@ def test_find_macaroon(self, user_service, macaroon_service): assert isinstance(dm, Macaroon) assert macaroon.id == dm.id + def test_find_from_raw(self, user_service, macaroon_service): + user = UserFactory.create() + serialized, macaroon = macaroon_service.create_macaroon( + "fake location", user.id, "fake description", {"fake": "caveats"} + ) + + dm = macaroon_service.find_from_raw(serialized) + + assert isinstance(dm, Macaroon) + assert macaroon.id == dm.id + + @pytest.mark.parametrize( + "raw_macaroon", + [ + "pypi-aaaa", # Invalid macaroon + # Macaroon properly formatted but not found. The string is purposedly cut to + # avoid triggering the github token disclosure feature that this very + # function implements. + "py" + "pi-AgEIcHlwaS5vcmcCJGQ0ZDhhNzA2LTUxYTEtNDg0NC1hNDlmLTEyZDRiYzNkYjZmOQAABi" + "D6hJOpYl9jFI4jBPvA8gvV1mSu1Ic3xMHmxA4CSA2w_g", + ], + ) + def test_find_from_raw_not_found_or_invalid(self, macaroon_service, raw_macaroon): + with pytest.raises(services.InvalidMacaroon): + macaroon_service.find_from_raw(raw_macaroon) + def test_find_userid_no_macaroon(self, macaroon_service): assert macaroon_service.find_userid(None) is None diff --git a/tests/unit/test_config.py b/tests/unit/test_config.py --- a/tests/unit/test_config.py +++ b/tests/unit/test_config.py @@ -232,7 +232,6 @@ def __init__(self): "warehouse.xmlrpc.client.ratelimit_string": "3600 per hour", "warehouse.xmlrpc.search.enabled": True, } - if environment == config.Environment.development: expected_settings.update( { @@ -305,7 +304,7 @@ def __init__(self): pretend.call(".legacy.api.xmlrpc.cache"), pretend.call("pyramid_rpc.xmlrpc"), pretend.call(".legacy.action_routing"), - pretend.call(".domain"), + pretend.call(".predicates"), pretend.call(".i18n"), pretend.call(".db"), pretend.call(".tasks"), diff --git a/tests/unit/test_domain.py b/tests/unit/test_predicates.py similarity index 53% rename from tests/unit/test_domain.py rename to tests/unit/test_predicates.py --- a/tests/unit/test_domain.py +++ b/tests/unit/test_predicates.py @@ -13,7 +13,9 @@ import pretend import pytest -from warehouse.domain import DomainPredicate, includeme +from pyramid.exceptions import ConfigurationError + +from warehouse.predicates import DomainPredicate, HeadersPredicate, includeme class TestDomainPredicate: @@ -39,10 +41,49 @@ def test_invalid_value(self): assert not predicate(None, pretend.stub(domain="pypi.io")) +class TestHeadersPredicate: + @pytest.mark.parametrize( + ("value", "expected"), + [ + (["Foo", "Bar"], "header Foo, header Bar"), + (["Foo", "Bar:baz"], "header Foo, header Bar=baz"), + ], + ) + def test_text(self, value, expected): + predicate = HeadersPredicate(value, None) + assert predicate.text() == expected + assert predicate.phash() == expected + + def test_when_empty(self): + with pytest.raises(ConfigurationError): + HeadersPredicate([], None) + + @pytest.mark.parametrize( + "value", + [["Foo", "Bar"], ["Foo", "Bar:baz"]], + ) + def test_valid_value(self, value): + predicate = HeadersPredicate(value, None) + assert predicate(None, pretend.stub(headers={"Foo": "a", "Bar": "baz"})) + + @pytest.mark.parametrize( + "value", + [["Foo", "Baz"], ["Foo", "Bar:foo"]], + ) + def test_invalid_value(self, value): + predicate = HeadersPredicate(value, None) + assert not predicate(None, pretend.stub(headers={"Foo": "a", "Bar": "baz"})) + + def test_includeme(): config = pretend.stub( - add_route_predicate=pretend.call_recorder(lambda name, pred: None) + add_route_predicate=pretend.call_recorder(lambda name, pred: None), + add_view_predicate=pretend.call_recorder(lambda name, pred: None), ) includeme(config) assert config.add_route_predicate.calls == [pretend.call("domain", DomainPredicate)] + + assert config.add_view_predicate.calls == [ + pretend.call("require_headers", HeadersPredicate) + ] diff --git a/tests/unit/test_routes.py b/tests/unit/test_routes.py --- a/tests/unit/test_routes.py +++ b/tests/unit/test_routes.py @@ -331,6 +331,11 @@ def add_policy(name, filename): read_only=True, domain=warehouse, ), + pretend.call( + "integrations.github.disclose-token", + "/_/github/disclose-token", + domain=warehouse, + ), pretend.call("legacy.api.simple.index", "/simple/", domain=warehouse), pretend.call( "legacy.api.simple.detail", diff --git a/tests/unit/test_tasks.py b/tests/unit/test_tasks.py --- a/tests/unit/test_tasks.py +++ b/tests/unit/test_tasks.py @@ -167,6 +167,8 @@ def test_creates_request(self, monkeypatch): assert obj.request.pyramid_env == pyramid_env assert request is pyramid_env["request"] assert isinstance(request.tm, transaction.TransactionManager) + assert 1.5e12 < request.timings["new_request_start"] < 1e13 + assert request.remote_addr == "127.0.0.1" def test_reuses_request(self): pyramid_env = {"request": pretend.stub()}
GitHub Token Scanning Partnership **What's the problem this feature will solve?** GitHub has a "Token Scanning" feature, which enables them to detect API keys/tokens, and notify the service provider who issued the token. The service providers can then act on this information according to their own policies (revoke, contact owner etc). This helps avoid situations where an active API key is available publicly on the project's repository. GitHub has a Partnership program for this, with various service providers (incl. [npm][1] and [more][2]). [1]: https://github.blog/changelog/2019-06-18-github-adds-npm-as-a-token-scanning-partner/ [2]: https://help.github.com/en/articles/about-token-scanning **What's the problem this feature will solve?** It would be worthwhile to explore partnering with GitHub for Token Scanning, when the support for API Keys has been completed. **Additional context** https://developer.github.com/partnerships/token-scanning/ contains details about how to contact GitHub for this partnership, and details on how the integration works.
Thanks @pradyunsg! (blocked on #994) This reminds me of https://python-security.readthedocs.io/pypi-vuln/index-2017-11-08-pypirc_exposure_on_github.html . The next steps necessary here are to implement the endpoints on PyPI for the [token alert service](https://developer.github.com/partnerships/secret-scanning/#create-a-token-alert-service) including [signature verification](https://developer.github.com/partnerships/secret-scanning/#implement-signature-verification-in-your-secret-alert-service) and optionally [revocation and notification](https://developer.github.com/partnerships/secret-scanning/#implement-token-revocation-and-user-notification-in-your-token-alert-service). I might be interested, as a first contribution. I don't want to block it though, so if anyone passes by and sees this, don't consider it's already taken before I post a PR. Documentation is missing a few elements, especially regarding cryptographic signature (I'd like to get a working example of key cert, signature and payload to make sure we're doing things right). I'm thinking about contacting the [email protected] email. Who should I Cc ? @ewdurbin and @pradyunsg ? (Ideally, GitHub being GitHub, they may agree to hold the discussion in this ticket.) @ewdurbin might know if there's already been contact with GitHub on this front. Happy to be cc'ed in whatever contact we're having with GitHub on this, if folks reckon that's a good idea. 😁
2020-09-15T10:18:55Z
[]
[]
pypi/warehouse
8,591
pypi__warehouse-8591
[ "8565" ]
64b393c9f771686b7d5ce518eee12063b48ed31f
diff --git a/warehouse/macaroons/caveats.py b/warehouse/macaroons/caveats.py --- a/warehouse/macaroons/caveats.py +++ b/warehouse/macaroons/caveats.py @@ -33,7 +33,7 @@ def __call__(self, predicate): class V1Caveat(Caveat): - def verify_projects(self, projects) -> None: + def verify_projects(self, projects): # First, ensure that we're actually operating in # the context of a package. if not isinstance(self.verifier.context, Project): @@ -43,13 +43,13 @@ def verify_projects(self, projects) -> None: project = self.verifier.context if project.normalized_name in projects: - return + return True raise InvalidMacaroon( f"project-scoped token is not valid for project '{project.name}'" ) - def verify(self, predicate) -> None: + def verify(self, predicate): try: data = json.loads(predicate) except ValueError: @@ -64,13 +64,13 @@ def verify(self, predicate) -> None: if permissions == "user": # User-scoped tokens behave exactly like a user's normal credentials. - return + return True projects = permissions.get("projects") if projects is None: raise InvalidMacaroon("invalid projects in predicate") - self.verify_projects(projects) + return self.verify_projects(projects) class Verifier: @@ -81,19 +81,13 @@ def __init__(self, macaroon, context, principals, permission): self.permission = permission self.verifier = pymacaroons.Verifier() - def verify(self, key: str) -> None: + def verify(self, key): self.verifier.satisfy_general(V1Caveat(self)) - self.verify_signature(key=key) - def verify_signature(self, key: str) -> None: try: - result = self.verifier.verify(self.macaroon, key) + return self.verifier.verify(self.macaroon, key) except ( pymacaroons.exceptions.MacaroonInvalidSignatureException, Exception, # https://github.com/ecordell/pymacaroons/issues/50 ): raise InvalidMacaroon("invalid macaroon signature") - - # This is dead code, the only hardcoded thing verify() can return is True - if not result: - raise InvalidMacaroon("invalid macaroon") diff --git a/warehouse/macaroons/interfaces.py b/warehouse/macaroons/interfaces.py --- a/warehouse/macaroons/interfaces.py +++ b/warehouse/macaroons/interfaces.py @@ -43,14 +43,6 @@ def verify(raw_macaroon, context, principals, permission): Raises InvalidMacaroon if the macaroon is not valid. """ - def check_if_macaroon_exists(raw_macaroon): - """ - Returns the database macaroon if the given raw (serialized) macaroon is - an existing valid macaroon, whatever its permissions. - - Raises InvalidMacaroon otherwise. - """ - def create_macaroon(location, user_id, description, caveats): """ Returns a new raw (serialized) macaroon. The description provided diff --git a/warehouse/macaroons/services.py b/warehouse/macaroons/services.py --- a/warehouse/macaroons/services.py +++ b/warehouse/macaroons/services.py @@ -97,11 +97,10 @@ def find_userid(self, raw_macaroon): return dm.user.id - def verify(self, raw_macaroon, context, principals, permission) -> None: + def verify(self, raw_macaroon, context, principals, permission): """ - Passes if the given raw (serialized) macaroon is + Returns True if the given raw (serialized) macaroon is valid for the context, principals, and requested permission. - Updates the last_used date for the macaroon. Raises InvalidMacaroon if the macaroon is not valid. """ @@ -112,35 +111,11 @@ def verify(self, raw_macaroon, context, principals, permission) -> None: raise InvalidMacaroon("deleted or nonexistent macaroon") verifier = Verifier(m, context, principals, permission) - verifier.verify(dm.key) - dm.last_used = datetime.datetime.now() + if verifier.verify(dm.key): + dm.last_used = datetime.datetime.now() + return True - def check_if_macaroon_exists(self, raw_macaroon): - """ - Returns the database macaroon if the given raw (serialized) macaroon is - an existing valid macaroon, whatever its permissions. - - Raises InvalidMacaroon otherwise. - """ - raw_macaroon = self._extract_raw_macaroon(raw_macaroon) - if raw_macaroon is None: - raise InvalidMacaroon("malformed or nonexistent macaroon") - - try: - m = pymacaroons.Macaroon.deserialize(raw_macaroon) - except MacaroonDeserializationException: - raise InvalidMacaroon("malformed macaroon") - - dm = self.find_macaroon(m.identifier.decode()) - - if dm is None: - raise InvalidMacaroon("deleted or nonexistent macaroon") - - verifier = Verifier(m, context=None, principals=None, permission=None) - # Will raise InvalidMacaroon if necessary - verifier.verify_signature(dm.key) - - return dm + raise InvalidMacaroon("invalid macaroon") def create_macaroon(self, location, user_id, description, caveats): """
diff --git a/tests/unit/macaroons/test_caveats.py b/tests/unit/macaroons/test_caveats.py --- a/tests/unit/macaroons/test_caveats.py +++ b/tests/unit/macaroons/test_caveats.py @@ -55,7 +55,7 @@ def test_verify_valid_predicate(self): caveat = V1Caveat(verifier) predicate = '{"permissions": "user", "version": 1}' - caveat(predicate) + assert caveat(predicate) is True def test_verify_project_invalid_context(self): verifier = pretend.stub(context=pretend.stub()) @@ -90,9 +90,9 @@ def test_verify_project(self, db_request): project = ProjectFactory.create(name="foobar") verifier = pretend.stub(context=project) caveat = V1Caveat(verifier) - predicate = {"version": 1, "permissions": {"projects": ["foobar"]}} - caveat(json.dumps(predicate)) + predicate = {"version": 1, "permissions": {"projects": ["foobar"]}} + assert caveat(json.dumps(predicate)) is True class TestVerifier: @@ -108,7 +108,7 @@ def test_creation(self): assert verifier.principals is principals assert verifier.permission is permission - def test_verify_raises(self, monkeypatch): + def test_verify(self, monkeypatch): verify = pretend.call_recorder( pretend.raiser(MacaroonInvalidSignatureException) ) @@ -123,33 +123,3 @@ def test_verify_raises(self, monkeypatch): with pytest.raises(InvalidMacaroon): verifier.verify(key) assert verify.calls == [pretend.call(macaroon, key)] - - def test_verify_works(self, monkeypatch): - verify = pretend.call_recorder(lambda x, y: True) - macaroon = pretend.stub() - context = pretend.stub() - principals = pretend.stub() - permission = pretend.stub() - key = pretend.stub() - verifier = Verifier(macaroon, context, principals, permission) - - monkeypatch.setattr(verifier.verifier, "verify", verify) - - verifier.verify(key) - assert verify.calls == [pretend.call(macaroon, key)] - - def test_verify_false(self, monkeypatch): - # This will not happen in real life, but in case pymacaroon's verify returns - # False, we need to raise. - verify = pretend.call_recorder(lambda x, y: False) - macaroon = pretend.stub() - context = pretend.stub() - principals = pretend.stub() - permission = pretend.stub() - key = pretend.stub() - verifier = Verifier(macaroon, context, principals, permission) - - monkeypatch.setattr(verifier.verifier, "verify", verify) - - with pytest.raises(InvalidMacaroon): - verifier.verify(key) diff --git a/tests/unit/macaroons/test_services.py b/tests/unit/macaroons/test_services.py --- a/tests/unit/macaroons/test_services.py +++ b/tests/unit/macaroons/test_services.py @@ -72,16 +72,13 @@ def test_find_macaroon(self, user_service, macaroon_service): def test_find_userid_no_macaroon(self, macaroon_service): assert macaroon_service.find_userid(None) is None - @pytest.fixture - def raw_macaroon(self): - return pymacaroons.Macaroon( + def test_find_userid_invalid_macaroon(self, macaroon_service): + raw_macaroon = pymacaroons.Macaroon( location="fake location", identifier=str(uuid4()), key=b"fake key", version=pymacaroons.MACAROON_V2, ).serialize() - - def test_find_userid_invalid_macaroon(self, macaroon_service, raw_macaroon): raw_macaroon = f"pypi-{raw_macaroon}" assert macaroon_service.find_userid(raw_macaroon) is None @@ -105,13 +102,26 @@ def test_find_userid(self, macaroon_service): assert user.id == user_id - def test_verify_unprefixed_macaroon(self, macaroon_service, raw_macaroon): + def test_verify_unprefixed_macaroon(self, macaroon_service): + raw_macaroon = pymacaroons.Macaroon( + location="fake location", + identifier=str(uuid4()), + key=b"fake key", + version=pymacaroons.MACAROON_V2, + ).serialize() + with pytest.raises(services.InvalidMacaroon): macaroon_service.verify( raw_macaroon, pretend.stub(), pretend.stub(), pretend.stub() ) - def test_verify_no_macaroon(self, macaroon_service, raw_macaroon): + def test_verify_no_macaroon(self, macaroon_service): + raw_macaroon = pymacaroons.Macaroon( + location="fake location", + identifier=str(uuid4()), + key=b"fake key", + version=pymacaroons.MACAROON_V2, + ).serialize() raw_macaroon = f"pypi-{raw_macaroon}" with pytest.raises(services.InvalidMacaroon): @@ -125,7 +135,7 @@ def test_verify_invalid_macaroon(self, monkeypatch, user_service, macaroon_servi "fake location", user.id, "fake description", {"fake": "caveats"} ) - verifier_obj = pretend.stub(verify=pretend.raiser(services.InvalidMacaroon)) + verifier_obj = pretend.stub(verify=pretend.call_recorder(lambda k: False)) verifier_cls = pretend.call_recorder(lambda *a: verifier_obj) monkeypatch.setattr(services, "Verifier", verifier_cls) @@ -193,7 +203,7 @@ def test_verify_valid_macaroon(self, monkeypatch, macaroon_service): principals = pretend.stub() permissions = pretend.stub() - macaroon_service.verify(raw_macaroon, context, principals, permissions) + assert macaroon_service.verify(raw_macaroon, context, principals, permissions) assert verifier_cls.calls == [ pretend.call(mock.ANY, context, principals, permissions) ] @@ -228,50 +238,3 @@ def test_get_macaroon_by_description(self, macaroon_service): macaroon_service.get_macaroon_by_description(user.id, macaroon.description) == dm ) - - def test_check_if_macaroon_exists_unprefixed_macaroon( - self, macaroon_service, raw_macaroon - ): - with pytest.raises(services.InvalidMacaroon): - macaroon_service.check_if_macaroon_exists(raw_macaroon) - - def test_check_if_macaroon_exists_no_macaroon(self, macaroon_service, raw_macaroon): - raw_macaroon = f"pypi-{raw_macaroon}" - - with pytest.raises(services.InvalidMacaroon): - macaroon_service.check_if_macaroon_exists(raw_macaroon) - - def test_check_if_macaroon_exists_invalid_macaroon( - self, monkeypatch, user_service, macaroon_service - ): - user = UserFactory.create() - raw_macaroon, _ = macaroon_service.create_macaroon( - "fake location", user.id, "fake description", {"fake": "caveats"} - ) - - verifier_obj = pretend.stub( - verify_signature=pretend.raiser(services.InvalidMacaroon) - ) - verifier_cls = pretend.call_recorder(lambda *a, **k: verifier_obj) - monkeypatch.setattr(services, "Verifier", verifier_cls) - - with pytest.raises(services.InvalidMacaroon): - macaroon_service.check_if_macaroon_exists(raw_macaroon) - - def test_check_if_macaroon_exists_malformed_macaroon(self, macaroon_service): - with pytest.raises(services.InvalidMacaroon): - macaroon_service.check_if_macaroon_exists("pypi-thiswillnotdeserialize") - - def test_check_if_macaroon_exists_valid_macaroon( - self, monkeypatch, macaroon_service - ): - user = UserFactory.create() - raw_macaroon, data_macaroon = macaroon_service.create_macaroon( - "fake location", user.id, "fake description", {"fake": "caveats"} - ) - - verifier_obj = pretend.stub(verify_signature=lambda k: None) - verifier_cls = pretend.call_recorder(lambda *a, **k: verifier_obj) - monkeypatch.setattr(services, "Verifier", verifier_cls) - - assert macaroon_service.check_if_macaroon_exists(raw_macaroon) is data_macaroon
Invalid API Token: invalid macaroon!r for url **Describe the bug** Possibly related to #6259 When using the same token generated from pypi a couple of months ago, now I got the error: ``` HTTPError: 403 Client Error: Invalid API Token: invalid macaroon!r for url: https://upload.pypi.org/legacy/ ``` I figured it was based on an old key and re-generated a token, but I get the same error. I verified that the version of `twine` did not change between the successful execution and the failure. **Expected behavior** No error on upload. **To Reproduce** TWINE_USERNAME=`__token__` TWINE_PASSWORD=`<token generated at pypi.org>` twine upload ... **My Platform** ``` Build dist: xenial Runtime kernel version: 4.15.0-1077-gcp ``` **Additional context** https://travis-ci.com/github/pyproj4/pyproj-wheels/jobs/384840987
Hi @snowman2, this should be resolved by #8555 (and the error message improved by #8561), can you confirm? Thanks for the fix - restarted build: https://travis-ci.com/github/pyproj4/pyproj-wheels/builds/184304607 It worked, thanks :+1: Just got a similar error. Was working fine a week ago. https://travis-ci.org/github/twilio/twilio-python/jobs/729123230 We also just got a similar error: https://github.com/newrelic/newrelic-python-agent/runs/1146467559?check_suite_focus=true#step:9:14 Thanks, looks like https://github.com/pypa/warehouse/pull/8562 has re-introduced this (cc @ewjoachim).
2020-09-21T22:27:14Z
[]
[]
pypi/warehouse
8,624
pypi__warehouse-8624
[ "8299" ]
cfef3488e94ff4b86cfd188b86fb7678d083354f
diff --git a/warehouse/email/__init__.py b/warehouse/email/__init__.py --- a/warehouse/email/__init__.py +++ b/warehouse/email/__init__.py @@ -14,8 +14,6 @@ from email.headerregistry import Address -import attr - from celery.schedules import crontab from first import first @@ -69,7 +67,11 @@ def _send_email_to_user(request, user, msg, *, email=None, allow_unverified=Fals request.task(send_email).delay( _compute_recipient(user, email.email), - attr.asdict(msg), + { + "subject": msg.subject, + "body_text": msg.body_text, + "body_html": msg.body_html, + }, { "tag": "account:email:sent", "user_id": user.id, diff --git a/warehouse/email/services.py b/warehouse/email/services.py --- a/warehouse/email/services.py +++ b/warehouse/email/services.py @@ -15,7 +15,6 @@ from email.utils import parseaddr from typing import Optional -import attr import premailer from jinja2.exceptions import TemplateNotFound @@ -33,12 +32,11 @@ def _format_sender(sitename, sender): return str(Address(sitename, addr_spec=sender)) [email protected](auto_attribs=True, frozen=True, slots=True) class EmailMessage: - - subject: str - body_text: str - body_html: Optional[str] = None + def __init__(self, subject: str, body_text: str, body_html: Optional[str] = None): + self.subject = subject + self.body_text = body_text + self.body_html = body_html @classmethod def from_template(cls, email_name, context, *, request):
diff --git a/tests/unit/email/test_init.py b/tests/unit/email/test_init.py --- a/tests/unit/email/test_init.py +++ b/tests/unit/email/test_init.py @@ -12,7 +12,6 @@ import datetime -import attr import celery.exceptions import pretend import pytest @@ -276,7 +275,11 @@ def record_event(self, user_id, tag, ip_address, additional): task, request, "recipient", - attr.asdict(msg), + { + "subject": msg.subject, + "body_text": msg.body_text, + "body_html": msg.body_html, + }, { "tag": "account:email:sent", "user_id": user_id, @@ -339,7 +342,11 @@ def retry(exc): task, request, "recipient", - attr.asdict(msg), + { + "subject": msg.subject, + "body_text": msg.body_text, + "body_html": msg.body_html, + }, { "tag": "account:email:sent", "user_id": user_id, @@ -455,16 +462,14 @@ def test_send_password_reset_email( "name_value <" + (stub_user.email if email_addr is None else email_addr) + ">", - attr.asdict( - EmailMessage( - subject="Email Subject", - body_text="Email Body", - body_html=( - "<html>\n<head></head>\n" - "<body><p>Email HTML Body</p></body>\n</html>\n" - ), - ) - ), + { + "subject": "Email Subject", + "body_text": "Email Body", + "body_html": ( + "<html>\n<head></head>\n" + "<body><p>Email HTML Body</p></body>\n</html>\n" + ), + }, { "tag": "account:email:sent", "user_id": stub_user.id, @@ -548,16 +553,14 @@ def test_email_verification_email( assert send_email.delay.calls == [ pretend.call( stub_email.email, - attr.asdict( - EmailMessage( - subject="Email Subject", - body_text="Email Body", - body_html=( - "<html>\n<head></head>\n" - "<body><p>Email HTML Body</p></body>\n</html>\n" - ), - ) - ), + { + "subject": "Email Subject", + "body_text": "Email Body", + "body_html": ( + "<html>\n<head></head>\n" + "<body><p>Email HTML Body</p></body>\n</html>\n" + ), + }, { "tag": "account:email:sent", "user_id": stub_user.id, @@ -621,16 +624,14 @@ def test_password_change_email(self, pyramid_request, pyramid_config, monkeypatc assert send_email.delay.calls == [ pretend.call( f"{stub_user.username} <{stub_user.email}>", - attr.asdict( - EmailMessage( - subject="Email Subject", - body_text="Email Body", - body_html=( - "<html>\n<head></head>\n" - "<body><p>Email HTML Body</p></body>\n</html>\n" - ), - ) - ), + { + "subject": "Email Subject", + "body_text": "Email Body", + "body_html": ( + "<html>\n<head></head>\n" + "<body><p>Email HTML Body</p></body>\n</html>\n" + ), + }, { "tag": "account:email:sent", "user_id": stub_user.id, @@ -741,16 +742,14 @@ def test_password_compromised_email_hibp( assert send_email.delay.calls == [ pretend.call( f"{stub_user.username} <{stub_user.email}>", - attr.asdict( - EmailMessage( - subject="Email Subject", - body_text="Email Body", - body_html=( - "<html>\n<head></head>\n" - "<body><p>Email HTML Body</p></body>\n</html>\n" - ), - ) - ), + { + "subject": "Email Subject", + "body_text": "Email Body", + "body_html": ( + "<html>\n<head></head>\n" + "<body><p>Email HTML Body</p></body>\n</html>\n" + ), + }, { "tag": "account:email:sent", "user_id": stub_user.id, @@ -814,16 +813,14 @@ def test_password_compromised_email( assert send_email.delay.calls == [ pretend.call( f"{stub_user.username} <{stub_user.email}>", - attr.asdict( - EmailMessage( - subject="Email Subject", - body_text="Email Body", - body_html=( - "<html>\n<head></head>\n" - "<body><p>Email HTML Body</p></body>\n</html>\n" - ), - ) - ), + { + "subject": "Email Subject", + "body_text": "Email Body", + "body_html": ( + "<html>\n<head></head>\n" + "<body><p>Email HTML Body</p></body>\n</html>\n" + ), + }, { "tag": "account:email:sent", "user_id": stub_user.id, @@ -888,16 +885,14 @@ def test_account_deletion_email(self, pyramid_request, pyramid_config, monkeypat assert send_email.delay.calls == [ pretend.call( f"{stub_user.username} <{stub_user.email}>", - attr.asdict( - EmailMessage( - subject="Email Subject", - body_text="Email Body", - body_html=( - "<html>\n<head></head>\n" - "<body><p>Email HTML Body</p></body>\n</html>\n" - ), - ) - ), + { + "subject": "Email Subject", + "body_text": "Email Body", + "body_html": ( + "<html>\n<head></head>\n" + "<body><p>Email HTML Body</p></body>\n</html>\n" + ), + }, { "tag": "account:email:sent", "user_id": stub_user.id, @@ -1016,16 +1011,14 @@ def test_primary_email_change_email( assert send_email.delay.calls == [ pretend.call( "username <[email protected]>", - attr.asdict( - EmailMessage( - subject="Email Subject", - body_text="Email Body", - body_html=( - "<html>\n<head></head>\n" - "<body><p>Email HTML Body</p></body>\n</html>\n" - ), - ) - ), + { + "subject": "Email Subject", + "body_text": "Email Body", + "body_html": ( + "<html>\n<head></head>\n" + "<body><p>Email HTML Body</p></body>\n</html>\n" + ), + }, { "tag": "account:email:sent", "user_id": stub_user.id, @@ -1176,16 +1169,14 @@ def test_collaborator_added_email( assert send_email.delay.calls == [ pretend.call( "username <[email protected]>", - attr.asdict( - EmailMessage( - subject="Email Subject", - body_text="Email Body", - body_html=( - "<html>\n<head></head>\n" - "<body><p>Email HTML Body</p></body>\n</html>\n" - ), - ) - ), + { + "subject": "Email Subject", + "body_text": "Email Body", + "body_html": ( + "<html>\n<head></head>\n" + "<body><p>Email HTML Body</p></body>\n</html>\n" + ), + }, { "tag": "account:email:sent", "user_id": stub_user.id, @@ -1200,16 +1191,14 @@ def test_collaborator_added_email( ), pretend.call( "submitterusername <[email protected]>", - attr.asdict( - EmailMessage( - subject="Email Subject", - body_text="Email Body", - body_html=( - "<html>\n<head></head>\n" - "<body><p>Email HTML Body</p></body>\n</html>\n" - ), - ) - ), + { + "subject": "Email Subject", + "body_text": "Email Body", + "body_html": ( + "<html>\n<head></head>\n" + "<body><p>Email HTML Body</p></body>\n</html>\n" + ), + }, { "tag": "account:email:sent", "user_id": stub_submitter_user.id, @@ -1302,16 +1291,14 @@ def test_collaborator_added_email_unverified( assert send_email.delay.calls == [ pretend.call( "submitterusername <[email protected]>", - attr.asdict( - EmailMessage( - subject="Email Subject", - body_text="Email Body", - body_html=( - "<html>\n<head></head>\n" - "<body><p>Email HTML Body</p></body>\n</html>\n" - ), - ) - ), + { + "subject": "Email Subject", + "body_text": "Email Body", + "body_html": ( + "<html>\n<head></head>\n" + "<body><p>Email HTML Body</p></body>\n</html>\n" + ), + }, { "tag": "account:email:sent", "user_id": stub_submitter_user.id, @@ -1387,16 +1374,14 @@ def test_project_role_verification_email( assert send_email.delay.calls == [ pretend.call( f"{stub_user.name} <{stub_user.email}>", - attr.asdict( - EmailMessage( - subject="Email Subject", - body_text="Email Body", - body_html=( - "<html>\n<head></head>\n" - "<body><p>Email HTML Body</p></body>\n</html>\n" - ), - ) - ), + { + "subject": "Email Subject", + "body_text": "Email Body", + "body_html": ( + "<html>\n<head></head>\n" + "<body><p>Email HTML Body</p></body>\n</html>\n" + ), + }, { "tag": "account:email:sent", "user_id": stub_user.id, @@ -1481,16 +1466,14 @@ def test_added_as_collaborator_email( assert send_email.delay.calls == [ pretend.call( "username <[email protected]>", - attr.asdict( - EmailMessage( - subject="Email Subject", - body_text="Email Body", - body_html=( - "<html>\n<head></head>\n" - "<body><p>Email HTML Body</p></body>\n</html>\n" - ), - ) - ), + { + "subject": "Email Subject", + "body_text": "Email Body", + "body_html": ( + "<html>\n<head></head>\n" + "<body><p>Email HTML Body</p></body>\n</html>\n" + ), + }, { "tag": "account:email:sent", "user_id": stub_user.id, @@ -1630,16 +1613,14 @@ def test_collaborator_removed_email(self, db_request, pyramid_config, monkeypatc assert send_email.delay.calls == [ pretend.call( f"{ removed_user.name } <{ removed_user.primary_email.email }>", - attr.asdict( - EmailMessage( - subject="Email Subject", - body_text="Email Body", - body_html=( - "<html>\n<head></head>\n" - "<body><p>Email HTML Body</p></body>\n</html>\n" - ), - ) - ), + { + "subject": "Email Subject", + "body_text": "Email Body", + "body_html": ( + "<html>\n<head></head>\n" + "<body><p>Email HTML Body</p></body>\n</html>\n" + ), + }, { "tag": "account:email:sent", "user_id": removed_user.id, @@ -1654,16 +1635,14 @@ def test_collaborator_removed_email(self, db_request, pyramid_config, monkeypatc ), pretend.call( f"{ submitter_user.name } <{ submitter_user.primary_email.email }>", - attr.asdict( - EmailMessage( - subject="Email Subject", - body_text="Email Body", - body_html=( - "<html>\n<head></head>\n" - "<body><p>Email HTML Body</p></body>\n</html>\n" - ), - ) - ), + { + "subject": "Email Subject", + "body_text": "Email Body", + "body_html": ( + "<html>\n<head></head>\n" + "<body><p>Email HTML Body</p></body>\n</html>\n" + ), + }, { "tag": "account:email:sent", "user_id": submitter_user.id, @@ -1731,16 +1710,14 @@ def test_removed_as_collaborator_email( assert send_email.delay.calls == [ pretend.call( f"{ removed_user.name } <{ removed_user.primary_email.email }>", - attr.asdict( - EmailMessage( - subject="Email Subject", - body_text="Email Body", - body_html=( - "<html>\n<head></head>\n" - "<body><p>Email HTML Body</p></body>\n</html>\n" - ), - ) - ), + { + "subject": "Email Subject", + "body_text": "Email Body", + "body_html": ( + "<html>\n<head></head>\n" + "<body><p>Email HTML Body</p></body>\n</html>\n" + ), + }, { "tag": "account:email:sent", "user_id": removed_user.id, @@ -1817,16 +1794,14 @@ def test_role_changed_email(self, db_request, pyramid_config, monkeypatch): assert send_email.delay.calls == [ pretend.call( f"{ changed_user.name } <{ changed_user.primary_email.email }>", - attr.asdict( - EmailMessage( - subject="Email Subject", - body_text="Email Body", - body_html=( - "<html>\n<head></head>\n" - "<body><p>Email HTML Body</p></body>\n</html>\n" - ), - ) - ), + { + "subject": "Email Subject", + "body_text": "Email Body", + "body_html": ( + "<html>\n<head></head>\n" + "<body><p>Email HTML Body</p></body>\n</html>\n" + ), + }, { "tag": "account:email:sent", "user_id": changed_user.id, @@ -1841,16 +1816,14 @@ def test_role_changed_email(self, db_request, pyramid_config, monkeypatch): ), pretend.call( f"{ submitter_user.name } <{ submitter_user.primary_email.email }>", - attr.asdict( - EmailMessage( - subject="Email Subject", - body_text="Email Body", - body_html=( - "<html>\n<head></head>\n" - "<body><p>Email HTML Body</p></body>\n</html>\n" - ), - ) - ), + { + "subject": "Email Subject", + "body_text": "Email Body", + "body_html": ( + "<html>\n<head></head>\n" + "<body><p>Email HTML Body</p></body>\n</html>\n" + ), + }, { "tag": "account:email:sent", "user_id": submitter_user.id, @@ -1922,16 +1895,14 @@ def test_role_changed_as_collaborator_email( assert send_email.delay.calls == [ pretend.call( f"{ changed_user.name } <{ changed_user.primary_email.email }>", - attr.asdict( - EmailMessage( - subject="Email Subject", - body_text="Email Body", - body_html=( - "<html>\n<head></head>\n" - "<body><p>Email HTML Body</p></body>\n</html>\n" - ), - ) - ), + { + "subject": "Email Subject", + "body_text": "Email Body", + "body_html": ( + "<html>\n<head></head>\n" + "<body><p>Email HTML Body</p></body>\n</html>\n" + ), + }, { "tag": "account:email:sent", "user_id": changed_user.id, @@ -2027,16 +1998,14 @@ def test_removed_project_email_to_maintainer( assert send_email.delay.calls == [ pretend.call( "username <[email protected]>", - attr.asdict( - EmailMessage( - subject="Email Subject", - body_text="Email Body", - body_html=( - "<html>\n<head></head>\n" - "<body><p>Email HTML Body</p></body>\n</html>\n" - ), + { + "subject": "Email Subject", + "body_text": "Email Body", + "body_html": ( + "<html>\n<head></head>\n" + "<body><p>Email HTML Body</p></body>\n</html>\n" ), - ), + }, { "tag": "account:email:sent", "user_id": stub_user.id, @@ -2051,16 +2020,14 @@ def test_removed_project_email_to_maintainer( ), pretend.call( "submitterusername <[email protected]>", - attr.asdict( - EmailMessage( - subject="Email Subject", - body_text="Email Body", - body_html=( - "<html>\n<head></head>\n" - "<body><p>Email HTML Body</p></body>\n</html>\n" - ), - ) - ), + { + "subject": "Email Subject", + "body_text": "Email Body", + "body_html": ( + "<html>\n<head></head>\n" + "<body><p>Email HTML Body</p></body>\n</html>\n" + ), + }, { "tag": "account:email:sent", "user_id": stub_submitter_user.id, @@ -2154,16 +2121,14 @@ def test_removed_project_email_to_owner( assert send_email.delay.calls == [ pretend.call( "username <[email protected]>", - attr.asdict( - EmailMessage( - subject="Email Subject", - body_text="Email Body", - body_html=( - "<html>\n<head></head>\n" - "<body><p>Email HTML Body</p></body>\n</html>\n" - ), + { + "subject": "Email Subject", + "body_text": "Email Body", + "body_html": ( + "<html>\n<head></head>\n" + "<body><p>Email HTML Body</p></body>\n</html>\n" ), - ), + }, { "tag": "account:email:sent", "user_id": stub_user.id, @@ -2178,16 +2143,14 @@ def test_removed_project_email_to_owner( ), pretend.call( "submitterusername <[email protected]>", - attr.asdict( - EmailMessage( - subject="Email Subject", - body_text="Email Body", - body_html=( - "<html>\n<head></head>\n" - "<body><p>Email HTML Body</p></body>\n</html>\n" - ), - ) - ), + { + "subject": "Email Subject", + "body_text": "Email Body", + "body_html": ( + "<html>\n<head></head>\n" + "<body><p>Email HTML Body</p></body>\n</html>\n" + ), + }, { "tag": "account:email:sent", "user_id": stub_submitter_user.id, @@ -2297,16 +2260,14 @@ def test_send_yanked_project_release_email_to_maintainer( assert send_email.delay.calls == [ pretend.call( "username <[email protected]>", - attr.asdict( - EmailMessage( - subject="Email Subject", - body_text="Email Body", - body_html=( - "<html>\n<head></head>\n" - "<body><p>Email HTML Body</p></body>\n</html>\n" - ), + { + "subject": "Email Subject", + "body_text": "Email Body", + "body_html": ( + "<html>\n<head></head>\n" + "<body><p>Email HTML Body</p></body>\n</html>\n" ), - ), + }, { "tag": "account:email:sent", "user_id": stub_user.id, @@ -2321,16 +2282,14 @@ def test_send_yanked_project_release_email_to_maintainer( ), pretend.call( "submitterusername <[email protected]>", - attr.asdict( - EmailMessage( - subject="Email Subject", - body_text="Email Body", - body_html=( - "<html>\n<head></head>\n" - "<body><p>Email HTML Body</p></body>\n</html>\n" - ), - ) - ), + { + "subject": "Email Subject", + "body_text": "Email Body", + "body_html": ( + "<html>\n<head></head>\n" + "<body><p>Email HTML Body</p></body>\n</html>\n" + ), + }, { "tag": "account:email:sent", "user_id": stub_submitter_user.id, @@ -2438,16 +2397,14 @@ def test_send_yanked_project_release_email_to_owner( assert send_email.delay.calls == [ pretend.call( "username <[email protected]>", - attr.asdict( - EmailMessage( - subject="Email Subject", - body_text="Email Body", - body_html=( - "<html>\n<head></head>\n" - "<body><p>Email HTML Body</p></body>\n</html>\n" - ), + { + "subject": "Email Subject", + "body_text": "Email Body", + "body_html": ( + "<html>\n<head></head>\n" + "<body><p>Email HTML Body</p></body>\n</html>\n" ), - ), + }, { "tag": "account:email:sent", "user_id": stub_user.id, @@ -2462,16 +2419,14 @@ def test_send_yanked_project_release_email_to_owner( ), pretend.call( "submitterusername <[email protected]>", - attr.asdict( - EmailMessage( - subject="Email Subject", - body_text="Email Body", - body_html=( - "<html>\n<head></head>\n" - "<body><p>Email HTML Body</p></body>\n</html>\n" - ), - ) - ), + { + "subject": "Email Subject", + "body_text": "Email Body", + "body_html": ( + "<html>\n<head></head>\n" + "<body><p>Email HTML Body</p></body>\n</html>\n" + ), + }, { "tag": "account:email:sent", "user_id": stub_submitter_user.id, @@ -2580,16 +2535,14 @@ def test_send_unyanked_project_release_email_to_maintainer( assert send_email.delay.calls == [ pretend.call( "username <[email protected]>", - attr.asdict( - EmailMessage( - subject="Email Subject", - body_text="Email Body", - body_html=( - "<html>\n<head></head>\n" - "<body><p>Email HTML Body</p></body>\n</html>\n" - ), + { + "subject": "Email Subject", + "body_text": "Email Body", + "body_html": ( + "<html>\n<head></head>\n" + "<body><p>Email HTML Body</p></body>\n</html>\n" ), - ), + }, { "tag": "account:email:sent", "user_id": stub_user.id, @@ -2604,16 +2557,14 @@ def test_send_unyanked_project_release_email_to_maintainer( ), pretend.call( "submitterusername <[email protected]>", - attr.asdict( - EmailMessage( - subject="Email Subject", - body_text="Email Body", - body_html=( - "<html>\n<head></head>\n" - "<body><p>Email HTML Body</p></body>\n</html>\n" - ), - ) - ), + { + "subject": "Email Subject", + "body_text": "Email Body", + "body_html": ( + "<html>\n<head></head>\n" + "<body><p>Email HTML Body</p></body>\n</html>\n" + ), + }, { "tag": "account:email:sent", "user_id": stub_submitter_user.id, @@ -2720,16 +2671,14 @@ def test_send_unyanked_project_release_email_to_owner( assert send_email.delay.calls == [ pretend.call( "username <[email protected]>", - attr.asdict( - EmailMessage( - subject="Email Subject", - body_text="Email Body", - body_html=( - "<html>\n<head></head>\n" - "<body><p>Email HTML Body</p></body>\n</html>\n" - ), + { + "subject": "Email Subject", + "body_text": "Email Body", + "body_html": ( + "<html>\n<head></head>\n" + "<body><p>Email HTML Body</p></body>\n</html>\n" ), - ), + }, { "tag": "account:email:sent", "user_id": stub_user.id, @@ -2744,16 +2693,14 @@ def test_send_unyanked_project_release_email_to_owner( ), pretend.call( "submitterusername <[email protected]>", - attr.asdict( - EmailMessage( - subject="Email Subject", - body_text="Email Body", - body_html=( - "<html>\n<head></head>\n" - "<body><p>Email HTML Body</p></body>\n</html>\n" - ), - ) - ), + { + "subject": "Email Subject", + "body_text": "Email Body", + "body_html": ( + "<html>\n<head></head>\n" + "<body><p>Email HTML Body</p></body>\n</html>\n" + ), + }, { "tag": "account:email:sent", "user_id": stub_submitter_user.id, @@ -2862,16 +2809,14 @@ def test_send_removed_project_release_email_to_maintainer( assert send_email.delay.calls == [ pretend.call( "username <[email protected]>", - attr.asdict( - EmailMessage( - subject="Email Subject", - body_text="Email Body", - body_html=( - "<html>\n<head></head>\n" - "<body><p>Email HTML Body</p></body>\n</html>\n" - ), + { + "subject": "Email Subject", + "body_text": "Email Body", + "body_html": ( + "<html>\n<head></head>\n" + "<body><p>Email HTML Body</p></body>\n</html>\n" ), - ), + }, { "tag": "account:email:sent", "user_id": stub_user.id, @@ -2886,16 +2831,14 @@ def test_send_removed_project_release_email_to_maintainer( ), pretend.call( "submitterusername <[email protected]>", - attr.asdict( - EmailMessage( - subject="Email Subject", - body_text="Email Body", - body_html=( - "<html>\n<head></head>\n" - "<body><p>Email HTML Body</p></body>\n</html>\n" - ), - ) - ), + { + "subject": "Email Subject", + "body_text": "Email Body", + "body_html": ( + "<html>\n<head></head>\n" + "<body><p>Email HTML Body</p></body>\n</html>\n" + ), + }, { "tag": "account:email:sent", "user_id": stub_submitter_user.id, @@ -3002,16 +2945,14 @@ def test_send_removed_project_release_email_to_owner( assert send_email.delay.calls == [ pretend.call( "username <[email protected]>", - attr.asdict( - EmailMessage( - subject="Email Subject", - body_text="Email Body", - body_html=( - "<html>\n<head></head>\n" - "<body><p>Email HTML Body</p></body>\n</html>\n" - ), + { + "subject": "Email Subject", + "body_text": "Email Body", + "body_html": ( + "<html>\n<head></head>\n" + "<body><p>Email HTML Body</p></body>\n</html>\n" ), - ), + }, { "tag": "account:email:sent", "user_id": stub_user.id, @@ -3026,16 +2967,14 @@ def test_send_removed_project_release_email_to_owner( ), pretend.call( "submitterusername <[email protected]>", - attr.asdict( - EmailMessage( - subject="Email Subject", - body_text="Email Body", - body_html=( - "<html>\n<head></head>\n" - "<body><p>Email HTML Body</p></body>\n</html>\n" - ), - ) - ), + { + "subject": "Email Subject", + "body_text": "Email Body", + "body_html": ( + "<html>\n<head></head>\n" + "<body><p>Email HTML Body</p></body>\n</html>\n" + ), + }, { "tag": "account:email:sent", "user_id": stub_submitter_user.id, @@ -3145,16 +3084,14 @@ def test_send_removed_project_release_file_email_to_owner( assert send_email.delay.calls == [ pretend.call( "username <[email protected]>", - attr.asdict( - EmailMessage( - subject="Email Subject", - body_text="Email Body", - body_html=( - "<html>\n<head></head>\n" - "<body><p>Email HTML Body</p></body>\n</html>\n" - ), + { + "subject": "Email Subject", + "body_text": "Email Body", + "body_html": ( + "<html>\n<head></head>\n" + "<body><p>Email HTML Body</p></body>\n</html>\n" ), - ), + }, { "tag": "account:email:sent", "user_id": stub_user.id, @@ -3169,16 +3106,14 @@ def test_send_removed_project_release_file_email_to_owner( ), pretend.call( "submitterusername <[email protected]>", - attr.asdict( - EmailMessage( - subject="Email Subject", - body_text="Email Body", - body_html=( - "<html>\n<head></head>\n" - "<body><p>Email HTML Body</p></body>\n</html>\n" - ), - ) - ), + { + "subject": "Email Subject", + "body_text": "Email Body", + "body_html": ( + "<html>\n<head></head>\n" + "<body><p>Email HTML Body</p></body>\n</html>\n" + ), + }, { "tag": "account:email:sent", "user_id": stub_submitter_user.id, @@ -3286,16 +3221,14 @@ def test_send_removed_project_release_file_email_to_maintainer( assert send_email.delay.calls == [ pretend.call( "username <[email protected]>", - attr.asdict( - EmailMessage( - subject="Email Subject", - body_text="Email Body", - body_html=( - "<html>\n<head></head>\n" - "<body><p>Email HTML Body</p></body>\n</html>\n" - ), + { + "subject": "Email Subject", + "body_text": "Email Body", + "body_html": ( + "<html>\n<head></head>\n" + "<body><p>Email HTML Body</p></body>\n</html>\n" ), - ), + }, { "tag": "account:email:sent", "user_id": stub_user.id, @@ -3310,16 +3243,14 @@ def test_send_removed_project_release_file_email_to_maintainer( ), pretend.call( "submitterusername <[email protected]>", - attr.asdict( - EmailMessage( - subject="Email Subject", - body_text="Email Body", - body_html=( - "<html>\n<head></head>\n" - "<body><p>Email HTML Body</p></body>\n</html>\n" - ), - ) - ), + { + "subject": "Email Subject", + "body_text": "Email Body", + "body_html": ( + "<html>\n<head></head>\n" + "<body><p>Email HTML Body</p></body>\n</html>\n" + ), + }, { "tag": "account:email:sent", "user_id": stub_submitter_user.id, @@ -3401,16 +3332,14 @@ def test_two_factor_email( assert send_email.delay.calls == [ pretend.call( f"{stub_user.username} <{stub_user.email}>", - attr.asdict( - EmailMessage( - subject="Email Subject", - body_text="Email Body", - body_html=( - "<html>\n<head></head>\n" - "<body><p>Email HTML Body</p></body>\n</html>\n" - ), - ) - ), + { + "subject": "Email Subject", + "body_text": "Email Body", + "body_html": ( + "<html>\n<head></head>\n" + "<body><p>Email HTML Body</p></body>\n</html>\n" + ), + }, { "tag": "account:email:sent", "user_id": stub_user.id,
Using undeclared dependency attrs **Describe the bug** <!-- A clear and concise description the bug --> From @di Regarding: https://github.com/pypa/warehouse/blob/b344984ee83126eb6fc541406d1f34719fad8b6b/warehouse/email/services.py#L36 > Currently attrs is not a top-level dependency of Warehouse, it's only available as a sub-dependency of automat. If we truly need to use attrs here, it should be added to https://github.com/pypa/warehouse/blob/master/requirements/main.in and those dependencies should be re-compiled. (source: https://github.com/pypa/warehouse/pull/7124#discussion_r453729298) **Expected behavior** <!-- A clear and concise description of what you expected to happen --> attrs should be added in main.in OR removed from the codebase.
I've added the "good first issue" label to this issue. I also said on that comment: > However I think I'd prefer to just use a regular class here. Ah, I didn't know if it applied here too ! :)
2020-09-29T16:58:55Z
[]
[]
pypi/warehouse
8,697
pypi__warehouse-8697
[ "8431" ]
a8d415bd82fc9d2ac4cfed12d4ec892d73f67ae8
diff --git a/warehouse/email/__init__.py b/warehouse/email/__init__.py --- a/warehouse/email/__init__.py +++ b/warehouse/email/__init__.py @@ -31,6 +31,20 @@ def _compute_recipient(user, email): return str(Address(first([user.name, user.username], default=""), addr_spec=email)) +def _redact_ip(request, email): + # We should only store/display IP address of an 'email sent' event if the user + # who triggered the email event is the one who receives the email. Else display + # 'Redacted' to prevent user privacy concerns. If we don't know the user who + # triggered the action, default to showing the IP of the source. + user_email = request.db.query(Email).filter(Email.email == email).one() + + if request.unauthenticated_userid: + return user_email.user_id != request.unauthenticated_userid + if request.user: + return user_email.user_id != request.user.id + return False + + @tasks.task(bind=True, ignore_result=True, acks_late=True) def send_email(task, request, recipient, msg, success_event): msg = EmailMessage(**msg) @@ -58,13 +72,6 @@ def _send_email_to_user(request, user, msg, *, email=None, allow_unverified=Fals if email is None or not (email.verified or allow_unverified): return - # We should only store/display IP address of an 'email sent' event if the user - # who triggered the email event is the one who receives the email. Else display - # 'Redacted' to prevent user privacy concerns. If we don't know the user who - # triggered the action, default to showing the IP of the source. - user_email = request.db.query(Email).filter(Email.email == email.email).one() - redact_ip = user_email.user_id != request.user.id if request.user else False - request.task(send_email).delay( _compute_recipient(user, email.email), { @@ -80,7 +87,7 @@ def _send_email_to_user(request, user, msg, *, email=None, allow_unverified=Fals "from_": request.registry.settings.get("mail.sender"), "to": email.email, "subject": msg.subject, - "redact_ip": redact_ip, + "redact_ip": _redact_ip(request, email.email), }, }, )
diff --git a/tests/unit/email/test_init.py b/tests/unit/email/test_init.py --- a/tests/unit/email/test_init.py +++ b/tests/unit/email/test_init.py @@ -59,6 +59,31 @@ def test_compute_recipient(user, address, expected): assert email._compute_recipient(user, email_) == expected [email protected]( + ("unauthenticated_userid", "user", "expected"), + [ + ("the_users_id", None, False), + ("some_other_id", None, True), + (None, pretend.stub(id="the_users_id"), False), + (None, pretend.stub(id="some_other_id"), True), + (None, None, False), + ], +) +def test_redact_ip(unauthenticated_userid, user, expected): + user_email = pretend.stub(user_id="the_users_id") + + request = pretend.stub( + unauthenticated_userid=unauthenticated_userid, + user=user, + db=pretend.stub( + query=lambda a: pretend.stub( + filter=lambda a: pretend.stub(one=lambda: user_email) + ) + ), + ) + assert email._redact_ip(request, user_email) == expected + + class TestSendEmailToUser: @pytest.mark.parametrize( ("name", "username", "primary_email", "address", "expected"), @@ -81,7 +106,7 @@ class TestSendEmailToUser: ], ) def test_sends_to_user_with_verified( - self, name, username, primary_email, address, expected + self, name, username, primary_email, address, expected, pyramid_request ): user = pretend.stub( name=name, @@ -91,28 +116,25 @@ def test_sends_to_user_with_verified( ) task = pretend.stub(delay=pretend.call_recorder(lambda *a, **kw: None)) - request = pretend.stub( - task=pretend.call_recorder(lambda x: task), - db=pretend.stub( - query=lambda a: pretend.stub( - filter=lambda *a: pretend.stub( - one=lambda: pretend.stub(user_id=user.id) - ) - ), + pyramid_request.task = pretend.call_recorder(lambda x: task) + pyramid_request.db = pretend.stub( + query=lambda a: pretend.stub( + filter=lambda *a: pretend.stub( + one=lambda: pretend.stub(user_id=user.id) + ) ), - remote_addr="0.0.0.0", - user=user, - registry=pretend.stub(settings={"mail.sender": "[email protected]"}), ) + pyramid_request.user = user + pyramid_request.registry.settings = {"mail.sender": "[email protected]"} if address is not None: address = pretend.stub(email=address, verified=True) msg = EmailMessage(subject="My Subject", body_text="My Body") - email._send_email_to_user(request, user, msg, email=address) + email._send_email_to_user(pyramid_request, user, msg, email=address) - assert request.task.calls == [pretend.call(email.send_email)] + assert pyramid_request.task.calls == [pretend.call(email.send_email)] assert task.delay.calls == [ pretend.call( expected, @@ -120,7 +142,7 @@ def test_sends_to_user_with_verified( { "tag": "account:email:sent", "user_id": user.id, - "ip_address": request.remote_addr, + "ip_address": pyramid_request.remote_addr, "additional": { "from_": "[email protected]", "to": address.email if address else primary_email, @@ -171,7 +193,7 @@ def test_doesnt_send_with_unverified(self, primary_email, address): ], ) def test_sends_unverified_with_override( - self, username, primary_email, address, expected + self, username, primary_email, address, expected, pyramid_request ): user = pretend.stub( username=username, @@ -183,19 +205,16 @@ def test_sends_unverified_with_override( ) task = pretend.stub(delay=pretend.call_recorder(lambda *a, **kw: None)) - request = pretend.stub( - task=pretend.call_recorder(lambda x: task), - db=pretend.stub( - query=lambda a: pretend.stub( - filter=lambda *a: pretend.stub( - one=lambda: pretend.stub(user_id=user.id) - ) - ), + pyramid_request.task = pretend.call_recorder(lambda x: task) + pyramid_request.db = pretend.stub( + query=lambda a: pretend.stub( + filter=lambda *a: pretend.stub( + one=lambda: pretend.stub(user_id=user.id) + ) ), - remote_addr="0.0.0.0", - user=user, - registry=pretend.stub(settings={"mail.sender": "[email protected]"}), ) + pyramid_request.user = user + pyramid_request.registry.settings = {"mail.sender": "[email protected]"} if address is not None: address = pretend.stub(email=address, verified=False) @@ -203,10 +222,10 @@ def test_sends_unverified_with_override( msg = EmailMessage(subject="My Subject", body_text="My Body") email._send_email_to_user( - request, user, msg, email=address, allow_unverified=True + pyramid_request, user, msg, email=address, allow_unverified=True ) - assert request.task.calls == [pretend.call(email.send_email)] + assert pyramid_request.task.calls == [pretend.call(email.send_email)] assert task.delay.calls == [ pretend.call( expected, @@ -214,7 +233,7 @@ def test_sends_unverified_with_override( { "tag": "account:email:sent", "user_id": user.id, - "ip_address": request.remote_addr, + "ip_address": pyramid_request.remote_addr, "additional": { "from_": "[email protected]", "to": address.email if address else primary_email,
500 error when uploading with breached password **Describe the bug** Uploading package with twine fails with 500 Internal Server Error. ``` e:\Git\ui-map-parser>twine upload dist\ui_map_parser-1.0.0-py3-none-any.whl --verbose Uploading distributions to https://upload.pypi.org/legacy/ dist\ui_map_parser-1.0.0-py3-none-any.whl (10.0 KB) Uploading ui_map_parser-1.0.0-py3-none-any.whl 100%|████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 15.5k/15.5k [00:06<00:00, 2.54kB/s] Received "500: Internal Server Error" Package upload appears to have failed. Retry 1 of 5 Uploading ui_map_parser-1.0.0-py3-none-any.whl 100%|████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 15.5k/15.5k [00:05<00:00, 2.93kB/s] Received "500: Internal Server Error" Package upload appears to have failed. Retry 2 of 5 Uploading ui_map_parser-1.0.0-py3-none-any.whl 100%|████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 15.5k/15.5k [00:05<00:00, 2.73kB/s] Received "500: Internal Server Error" Package upload appears to have failed. Retry 3 of 5 Uploading ui_map_parser-1.0.0-py3-none-any.whl 100%|████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 15.5k/15.5k [00:05<00:00, 2.93kB/s] Received "500: Internal Server Error" Package upload appears to have failed. Retry 4 of 5 Uploading ui_map_parser-1.0.0-py3-none-any.whl 100%|████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 15.5k/15.5k [00:05<00:00, 2.81kB/s] Received "500: Internal Server Error" Package upload appears to have failed. Retry 5 of 5 Content received from server: <html> <head> <title>Internal Server Error</title> </head> <body> <h1><p>Internal Server Error</p></h1> </body> </html> HTTPError: 500 Internal Server Error from https://upload.pypi.org/legacy/ Internal Server Error ``` `twine check dist\*` output shows that all is ok: ``` e:\Git\ui-map-parser>twine check dist\* Checking dist\ui_map_parser-1.0.0-py3-none-any.whl: PASSED Checking dist\ui-map-parser-1.0.0.tar.gz: PASSED ``` The same if trying to upload `ui-map-parser-1.0.0.tar.gz` file. **Expected behavior** Package should be uploaded succesfully. **To Reproduce** Execute `twine upload ui-map-parser-1.0.0.tar.gz` **My Platform** Windows 10.0.17763 Python 3.7.3 Twine 3.2.0 **Additional context** Project repo: https://github.com/F1ashhimself/ui-map-parser [ui-map-parser-1.0.0.tar.gz](https://github.com/pypa/warehouse/files/5074613/ui-map-parser-1.0.0.tar.gz)
I can't reproduce your issue with the `ui-map-parser-1.0.0.tar.gz` file provided. Can you provide the wheel as well? Would it be possible for you to try this again and note the time so I can correlate this with the logs? I cant add `.whl` file because this format is not supported, I just added `.zip` to end of file. You can remove this extension and operate like with `.whl` file. [ui_map_parser-1.0.0-py3-none-any.whl.zip](https://github.com/pypa/warehouse/files/5076401/ui_map_parser-1.0.0-py3-none-any.whl.zip) I can't reproduce with that file either, I'll need you to try again & let me know the time that you got the error. Here are gif with all steps including creation of wheel and tar.gz files. https://i.imgur.com/yAaRrZe.gif @di also I have reproduced it one more time for you at 09:10 UTC 17 Aug 2020 I can correlate this with our logs, but all I see is this (with no stack trace): ``` pypi-warehouse-web-uploads-cdb98bcb-gqgg5 web-uploads - - [17/Aug/2020:09:10:24 +0000] "POST /legacy/ HTTP/1.1" 500 0 "-" "-" pypi-warehouse-web-uploads-cdb98bcb-s49qv web-uploads - - [17/Aug/2020:09:10:18 +0000] "POST /legacy/ HTTP/1.1" 500 0 "-" "-" ``` And there's no corresponding issue in our alerting system, which is odd. @di how I can help to figure out the problem? @F1ashhimself Could you try to reproduce by uploading to https://test.pypi.org/? @di I have deleted .pypirc file and created it again with token instead of password and uploaded this package to main pypi successfully. Thank you for your help. @F1ashhimself That's interesting. Did your password contain any non-ascii characters or anything else unusual? @di I have password with only characters and number but it was set long time ago, so when Im trying to login on site with this password I have received message that password does not meet requirements and should be changed. After changing it I tried only with token. Maybe the problem was in that old password that was need to be renewed. @F1ashhimself Was the message that your password was included in a breach? PyPI integrates with https://haveibeenpwned.com/ to prevent using accounts with breached passwords. It sounds like this might not be working 100% correctly. @di The message was something like "You password is insecure, you should add one or two words to it...". Nothing about breach but that password was compromised but not on my account. @F1ashhimself Can you run the following script, changing the `password` variable to be the old password you were using, and let me know what it prints? This will use https://haveibeenpwned.com/API/v2#PwnedPasswords to determine if PyPI would have considered your password breached. Your password will be hashed before it's sent to the API. ```python import hashlib import requests password = "your password here" hash_object = hashlib.sha1(password.encode("utf-8")) digest = hash_object.hexdigest() result = requests.get("https://api.pwnedpasswords.com/range/" + digest[:5]).text print(digest[5:].upper() in result) ``` @di script returns `True` and as I said previously this password was leaked but message when logging in was not about leaked password. Thanks for confirming! I've updated the issue title accordingly.
2020-10-12T22:53:21Z
[]
[]
pypi/warehouse
8,815
pypi__warehouse-8815
[ "8378" ]
c9277c501358df4d2192d02606b7207bdb14654d
diff --git a/warehouse/forklift/legacy.py b/warehouse/forklift/legacy.py --- a/warehouse/forklift/legacy.py +++ b/warehouse/forklift/legacy.py @@ -647,7 +647,7 @@ def _is_valid_dist_file(filename, filetype): member = tar.next() if bad_tar: return False - except tarfile.ReadError: + except (tarfile.ReadError, EOFError): return False elif filename.endswith(".exe"): # The only valid filetype for a .exe file is "bdist_wininst".
diff --git a/tests/unit/forklift/test_legacy.py b/tests/unit/forklift/test_legacy.py --- a/tests/unit/forklift/test_legacy.py +++ b/tests/unit/forklift/test_legacy.py @@ -1853,6 +1853,50 @@ def test_upload_fails_with_invalid_file(self, pyramid_config, db_request): assert resp.status_code == 400 assert resp.status == "400 Invalid distribution file." + def test_upload_fails_end_of_file_error(self, pyramid_config, db_request, metrics): + pyramid_config.testing_securitypolicy(userid=1) + + user = UserFactory.create() + EmailFactory.create(user=user) + project = ProjectFactory.create(name="Package-Name") + RoleFactory.create(user=user, project=project) + + # Malformed tar.gz, triggers EOF error + file_contents = b"\x8b\x08\x00\x00\x00\x00\x00\x00\xff" + + db_request.user = user + db_request.user_agent = "warehouse-tests/6.6.6" + db_request.POST = MultiDict( + { + "metadata_version": "1.1", + "name": "malformed", + "version": "1.1", + "summary": "This is my summary!", + "filetype": "sdist", + "md5_digest": hashlib.md5(file_contents).hexdigest(), + "content": pretend.stub( + filename="malformed-1.1.tar.gz", + file=io.BytesIO(file_contents), + type="application/tar", + ), + } + ) + + storage_service = pretend.stub(store=lambda path, filepath, meta: None) + db_request.find_service = lambda svc, name=None, context=None: { + IFileStorage: storage_service, + IMetricsService: metrics, + }.get(svc) + db_request.user_agent = "warehouse-tests/6.6.6" + + with pytest.raises(HTTPBadRequest) as excinfo: + legacy.file_upload(db_request) + + resp = excinfo.value + + assert resp.status_code == 400 + assert resp.status == "400 Invalid distribution file." + def test_upload_fails_with_too_large_file(self, pyramid_config, db_request): pyramid_config.testing_securitypolicy(userid=1)
test.pypi.org returns 500 internal server error if user tries to upload malformed package (tar.gz) Hi, thanks for your work. I found a bug when creating package. Here is how: I was creating (actually generating) a python package. It got malformed (I need to learn to close files properly, always, :facepalm:). I tried to upload it to [test pypi](https://test.pypi.org) via [legacy api](https://warehouse.readthedocs.io/api-reference/legacy/#upload-api). Server responded with 500 internal server error. response body: ``` <html> <head> <title>Internal Server Error</title> </head> <body> <h1><p>Internal Server Error</p></h1> </body> </html> ``` **Expected behavior** 400 with some helpful error message would be nice :slightly_smiling_face: **To Reproduce** To reproduce this, one needs to call the API directly. Twine will fail before sending the request to server. I'm attaching the malformed package. btw, it really looks like ok archive: ``` $ file foo47-0.0.0.tar.gz foo47-0.0.0.tar.gz: gzip compressed data ``` **Additional context** malformed package: [foo47-0.0.0.tar.gz](https://github.com/pypa/warehouse/files/5028545/foo47-0.0.0.tar.gz) --- **Good First Issue**: This issue is good for first time contributors. If you've already contributed to Warehouse, work on [another issue without this label](https://github.com/pypa/warehouse/issues?utf8=%E2%9C%93&q=is%3Aissue+is%3Aopen+-label%3A%22good+first+issue%22) instead. If there is not a corresponding pull request for this issue, it is up for grabs. For directions for getting set up, see our [Getting Started Guide](https://warehouse.pypa.io/development/getting-started/).
Indeed! Here's the exception it generated on our backend for completeness: https://sentry.io/share/issue/5c077b6c90b74b88b420028dc0493088/ (Marking this as "good first issue") Is this still valid? The sentry link seems to be expired. It's still valid, the sentry exceptions for TestPyPI seem to expire after 3 months. I tried to upload the provided distribution but twine couldn't even handle the file (see https://github.com/pypa/twine/issues/712). @matusf, what version of Python / Twine did you use originally? I think I found the error, anyone recall if it was an EOFError? I was able to trigger it via unit tests. I have a patch ready if so. via Hiptop > On Nov 11, 2020, at 1:21 PM, Dustin Ingram <[email protected]> wrote: > >  > It's still valid, the sentry exceptions for TestPyPI seem to expire after 3 months. > > I tried to upload the provided distribution but twine couldn't even handle the file (see pypa/twine#712). @matusf, what version of Python / Twine did you use originally? > > — > You are receiving this because you commented. > Reply to this email directly, view it on GitHub, or unsubscribe. I haven't used twine. I've made the request directly to the legacy endpoint in Go. I may try to crash it one more time to show the exception in sentry :smiley: @matusf Please do!
2020-11-11T21:24:14Z
[]
[]
pypi/warehouse
8,881
pypi__warehouse-8881
[ "8870" ]
78c417fca31b55790903412e74fe35f7fad8d1df
diff --git a/warehouse/forklift/legacy.py b/warehouse/forklift/legacy.py --- a/warehouse/forklift/legacy.py +++ b/warehouse/forklift/legacy.py @@ -124,18 +124,25 @@ def namespace_stdlib_list(module_list): "linux_armv7l", } # macosx is a little more complicated: -_macosx_platform_re = re.compile(r"macosx_10_(\d+)_(?P<arch>.*)") +_macosx_platform_re = re.compile(r"macosx_(?P<major>\d+)_(\d+)_(?P<arch>.*)") _macosx_arches = { "ppc", "ppc64", "i386", "x86_64", + "arm64", "intel", "fat", "fat32", "fat64", "universal", + "universal2", } +_macosx_major_versions = { + "10", + "11", +} + # manylinux pep600 is a little more complicated: _manylinux_platform_re = re.compile(r"manylinux_(\d+)_(\d+)_(?P<arch>.*)") _manylinux_arches = { @@ -154,7 +161,11 @@ def _valid_platform_tag(platform_tag): if platform_tag in _allowed_platforms: return True m = _macosx_platform_re.match(platform_tag) - if m and m.group("arch") in _macosx_arches: + if ( + m + and m.group("major") in _macosx_major_versions + and m.group("arch") in _macosx_arches + ): return True m = _manylinux_platform_re.match(platform_tag) if m and m.group("arch") in _manylinux_arches:
diff --git a/tests/unit/forklift/test_legacy.py b/tests/unit/forklift/test_legacy.py --- a/tests/unit/forklift/test_legacy.py +++ b/tests/unit/forklift/test_legacy.py @@ -2566,6 +2566,9 @@ def test_upload_fails_without_permission(self, pyramid_config, db_request): "manylinux_3_0_s390x", "macosx_10_6_intel", "macosx_10_13_x86_64", + "macosx_11_0_x86_64", + "macosx_10_15_arm64", + "macosx_11_10_universal2", # A real tag used by e.g. some numpy wheels ( "macosx_10_6_intel.macosx_10_9_intel.macosx_10_9_x86_64." @@ -2790,7 +2793,16 @@ def storage_service_store(path, file_path, *, meta): ) ] - @pytest.mark.parametrize("plat", ["linux_x86_64", "linux_x86_64.win32"]) + @pytest.mark.parametrize( + "plat", + [ + "linux_x86_64", + "linux_x86_64.win32", + "macosx_9_2_x86_64", + "macosx_12_2_arm64", + "macosx_10_15_amd64", + ], + ) def test_upload_fails_with_unsupported_wheel_plat( self, monkeypatch, pyramid_config, db_request, plat ):
macOS 11 wheel upload is not allowed <!-- NOTE: This issue should be for problems with PyPI itself, including: * pypi.org * test.pypi.org * files.pythonhosted.org This issue should NOT be for a project installed from PyPI. If you are having an issue with a specific package, you should reach out to the maintainers of that project directly instead. Furthermore, this issue should NOT be for any non-PyPI properties (like python.org, docs.python.org, etc.) --> **Describe the bug** <!-- A clear and concise description the bug --> Upload of wheels targeting latest stable macOS (11.0) is rejected due to unsupported platform tag: ``` HTTPError: 400 Bad Request from https://upload.pypi.org/legacy/ Binary wheel 'pyzmq-20.0.0-cp38-cp38-macosx_11_0_x86_64.whl' has an unsupported platform tag 'macosx_11_0_x86_64'. ``` **Expected behavior** <!-- A clear and concise description of what you expected to happen --> Upload of wheels built for current stable macOS (11.0) should be allowed. **To Reproduce** <!-- Steps to reproduce the bug, or a link to PyPI where the bug is visible --> Build a wheel with tag `macosx_11_0_x86_64` and try to upload with `twine`. The simplest path is probably to `brew install python` on macOS 11.0 and build a wheel with that. I'm using pyzmq-20.0.0, but anything should do: ```python brew install [email protected] /usr/local/opt/[email protected]/bin/python3 -m venv ./env source ./env/bin/activate pip wheel pyzmq twine upload ... ``` Folks who know more about wheels could probably hand-craft an os-tagged wheel on another platform without needing to install the latest macOS. **My Platform** <!-- Any details about your specific platform: * If the problem is in the browser, what browser, version, and OS? * If the problem is with a command-line tool, what version of that tool? * If the problem is with connecting to PyPI, include some details about your network, including SSL/TLS implementation in use, internet service provider, and if there are any firewalls or proxies in use. --> macOS 11.0.1, Python 3.8, 3.9 from homebrew. Only homebrew Python targets the latest OS as the minimum version, but probably pyenv or similar would as well, or any Python built from source, I think. Pythons with an earlier target OS (conda, Python.org) do not have this issue. **Additional context** <!-- Add any other context, links, etc. about the feature here. --> This is related to https://github.com/pypa/packaging/pull/319. I don't know where warehouse gets its supported tag list, and it may be that that PR to packaging is actually the only fix necessary, but as of today upload of valid tags is not allowed.
We'll need to make an update here: https://github.com/pypa/warehouse/blob/b9316cb12d0e0d91bf52f6a88f612db40ebf137a/warehouse/forklift/legacy.py#L126-L138
2020-12-01T11:05:40Z
[]
[]
pypi/warehouse
9,149
pypi__warehouse-9149
[ "8843", "9144" ]
60afeb136c55574b0deb6c36a2a416c951203633
diff --git a/warehouse/legacy/api/pypi.py b/warehouse/legacy/api/pypi.py --- a/warehouse/legacy/api/pypi.py +++ b/warehouse/legacy/api/pypi.py @@ -13,7 +13,7 @@ from pyramid.httpexceptions import HTTPGone, HTTPMovedPermanently, HTTPNotFound from pyramid.response import Response from pyramid.view import forbidden_view_config, view_config -from trove_classifiers import classifiers +from trove_classifiers import sorted_classifiers from warehouse.classifiers.models import Classifier @@ -76,7 +76,7 @@ def forbidden_legacy(exc, request): @view_config(route_name="legacy.api.pypi.list_classifiers") def list_classifiers(request): return Response( - text="\n".join(sorted(classifiers)), content_type="text/plain; charset=utf-8" + text="\n".join(sorted_classifiers), content_type="text/plain; charset=utf-8" ) diff --git a/warehouse/packaging/models.py b/warehouse/packaging/models.py --- a/warehouse/packaging/models.py +++ b/warehouse/packaging/models.py @@ -45,6 +45,8 @@ from sqlalchemy.ext.hybrid import hybrid_property from sqlalchemy.orm import validates from sqlalchemy.orm.exc import MultipleResultsFound, NoResultFound +from sqlalchemy.sql import expression +from trove_classifiers import sorted_classifiers from warehouse import db from warehouse.accounts.models import User @@ -398,7 +400,10 @@ def __table_args__(cls): # noqa Classifier, backref="project_releases", secondary=lambda: release_classifiers, - order_by=Classifier.classifier, + order_by=expression.case( + {c: i for i, c in enumerate(sorted_classifiers)}, + value=Classifier.classifier, + ), passive_deletes=True, ) classifiers = association_proxy("_classifiers", "classifier") diff --git a/warehouse/views.py b/warehouse/views.py --- a/warehouse/views.py +++ b/warehouse/views.py @@ -37,8 +37,8 @@ ) from sqlalchemy import func from sqlalchemy.orm import aliased, joinedload -from sqlalchemy.sql import exists -from trove_classifiers import classifiers, deprecated_classifiers +from sqlalchemy.sql import exists, expression +from trove_classifiers import deprecated_classifiers, sorted_classifiers from warehouse.accounts import REDIRECT_FIELD_NAME from warehouse.accounts.models import User @@ -261,7 +261,7 @@ def locale(request): route_name="classifiers", renderer="pages/classifiers.html", has_translations=True ) def list_classifiers(request): - return {"classifiers": sorted(classifiers)} + return {"classifiers": sorted_classifiers} @view_config( @@ -311,7 +311,12 @@ def search(request): ), Classifier.classifier.notin_(deprecated_classifiers.keys()), ) - .order_by(Classifier.classifier) + .order_by( + expression.case( + {c: i for i, c in enumerate(sorted_classifiers)}, + value=Classifier.classifier, + ) + ) ) for cls in classifiers_q:
diff --git a/tests/unit/legacy/api/test_pypi.py b/tests/unit/legacy/api/test_pypi.py --- a/tests/unit/legacy/api/test_pypi.py +++ b/tests/unit/legacy/api/test_pypi.py @@ -14,7 +14,7 @@ import pytest from pyramid.httpexceptions import HTTPBadRequest, HTTPMovedPermanently, HTTPNotFound -from trove_classifiers import classifiers +from trove_classifiers import sorted_classifiers from warehouse.legacy.api import pypi @@ -71,7 +71,7 @@ def test_list_classifiers(db_request): resp = pypi.list_classifiers(db_request) assert resp.status_code == 200 - assert resp.text == "\n".join(sorted(classifiers)) + assert resp.text == "\n".join(sorted_classifiers) def test_search(): diff --git a/tests/unit/test_views.py b/tests/unit/test_views.py --- a/tests/unit/test_views.py +++ b/tests/unit/test_views.py @@ -22,7 +22,7 @@ HTTPSeeOther, HTTPServiceUnavailable, ) -from trove_classifiers import classifiers +from trove_classifiers import sorted_classifiers from webob.multidict import MultiDict from warehouse import views @@ -425,7 +425,7 @@ def raiser(*args, **kwargs): def test_classifiers(db_request): - assert list_classifiers(db_request) == {"classifiers": sorted(classifiers)} + assert list_classifiers(db_request) == {"classifiers": sorted_classifiers} def test_stats(db_request):
Classifiers: Python version sort order <!-- NOTE: This issue should be for problems with PyPI itself, including: * pypi.org * test.pypi.org * files.pythonhosted.org This issue should NOT be for a project installed from PyPI. If you are having an issue with a specific package, you should reach out to the maintainers of that project directly instead. Furthermore, this issue should NOT be for any non-PyPI properties (like python.org, docs.python.org, etc.) --> **Describe the bug** <!-- A clear and concise description the bug --> The classifiers "Programming Language :: Python :: 3.X" aren't sorted in the right order on https://pypi.org as well as on https://test.pypi.org I'm defining the classifiers like this in the `setup.py` file. ``` classifiers=[ "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10" ] ``` In the navigation bar on pypy.org it will then appear like this: ![image](https://user-images.githubusercontent.com/70264417/99465712-42797f00-293b-11eb-8f1a-dced842f433f.png) With Python 3.10 at the top instead of at the bottom (after Python 3.9). To give the visitors of pypi.org a better and faster overview over a project, it would be great if the Python classifiers were sorted by the Python versions. **Expected behavior** <!-- A clear and concise description of what you expected to happen --> Classifiers sorted by Python versions. Python :: 3 Python :: 3.6 Python :: 3.7 Python :: 3.8 Python :: 3.9 Python :: 3.10 Python :: 3.11 Python :: 3.12 etc. **To Reproduce** <!-- Steps to reproduce the bug, or a link to PyPI where the bug is visible --> It can be seen for example here: https://pypi.org/project/officeextractor/ Bump trove-classifiers from 2021.1.14 to 2021.2.28 Bumps [trove-classifiers](https://github.com/pypa/trove-classifiers) from 2021.1.14 to 2021.2.28. <details> <summary>Commits</summary> <ul> <li>See full diff in <a href="https://github.com/pypa/trove-classifiers/commits">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://api.dependabot.com/badges/compatibility_score?dependency-name=trove-classifiers&package-manager=pip&previous-version=2021.1.14&new-version=2021.2.28)](https://dependabot.com/compatibility-score/?dependency-name=trove-classifiers&package-manager=pip&previous-version=2021.1.14&new-version=2021.2.28) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) - `@dependabot use these labels` will set the current labels as the default for future PRs for this repo and language - `@dependabot use these reviewers` will set the current reviewers as the default for future PRs for this repo and language - `@dependabot use these assignees` will set the current assignees as the default for future PRs for this repo and language - `@dependabot use this milestone` will set the current milestone as the default for future PRs for this repo and language - `@dependabot badge me` will comment on this PR with code to add a "Dependabot enabled" badge to your readme Additionally, you can set the following in your Dependabot [dashboard](https://app.dependabot.com): - Update frequency (including time of day and day of week) - Pull request limits (per update run and/or open at any time) - Automerge options (never/patch/minor, and dev/runtime dependencies) - Out-of-range updates (receive only lockfile updates, if desired) - Security updates (receive only security updates, if desired) </details>
Thanks for filing an issue. PyPI is technically sorting these "correctly", since the original classifier order is not preserved in the project metadata, classifiers are just strings, and the classifiers are being sorted lexicographically. This can be seen in a number of other places as well, like https://pypi.org/classifiers/. I agree that it does feel unintuitive for humans, though. I'd be open to accepting a change that takes any integer/float value of segment of a classifier into account when sorting. However, this should probably be computed once and pre-built into https://github.com/pypa/trove-classifiers instead of being done on the fly everywhere by PyPI. For instances like this, where we'd have to do it on the fly, `trove-classifiers` should also provide a sorting function as well. I've filed https://github.com/pypa/trove-classifiers/issues/56 to capture that and marked this issue as blocked until it's implemented. This is also an issue in the list of filters in the sidebar of the search results page, e.g. https://pypi.org/search/?q=beautifulsoup ![Screenshot from 2021-01-19 23-43-05](https://user-images.githubusercontent.com/5687998/105128436-08e70080-5ab1-11eb-9f12-05cf9390b9f5.png) Also an issue in https://pypi.org/pypi?%3Aaction=list_classifiers I assume this has been fixed now by https://github.com/pypa/trove-classifiers/pull/57 👍🏼
2021-03-01T04:19:42Z
[]
[]
pypi/warehouse
9,341
pypi__warehouse-9341
[ "9321" ]
535ecd8854e20c2041acac6814150ff4a1775d5f
diff --git a/warehouse/admin/views/users.py b/warehouse/admin/views/users.py --- a/warehouse/admin/views/users.py +++ b/warehouse/admin/views/users.py @@ -14,6 +14,7 @@ import wtforms import wtforms.fields.html5 +import wtforms.validators from paginate_sqlalchemy import SqlalchemyOrmPage as SQLAlchemyORMPage from pyramid.httpexceptions import HTTPBadRequest, HTTPNotFound, HTTPSeeOther @@ -95,6 +96,14 @@ class UserForm(forms.Form): emails = wtforms.fields.FieldList(wtforms.fields.FormField(EmailForm)) + def validate_emails(self, field): + # If there's no email on the account, it's ok. Otherwise, ensure + # we have 1 primary email. + if field.data and len([1 for email in field.data if email["primary"]]) != 1: + raise wtforms.validators.ValidationError( + "There must be exactly one primary email" + ) + @view_config( route_name="admin.user.detail", @@ -134,6 +143,7 @@ def user_detail(request): if request.method == "POST" and form.validate(): form.populate_obj(user) + request.session.flash(f"User {user.username!r} updated", queue="success") return HTTPSeeOther(location=request.current_route_path()) return {"user": user, "form": form, "roles": roles, "add_email_form": EmailForm()} @@ -151,6 +161,11 @@ def user_add_email(request): form = EmailForm(request.POST) if form.validate(): + + if form.primary.data: + for other in user.emails: + other.primary = False + email = Email( email=form.email.data, user=user,
diff --git a/tests/unit/admin/views/test_users.py b/tests/unit/admin/views/test_users.py --- a/tests/unit/admin/views/test_users.py +++ b/tests/unit/admin/views/test_users.py @@ -147,10 +147,55 @@ def test_updates_user(self, db_request): assert resp.location == "/admin/users/{}/".format(user.id) assert user.name == "Jane Doe" + def test_updates_user_no_primary_email(self, db_request): + email = EmailFactory.create(primary=True) + user = UserFactory.create(emails=[email]) + db_request.matchdict["user_id"] = str(user.id) + db_request.method = "POST" + db_request.POST["name"] = "Jane Doe" + db_request.POST["emails-0-email"] = email.email + # No primary = checkbox unchecked + + db_request.POST = MultiDict(db_request.POST) + db_request.current_route_path = pretend.call_recorder( + lambda: "/admin/users/{}/".format(user.id) + ) + + resp = views.user_detail(db_request) + + assert resp["form"].errors == { + "emails": ["There must be exactly one primary email"] + } + + def test_updates_user_multiple_primary_emails(self, db_request): + email1 = EmailFactory.create(primary=True) + email2 = EmailFactory.create(primary=True) + user = UserFactory.create(emails=[email1, email2]) + db_request.matchdict["user_id"] = str(user.id) + db_request.method = "POST" + db_request.POST["name"] = "Jane Doe" + db_request.POST["emails-0-email"] = email1.email + db_request.POST["emails-0-primary"] = "true" + db_request.POST["emails-1-email"] = email2.email + db_request.POST["emails-1-primary"] = "true" + # No primary = checkbox unchecked + + db_request.POST = MultiDict(db_request.POST) + db_request.current_route_path = pretend.call_recorder( + lambda: "/admin/users/{}/".format(user.id) + ) + + resp = views.user_detail(db_request) + + assert resp["form"].errors == { + "emails": ["There must be exactly one primary email"] + } + class TestUserAddEmail: - def test_add_email(self, db_request): - user = UserFactory.create(emails=[]) + def test_add_primary_email(self, db_request): + old_email = EmailFactory.create(email="[email protected]", primary=True) + user = UserFactory.create(emails=[old_email]) db_request.matchdict["user_id"] = str(user.id) db_request.method = "POST" db_request.POST["email"] = "[email protected]" @@ -167,13 +212,39 @@ def test_add_email(self, db_request): assert resp.status_code == 303 assert resp.location == "/admin/users/{}/".format(user.id) - assert len(user.emails) == 1 + assert len(user.emails) == 2 + + emails = {e.email: e for e in user.emails} + + assert not emails["[email protected]"].primary + assert emails["[email protected]"].primary + assert emails["[email protected]"].verified + + def test_add_non_primary_email(self, db_request): + old_email = EmailFactory.create(email="[email protected]", primary=True) + user = UserFactory.create(emails=[old_email]) + db_request.matchdict["user_id"] = str(user.id) + db_request.method = "POST" + db_request.POST["email"] = "[email protected]" + # No "primary" field + db_request.POST["verified"] = True + db_request.POST = MultiDict(db_request.POST) + db_request.route_path = pretend.call_recorder( + lambda *a, **kw: "/admin/users/{}/".format(user.id) + ) + + resp = views.user_add_email(db_request) + + db_request.db.flush() + + assert resp.status_code == 303 + assert resp.location == "/admin/users/{}/".format(user.id) + assert len(user.emails) == 2 - email = user.emails[0] + emails = {e.email: e for e in user.emails} - assert email.email == "[email protected]" - assert email.primary - assert email.verified + assert emails["[email protected]"].primary + assert not emails["[email protected]"].primary def test_add_invalid(self, db_request): user = UserFactory.create(emails=[])
Reset password doesn't work if user doesn't have verified/primary email address. **Describe the bug** Password reset is not working. I get the email, follow the link, enter the password twice, submit the form, and get back a page with no error messages. But the page title is "Error processing form -- Log in". **Expected behavior** I expected a positive confirmation that my password was reset, and to be able to login with the new password. Neither one happens. **To Reproduce** 1. Start at `https://pypi.org/account/login/` 2. Click "Forgot password?" link 3. Enter the email address associated with my PyPI account (gerg.ward at gmail dot com) 4. Check my inbox and the email is already there. 5. Click the link: `https://pypi.org/account/reset-password/?token=.eJw1...` 6. Generate a new password outside my browser. 7. Copy/paste the new password into both password fields; text changes to "Passwords match" and the "Reset password" button is enabled. 8. Click "Reset password" 9. Receive the same "Reset password" form again, but with positive confirmation and no error messages. Only the page title gives a clue: "Error processing form – Reset your password · PyPI". When I try to login with the new password (again, copy/pasted), it does not work. I'll attach HTML and screenshot. **My Platform** Firefox 87.0 on Ubuntu 20.04.2. Same behaviour with Chromium. No HTTP proxy. **Additional context**
Screenshot 1: right before I submit the form: ![pypi-password-reset-fail-1](https://user-images.githubusercontent.com/51437/113213624-6f716300-9246-11eb-8c47-72e7416b1746.png) Screenshot 2: the result of submitting the form: ![pypi-password-reset-fail-2](https://user-images.githubusercontent.com/51437/113213668-7d26e880-9246-11eb-817e-c381c1d6ef20.png) gzipped HTML of the error page, with tokens censored: [pypi-password-reset-fail.html.gz](https://github.com/pypa/warehouse/files/6239682/pypi-password-reset-fail.html.gz) I just went through this flow with a new account and was able to successfully reset my password. Are you able to open Chrome/Firefox devtools and check for JavaScript/console/network errors? Are you able to try a different platform entirely? Also can you share your PyPI username with us? PyPI username is `gward`. This account was recently recovered with a [support ticket](https://github.com/pypa/pypi-support/issues/993). I hope that's not relevant, but you never know! When I load the page initially (eg. by following the reset link in my email), Firefox' JS console logs this: ``` Navigated to https://pypi.org/account/reset-password/?token=.eJw1... Content Security Policy: The page’s settings blocked the loading of a resource at inline (“script-src”). onloadwff.js:71:794067 Loading failed for the <script> with source “https://www.googletagmanager.com/gtag/js?id=UA-55961911-1”. reset-password:73:1 Loading failed for the <script> with source “https://www.fastly-insights.com/insights.js?k=6a52360a-f306-421e-8ed5-7417d0d4a4e9&dnt=true”. reset-password:74:1 ``` (with token censored manually) When I submit the form, it seems to log exactly the same thing again: ``` Navigated to https://pypi.org/account/reset-password/?token=.eJw1... Content Security Policy: The page’s settings blocked the loading of a resource at inline (“script-src”). onloadwff.js:71:794067 Loading failed for the <script> with source “https://www.googletagmanager.com/gtag/js?id=UA-55961911-1”. reset-password:73:1 Loading failed for the <script> with source “https://www.fastly-insights.com/insights.js?k=6a52360a-f306-421e-8ed5-7417d0d4a4e9&dnt=true”. reset-password:74:1 ``` Oh, I forgot to mention: no obvious errors in the network log. Specifically, the form submit request looks like: ``` { "POST": { "scheme": "https", "host": "pypi.org", "filename": "/account/reset-password/", "query": { "token": ".eJw1jU..." }, "remote": { "Address": "151.101.192.223:443" } } } ``` (That JSON is generated by Firefox "Copy All" in the network log.) The response: ``` { "Status": "200OK", "Version": "HTTP/2", "Transferred": "6.36 kB (22.29 kB size)", "Referrer Policy": "origin-when-cross-origin" } ``` Response body: ``` <!DOCTYPE html> <html lang="en" dir="ltr"> <head> <meta charset="utf-8"> <meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta name="viewport" content="width=device-width, initial-scale=1"> <meta name="defaultLanguage" content="en"> <meta name="availableLanguages" content="en, es, fr, ja, pt_BR, uk, el, de, zh_Hans, ru, he, eo"> <title> Error processing form – Reset your password · PyPI</title> <meta name="description" content="The Python Package Index (PyPI) is a repository of software for the Python programming language."> [...truncated...] ``` I tried it on my Android phone a couple of ways: * Chrome embedded in my email client: same result * standalone Android Chrome: same result * DuckDuckGo Android browser: same result The ways that this form can fail include: * No data provided * Password strength isn't sufficient * Password fields don't match * Password has appeared in a breach (via https://haveibeenpwned.com/) Can you confirm that you're using a new, strong, randomly generated password? Are you generating a different password each time? I also confirmed that attempting to use a breached password will raise a corresponding error: ![image](https://user-images.githubusercontent.com/294415/113217159-1f44d180-9243-11eb-9fe1-744a59e9f194.png) Are you able to attempt to reset with the breached password `ilovedogs` to confirm you can see such an error message? Yes, I am using a new, strong, randomly generated password. Sometimes I use Firefox builtin password generator, sometimes I use the generator in keepassxc, and sometimes I use a command-line tool called `makepasswd`. Either way, they're long and random and full of a delightful mix of lowercase, uppercase, and numbers. I've avoided punctuation _just in case_ that was causing problems. However, I have NOT used a different password each time. I switched to a shorter password (12 chars instead of 20) for testing on my phone, but I used the same random password for all tests on my phone. I can certainly try again with a unique password each time. Oh yeah, I also confirmed that the "known weak password" feature works for me. I tried with "rush2112" and said I need more words. Tried with "ilovedogs" and I got the same result as you did. Oh, another guilty admission: I have used the same reset token many times. You seem to have some rate limiting on password reset requests (good!!), which means testing with a new token each time would be annoying. I can do that too, if it helps. OK just tried it with a new reset token and a new random password: same error. Still with Firefox 87.0 on Ubuntu 20.04.2. OK, can you try again? I noticed that the email address on your account was not marked as verified or primary, which likely happened when you went through the account recovery process. Ahhhhh, thank you! I just tried it on Chrome on a Mac and it suddenly worked. I'm guessing it's because you tweaked my verification flag, rather than because of the change of browser and platform. ;-) Thank you!! (And, also, thank you for your work on PyPI. It's a fantastic piece of infrastructure and you are making the world a better place.) Wait, I probably should not close this. My problem is solved, but the error reporting could definitely be better. ;-) Yep, I'd consider this a bug. Will update the title accordingly.
2021-04-03T16:18:03Z
[]
[]
pypi/warehouse
9,411
pypi__warehouse-9411
[ "8966" ]
5d15bfe92a086f3d7ed9072caaf659d20abe995f
diff --git a/warehouse/legacy/api/json.py b/warehouse/legacy/api/json.py --- a/warehouse/legacy/api/json.py +++ b/warehouse/legacy/api/json.py @@ -65,8 +65,12 @@ def json_project(project, request): try: release = ( request.db.query(Release) - .filter(Release.project == project, Release.yanked.is_(False)) - .order_by(Release.is_prerelease.nullslast(), Release._pypi_ordering.desc()) + .filter(Release.project == project) + .order_by( + Release.yanked.asc(), + Release.is_prerelease.nullslast(), + Release._pypi_ordering.desc(), + ) .limit(1) .one() )
diff --git a/tests/unit/legacy/api/test_json.py b/tests/unit/legacy/api/test_json.py --- a/tests/unit/legacy/api/test_json.py +++ b/tests/unit/legacy/api/test_json.py @@ -118,6 +118,76 @@ def test_only_prereleases(self, monkeypatch, db_request): assert resp is response assert json_release.calls == [pretend.call(release, db_request)] + def test_all_releases_yanked(self, monkeypatch, db_request): + """ + If all releases are yanked, the endpoint should return the same release as + if none of the releases are yanked. + """ + + project = ProjectFactory.create() + + ReleaseFactory.create(project=project, version="1.0", yanked=True) + ReleaseFactory.create(project=project, version="2.0", yanked=True) + ReleaseFactory.create(project=project, version="4.0.dev0", yanked=True) + + release = ReleaseFactory.create(project=project, version="3.0", yanked=True) + + response = pretend.stub() + json_release = pretend.call_recorder(lambda ctx, request: response) + monkeypatch.setattr(json, "json_release", json_release) + + resp = json.json_project(project, db_request) + + assert resp is response + assert json_release.calls == [pretend.call(release, db_request)] + + def test_latest_release_yanked(self, monkeypatch, db_request): + """ + If the latest version is yanked, the endpoint should fall back on the + latest non-prerelease version that is not yanked, if one is available. + """ + + project = ProjectFactory.create() + + ReleaseFactory.create(project=project, version="1.0") + ReleaseFactory.create(project=project, version="3.0", yanked=True) + ReleaseFactory.create(project=project, version="3.0.dev0") + + release = ReleaseFactory.create(project=project, version="2.0") + + response = pretend.stub() + json_release = pretend.call_recorder(lambda ctx, request: response) + monkeypatch.setattr(json, "json_release", json_release) + + resp = json.json_project(project, db_request) + + assert resp is response + assert json_release.calls == [pretend.call(release, db_request)] + + def test_all_non_prereleases_yanked(self, monkeypatch, db_request): + """ + If all non-prerelease versions are yanked, the endpoint should return the + latest prerelease version that is not yanked. + """ + + project = ProjectFactory.create() + + ReleaseFactory.create(project=project, version="1.0", yanked=True) + ReleaseFactory.create(project=project, version="2.0", yanked=True) + ReleaseFactory.create(project=project, version="3.0", yanked=True) + ReleaseFactory.create(project=project, version="3.0.dev0", yanked=True) + + release = ReleaseFactory.create(project=project, version="2.0.dev0") + + response = pretend.stub() + json_release = pretend.call_recorder(lambda ctx, request: response) + monkeypatch.setattr(json, "json_release", json_release) + + resp = json.json_project(project, db_request) + + assert resp is response + assert json_release.calls == [pretend.call(release, db_request)] + class TestJSONProjectSlash: def test_normalizing_redirects(self, db_request):
JSON endpoint for project is 404 if only yanked releases are available From https://github.com/pypa/warehouse/issues/3709#issuecomment-754973958, it appears that the JSON API for a project is 404 if the only releases available are yanked releases. E.g. https://pypi.org/pypi/django-twilio2/json should probably look like https://pypi.org/pypi/django-twilio2/0.9.0/json instead of being 404.
@di Do you know if anyone is working on this? I'm interested in taking a crack at it. I'm working through the getting-started and submitting-patches guides, just didn't want to duplicate any efforts. It does not appear that anyone is working on this at the moment!
2021-04-22T04:47:43Z
[]
[]
pypi/warehouse
9,656
pypi__warehouse-9656
[ "9525" ]
21a6432ad9e6fac1ba0cac3bd4797a1ad4180e93
diff --git a/warehouse/admin/routes.py b/warehouse/admin/routes.py --- a/warehouse/admin/routes.py +++ b/warehouse/admin/routes.py @@ -175,3 +175,16 @@ def includeme(config): config.add_route( "admin.sponsor.edit", "/admin/sponsors/{sponsor_id}/", domain=warehouse ) + + # Banner related Admin pages + config.add_route("admin.banner.list", "/admin/banners/", domain=warehouse) + config.add_route("admin.banner.create", "/admin/banners/create/", domain=warehouse) + config.add_route( + "admin.banner.delete", "/admin/banners/{banner_id}/delete/", domain=warehouse + ) + config.add_route( + "admin.banner.preview", "/admin/banners/{banner_id}/preview/", domain=warehouse + ) + config.add_route( + "admin.banner.edit", "/admin/banners/{banner_id}/", domain=warehouse + ) diff --git a/warehouse/admin/views/banners.py b/warehouse/admin/views/banners.py new file mode 100644 --- /dev/null +++ b/warehouse/admin/views/banners.py @@ -0,0 +1,182 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import wtforms + +from pyramid.httpexceptions import HTTPNotFound, HTTPSeeOther +from pyramid.view import view_config +from sqlalchemy.orm.exc import NoResultFound + +from warehouse.banners.models import Banner +from warehouse.forms import Form, URIValidator + + +@view_config( + route_name="admin.banner.list", + renderer="admin/banners/list.html", + permission="admin_dashboard_access", + request_method="GET", + uses_session=True, +) +def banner_list(request): + banners = request.db.query(Banner).all() + return {"banners": banners} + + +@view_config( + route_name="admin.banner.edit", + renderer="admin/banners/edit.html", + permission="admin_dashboard_access", + request_method="GET", + uses_session=True, + require_csrf=True, + require_methods=False, +) +@view_config( + route_name="admin.banner.edit", + renderer="admin/banners/edit.html", + permission="psf_staff", + request_method="POST", + uses_session=True, + require_csrf=True, + require_methods=False, +) +def edit_banner(request): + id_ = request.matchdict["banner_id"] + try: + banner = request.db.query(Banner).filter(Banner.id == id_).one() + except NoResultFound: + raise HTTPNotFound + + form = BannerForm(request.POST if request.method == "POST" else None, banner) + + if request.method == "POST" and form.validate(): + form.populate_obj(banner) + request.session.flash("Banner updated", queue="success") + return HTTPSeeOther(location=request.current_route_path()) + + return {"banner": banner, "form": form} + + +@view_config( + route_name="admin.banner.create", + renderer="admin/banners/edit.html", + permission="admin_dashboard_access", + request_method="GET", + uses_session=True, + require_csrf=True, + require_methods=False, +) +@view_config( + route_name="admin.banner.create", + renderer="admin/banners/edit.html", + permission="psf_staff", + request_method="POST", + uses_session=True, + require_csrf=True, + require_methods=False, +) +def create_banner(request): + form = BannerForm(request.POST if request.method == "POST" else None) + + if request.method == "POST" and form.validate(): + banner = Banner(**form.data) + request.db.add(banner) + request.session.flash( + f"Added new banner '{banner.name}'", + queue="success", + ) + redirect_url = request.route_url("admin.banner.list") + return HTTPSeeOther(location=redirect_url) + + return {"form": form} + + +@view_config( + route_name="admin.banner.delete", + require_methods=["POST"], + permission="psf_staff", + uses_session=True, + require_csrf=True, +) +def delete_banner(request): + id_ = request.matchdict["banner_id"] + try: + banner = request.db.query(Banner).filter(Banner.id == id_).one() + except NoResultFound: + raise HTTPNotFound + + # Safeguard check on banner name + if banner.name != request.params.get("banner"): + request.session.flash("Wrong confirmation input", queue="error") + return HTTPSeeOther(request.route_url("admin.banner.edit", banner_id=banner.id)) + + # Delete the banner + request.db.delete(banner) + request.session.flash(f"Deleted banner {banner.name}", queue="success") + return HTTPSeeOther(request.route_url("admin.banner.list")) + + +@view_config( + route_name="admin.banner.preview", + require_methods=["GET"], + permission="moderator", + uses_session=True, + require_csrf=True, + has_translations=True, + renderer="admin/banners/preview.html", +) +def preview_banner(request): + id_ = request.matchdict["banner_id"] + try: + banner = request.db.query(Banner).filter(Banner.id == id_).one() + return {"banner": banner} + except NoResultFound: + raise HTTPNotFound + + +class BannerForm(Form): + name = wtforms.fields.StringField( + validators=[ + wtforms.validators.Length(max=100), + wtforms.validators.DataRequired(), + ], + ) + text = wtforms.fields.StringField( + validators=[ + wtforms.validators.Length(max=280), + wtforms.validators.DataRequired(), + ], + ) + link_url = wtforms.fields.StringField( + validators=[ + wtforms.validators.DataRequired(), + URIValidator(), + ] + ) + link_label = wtforms.fields.StringField( + validators=[ + wtforms.validators.Optional(), + ], + default=Banner.DEFAULT_BTN_LABEL, + ) + fa_icon = wtforms.fields.StringField( + validators=[ + wtforms.validators.Length(max=20), + wtforms.validators.Optional(), + ], + default=Banner.DEFAULT_FA_ICON, + ) + active = wtforms.fields.BooleanField( + validators=[wtforms.validators.Optional()], default=False + ) + end = wtforms.fields.DateField(validators=[wtforms.validators.DataRequired()]) diff --git a/warehouse/banners/__init__.py b/warehouse/banners/__init__.py new file mode 100644 --- /dev/null +++ b/warehouse/banners/__init__.py @@ -0,0 +1,22 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def includeme(config): + warehouse = config.get_settings().get("warehouse.domain") + + # route to async render banner messages + config.add_route( + "includes.db-banners", + "/_includes/notification-banners/", + domain=warehouse, + ) diff --git a/warehouse/banners/models.py b/warehouse/banners/models.py new file mode 100644 --- /dev/null +++ b/warehouse/banners/models.py @@ -0,0 +1,43 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from datetime import date + +from sqlalchemy import Boolean, Column, Date, String, Text +from sqlalchemy_utils.types.url import URLType + +from warehouse import db +from warehouse.utils.attrs import make_repr + + +class Banner(db.Model): + __tablename__ = "banners" + __repr__ = make_repr("text") + DEFAULT_FA_ICON = "fa-comment-alt" + DEFAULT_BTN_LABEL = "See more" + + # internal name + name = Column(String, nullable=False) + + # banner display configuration + text = Column(Text, nullable=False) + link_url = Column(URLType, nullable=False) + link_label = Column(String, nullable=False, default=DEFAULT_BTN_LABEL) + fa_icon = Column(String, nullable=False, default=DEFAULT_FA_ICON) + + # visibility control + active = Column(Boolean, nullable=False, default=False) + end = Column(Date, nullable=False) + + @property + def is_live(self): + # date.today is using the server timezone which is UTC + return self.active and date.today() <= self.end diff --git a/warehouse/banners/views.py b/warehouse/banners/views.py new file mode 100644 --- /dev/null +++ b/warehouse/banners/views.py @@ -0,0 +1,37 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import datetime + +from pyramid.view import view_config + +from warehouse.banners.models import Banner + + +@view_config( + route_name="includes.db-banners", + renderer="includes/banner-messages.html", + uses_session=True, + has_translations=True, +) +def list_banner_messages(request): + # used to preview specific banner + banner_id = request.params.get("single_banner") + if banner_id: + query = request.db.query(Banner).filter((Banner.id == banner_id)) + else: + today = str(datetime.date.today()) + query = request.db.query(Banner).filter( + (Banner.active == True) & (Banner.end >= today) # noqa + ) + + return {"banners": query.all()} diff --git a/warehouse/config.py b/warehouse/config.py --- a/warehouse/config.py +++ b/warehouse/config.py @@ -442,6 +442,9 @@ def configure(settings=None): # Allow the sponsors app to list sponsors config.include(".sponsors") + # Allow the banners app to list banners + config.include(".banners") + # Include our admin application config.include(".admin") diff --git a/warehouse/migrations/versions/10825786b3df_create_banner_model.py b/warehouse/migrations/versions/10825786b3df_create_banner_model.py new file mode 100644 --- /dev/null +++ b/warehouse/migrations/versions/10825786b3df_create_banner_model.py @@ -0,0 +1,64 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +Create banner model + +Revision ID: 10825786b3df +Revises: 590c513f1c74 +Create Date: 2021-06-22 18:16:50.425481 +""" + +import sqlalchemy as sa +import sqlalchemy_utils + +from alembic import op +from sqlalchemy.dialects import postgresql + +revision = "10825786b3df" +down_revision = "590c513f1c74" + +# Note: It is VERY important to ensure that a migration does not lock for a +# long period of time and to ensure that each individual migration does +# not break compatibility with the *previous* version of the code base. +# This is because the migrations will be ran automatically as part of the +# deployment process, but while the previous version of the code is still +# up and running. Thus backwards incompatible changes must be broken up +# over multiple migrations inside of multiple pull requests in order to +# phase them in over multiple deploys. + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table( + "banners", + sa.Column( + "id", + postgresql.UUID(as_uuid=True), + server_default=sa.text("gen_random_uuid()"), + nullable=False, + ), + sa.Column("name", sa.String(), nullable=False), + sa.Column("text", sa.Text(), nullable=False), + sa.Column("link_url", sqlalchemy_utils.types.url.URLType(), nullable=False), + sa.Column("link_label", sa.String(), nullable=False), + sa.Column("fa_icon", sa.String(), nullable=False), + sa.Column("active", sa.Boolean(), nullable=False), + sa.Column("end", sa.Date(), nullable=False), + sa.PrimaryKeyConstraint("id"), + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table("banners") + # ### end Alembic commands ###
diff --git a/tests/common/db/banners.py b/tests/common/db/banners.py new file mode 100644 --- /dev/null +++ b/tests/common/db/banners.py @@ -0,0 +1,32 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from datetime import date, timedelta + +from factory import fuzzy + +from warehouse.banners.models import Banner + +from .base import FuzzyUrl, WarehouseFactory + + +class BannerFactory(WarehouseFactory): + class Meta: + model = Banner + + name = fuzzy.FuzzyText(length=12) + text = fuzzy.FuzzyText(length=30) + link_url = FuzzyUrl() + link_label = fuzzy.FuzzyText(length=10) + + active = True + end = date.today() + timedelta(days=2) diff --git a/tests/unit/admin/test_routes.py b/tests/unit/admin/test_routes.py --- a/tests/unit/admin/test_routes.py +++ b/tests/unit/admin/test_routes.py @@ -180,4 +180,29 @@ def test_includeme(): "/admin/sponsors/{sponsor_id}/", domain=warehouse, ), + pretend.call( + "admin.banner.list", + "/admin/banners/", + domain=warehouse, + ), + pretend.call( + "admin.banner.create", + "/admin/banners/create/", + domain=warehouse, + ), + pretend.call( + "admin.banner.delete", + "/admin/banners/{banner_id}/delete/", + domain=warehouse, + ), + pretend.call( + "admin.banner.preview", + "/admin/banners/{banner_id}/preview/", + domain=warehouse, + ), + pretend.call( + "admin.banner.edit", + "/admin/banners/{banner_id}/", + domain=warehouse, + ), ] diff --git a/tests/unit/admin/views/test_banners.py b/tests/unit/admin/views/test_banners.py new file mode 100644 --- /dev/null +++ b/tests/unit/admin/views/test_banners.py @@ -0,0 +1,234 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import uuid + +import pretend +import pytest + +from pyramid.httpexceptions import HTTPNotFound +from sqlalchemy.orm.exc import NoResultFound +from webob.multidict import MultiDict + +from warehouse.admin.views import banners as views +from warehouse.banners.models import Banner + +from ....common.db.banners import BannerFactory + + [email protected] +def banner_data(): + """Fixture with minimal required data to create a banner""" + return { + "name": "Sample Banner", + "text": "This should be the correct text", + "link_url": "https://samplebanner.com", + "end": "2021-07-30", + } + + +class TestBannerList: + def test_list_all_banners(self, db_request): + BannerFactory.create_batch(5) + banners = db_request.db.query(Banner).all() + + result = views.banner_list(db_request) + + assert result == {"banners": banners} + + +class TestCreateBanner: + def test_serialize_form_to_create_banner(self, db_request): + result = views.create_banner(db_request) + + assert len(result) == 1 + assert isinstance(result["form"], views.BannerForm) + + def test_serialize_form_errors_if_invalid_post(self, db_request): + db_request.method = "POST" + db_request.POST["name"] = "" + db_request.POST["link_url"] = "" + db_request.POST = MultiDict(db_request.POST) + + result = views.create_banner(db_request) + + assert len(result) == 1 + assert isinstance(result["form"], views.BannerForm) + assert result["form"].errors + + def test_create_banner(self, db_request, banner_data): + db_request.method = "POST" + db_request.POST = MultiDict(banner_data) + db_request.session = pretend.stub( + flash=pretend.call_recorder(lambda *a, **kw: None) + ) + db_request.route_url = pretend.call_recorder(lambda r: "/admin/banners/") + + assert db_request.db.query(Banner).count() == 0 + resp = views.create_banner(db_request) + assert db_request.db.query(Banner).count() == 1 + + assert resp.status_code == 303 + assert resp.location == "/admin/banners/" + assert db_request.session.flash.calls == [ + pretend.call("Added new banner 'Sample Banner'", queue="success") + ] + assert db_request.route_url.calls == [pretend.call("admin.banner.list")] + + +class TestEditBanner: + def test_serialize_form_and_banner(self, db_request): + banner = BannerFactory.create() + db_request.matchdict["banner_id"] = banner.id + + result = views.edit_banner(db_request) + + assert len(result) == 2 + assert isinstance(result["form"], views.BannerForm) + assert result["form"].data["name"] == banner.name + assert result["banner"] == banner + + def test_404_if_banner_does_not_exist(self, db_request): + db_request.matchdict["banner_id"] = str(uuid.uuid4()) + + with pytest.raises(HTTPNotFound): + views.edit_banner(db_request) + + def test_update_banner(self, db_request, banner_data): + banner = BannerFactory.create(fa_icon="custom", **banner_data) + assert banner.is_live + form = views.BannerForm(MultiDict({}), banner) + data = form.data.copy() + data["name"] = "New Name" + data["end"] = str(data["end"]) + data.pop("fa_icon") # do not send fa icon within post data + db_request.matchdict["banner_id"] = banner.id + db_request.method = "POST" + db_request.POST = MultiDict(data) + db_request.current_route_path = pretend.call_recorder( + lambda: f"/admin/banners/{banner.id}/" + ) + db_request.session = pretend.stub( + flash=pretend.call_recorder(lambda *a, **kw: None) + ) + + resp = views.edit_banner(db_request) + db_banner = db_request.db.query(Banner).filter(Banner.id == banner.id).one() + + assert resp.status_code == 303 + assert resp.location == f"/admin/banners/{banner.id}/" + assert db_banner.name == "New Name" + assert db_banner.fa_icon == "custom" # keep previous value + assert db_request.session.flash.calls == [ + pretend.call("Banner updated", queue="success") + ] + + def test_form_errors_if_invalid_post_data(self, db_request): + banner = BannerFactory.create() + form = views.BannerForm(MultiDict({}), banner) + data = form.data.copy() + data["name"] = "New name" + data["end"] = "" # date is required + db_request.matchdict["banner_id"] = banner.id + db_request.method = "POST" + db_request.POST = MultiDict(data) + + result = views.edit_banner(db_request) + + assert "end" in result["form"].errors + assert "New name" == result["form"].data["name"] + + +class TestDeleteBanner: + def test_404_if_banner_does_not_exist(self, db_request): + db_request.matchdict["banner_id"] = str(uuid.uuid4()) + + with pytest.raises(HTTPNotFound): + views.delete_banner(db_request) + + def test_delete_banner(self, db_request): + banner = BannerFactory.create() + db_request.matchdict["banner_id"] = banner.id + db_request.params = {"banner": banner.name} + db_request.method = "POST" + db_request.route_url = pretend.call_recorder(lambda s: "/admin/banners/") + db_request.session = pretend.stub( + flash=pretend.call_recorder(lambda *a, **kw: None) + ) + + resp = views.delete_banner(db_request) + with pytest.raises(NoResultFound): + db_request.db.query(Banner).filter(Banner.id == banner.id).one() + + assert resp.status_code == 303 + assert resp.location == "/admin/banners/" + assert db_request.session.flash.calls == [ + pretend.call(f"Deleted banner {banner.name}", queue="success") + ] + assert db_request.route_url.calls == [pretend.call("admin.banner.list")] + + def test_do_not_delete_banner_if_invalid_confirmation_param(self, db_request): + banner = BannerFactory.create() + db_request.matchdict["banner_id"] = banner.id + db_request.params = {"banner": "not the banner name"} + db_request.method = "POST" + db_request.route_url = pretend.call_recorder( + lambda s, banner_id: f"/admin/banners/{banner_id}" + ) + db_request.session = pretend.stub( + flash=pretend.call_recorder(lambda *a, **kw: None) + ) + + resp = views.delete_banner(db_request) + banner = db_request.db.query(Banner).filter(Banner.id == banner.id).one() + + assert resp.status_code == 303 + assert resp.location == f"/admin/banners/{banner.id}" + assert db_request.session.flash.calls == [ + pretend.call("Wrong confirmation input", queue="error") + ] + assert db_request.route_url.calls == [ + pretend.call("admin.banner.edit", banner_id=banner.id) + ] + + +class TestPreviewBanner: + def test_404_if_banner_does_not_exist(self, db_request): + db_request.matchdict["banner_id"] = str(uuid.uuid4()) + + with pytest.raises(HTTPNotFound): + views.preview_banner(db_request) + + def test_preview_banner(self, db_request): + banner = BannerFactory.create() + db_request.matchdict["banner_id"] = str(banner.id) + + resp = views.preview_banner(db_request) + assert {"banner": banner} == resp + + +class TestBannerForm: + def test_required_fields(self, banner_data): + form = views.BannerForm(data={}) + + assert form.validate() is False + assert set(form.errors) == set(banner_data) + + def test_valid_data(self, banner_data): + form = views.BannerForm(data=banner_data) + assert form.validate() is True + data = form.data + defaults = { + "fa_icon": Banner.DEFAULT_FA_ICON, + "active": False, + "link_label": Banner.DEFAULT_BTN_LABEL, + } + assert data == {**banner_data, **defaults} diff --git a/tests/unit/banners/__init__.py b/tests/unit/banners/__init__.py new file mode 100644 --- /dev/null +++ b/tests/unit/banners/__init__.py @@ -0,0 +1,11 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/tests/unit/banners/test_init.py b/tests/unit/banners/test_init.py new file mode 100644 --- /dev/null +++ b/tests/unit/banners/test_init.py @@ -0,0 +1,32 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pretend + +from warehouse import banners + + +def test_includeme(): + config = pretend.stub( + get_settings=lambda: {"warehouse.domain": "pypi"}, + add_route=pretend.call_recorder(lambda name, route, domain: None), + ) + + banners.includeme(config) + + assert config.add_route.calls == [ + pretend.call( + "includes.db-banners", + "/_includes/notification-banners/", + domain="pypi", + ), + ] diff --git a/tests/unit/banners/test_models.py b/tests/unit/banners/test_models.py new file mode 100644 --- /dev/null +++ b/tests/unit/banners/test_models.py @@ -0,0 +1,32 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from datetime import date, timedelta + +import pytest + +from warehouse.banners.models import Banner + + [email protected]( + ("active", "end_diff", "expected"), + [ + (False, -10, False), # past inactive banner (ended 10 days ago) + (True, -10, False), # past active banner using end date as safeguard + (False, 20, False), # future inactive banner (ends in 20 days) + (True, 20, True), # future active banner (ends in 20 days) + ], +) +def test_banner_is_live_property(db_request, active, end_diff, expected): + banner = Banner() + banner.active = active + banner.end = date.today() + timedelta(days=end_diff) + assert banner.is_live is expected diff --git a/tests/unit/banners/test_views.py b/tests/unit/banners/test_views.py new file mode 100644 --- /dev/null +++ b/tests/unit/banners/test_views.py @@ -0,0 +1,38 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from warehouse.banners import views + +from ...common.db.banners import BannerFactory + + +def test_list_active_banners(db_request): + active_banner = BannerFactory.create() + assert active_banner.is_live + inactive_banner = BannerFactory.create(active=False) + assert inactive_banner.is_live is False + + result = views.list_banner_messages(db_request) + + assert result["banners"] == [active_banner] + + +def test_list_specific_banner_for_preview(db_request): + active_banner = BannerFactory.create() + assert active_banner.is_live + inactive_banner = BannerFactory.create(active=False) + assert inactive_banner.is_live is False + + db_request.params = {"single_banner": str(inactive_banner.id)} + result = views.list_banner_messages(db_request) + + assert result["banners"] == [inactive_banner] diff --git a/tests/unit/test_config.py b/tests/unit/test_config.py --- a/tests/unit/test_config.py +++ b/tests/unit/test_config.py @@ -330,6 +330,7 @@ def __init__(self): pretend.call(".redirects"), pretend.call(".routes"), pretend.call(".sponsors"), + pretend.call(".banners"), pretend.call(".admin"), pretend.call(".forklift"), pretend.call(".sentry"),
Define banners in the DB **What's the problem this feature will solve?** [Setting up and removing occasional community banners](https://github.com/pypa/warehouse/pulls?q=is%3Apr+sort%3Aupdated-desc+banner+is%3Aclosed) seems quite painful **Describe the solution you'd like** There's an ongoing effort to put the Sponsors in the DB (#9512), I'm guessing surfing on the momentum on cleaning the sponsors part, we could also take the opportunity to define those banners in the DB too. I'm even wondering if it would made sense to have the same sponsor roles also able to set banners. **Additional context** I imagine we'd need: - A model with: - A text (potentially with links) that we want to display. XSS may be an interesting problem. - A `begin` and `end` date, this would allow being more precise on event start and end, and less relying on @ewdurbin 's ability to deploy at specific times - Maybe a color if there are choices. - An admin page to [CRUD](https://en.wikipedia.org/wiki/Create,_read,_update_and_delete) the banners There's the question of translations, but as of today, banners are usually up for less time than the interval between weblate PR merges, so I guess most banners are already untranslated as of now.
> I'm even wondering if it would made sense to have the same sponsor roles also able to set banners. Abso-freakin-lutely. Hey @ewjoachim, thank for bringing this up! I'd be happy to help with that. I was thinking on how to implement this and it seems to be simple enough to work on. The how to input links and, thus, potential XSS attacks are definitely something to think about. About the XSS, if the admin expects the text input as a markdown one, we're safe because `readme_renderer` project takes care of sanitizing it before displaying the final HTML, as I've explained [in this comment](https://github.com/pypa/warehouse/pull/9512#discussion_r644889217). If we agree on the markdown strategy, there's probably one thing we'll have to update on how we're using `readme_renderer` that is to enable it to consider the `class` attribute for `<a>` tags. Currently, it [only accepts `href` and `title` attributes](https://github.com/pypa/readme_renderer/blob/main/readme_renderer/clean.py#L38) by default. And, even though the lib's [`clean` method](https://github.com/pypa/readme_renderer/blob/main/readme_renderer/clean.py#L63) accepts custom allowed attributes, its [`render`](https://github.com/pypa/readme_renderer/blob/main/readme_renderer/markdown.py#L54) one doesn't, and neither warehouse's [`utils.render`](https://github.com/pypa/warehouse/blob/main/warehouse/utils/readme.py#L31). So, probably we'll have to release a new version of the `readme_renderer` with a more extensible parameters API. Probably the same is true for `target="_blank"` and `rel="noopener"` attributes. This whole `readme_renderer` thing can also be avoided with @ewjoachim suggestion on having a way to pick colors. But I'll go further and suggest to pick "banners templates" instead of just colors. These templates would be several CSS classes defining the banner color and **how a link should be displayed**. The classes can be all the `notification-bar--*` ones. That way, we escape from having to define style within the link content. What do you think? Also, another question is where this code should live? Is it ok to create a new `banners` module or is it an overkill since it should probably end up with a simple `models.py` defining this new model? > I was thinking on how to implement this and it seems to be simple enough to work on. The how to input links and, thus, potential XSS attacks are definitely something to think about. About the XSS, if the admin expects the text input as a markdown one, we're safe because `readme_renderer` project takes care of sanitizing it before displaying the final HTML, as I've explained [in this comment](https://github.com/pypa/warehouse/pull/9512#discussion_r644889217). We should minimize the featureset here to match the need, at least initially. All historic banners have been a single string describing the banner and a text/link to construct a call-to-action button. Thus, I don't know that we need much free-form concern around Markdown/rendering. > This whole `readme_renderer` thing can also be avoided with @ewjoachim suggestion on having a way to pick colors. But I'll go further and suggest to pick "banners templates" instead of just colors. These templates would be several CSS classes defining the banner color and **how a link should be displayed**. The classes can be all the `notification-bar--*` ones. That way, we escape from having to define style within the link content. What do you think? While I think having flexible banners for other uses (important notifications from moderators/admins) is an _excellent feature_ I'm not aware of a use-case for this that we've had in the past that isn't covered by our statuspage. For the initial iteration, reducing the scope would be ideal since we would _not_ want the `psf_staff` role to be able to apply banners that aren't just the regular yellow. > Also, another question is where this code should live? Is it ok to create a new `banners` module or is it an overkill since it should probably end up with a simple `models.py` defining this new model? `banners` makes sense to me! i don't think we're adverse to creating new modules. Ha perfect. I was planning on waiting for your PR to be merged because this one was like going to copy large chunks of it, but you offering to take care of it is perfect. I can promise you a very thorough review in exchange, for what it's worth :) (seriously, that's awesome, thank you) (Also, +1 Ee on all those points! Start small, iterate) (❤️ to you all) Thanks for all your comments! I'm starting to work on it now and hope to get back with good news as soon as possible :+1: And for @ewjoachim information and to document it here, me and @ewdurbin talked about having a way to preview the banner after it gets created. That way moderators/psf staff/admin users won't have to wait until the banner dates to see the results.
2021-06-11T14:48:09Z
[]
[]
pypi/warehouse
9,983
pypi__warehouse-9983
[ "9913" ]
3802138b4eed60a1b8218168919e6de3792e9105
diff --git a/warehouse/integrations/vulnerabilities/utils.py b/warehouse/integrations/vulnerabilities/utils.py --- a/warehouse/integrations/vulnerabilities/utils.py +++ b/warehouse/integrations/vulnerabilities/utils.py @@ -10,7 +10,7 @@ # See the License for the specific language governing permissions and # limitations under the License. - +from pyramid.httpexceptions import HTTPBadRequest from sqlalchemy import func, orm from sqlalchemy.orm.exc import NoResultFound @@ -101,19 +101,26 @@ def _analyze_vulnerability(request, vulnerability_report, origin, metrics): ) raise + found_releases = False # by now, we don't have any release found + for version in report.versions: try: release = _get_release(request, project, version) + found_releases = True # at least one release found except NoResultFound: metrics.increment( "warehouse.vulnerabilities.error.release_not_found", tags=[f"origin:{origin}"], ) - raise + continue # skip that release if release not in vulnerability_record.releases: vulnerability_record.releases.append(release) + if not found_releases: + # no releases found, then raise an exception + raise HTTPBadRequest("None of the releases were found") + # Unassociate any releases that no longer apply. for release in list(vulnerability_record.releases): if release.version not in report.versions: @@ -136,7 +143,11 @@ def analyze_vulnerability(request, vulnerability_report, origin, metrics): metrics.increment( "warehouse.vulnerabilities.processed", tags=[f"origin:{origin}"] ) - except (vulnerabilities.InvalidVulnerabilityReportRequest, NoResultFound): + except ( + vulnerabilities.InvalidVulnerabilityReportRequest, + NoResultFound, + HTTPBadRequest, + ): raise except Exception: metrics.increment(
diff --git a/tests/unit/integration/vulnerabilities/test_utils.py b/tests/unit/integration/vulnerabilities/test_utils.py --- a/tests/unit/integration/vulnerabilities/test_utils.py +++ b/tests/unit/integration/vulnerabilities/test_utils.py @@ -16,6 +16,7 @@ import pretend import pytest +from pyramid.httpexceptions import HTTPBadRequest from sqlalchemy.orm.exc import NoResultFound from tests.common.db.packaging import ProjectFactory, ReleaseFactory @@ -289,7 +290,7 @@ def metrics_increment(key, tags): metrics = pretend.stub(increment=metrics_increment, timed=metrics.timed) - with pytest.raises(NoResultFound): + with pytest.raises(HTTPBadRequest): utils.analyze_vulnerability( request=db_request, vulnerability_report={ @@ -309,7 +310,7 @@ def metrics_increment(key, tags): ( "warehouse.vulnerabilities.error.release_not_found", ("origin:test_report_source",), - ): 1, + ): 2, }
Vulnerability reporting task is not resilient to deleted releases Currently attempting to publish a vulnerability report for a notification that includes release versions that have been deleted (or never existed) causes an error and prevents the entire report from being stored. Example: https://sentry.io/share/issue/69362937025d4524be01918a80f830d1/ If the release is not found, we should just skip that version and continue storing the vulnerability report. If *none* of the versions are found, we should error.
Hi @di, seems like no one is working on this issue. I'm interested, can you tell me what do I have to do? Sure. The `analyze_vulnerability` function handles a report of a vulnerability for multiple release versions. Currently it fails if one of the releases are missing: https://github.com/pypa/warehouse/blob/b0da6add8783535879057d011ee5300330a154f8/warehouse/integrations/vulnerabilities/utils.py#L105-L112 (This might happen due to a typo, or more likely if the release has been removed since the vulnerability has been discovered) If one or more of the versions in the report don't correspond to a release, we should just skip that version and continue storing the vulnerability report. If none of the versions in the report are found, we should error. Thanks. I'll work on it later.
2021-08-31T19:32:03Z
[]
[]
pypi/warehouse
10,002
pypi__warehouse-10002
[ "9420" ]
ed936ebc4dbb03473859edf29a01a73ede6c04be
diff --git a/warehouse/config.py b/warehouse/config.py --- a/warehouse/config.py +++ b/warehouse/config.py @@ -410,7 +410,11 @@ def configure(settings=None): # We need to enable our Client Side Include extension config.get_settings().setdefault( - "jinja2.extensions", ["warehouse.utils.html.ClientSideIncludeExtension"] + "jinja2.extensions", + [ + "warehouse.utils.html.ClientSideIncludeExtension", + "warehouse.i18n.extensions.TrimmedTranslatableTagsExtension", + ], ) # We'll want to configure some filters for Jinja2 as well. diff --git a/warehouse/i18n/extensions.py b/warehouse/i18n/extensions.py new file mode 100644 --- /dev/null +++ b/warehouse/i18n/extensions.py @@ -0,0 +1,22 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from jinja2.ext import Extension + + +class TrimmedTranslatableTagsExtension(Extension): + """ + This extension ensures all {% trans %} tags are trimmed by default. + """ + + def __init__(self, environment): + environment.policies["ext.i18n.trimmed"] = True
diff --git a/tests/functional/test_templates.py b/tests/functional/test_templates.py --- a/tests/functional/test_templates.py +++ b/tests/functional/test_templates.py @@ -48,6 +48,7 @@ def test_templates_for_empty_titles(): extensions=[ "jinja2.ext.i18n", "warehouse.utils.html.ClientSideIncludeExtension", + "warehouse.i18n.extensions.TrimmedTranslatableTagsExtension", ], cache_size=0, ) @@ -85,6 +86,7 @@ def test_render_templates(): extensions=[ "jinja2.ext.i18n", "warehouse.utils.html.ClientSideIncludeExtension", + "warehouse.i18n.extensions.TrimmedTranslatableTagsExtension", ], cache_size=0, ) diff --git a/tests/unit/i18n/test_extensions.py b/tests/unit/i18n/test_extensions.py new file mode 100644 --- /dev/null +++ b/tests/unit/i18n/test_extensions.py @@ -0,0 +1,40 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest + +from jinja2 import Environment + + [email protected]( + "ext, result", + [ + # Just a sanity check: test that when we do nothing, text is not trimmed. + ([], " hey "), + # Now test that with our extension, text is trimmed. + (["warehouse.i18n.extensions.TrimmedTranslatableTagsExtension"], "hey"), + ], +) +def test_trim_trans_tags(ext, result): + env = Environment( + extensions=["jinja2.ext.i18n"] + ext, + ) + + class Faketext: + # Every method is identity + def __getattribute__(self, _: str): + return lambda x: x + + env.install_gettext_translations(Faketext()) + + # Result is trimmed + assert env.from_string("{% trans %} hey {% endtrans %}").render() == result
All trans tags should be trimmed by default Having translation strings not be trimmed always results in ugly things for translators. We can approach this in 2 ways: - The simple but inelegant way: add `trimmed` to all existing trans tags and a CI check to ensure it's added to future tags too - The elegant way: tell jinja2/gettext to trim by default and subsequently remove all `trimmed` parameters. The problem for solution number 2 is that it requires a way to access the [policies on the Jinja2 environment](https://jinja.palletsprojects.com/en/2.11.x/api/#ext-i18n-trimmed), and I haven't been able to find how to do so using `pyramid_jinja2`. If anyone finds how to do so...
2021-09-03T21:26:51Z
[]
[]
pypi/warehouse
10,019
pypi__warehouse-10019
[ "8843" ]
6581b0225686270140415b35dfdaa4a00da15674
diff --git a/warehouse/config.py b/warehouse/config.py --- a/warehouse/config.py +++ b/warehouse/config.py @@ -315,6 +315,7 @@ def configure(settings=None): # We'll want to configure some filters for Jinja2 as well. filters = config.get_settings().setdefault("jinja2.filters", {}) + filters.setdefault("format_classifiers", "warehouse.filters:format_classifiers") filters.setdefault("classifier_id", "warehouse.filters:classifier_id") filters.setdefault("format_tags", "warehouse.filters:format_tags") filters.setdefault("json", "warehouse.filters:tojson") diff --git a/warehouse/filters.py b/warehouse/filters.py --- a/warehouse/filters.py +++ b/warehouse/filters.py @@ -11,6 +11,7 @@ # limitations under the License. import binascii +import collections import enum import hmac import json @@ -122,6 +123,20 @@ def format_tags(tags): return formatted_tags +def format_classifiers(classifiers): + structured = collections.OrderedDict() + + # Split up our classifiers into our data structure + for classifier in classifiers: + key, *value = classifier.split(" :: ", 1) + if value: + if key not in structured: + structured[key] = [] + structured[key].append(value[0]) + + return structured + + def classifier_id(classifier): return classifier.replace(" ", "_").replace("::", ".")
diff --git a/tests/functional/test_templates.py b/tests/functional/test_templates.py --- a/tests/functional/test_templates.py +++ b/tests/functional/test_templates.py @@ -21,6 +21,7 @@ "format_datetime": "warehouse.i18n.filters:format_datetime", "format_rfc822_datetime": "warehouse.i18n.filters:format_rfc822_datetime", "format_number": "warehouse.i18n.filters:format_number", + "format_classifiers": "warehouse.filters:format_classifiers", "classifier_id": "warehouse.filters:classifier_id", "format_tags": "warehouse.filters:format_tags", "json": "warehouse.filters:tojson", diff --git a/tests/unit/test_filters.py b/tests/unit/test_filters.py --- a/tests/unit/test_filters.py +++ b/tests/unit/test_filters.py @@ -135,6 +135,23 @@ def test_format_tags(inp, expected): assert filters.format_tags(inp) == expected [email protected]( + ("inp", "expected"), + [ + ( + ["Foo :: Bar :: Baz", "Foo :: Bar :: Qux", "Vleep"], + [("Foo", ["Bar :: Baz", "Bar :: Qux"])], + ), + ( + ["Foo :: Bar :: Baz", "Vleep :: Foo", "Foo :: Bar :: Qux"], + [("Foo", ["Bar :: Baz", "Bar :: Qux"]), ("Vleep", ["Foo"])], + ), + ], +) +def test_format_classifiers(inp, expected): + assert list(filters.format_classifiers(inp).items()) == expected + + @pytest.mark.parametrize( ("inp", "expected"), [("Foo", "Foo"), ("Foo :: Foo", "Foo_._Foo")] )
Classifiers: Python version sort order <!-- NOTE: This issue should be for problems with PyPI itself, including: * pypi.org * test.pypi.org * files.pythonhosted.org This issue should NOT be for a project installed from PyPI. If you are having an issue with a specific package, you should reach out to the maintainers of that project directly instead. Furthermore, this issue should NOT be for any non-PyPI properties (like python.org, docs.python.org, etc.) --> **Describe the bug** <!-- A clear and concise description the bug --> The classifiers "Programming Language :: Python :: 3.X" aren't sorted in the right order on https://pypi.org as well as on https://test.pypi.org I'm defining the classifiers like this in the `setup.py` file. ``` classifiers=[ "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10" ] ``` In the navigation bar on pypy.org it will then appear like this: ![image](https://user-images.githubusercontent.com/70264417/99465712-42797f00-293b-11eb-8f1a-dced842f433f.png) With Python 3.10 at the top instead of at the bottom (after Python 3.9). To give the visitors of pypi.org a better and faster overview over a project, it would be great if the Python classifiers were sorted by the Python versions. **Expected behavior** <!-- A clear and concise description of what you expected to happen --> Classifiers sorted by Python versions. Python :: 3 Python :: 3.6 Python :: 3.7 Python :: 3.8 Python :: 3.9 Python :: 3.10 Python :: 3.11 Python :: 3.12 etc. **To Reproduce** <!-- Steps to reproduce the bug, or a link to PyPI where the bug is visible --> It can be seen for example here: https://pypi.org/project/officeextractor/
Thanks for filing an issue. PyPI is technically sorting these "correctly", since the original classifier order is not preserved in the project metadata, classifiers are just strings, and the classifiers are being sorted lexicographically. This can be seen in a number of other places as well, like https://pypi.org/classifiers/. I agree that it does feel unintuitive for humans, though. I'd be open to accepting a change that takes any integer/float value of segment of a classifier into account when sorting. However, this should probably be computed once and pre-built into https://github.com/pypa/trove-classifiers instead of being done on the fly everywhere by PyPI. For instances like this, where we'd have to do it on the fly, `trove-classifiers` should also provide a sorting function as well. I've filed https://github.com/pypa/trove-classifiers/issues/56 to capture that and marked this issue as blocked until it's implemented. This is also an issue in the list of filters in the sidebar of the search results page, e.g. https://pypi.org/search/?q=beautifulsoup ![Screenshot from 2021-01-19 23-43-05](https://user-images.githubusercontent.com/5687998/105128436-08e70080-5ab1-11eb-9f12-05cf9390b9f5.png) Also an issue in https://pypi.org/pypi?%3Aaction=list_classifiers I assume this has been fixed now by https://github.com/pypa/trove-classifiers/pull/57 👍🏼 Indeed! We still need to pull in the new version of `trove-classifiers` here and update our usage of it to use the `sorted_classifiers` list. Is this actually fixed? Looking at https://pypi.org/project/officeextractor/ the ordering is still wrong. Looks like we need to update this function which does some secondary sorting which may no longer be necessary: https://github.com/pypa/warehouse/blob/7fc3ce5bd7ecc93ef54c1652787fb5e7757fe6f2/warehouse/filters.py#L126-L144 which is used here: https://github.com/pypa/warehouse/blob/20c947a4cd4a9667bf0f31722e77b87964ae195c/warehouse/templates/includes/packaging/project-data.html#L128 Re-opening... Please see PR https://github.com/pypa/warehouse/pull/10006.
2021-09-08T15:48:20Z
[]
[]
pypi/warehouse
10,116
pypi__warehouse-10116
[ "10087" ]
aecd05dbbf709d54d4e7bc277546bc2d9260b746
diff --git a/warehouse/utils/webauthn.py b/warehouse/utils/webauthn.py --- a/warehouse/utils/webauthn.py +++ b/warehouse/utils/webauthn.py @@ -87,6 +87,7 @@ def get_credential_options(user, *, challenge, rp_name, rp_id): user.username, user.name or user.username, None, + attestation=None, user_verification="discouraged", ) @@ -119,7 +120,12 @@ def verify_registration_response(response, challenge, *, rp_id, origin): # for the individual challenge. encoded_challenge = _webauthn_b64encode(challenge.encode()).decode() response = pywebauthn.WebAuthnRegistrationResponse( - rp_id, origin, response, encoded_challenge, self_attestation_permitted=True + rp_id, + origin, + response, + encoded_challenge, + self_attestation_permitted=True, + none_attestation_permitted=True, ) try: return response.verify()
diff --git a/tests/unit/utils/test_webauthn.py b/tests/unit/utils/test_webauthn.py --- a/tests/unit/utils/test_webauthn.py +++ b/tests/unit/utils/test_webauthn.py @@ -47,6 +47,7 @@ def test_verify_registration_response(monkeypatch): {}, webauthn._webauthn_b64encode("not_a_real_challenge".encode()).decode(), self_attestation_permitted=True, + none_attestation_permitted=True, ) ] assert resp == "not a real object"
TFA with Fido2 privacy intrution by requiring attestation **Describe the bug** Can't register TFA with a FIDO key without device attestation, making this feature unusable for privacy conscious developers. **Expected behavior** Respect users privacy, when registering FIDO keys by not requiring attestation. Most sites work without attestation pip should work without as well. **To Reproduce** - With any Yubikey supporting Fido2 - With Firefox (92.0 (64-Bit)) go to https://pypi.org/manage/account/webauthn-provision - Try registering a Fido2 device - Wait for Firefox to bring up its privacy warning - Select "anonymize anyway" and continue - Pip says "Registration rejected. Error: Authenticator attestation is required.." ![Firefox fido2 pip](https://user-images.githubusercontent.com/82288872/134816676-9feb37a0-0519-46a8-a1d9-3acb068ca760.png) **My Platform** macOS 11.6 Firefox 92.0 (64-Bit) Yubikey blue USB-A with NFC
While we do not store the attestation information, we do currently attempt to verify it in our provisioning. We also currently allow self attestation, so I'm not sure _precisely_ what requiring it at all gains us. It seems that attestation is most crucial in environments that have strict requirements on hardware/provenance or to prevent MITM attacks... But I'm not sure what the _correct_ approach is. @woodruffw do you have an opinion here? The change necessary (excluding tests) to avoid this popover/warning is as follows: ```diff diff --git a/warehouse/utils/webauthn.py b/warehouse/utils/webauthn.py index 8507d434..be5d12a1 100644 --- a/warehouse/utils/webauthn.py +++ b/warehouse/utils/webauthn.py @@ -87,6 +87,7 @@ def get_credential_options(user, *, challenge, rp_name, rp_id): user.username, user.name or user.username, None, + attestation=None, user_verification="discouraged", ) @@ -119,7 +120,7 @@ def verify_registration_response(response, challenge, *, rp_id, origin): # for the individual challenge. encoded_challenge = _webauthn_b64encode(challenge.encode()).decode() response = pywebauthn.WebAuthnRegistrationResponse( - rp_id, origin, response, encoded_challenge, self_attestation_permitted=True + rp_id, origin, response, encoded_challenge, self_attestation_permitted=True, none_attestation_permitted=True ) try: return response.verify() ``` Just to chime in, I agree that attestation isn't required here, since the provenance of the hardware authenticator itself shouldn't matter (ie PyPI shouldn't need to know that a certain key was made by a specific company). Here's [some additional explanation](https://fidoalliance.org/fido-technotes-the-truth-about-attestation/). Yep, we shouldn't need attestation at all (it is indeed for provenance, and we already have other mechanisms like WebAuthn's counter for MITM/skip attacks). The only "downside" is that we aren't able to associate signatures to physical/trustworhy devices, but that's not something we're doing anyways and is outside of PyPI's threat model. IIRC, we have self-attestation enabled because WebKit/Safari's implementation of attestation with Touch ID originally only provided self-attested signatures. I'm not sure if that's changed, but WebKit *does* [support some kind of anonymized direct attestation](https://webkit.org/blog/11312/meet-face-id-and-touch-id-for-the-web/) that looks similar (identical?) to the [standardized anonymous attestation CA technique](https://www.w3.org/TR/webauthn-2/#anonymization-ca). Thanks @woodruffw and @skorokithakis! It makes sense that we move to a "No Attestation" model and keep tabs on the client library, if at some point they support additional attestations including Anonymization format (currently there are only [these supported formats](https://github.com/duo-labs/py_webauthn/blob/4a0f8cd1db3b7635a1951a933d5a690beedf7c50/webauthn/webauthn.py#L48-L50)) we could consider those. I'll open a PR shortly.
2021-09-30T16:47:54Z
[]
[]
pypi/warehouse
10,438
pypi__warehouse-10438
[ "10262" ]
edd39ff97e24acfff1d26d16591462d7020667d6
diff --git a/warehouse/packaging/views.py b/warehouse/packaging/views.py --- a/warehouse/packaging/views.py +++ b/warehouse/packaging/views.py @@ -10,6 +10,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +from natsort import natsorted from pyramid.httpexceptions import HTTPMovedPermanently, HTTPNotFound from pyramid.view import view_config from sqlalchemy.orm.exc import NoResultFound @@ -125,7 +126,8 @@ def release_detail(release, request): "project": project, "release": release, "description": description, - "files": release.files.all(), + # We cannot easily sort naturally in SQL, sort here and pass to template + "files": natsorted(release.files.all(), reverse=True, key=lambda f: f.filename), "latest_version": project.latest_version, "all_versions": project.all_versions, "maintainers": maintainers,
diff --git a/tests/unit/packaging/test_views.py b/tests/unit/packaging/test_views.py --- a/tests/unit/packaging/test_views.py +++ b/tests/unit/packaging/test_views.py @@ -13,6 +13,7 @@ import pretend import pytest +from natsort import natsorted from pyramid.httpexceptions import HTTPMovedPermanently, HTTPNotFound from warehouse.packaging import views @@ -271,6 +272,27 @@ def test_detail_renders(self, monkeypatch, db_request): pretend.call("unrendered description", "text/plain") ] + def test_detail_renders_files_natural_sort(self, db_request): + """Tests that when a release has multiple versions of Python, + the sort order is most recent Python version first.""" + project = ProjectFactory.create() + release = ReleaseFactory.create(project=project, version="3.0") + files = [ + FileFactory.create( + release=release, + filename="{}-{}-{}.tar.gz".format( + project.name, release.version, py_ver + ), + python_version="source", + ) + for py_ver in ["cp27", "cp310", "cp39"] # intentionally out of order + ] + sorted_files = natsorted(files, reverse=True, key=lambda f: f.filename) + + result = views.release_detail(release, db_request) + + assert result["files"] == sorted_files + def test_license_from_classifier(self, db_request): """A license label is added when a license classifier exists.""" other_classifier = ClassifierFactory.create(
Use natural sort order for file listings **What's the problem this feature will solve?** Currently on https://pypi.org/project/lxml/4.6.3/#files, the files are listed as: - lxml-4.6.3-cp27-cp27mu-manylinux1_x86_64.whl - lxml-4.6.3-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl - lxml-4.6.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl - lxml-4.6.3-cp35-cp35m-manylinux1_i686.whl This is because the strings are sorted as 27 < 310 < 35, for strings. **Describe the solution you'd like** Use natural sorting order for filenames, similar to what we did for https://github.com/pypa/trove-classifiers/issues/56. This _may_ also make sense for the simple pages, where it would be a nice-to-have when a human looks at the page.
I added the 'good first issue' label to this issue. I would recommend using https://pypi.org/project/natsort/ for this. How about writing all the filenames in fixed N digits, for example 027 for 27 if using 3 digits? That isn't permitted. The filenames come from files uploaded by the users/package authors, and need to be presented without modification. Alright, then I'll get to using [natsort](https://pypi.org/project/natsort/) for the file listings. I looked at this yesterday, it seems like the ordering comes from the view definition here: https://github.com/pypa/warehouse/blob/9029bb58bd2273efcfcf6906b6bda2145f028a44/warehouse/packaging/models.py#L412-L419 It looks like the sqlalchemy method `order_by` takes a clause to order the results by. I'm not sure whether this can just be a python function so we could wrap this with `natsorted()` and call it a day, or if it would have to be a SQL implementation of natsorting similar to this: http://www.rhodiumtoad.org.uk/junk/naturalsort.sql I'm having issues adding `natsort` as a dependency, when I go to make the `requirements.txt` I get the following: ```bash $ make requirements/main.txt /bin/sh: .state/env/bin/python: No such file or directory /Users/crow/warehouse/.state/env/bin/pip-compile --allow-unsafe --generate-hashes --output-file=requirements/main.txt requirements/main.in make: /Users/crow/warehouse/.state/env/bin/pip-compile: No such file or directory ``` @Benjscho Sorry about that, that should be resolved by https://github.com/pypa/warehouse/pull/10278 @di Thanks! I'll try again now You'd want to order using natsort.natsort_keygen in the sqlalchemy order_by lambda: https://natsort.readthedocs.io/en/master/api.html#natsort.natsort_keygen Still struggling to get make requirements working, now getting an error relating to `pg_config` not found ```bash Error: pg_config executable not found. pg_config is required to build psycopg2 from source. Please add the directory containing pg_config to the $PATH or specify the full executable path with the option: python setup.py build_ext --pg-config /path/to/pg_config build ... or with the pg_config option in 'setup.cfg'. ``` This may be because I have incompatible python versions running but not sure how to disable them. Should otherwise be able to test the change here which I believe should work https://github.com/Benjscho/warehouse/commit/38a3b5434fb65ad082889905d68e6e1126cb8faf re `pg_config` not found: you need to apt-get install either postgresql-common, libpq-dev (making some assumptions about build env). If you're using mac os, what you may want to do is ``` brew install libpq ``` In the output, it will mentionned that libpq is not automatically put in the path, so you may want to either edit your path for the current shell or link libpq executables to your path with the `link --force` command that will be described in the output.
2021-12-03T20:03:49Z
[]
[]
pypi/warehouse
10,459
pypi__warehouse-10459
[ "10458" ]
5c78122a57257a2e76fe883d78500416784d6111
diff --git a/warehouse/manage/forms.py b/warehouse/manage/forms.py --- a/warehouse/manage/forms.py +++ b/warehouse/manage/forms.py @@ -25,6 +25,7 @@ TOTPValueMixin, WebAuthnCredentialMixin, ) +from warehouse.i18n import localize as _ class RoleNameMixin: @@ -65,7 +66,17 @@ class SaveAccountForm(forms.Form): __params__ = ["name", "public_email"] - name = wtforms.StringField() + name = wtforms.StringField( + validators=[ + wtforms.validators.Length( + max=100, + message=_( + "The name is too long. " + "Choose a name with 100 characters or less." + ), + ) + ] + ) public_email = wtforms.SelectField(choices=[("", "Not displayed")]) def __init__(self, *args, user_service, user_id, **kwargs):
diff --git a/tests/unit/manage/test_forms.py b/tests/unit/manage/test_forms.py --- a/tests/unit/manage/test_forms.py +++ b/tests/unit/manage/test_forms.py @@ -506,7 +506,7 @@ def test_public_email_verified(self): email = pretend.stub(verified=True, public=False, email="[email protected]") user = pretend.stub(id=1, username=pretend.stub(), emails=[email]) form = forms.SaveAccountForm( - name=pretend.stub(), + name="some name", public_email=email.email, user_service=pretend.stub(get_user=lambda _: user), user_id=user.id, @@ -517,10 +517,26 @@ def test_public_email_unverified(self): email = pretend.stub(verified=False, public=False, email=pretend.stub()) user = pretend.stub(id=1, username=pretend.stub(), emails=[email]) form = forms.SaveAccountForm( - name=pretend.stub(), + name="some name", public_email=email.email, user_service=pretend.stub(get_user=lambda _: user), user_id=user.id, ) assert not form.validate() assert "is not a verified email for" in form.public_email.errors.pop() + + def test_name_too_long(self, pyramid_config): + email = pretend.stub(verified=True, public=False, email="[email protected]") + user = pretend.stub(id=1, username=pretend.stub(), emails=[email]) + form = forms.SaveAccountForm( + name="x" * 101, + public_email=email.email, + user_service=pretend.stub(get_user=lambda _: user), + user_id=user.id, + ) + + assert not form.validate() + assert ( + str(form.name.errors.pop()) + == "The name is too long. Choose a name with 100 characters or less." + )
DataError: No validator for user display name max length https://sentry.io/organizations/python-software-foundation/issues/2847760761/?referrer=github_plugin ``` StringDataRightTruncation: value too long for type character varying(100) File "sqlalchemy/engine/base.py", line 1802, in _execute_context self.dialect.do_execute( File "sqlalchemy/engine/default.py", line 719, in do_execute cursor.execute(statement, parameters) DataError: (raised as a result of Query-invoked autoflush; consider using a session.no_autoflush block if this flush is occurring prematurely) (psycopg2.errors.StringDataRightTruncation) value too long for type character varying(100) [SQL: UPDATE users SET name=%(name)s WHERE users.id = %(users_id)s] [parameters: {'name': 'James is a marketing executive who excels in the development and execution of integrated marketing campaigns to drive brand awareness and drive customer loyalty. Links - https://www.amenify.com/... (54 additional frame(s) were not displayed) ... File "sqlalchemy/engine/base.py", line 1845, in _execute_context self._handle_dbapi_exception( File "sqlalchemy/engine/base.py", line 2026, in _handle_dbapi_exception util.raise_( File "sqlalchemy/util/compat.py", line 207, in raise_ raise exception File "sqlalchemy/engine/base.py", line 1802, in _execute_context self.dialect.do_execute( File "sqlalchemy/engine/default.py", line 719, in do_execute cursor.execute(statement, parameters) ```
2021-12-09T16:00:23Z
[]
[]
pypi/warehouse
10,482
pypi__warehouse-10482
[ "10202" ]
ee06d1dc33f255488e03504c6e4efc6f8eacba37
diff --git a/warehouse/accounts/forms.py b/warehouse/accounts/forms.py --- a/warehouse/accounts/forms.py +++ b/warehouse/accounts/forms.py @@ -336,7 +336,7 @@ def __init__(self, *args, challenge, origin, rp_id, **kwargs): def validate_credential(self, field): try: - assertion_dict = json.loads(field.data.encode("utf8")) + json.loads(field.data.encode("utf8")) except json.JSONDecodeError: raise wtforms.validators.ValidationError( _("Invalid WebAuthn assertion: Bad payload") @@ -345,7 +345,7 @@ def validate_credential(self, field): try: validated_credential = self.user_service.verify_webauthn_assertion( self.user_id, - assertion_dict, + field.data.encode("utf8"), challenge=self.challenge, origin=self.origin, rp_id=self.rp_id, diff --git a/warehouse/accounts/services.py b/warehouse/accounts/services.py --- a/warehouse/accounts/services.py +++ b/warehouse/accounts/services.py @@ -23,6 +23,7 @@ from passlib.context import CryptContext from sqlalchemy.orm import joinedload from sqlalchemy.orm.exc import NoResultFound +from webauthn.helpers import bytes_to_base64url from zope.interface import implementer import warehouse.utils.otp as otp @@ -419,7 +420,9 @@ def verify_webauthn_credential(self, credential, *, challenge, rp_id, origin): webauthn_cred = ( self.db.query(WebAuthn) - .filter_by(credential_id=validated_credential.credential_id.decode()) + .filter_by( + credential_id=bytes_to_base64url(validated_credential.credential_id) + ) .first() ) diff --git a/warehouse/accounts/views.py b/warehouse/accounts/views.py --- a/warehouse/accounts/views.py +++ b/warehouse/accounts/views.py @@ -24,6 +24,7 @@ from pyramid.security import forget, remember from pyramid.view import view_config from sqlalchemy.orm.exc import NoResultFound +from webauthn.helpers import bytes_to_base64url from warehouse.accounts import REDIRECT_FIELD_NAME from warehouse.accounts.forms import ( @@ -353,9 +354,10 @@ def webauthn_authentication_validate(request): request.session.clear_webauthn_challenge() if form.validate(): - credential_id, sign_count = form.validated_credential - webauthn = user_service.get_webauthn_by_credential_id(userid, credential_id) - webauthn.sign_count = sign_count + webauthn = user_service.get_webauthn_by_credential_id( + userid, bytes_to_base64url(form.validated_credential.credential_id) + ) + webauthn.sign_count = form.validated_credential.new_sign_count _login_user( request, diff --git a/warehouse/manage/forms.py b/warehouse/manage/forms.py --- a/warehouse/manage/forms.py +++ b/warehouse/manage/forms.py @@ -195,7 +195,7 @@ def __init__( def validate_credential(self, field): try: - credential_dict = json.loads(field.data.encode("utf8")) + json.loads(field.data.encode("utf-8")) except json.JSONDecodeError: raise wtforms.validators.ValidationError( "Invalid WebAuthn credential: Bad payload" @@ -203,7 +203,7 @@ def validate_credential(self, field): try: validated_credential = self.user_service.verify_webauthn_credential( - credential_dict, + field.data.encode("utf-8"), challenge=self.challenge, rp_id=self.rp_id, origin=self.origin, diff --git a/warehouse/manage/views.py b/warehouse/manage/views.py --- a/warehouse/manage/views.py +++ b/warehouse/manage/views.py @@ -22,6 +22,7 @@ from sqlalchemy import func from sqlalchemy.orm import Load, joinedload from sqlalchemy.orm.exc import NoResultFound +from webauthn.helpers import bytes_to_base64url import warehouse.utils.otp as otp @@ -612,8 +613,12 @@ def validate_webauthn_provision(self): self.user_service.add_webauthn( self.request.user.id, label=form.label.data, - credential_id=form.validated_credential.credential_id.decode(), - public_key=form.validated_credential.public_key.decode(), + credential_id=bytes_to_base64url( + form.validated_credential.credential_id + ), + public_key=bytes_to_base64url( + form.validated_credential.credential_public_key + ), sign_count=form.validated_credential.sign_count, ) self.user_service.record_event( diff --git a/warehouse/utils/webauthn.py b/warehouse/utils/webauthn.py --- a/warehouse/utils/webauthn.py +++ b/warehouse/utils/webauthn.py @@ -11,13 +11,23 @@ # limitations under the License. import base64 -import os import webauthn as pywebauthn -from webauthn.webauthn import ( - AuthenticationRejectedException as _AuthenticationRejectedError, - RegistrationRejectedException as _RegistrationRejectedError, +from webauthn.helpers import base64url_to_bytes, generate_challenge +from webauthn.helpers.exceptions import ( + InvalidAuthenticationResponse, + InvalidRegistrationResponse, +) +from webauthn.helpers.options_to_json import converter +from webauthn.helpers.structs import ( + AttestationConveyancePreference, + AuthenticationCredential, + AuthenticatorSelectionCriteria, + AuthenticatorTransport, + PublicKeyCredentialDescriptor, + RegistrationCredential, + UserVerificationRequirement, ) @@ -29,33 +39,34 @@ class RegistrationRejectedError(Exception): pass -WebAuthnCredential = pywebauthn.WebAuthnCredential - - -def _get_webauthn_users(user, *, rp_id): +def _get_webauthn_user_public_key_credential_descriptors(user, *, rp_id): """ Returns a webauthn.WebAuthnUser instance corresponding to the given user model, with properties suitable for usage within the webauthn API. """ return [ - pywebauthn.WebAuthnUser( - str(user.id), - user.username, - user.name or user.username, - None, - credential.credential_id, - credential.public_key, - credential.sign_count, - rp_id, + PublicKeyCredentialDescriptor( + id=base64url_to_bytes(credential.credential_id), + transports=[ + AuthenticatorTransport.USB, + AuthenticatorTransport.NFC, + AuthenticatorTransport.BLE, + AuthenticatorTransport.INTERNAL, + ], ) for credential in user.webauthn ] -def _webauthn_b64decode(encoded): - padding = "=" * (len(encoded) % 4) - return base64.urlsafe_b64decode(encoded + padding) +def _get_webauthn_user_public_keys(user, *, rp_id): + return [ + ( + base64url_to_bytes(credential.public_key), + credential.sign_count, + ) + for credential in user.webauthn + ] def _webauthn_b64encode(source): @@ -69,9 +80,7 @@ def generate_webauthn_challenge(): See: https://w3c.github.io/webauthn/#cryptographic-challenges """ - # NOTE: Webauthn recommends at least 16 bytes of entropy, - # we go with 32 because it doesn't cost us anything. - return _webauthn_b64encode(os.urandom(32)).decode() + return generate_challenge() def get_credential_options(user, *, challenge, rp_name, rp_id): @@ -79,19 +88,19 @@ def get_credential_options(user, *, challenge, rp_name, rp_id): Returns a dictionary of options for credential creation on the client side. """ - options = pywebauthn.WebAuthnMakeCredentialOptions( - challenge, - rp_name, - rp_id, - str(user.id), - user.username, - user.name or user.username, - None, - attestation=None, - user_verification="discouraged", + _authenticator_selection = AuthenticatorSelectionCriteria() + _authenticator_selection.user_verification = UserVerificationRequirement.DISCOURAGED + options = pywebauthn.generate_registration_options( + rp_id=rp_id, + rp_name=rp_name, + user_id=str(user.id), + user_name=user.username, + user_display_name=user.name or user.username, + challenge=challenge, + attestation=AttestationConveyancePreference.NONE, + authenticator_selection=_authenticator_selection, ) - - return options.registration_dict + return converter.unstructure(options) def get_assertion_options(user, *, challenge, rp_id): @@ -99,11 +108,16 @@ def get_assertion_options(user, *, challenge, rp_id): Returns a dictionary of options for assertion retrieval on the client side. """ - options = pywebauthn.WebAuthnAssertionOptions( - _get_webauthn_users(user, rp_id=rp_id), challenge + options = pywebauthn.generate_authentication_options( + rp_id=rp_id, + challenge=challenge, + allow_credentials=_get_webauthn_user_public_key_credential_descriptors( + user, rp_id=rp_id + ), + user_verification=UserVerificationRequirement.DISCOURAGED, ) - return options.assertion_dict + return converter.unstructure(options) def verify_registration_response(response, challenge, *, rp_id, origin): @@ -118,18 +132,17 @@ def verify_registration_response(response, challenge, *, rp_id, origin): # response's clientData.challenge is encoded twice: # first for the entire clientData payload, and then again # for the individual challenge. - encoded_challenge = _webauthn_b64encode(challenge.encode()).decode() - response = pywebauthn.WebAuthnRegistrationResponse( - rp_id, - origin, - response, - encoded_challenge, - self_attestation_permitted=True, - none_attestation_permitted=True, - ) + encoded_challenge = _webauthn_b64encode(challenge) try: - return response.verify() - except _RegistrationRejectedError as e: + _credential = RegistrationCredential.parse_raw(response) + return pywebauthn.verify_registration_response( + credential=_credential, + expected_challenge=encoded_challenge, + expected_rp_id=rp_id, + expected_origin=origin, + require_user_verification=False, + ) + except InvalidRegistrationResponse as e: raise RegistrationRejectedError(str(e)) @@ -141,21 +154,26 @@ def verify_assertion_response(assertion, *, challenge, user, origin, rp_id): Returns an updated signage count on success. Raises AuthenticationRejectedError on failure. """ - webauthn_users = _get_webauthn_users(user, rp_id=rp_id) - cred_ids = [cred.credential_id for cred in webauthn_users] - encoded_challenge = _webauthn_b64encode(challenge.encode()).decode() - - for webauthn_user in webauthn_users: - response = pywebauthn.WebAuthnAssertionResponse( - webauthn_user, - assertion, - encoded_challenge, - origin, - allow_credentials=cred_ids, - ) + # NOTE: We re-encode the challenge below, because our + # response's clientData.challenge is encoded twice: + # first for the entire clientData payload, and then again + # for the individual challenge. + encoded_challenge = _webauthn_b64encode(challenge) + webauthn_user_public_keys = _get_webauthn_user_public_keys(user, rp_id=rp_id) + + for public_key, current_sign_count in webauthn_user_public_keys: try: - return (webauthn_user.credential_id, response.verify()) - except _AuthenticationRejectedError: + _credential = AuthenticationCredential.parse_raw(assertion) + return pywebauthn.verify_authentication_response( + credential=_credential, + expected_challenge=encoded_challenge, + expected_rp_id=rp_id, + expected_origin=origin, + credential_public_key=public_key, + credential_current_sign_count=current_sign_count, + require_user_verification=False, + ) + except InvalidAuthenticationResponse: pass # If we exit the loop, then we've failed to verify the assertion against
diff --git a/tests/unit/accounts/test_services.py b/tests/unit/accounts/test_services.py --- a/tests/unit/accounts/test_services.py +++ b/tests/unit/accounts/test_services.py @@ -19,6 +19,9 @@ import pytest import requests +from webauthn.helpers import bytes_to_base64url +from webauthn.helpers.structs import AttestationFormat, PublicKeyCredentialType +from webauthn.registration.verify_registration_response import VerifiedRegistration from zope.interface.verify import verifyClass import warehouse.utils.otp as otp @@ -554,24 +557,21 @@ def test_check_totp_value_invalid_totp(self, user_service, monkeypatch): assert not valid assert limiter.hit.calls == [pretend.call(user.id), pretend.call()] - @pytest.mark.parametrize( - ("challenge", "rp_name", "rp_id"), - (["fake_challenge", "fake_rp_name", "fake_rp_id"], [None, None, None]), - ) - def test_get_webauthn_credential_options( - self, user_service, challenge, rp_name, rp_id - ): + def test_get_webauthn_credential_options(self, user_service): user = UserFactory.create() options = user_service.get_webauthn_credential_options( - user.id, challenge=challenge, rp_name=rp_name, rp_id=rp_id + user.id, + challenge=b"fake_challenge", + rp_name="fake_rp_name", + rp_id="fake_rp_id", ) - assert options["user"]["id"] == str(user.id) + assert options["user"]["id"] == bytes_to_base64url(str(user.id).encode()) assert options["user"]["name"] == user.username assert options["user"]["displayName"] == user.name - assert options["challenge"] == challenge - assert options["rp"]["name"] == rp_name - assert options["rp"]["id"] == rp_id + assert options["challenge"] == bytes_to_base64url(b"fake_challenge") + assert options["rp"]["name"] == "fake_rp_name" + assert options["rp"]["id"] == "fake_rp_id" assert "icon" not in options["user"] def test_get_webauthn_credential_options_for_blank_name(self, user_service): @@ -579,7 +579,7 @@ def test_get_webauthn_credential_options_for_blank_name(self, user_service): options = user_service.get_webauthn_credential_options( user.id, - challenge="fake_challenge", + challenge=b"fake_challenge", rp_name="fake_rp_name", rp_id="fake_rp_id", ) @@ -598,10 +598,10 @@ def test_get_webauthn_assertion_options(self, user_service): ) options = user_service.get_webauthn_assertion_options( - user.id, challenge="fake_challenge", rp_id="fake_rp_id" + user.id, challenge=b"fake_challenge", rp_id="fake_rp_id" ) - assert options["challenge"] == "fake_challenge" + assert options["challenge"] == bytes_to_base64url(b"fake_challenge") assert options["rpId"] == "fake_rp_id" assert options["allowCredentials"][0]["id"] == user.webauthn[0].credential_id @@ -637,12 +637,21 @@ def test_verify_webauthn_credential_already_in_use(self, user_service, monkeypat user_service.add_webauthn( user.id, label="test_label", - credential_id="foo", - public_key="bar", + credential_id=bytes_to_base64url(b"foo"), + public_key=b"bar", sign_count=1, ) - fake_validated_credential = pretend.stub(credential_id=b"foo") + fake_validated_credential = VerifiedRegistration( + credential_id=b"foo", + credential_public_key=b"bar", + sign_count=0, + aaguid="wutang", + fmt=AttestationFormat.NONE, + credential_type=PublicKeyCredentialType.PUBLIC_KEY, + user_verified=False, + attestation_object=b"foobar", + ) verify_registration_response = pretend.call_recorder( lambda *a, **kw: fake_validated_credential ) diff --git a/tests/unit/accounts/test_views.py b/tests/unit/accounts/test_views.py --- a/tests/unit/accounts/test_views.py +++ b/tests/unit/accounts/test_views.py @@ -20,6 +20,9 @@ from pyramid.httpexceptions import HTTPMovedPermanently, HTTPSeeOther from sqlalchemy.orm.exc import NoResultFound +from webauthn.authentication.verify_authentication_response import ( + VerifiedAuthentication, +) from warehouse.accounts import views from warehouse.accounts.interfaces import ( @@ -862,7 +865,9 @@ def test_webauthn_validate(self, monkeypatch, pyramid_request): form_obj = pretend.stub( validate=pretend.call_recorder(lambda: True), credential=pretend.stub(errors=["Fake validation failure"]), - validated_credential=(pretend.stub(), pretend.stub()), + validated_credential=VerifiedAuthentication( + credential_id=b"", new_sign_count=1 + ), ) form_class = pretend.call_recorder(lambda *a, **kw: form_obj) monkeypatch.setattr(views, "WebAuthnAuthenticationForm", form_class) diff --git a/tests/unit/manage/test_views.py b/tests/unit/manage/test_views.py --- a/tests/unit/manage/test_views.py +++ b/tests/unit/manage/test_views.py @@ -22,6 +22,7 @@ from pyramid.response import Response from sqlalchemy.orm import joinedload from sqlalchemy.orm.exc import NoResultFound +from webauthn.helpers import bytes_to_base64url from webob.multidict import MultiDict import warehouse.utils.otp as otp @@ -1338,7 +1339,7 @@ def test_validate_webauthn_provision(self, monkeypatch): validate=lambda: True, validated_credential=pretend.stub( credential_id=b"fake_credential_id", - public_key=b"fake_public_key", + credential_public_key=b"fake_public_key", sign_count=1, ), label=pretend.stub(data="fake_label"), @@ -1360,8 +1361,8 @@ def test_validate_webauthn_provision(self, monkeypatch): pretend.call( 1234, label="fake_label", - credential_id="fake_credential_id", - public_key="fake_public_key", + credential_id=bytes_to_base64url(b"fake_credential_id"), + public_key=bytes_to_base64url(b"fake_public_key"), sign_count=1, ) ] diff --git a/tests/unit/utils/test_webauthn.py b/tests/unit/utils/test_webauthn.py --- a/tests/unit/utils/test_webauthn.py +++ b/tests/unit/utils/test_webauthn.py @@ -14,107 +14,174 @@ import pytest import webauthn as pywebauthn +from webauthn.authentication.verify_authentication_response import ( + VerifiedAuthentication, +) +from webauthn.helpers import base64url_to_bytes, bytes_to_base64url +from webauthn.helpers.structs import ( + AttestationFormat, + AuthenticationCredential, + AuthenticatorAssertionResponse, + AuthenticatorAttestationResponse, + PublicKeyCredentialType, + RegistrationCredential, +) +from webauthn.registration.verify_registration_response import VerifiedRegistration + import warehouse.utils.webauthn as webauthn def test_generate_webauthn_challenge(): challenge = webauthn.generate_webauthn_challenge() - assert isinstance(challenge, str) - assert ( - challenge - == webauthn._webauthn_b64encode( - webauthn._webauthn_b64decode(challenge) - ).decode() - ) + assert isinstance(challenge, bytes) + assert challenge == base64url_to_bytes(bytes_to_base64url(challenge)) def test_verify_registration_response(monkeypatch): - response_obj = pretend.stub( - verify=pretend.call_recorder(lambda: "not a real object") + fake_verified_registration = VerifiedRegistration( + credential_id=b"foo", + credential_public_key=b"bar", + sign_count=0, + aaguid="wutang", + fmt=AttestationFormat.NONE, + credential_type=PublicKeyCredentialType.PUBLIC_KEY, + user_verified=False, + attestation_object=b"foobar", + ) + mock_verify_registration_response = pretend.call_recorder( + lambda *a, **kw: fake_verified_registration + ) + monkeypatch.setattr( + pywebauthn, "verify_registration_response", mock_verify_registration_response ) - response_cls = pretend.call_recorder(lambda *a, **kw: response_obj) - monkeypatch.setattr(pywebauthn, "WebAuthnRegistrationResponse", response_cls) resp = webauthn.verify_registration_response( - {}, "not_a_real_challenge", rp_id="fake_rp_id", origin="fake_origin" + ( + '{"id": "foo", "rawId": "foo", "response": ' + '{"attestationObject": "foo", "clientDataJSON": "bar"}}' + ), + b"not_a_real_challenge", + rp_id="fake_rp_id", + origin="fake_origin", ) - assert response_cls.calls == [ + assert mock_verify_registration_response.calls == [ pretend.call( - "fake_rp_id", - "fake_origin", - {}, - webauthn._webauthn_b64encode("not_a_real_challenge".encode()).decode(), - self_attestation_permitted=True, - none_attestation_permitted=True, + credential=RegistrationCredential( + id="foo", + raw_id=b"~\x8a", + response=AuthenticatorAttestationResponse( + client_data_json=b"m\xaa", attestation_object=b"~\x8a" + ), + transports=None, + type=PublicKeyCredentialType.PUBLIC_KEY, + ), + expected_challenge=bytes_to_base64url(b"not_a_real_challenge").encode(), + expected_rp_id="fake_rp_id", + expected_origin="fake_origin", + require_user_verification=False, ) ] - assert resp == "not a real object" + assert resp == fake_verified_registration def test_verify_registration_response_failure(monkeypatch): - response_obj = pretend.stub( - verify=pretend.raiser(pywebauthn.webauthn.RegistrationRejectedException) + monkeypatch.setattr( + pywebauthn, + "verify_registration_response", + pretend.raiser(pywebauthn.helpers.exceptions.InvalidRegistrationResponse), ) - response_cls = pretend.call_recorder(lambda *a, **kw: response_obj) - monkeypatch.setattr(pywebauthn, "WebAuthnRegistrationResponse", response_cls) with pytest.raises(webauthn.RegistrationRejectedError): webauthn.verify_registration_response( - {}, "not_a_real_challenge", rp_id="fake_rp_id", origin="fake_origin" + ( + '{"id": "foo", "rawId": "foo", "response": ' + '{"attestationObject": "foo", "clientDataJSON": "bar"}}' + ), + b"not_a_real_challenge", + rp_id="fake_rp_id", + origin="fake_origin", ) def test_verify_assertion_response(monkeypatch): - assertion_obj = pretend.stub(verify=pretend.call_recorder(lambda: 1234)) - assertion_cls = pretend.call_recorder(lambda *a, **kw: assertion_obj) - monkeypatch.setattr(pywebauthn, "WebAuthnAssertionResponse", assertion_cls) - - not_a_real_user = pretend.stub(credential_id="not_a_real_credential") - get_webauthn_users = pretend.call_recorder(lambda *a, **kw: [not_a_real_user]) - monkeypatch.setattr(webauthn, "_get_webauthn_users", get_webauthn_users) + fake_verified_authentication = VerifiedAuthentication( + credential_id=b"a credential id", + new_sign_count=69, + ) + mock_verify_authentication_response = pretend.call_recorder( + lambda *a, **kw: fake_verified_authentication + ) + monkeypatch.setattr( + pywebauthn, + "verify_authentication_response", + mock_verify_authentication_response, + ) - not_a_real_assertion = object() + not_a_real_user = pretend.stub( + webauthn=[ + pretend.stub( + public_key=bytes_to_base64url(b"fake public key"), sign_count=68 + ) + ] + ) resp = webauthn.verify_assertion_response( - not_a_real_assertion, - challenge="not_a_real_challenge", + ( + '{"id": "foo", "rawId": "foo", "response": ' + '{"authenticatorData": "foo", "clientDataJSON": "bar", ' + '"signature": "wutang"}}' + ), + challenge=b"not_a_real_challenge", user=not_a_real_user, origin="fake_origin", rp_id="fake_rp_id", ) - assert get_webauthn_users.calls == [ - pretend.call(not_a_real_user, rp_id="fake_rp_id") - ] - assert assertion_cls.calls == [ + assert mock_verify_authentication_response.calls == [ pretend.call( - not_a_real_user, - not_a_real_assertion, - webauthn._webauthn_b64encode("not_a_real_challenge".encode()).decode(), - "fake_origin", - allow_credentials=["not_a_real_credential"], + credential=AuthenticationCredential( + id="foo", + raw_id=b"~\x8a", + response=AuthenticatorAssertionResponse( + client_data_json=b"m\xaa", + authenticator_data=b"~\x8a", + signature=b"\xc2\xebZ\x9e", + user_handle=None, + ), + type=PublicKeyCredentialType.PUBLIC_KEY, + ), + expected_challenge=b"bm90X2FfcmVhbF9jaGFsbGVuZ2U", + expected_rp_id="fake_rp_id", + expected_origin="fake_origin", + credential_public_key=b"fake public key", + credential_current_sign_count=68, + require_user_verification=False, ) ] - assert resp == ("not_a_real_credential", 1234) + assert resp == fake_verified_authentication def test_verify_assertion_response_failure(monkeypatch): - assertion_obj = pretend.stub( - verify=pretend.raiser(pywebauthn.webauthn.AuthenticationRejectedException) + monkeypatch.setattr( + pywebauthn, + "verify_authentication_response", + pretend.raiser(pywebauthn.helpers.exceptions.InvalidAuthenticationResponse), ) - assertion_cls = pretend.call_recorder(lambda *a, **kw: assertion_obj) - monkeypatch.setattr(pywebauthn, "WebAuthnAssertionResponse", assertion_cls) get_webauthn_users = pretend.call_recorder( - lambda *a, **kw: [pretend.stub(credential_id=pretend.stub())] + lambda *a, **kw: [(b"not a public key", 0)] ) - monkeypatch.setattr(webauthn, "_get_webauthn_users", get_webauthn_users) + monkeypatch.setattr(webauthn, "_get_webauthn_user_public_keys", get_webauthn_users) with pytest.raises(webauthn.AuthenticationRejectedError): webauthn.verify_assertion_response( - pretend.stub(), - challenge="not_a_real_challenge", + ( + '{"id": "foo", "rawId": "foo", "response": ' + '{"authenticatorData": "foo", "clientDataJSON": "bar", ' + '"signature": "wutang"}}' + ), + challenge=b"not_a_real_challenge", user=pretend.stub(), origin="fake_origin", rp_id="fake_rp_id",
Bump webauthn from 0.4.7 to 1.0.0 Bumps [webauthn](https://github.com/duo-labs/py_webauthn) from 0.4.7 to 1.0.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/duo-labs/py_webauthn/releases">webauthn's releases</a>.</em></p> <blockquote> <h2>v1.0.0-beta2</h2> <p>No release notes provided.</p> <h2>v1.0.0-beta1</h2> <p>Preview release of the revitalized py_webauthn library. See PR <a href="https://github-redirect.dependabot.com/duo-labs/py_webauthn/issues/95">#95</a></p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/duo-labs/py_webauthn/commit/970c29270a445aa77dfde5e1b4de47b8baf65295"><code>970c292</code></a> Merge pull request <a href="https://github-redirect.dependabot.com/duo-labs/py_webauthn/issues/98">#98</a> from duo-labs/prepare-v1.0.0</li> <li><a href="https://github.com/duo-labs/py_webauthn/commit/2e1171cd7510eff61213a4159c39e9d6d2970ac1"><code>2e1171c</code></a> Add mention of Pydantic for structs</li> <li><a href="https://github.com/duo-labs/py_webauthn/commit/a7a3a97c2a6573cc39e02b0201a4854090a50ec3"><code>a7a3a97</code></a> Drop beta tag</li> <li><a href="https://github.com/duo-labs/py_webauthn/commit/2331c7647a6562290b655884002f1963b9b4e940"><code>2331c76</code></a> Add mention of <code>webauthn.helpers.structs</code></li> <li><a href="https://github.com/duo-labs/py_webauthn/commit/3649f68acfe4071b790c44d76303db3f8c954ee4"><code>3649f68</code></a> Merge pull request <a href="https://github-redirect.dependabot.com/duo-labs/py_webauthn/issues/97">#97</a> from duo-labs/fix-setup-py</li> <li><a href="https://github.com/duo-labs/py_webauthn/commit/65f158b4ee27dbf59275478d92c8aaae7c07cdbe"><code>65f158b</code></a> Update version to 1.0.0-beta2</li> <li><a href="https://github.com/duo-labs/py_webauthn/commit/2c12de983dbfcc26907631401a0169f2a31bc9fd"><code>2c12de9</code></a> Update setup.py to use find_packages</li> <li><a href="https://github.com/duo-labs/py_webauthn/commit/00e5dc886b1ad6246134bbcf7950b8f93641eec9"><code>00e5dc8</code></a> Merge pull request <a href="https://github-redirect.dependabot.com/duo-labs/py_webauthn/issues/95">#95</a> from duo-labs/lib-refresh</li> <li><a href="https://github.com/duo-labs/py_webauthn/commit/8f22b048c4c0d28a9d8794dd38e8f0a03685c09d"><code>8f22b04</code></a> Indicate Python 3.8 as minimum version</li> <li><a href="https://github.com/duo-labs/py_webauthn/commit/b78e0f3af1f4ffeaa66a2b6e0a3e86bb70129ab0"><code>b78e0f3</code></a> Wrap python versions in strings and use v2</li> <li>Additional commits viewable in <a href="https://github.com/duo-labs/py_webauthn/compare/v0.4.7...v1.0.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=webauthn&package-manager=pip&previous-version=0.4.7&new-version=1.0.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details>
Looks like there's quite a bit of breaking changes we need to accommodate here: https://github.com/duo-labs/py_webauthn/releases/tag/v1.0.0 A newer version of webauthn exists, but since this PR has been edited by someone other than Dependabot I haven't updated it. You'll get a PR for the updated version as normal once this PR is merged.
2021-12-14T02:44:41Z
[]
[]
pypi/warehouse
10,508
pypi__warehouse-10508
[ "10501" ]
439fd6047f088d432af773c331872b8838173edc
diff --git a/warehouse/forklift/legacy.py b/warehouse/forklift/legacy.py --- a/warehouse/forklift/legacy.py +++ b/warehouse/forklift/legacy.py @@ -869,7 +869,12 @@ def file_upload(request): if field.description and isinstance(field, wtforms.StringField): error_message = ( "{value!r} is an invalid value for {field}. ".format( - value=field.data, field=field.description + value=( + field.data[:30] + "..." + field.data[-30:] + if len(field.data) > 60 + else field.data + ), + field=field.description, ) + "Error: {} ".format(form.errors[field_name][0]) + "See "
diff --git a/tests/unit/forklift/test_legacy.py b/tests/unit/forklift/test_legacy.py --- a/tests/unit/forklift/test_legacy.py +++ b/tests/unit/forklift/test_legacy.py @@ -943,7 +943,11 @@ def test_fails_invalid_version(self, pyramid_config, pyramid_request, version): "md5_digest": "a fake md5 digest", "summary": "A" * 513, }, - "'" + "A" * 513 + "' is an invalid value for Summary. " + "'" + + "A" * 30 + + "..." + + "A" * 30 + + "' is an invalid value for Summary. " "Error: Field cannot be longer than 512 characters. " "See " "https://packaging.python.org/specifications/core-metadata"
Test pypi upload fails with 502 Bad Gateway error **Describe the bug** During a GitHub Action to release the latest version of aurora, the package fails to upload to the pypi test server with a 502 Bad Gateway error **Expected behavior** The Action has worked fine in the past **To Reproduce** https://github.com/fsciortino/Aurora/runs/4558019561?check_suite_focus=true **My Platform** GitHub Actions Your help is greatly appreciated. Mention @fsciortino .
Thanks for the report. This is fixed in #10497. Your quick help is much appreciated. 🎉 @di unfortunately it seems that there's still some issue: we still get the same error when trying to upload the `aurorafusion` package. https://github.com/fsciortino/Aurora/runs/4582247971?check_suite_focus=true I tried to increase the package version number and create a new release via github, but the error is still `HTTPError: 502 Bad Gateway from https://test.pypi.org/legacy/`. Could you please have a look? @fsciortino Unfortunately I'm not seeing the corresponding exception in our error tracker. Can you share the actual distribution file attempting to be uploaded with us here so I can debug? @fsciortino Looks like this started failing more than 10 days ago, so it's not likely due to this particular issue: https://github.com/fsciortino/Aurora/runs/4468671514?check_suite_focus=true @di thank you very much for looking into this. Yes, I've been seeing the same error for more than a week, and only 3 days ago @smithsp opened this issue because we couldn't see what could be going wrong on our side. We're trying to upload the package via https://github.com/fsciortino/Aurora/blob/master/.github/workflows/python-publish.yml How can I get a "distribution file" and share it with you? I would recommend building your project locally (`python setup.py sdist`) and trying to upload it manually (`twine upload -r testpypi dist/aurorafusion-2.0.4.tar.gz`) to confirm that it also fails, then attach it here or host it elsewhere and link to it, whatever is easiest. I confirm that `testpypi` also fails when doing a manual upload. Here's the dist file: [aurorafusion-2.0.4.tar.gz](https://github.com/pypa/warehouse/files/7747164/aurorafusion-2.0.4.tar.gz) I might be wrong, and it's certainly not a reason for crashing but it looks like you set a very long description, and no `long_description`. Did you mixup the 2 fields by any chance ? I see this error locally: ``` $ twine upload --repository-url http://localhost/legacy/ aurorafusion-2.0.4.tar.gz Uploading distributions to http://localhost/legacy/ Enter your username: di Enter your password: Uploading aurorafusion-2.0.4.tar.gz 100%|███████████████████████████████████████████| 11.5M/11.5M [00:05<00:00, 2.33MB/s] NOTE: Try --verbose to see response content. HTTPError: 400 Bad Request from http://localhost/legacy/ ".. image:: https://badge.fury.io/py/aurorafusion.svg :target: https://badge.fury.io/py/aurorafusion .. image:: https://anaconda.org/conda-forge/aurorafusion/badges/version.svg :target: https://anaconda.org/conda-forge/aurorafusion .. image:: https://anaconda.org/conda-forge/aurorafusion/badges/latest_release_date.svg :target: https://anaconda.org/conda-forge/aurorafusion .. image:: https://anaconda.org/conda-forge/aurorafusion/badges/platforms.svg :target: https://anaconda.org/conda-forge/aurorafusion .. image:: https://anaconda.org/conda-forge/aurorafusion/badges/license.svg :target: https://anaconda.org/conda-forge/aurorafusion .. image:: https://anaconda.org/conda-forge/aurorafusion/badges/downloads.svg :target: https://anaconda.org/conda-forge/aurorafusion Aurora is a package to simulate heavy-ion transportm neutrals and radiation in magnetically-confined plasmas. It includes a 1.5D impurity transport forward model, thoroughly benchmarked with the widely-adopted STRAHL code. It also offers routines to analyze neutral states of hydrogen isotopes, both from the edge of fusion plasmas and from neutral beam injection. A simple interface to atomic data for fusion plasmas makes it a convenient tool for spectroscopy and integrated modeling. Aurora's code is mostly written in Python 3 and Fortran 90. An experimental Julia interface has also been added. Documentation is available at https://aurora-fusion.readthedocs.io. Development ----------- The code is developed and maintained by F. Sciortino (MPI-IPP) in collaboration with T. Odstrcil (GA), D. Fajardo (MPI-IPP), A. Cavallaro (MIT) and R. Reksoatmodjo (W&M), with support from O. Linder (MPI-IPP), C. Johnson (U. Auburn), D. Stanczak (IPPLM) and S. Smith (GA). The STRAHL documentation provided by R.Dux (MPI-IPP) was extremely helpful to guide the initial development of Aurora. New contributors are more than welcome! Please get in touch at francesco.sciortino-at-ipp.mpg.de or open a pull-request via Github. Generally, we would appreciate if you could work with us to merge your features back into the main Aurora distribution if there is any chance that the changes that you made could be useful to others. Installation ------------ Aurora can be installed from PyPI using pip install aurorafusion --user You can omit the `--user` flag if you have write-access to the default package directory on your system and wish to install there. Installing via conda is also possible using conda install -c conda-forge aurorafusion Both the PyPI and conda installation are automatically updated at every package release. Note that the conda installation does not currently install dependencies on `omfit_classes`, which users may need to install via `pip` (see the `PyPI repo <https://pypi.org/project/omfit-classes/>`_). To look at the code and contribute to the Aurora repository, it is recommended to install from source, by git-cloning the `Aurora repo <https://github.com/fsciortino/aurora>`_ from Github. This will ensure that you can access the latest version of the tools. For compilation after git-cloning, users can make use of the `setup.py` file, e.g. using python setup.py -e . or use the makefile in the package directory to build the Fortran code using make clean; make Note that the makefile will not install any of the dependencies, listed in the `requirements.txt` file in the main directory. You can use this file to quickly install dependencies within a Python virtual environment, or install each dependency one at a time. The Julia version of the code is not built by default. If you have Julia installed on your system, you can do make julia from the main package directory. This will build a Julia `sysimage` to speed up access of Julia source code from Python, but it is not strictly necessary. See the documentation to read about interfacing Python and Julia. Atomic data ----------- Aurora offers a simple interface to download, read, process and plot atomic data from the Atomic Data and Structure Analysis (ADAS) database, particularly through the OPEN-ADAS website: www.open-adas.ac.uk . ADAS data files can be fetched remotely and stored within the Aurora distribution directory, or users may choose to fetch ADAS files from a chosen, pre-existing directory by setting export AURORA_ADAS_DIR=my_adas_directory within their Linux environment (or analogous). If an ADAS files that is not available in AURORA_ADAS_DIR is requested by a user, Aurora attempts to download it and store it there. If you are using a public installation of Aurora and you do not have write-access to the directory where Aurora is installed, make sure to set AURORA_ADAS_DIR to a directory where you do have write-access before starting. Several ADAS formats can currently be managed -- please see the docs. Please contact the authors to request and/or suggest expansions of current capabilities. License ------- Aurora is distributed under the MIT License. The package is made open-source with the hope that this will speed up research on fusion energy and make further code development easier. However, we kindly ask that all users communicate to us their purposes, difficulties and successes with Aurora, so that we may support users as much as possible and grow the code further. Citing Aurora ------------- Please see the `User Agreement <https://github.com/fsciortino/Aurora/blob/master/USER_AGREEMENT.txt>`_." is an invalid value for Summary. Error: Field cannot be longer than 512 characters. See https://packaging.python.org/specifications/core-metadata for more information. ``` Which seems like @ewjoachim is correct, but not sure why this results in a 502 from PyPI instead of a 400. Will reopen this to investigate (since this isn't due to #10497), but short-term changing `description` to `long_description` should fix this. I think shortening/truncating the bad value in the error message here would probably help. hmmm, from logs it looks like the backend _is_ returning a 400... ``` [MY.IP.ADDR.ESS] - ewdurbin [20/Dec/2021:18:33:27 +0000] "POST /legacy/ HTTP/1.1" 400 17342 "-" "twine/3.4.2 importlib_metadata/4.8.1 pkginfo/1.7.1 requests/2.26.0 requests-toolbelt/0.9.1 tqdm/4.62.3 CPython/3.9.6" ``` I'll try to determine what's turning that into a 503... Definitely looks like it's the fact that we're trying to shovel so much data into the HTTP Response Status line, which is a header. I'm guessing some part of our stack is choking on that, so @di's suggestion is a great start.
2021-12-20T19:32:24Z
[]
[]
pypi/warehouse
10,628
pypi__warehouse-10628
[ "10620" ]
d0e599547c66f1eaa613c88a3c9c23054adcfae8
diff --git a/warehouse/config.py b/warehouse/config.py --- a/warehouse/config.py +++ b/warehouse/config.py @@ -178,6 +178,7 @@ def configure(settings=None): maybe_set(settings, "celery.broker_url", "BROKER_URL") maybe_set(settings, "celery.result_url", "REDIS_URL") maybe_set(settings, "celery.scheduler_url", "REDIS_URL") + maybe_set(settings, "oidc.jwk_cache_url", "REDIS_URL") maybe_set(settings, "database.url", "DATABASE_URL") maybe_set(settings, "elasticsearch.url", "ELASTICSEARCH_URL") maybe_set(settings, "elasticsearch.url", "ELASTICSEARCH_SIX_URL") @@ -459,6 +460,9 @@ def configure(settings=None): # Register support for Macaroon based authentication config.include(".macaroons") + # Register support for OIDC provider based authentication + config.include(".oidc") + # Register support for malware checks config.include(".malware") diff --git a/warehouse/oidc/__init__.py b/warehouse/oidc/__init__.py new file mode 100644 --- /dev/null +++ b/warehouse/oidc/__init__.py @@ -0,0 +1,24 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from warehouse.oidc.interfaces import IOIDCProviderService +from warehouse.oidc.services import OIDCProviderServiceFactory + + +def includeme(config): + config.register_service_factory( + OIDCProviderServiceFactory( + provider="github", issuer_url="https://token.actions.githubusercontent.com" + ), + IOIDCProviderService, + name="github", + ) diff --git a/warehouse/oidc/interfaces.py b/warehouse/oidc/interfaces.py new file mode 100644 --- /dev/null +++ b/warehouse/oidc/interfaces.py @@ -0,0 +1,32 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from zope.interface import Interface + + +class IOIDCProviderService(Interface): + def get_key(key_id): + """ + Return the JWK identified by the given KID, + fetching it if not already cached locally. + + Returns None if the JWK does not exist or the access pattern is + invalid (i.e., exceeds our internal limit on JWK requests to + each provider). + """ + pass + + def verify(token): + """ + Verify the given JWT. + """ diff --git a/warehouse/oidc/services.py b/warehouse/oidc/services.py new file mode 100644 --- /dev/null +++ b/warehouse/oidc/services.py @@ -0,0 +1,177 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json + +import redis +import requests +import sentry_sdk + +from jwt import PyJWK +from zope.interface import implementer + +from warehouse.metrics.interfaces import IMetricsService +from warehouse.oidc.interfaces import IOIDCProviderService + + +@implementer(IOIDCProviderService) +class OIDCProviderService: + def __init__(self, provider, issuer_url, cache_url, metrics): + self.provider = provider + self.issuer_url = issuer_url + self.cache_url = cache_url + self.metrics = metrics + + self._provider_jwk_key = f"/warehouse/oidc/jwks/{self.provider}" + self._provider_timeout_key = f"{self._provider_jwk_key}/timeout" + + def _store_keyset(self, keys): + """ + Store the given keyset for the given provider, setting the timeout key + in the process. + """ + + with redis.StrictRedis.from_url(self.cache_url) as r: + r.set(self._provider_jwk_key, json.dumps(keys)) + r.setex(self._provider_timeout_key, 60, "placeholder") + + def _get_keyset(self): + """ + Return the cached keyset for the given provider, or an empty + keyset if no keys are currently cached. + """ + + with redis.StrictRedis.from_url(self.cache_url) as r: + keys = r.get(self._provider_jwk_key) + timeout = bool(r.exists(self._provider_timeout_key)) + if keys is not None: + return (json.loads(keys), timeout) + else: + return ({}, timeout) + + def _refresh_keyset(self): + """ + Attempt to refresh the keyset from the OIDC provider, assuming no + timeout is in effect. + + Returns the refreshed keyset, or the cached keyset if a timeout is + in effect. + + Returns the cached keyset on any provider access or format errors. + """ + + # Fast path: we're in a cooldown from a previous refresh. + keys, timeout = self._get_keyset() + if timeout: + self.metrics.increment( + "warehouse.oidc.refresh_keyset.timeout", + tags=[f"provider:{self.provider}"], + ) + return keys + + oidc_url = f"{self.issuer_url}/.well-known/openid-configuration" + + resp = requests.get(oidc_url) + + # For whatever reason, an OIDC provider's configuration URL might be + # offline. We don't want to completely explode here, since other + # providers might still be online (and need updating), so we spit + # out an error and return None instead of raising. + if not resp.ok: + sentry_sdk.capture_message( + f"OIDC provider {self.provider} failed to return configuration: " + f"{oidc_url}" + ) + return keys + + oidc_conf = resp.json() + jwks_url = oidc_conf.get("jwks_uri") + + # A valid OIDC configuration MUST have a `jwks_uri`, but we + # defend against its absence anyways. + if jwks_url is None: + sentry_sdk.capture_message( + f"OIDC provider {self.provider} is returning malformed " + "configuration (no jwks_uri)" + ) + return keys + + resp = requests.get(jwks_url) + + # Same reasoning as above. + if not resp.ok: + sentry_sdk.capture_message( + f"OIDC provider {self.provider} failed to return JWKS JSON: " + f"{jwks_url}" + ) + return keys + + jwks_conf = resp.json() + new_keys = jwks_conf.get("keys") + + # Another sanity test: an OIDC provider should never return an empty + # keyset, but there's nothing stopping them from doing so. We don't + # want to cache an empty keyset just in case it's a short-lived error, + # so we check here, error, and return the current cache instead. + if not new_keys: + sentry_sdk.capture_message( + f"OIDC provider {self.provider} returned JWKS JSON but no keys" + ) + return keys + + keys = {key["kid"]: key for key in new_keys} + self._store_keyset(keys) + + return keys + + def get_key(self, key_id): + """ + Return a JWK for the given key ID, or None if the key can't be found + in this provider's keyset. + """ + + keyset, _ = self._get_keyset() + if key_id not in keyset: + keyset = self._refresh_keyset() + if key_id not in keyset: + self.metrics.increment( + "warehouse.oidc.get_key.error", + tags=[f"provider:{self.provider}", f"key_id:{key_id}"], + ) + return None + return PyJWK(keyset[key_id]) + + def verify(self, token): + return NotImplemented + + +class OIDCProviderServiceFactory: + def __init__(self, provider, issuer_url, service_class=OIDCProviderService): + self.provider = provider + self.issuer_url = issuer_url + self.service_class = service_class + + def __call__(self, _context, request): + cache_url = request.registry.settings["oidc.jwk_cache_url"] + metrics = request.find_service(IMetricsService, context=None) + + return self.service_class(self.provider, self.issuer_url, cache_url, metrics) + + def __eq__(self, other): + if not isinstance(other, OIDCProviderServiceFactory): + return NotImplemented + + return (self.provider, self.issuer_url, self.service_class) == ( + other.provider, + other.issuer_url, + other.service_class, + )
diff --git a/tests/unit/oidc/test_services.py b/tests/unit/oidc/test_services.py new file mode 100644 --- /dev/null +++ b/tests/unit/oidc/test_services.py @@ -0,0 +1,418 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import functools + +import fakeredis +import pretend + +from jwt import PyJWK +from zope.interface.verify import verifyClass + +from warehouse.oidc import interfaces, services + + +def test_oidc_provider_service_factory(): + factory = services.OIDCProviderServiceFactory( + provider="example", issuer_url="https://example.com" + ) + + assert factory.provider == "example" + assert factory.issuer_url == "https://example.com" + assert verifyClass(interfaces.IOIDCProviderService, factory.service_class) + + metrics = pretend.stub() + request = pretend.stub( + registry=pretend.stub( + settings={"oidc.jwk_cache_url": "https://another.example.com"} + ), + find_service=lambda *a, **kw: metrics, + ) + service = factory(pretend.stub(), request) + + assert isinstance(service, factory.service_class) + assert service.provider == factory.provider + assert service.issuer_url == factory.issuer_url + assert service.cache_url == "https://another.example.com" + assert service.metrics == metrics + + assert factory != object() + assert factory != services.OIDCProviderServiceFactory( + provider="another", issuer_url="https://foo.example.com" + ) + + +class TestOIDCProviderService: + def test_verify(self): + service = services.OIDCProviderService( + provider=pretend.stub(), + issuer_url=pretend.stub(), + cache_url=pretend.stub(), + metrics=pretend.stub(), + ) + assert service.verify(pretend.stub()) == NotImplemented + + def test_get_keyset_not_cached(self, monkeypatch): + service = services.OIDCProviderService( + provider="example", + issuer_url=pretend.stub(), + cache_url="rediss://fake.example.com", + metrics=pretend.stub(), + ) + + monkeypatch.setattr(services.redis, "StrictRedis", fakeredis.FakeStrictRedis) + keys, timeout = service._get_keyset() + + assert not keys + assert timeout is False + + def test_get_keyset_cached(self, monkeypatch): + service = services.OIDCProviderService( + provider="example", + issuer_url=pretend.stub(), + cache_url="rediss://fake.example.com", + metrics=pretend.stub(), + ) + + # Create a fake server to provide persistent state through each + # StrictRedis.from_url context manager. + server = fakeredis.FakeServer() + from_url = functools.partial(fakeredis.FakeStrictRedis.from_url, server=server) + monkeypatch.setattr(services.redis.StrictRedis, "from_url", from_url) + + keyset = {"fake-key-id": {"foo": "bar"}} + service._store_keyset(keyset) + keys, timeout = service._get_keyset() + + assert keys == keyset + assert timeout is True + + def test_refresh_keyset_timeout(self, monkeypatch): + metrics = pretend.stub(increment=pretend.call_recorder(lambda *a, **kw: None)) + service = services.OIDCProviderService( + provider="example", + issuer_url="https://example.com", + cache_url="rediss://fake.example.com", + metrics=metrics, + ) + + # Create a fake server to provide persistent state through each + # StrictRedis.from_url context manager. + server = fakeredis.FakeServer() + from_url = functools.partial(fakeredis.FakeStrictRedis.from_url, server=server) + monkeypatch.setattr(services.redis.StrictRedis, "from_url", from_url) + + keyset = {"fake-key-id": {"foo": "bar"}} + service._store_keyset(keyset) + + keys = service._refresh_keyset() + assert keys == keyset + assert metrics.increment.calls == [ + pretend.call( + "warehouse.oidc.refresh_keyset.timeout", tags=["provider:example"] + ) + ] + + def test_refresh_keyset_oidc_config_fails(self, monkeypatch): + metrics = pretend.stub(increment=pretend.call_recorder(lambda *a, **kw: None)) + service = services.OIDCProviderService( + provider="example", + issuer_url="https://example.com", + cache_url="rediss://fake.example.com", + metrics=metrics, + ) + + monkeypatch.setattr(services.redis, "StrictRedis", fakeredis.FakeStrictRedis) + + requests = pretend.stub( + get=pretend.call_recorder(lambda url: pretend.stub(ok=False)) + ) + sentry_sdk = pretend.stub( + capture_message=pretend.call_recorder(lambda msg: pretend.stub()) + ) + monkeypatch.setattr(services, "requests", requests) + monkeypatch.setattr(services, "sentry_sdk", sentry_sdk) + + keys = service._refresh_keyset() + + assert keys == {} + assert metrics.increment.calls == [] + assert requests.get.calls == [ + pretend.call("https://example.com/.well-known/openid-configuration") + ] + assert sentry_sdk.capture_message.calls == [ + pretend.call( + "OIDC provider example failed to return configuration: " + "https://example.com/.well-known/openid-configuration" + ) + ] + + def test_refresh_keyset_oidc_config_no_jwks_uri(self, monkeypatch): + metrics = pretend.stub(increment=pretend.call_recorder(lambda *a, **kw: None)) + service = services.OIDCProviderService( + provider="example", + issuer_url="https://example.com", + cache_url="rediss://fake.example.com", + metrics=metrics, + ) + + monkeypatch.setattr(services.redis, "StrictRedis", fakeredis.FakeStrictRedis) + + requests = pretend.stub( + get=pretend.call_recorder( + lambda url: pretend.stub(ok=True, json=lambda: {}) + ) + ) + sentry_sdk = pretend.stub( + capture_message=pretend.call_recorder(lambda msg: pretend.stub()) + ) + monkeypatch.setattr(services, "requests", requests) + monkeypatch.setattr(services, "sentry_sdk", sentry_sdk) + + keys = service._refresh_keyset() + + assert keys == {} + assert metrics.increment.calls == [] + assert requests.get.calls == [ + pretend.call("https://example.com/.well-known/openid-configuration") + ] + assert sentry_sdk.capture_message.calls == [ + pretend.call( + "OIDC provider example is returning malformed configuration " + "(no jwks_uri)" + ) + ] + + def test_refresh_keyset_oidc_config_no_jwks_json(self, monkeypatch): + metrics = pretend.stub(increment=pretend.call_recorder(lambda *a, **kw: None)) + service = services.OIDCProviderService( + provider="example", + issuer_url="https://example.com", + cache_url="rediss://fake.example.com", + metrics=metrics, + ) + + monkeypatch.setattr(services.redis, "StrictRedis", fakeredis.FakeStrictRedis) + + openid_resp = pretend.stub( + ok=True, + json=lambda: { + "jwks_uri": "https://example.com/.well-known/jwks.json", + }, + ) + jwks_resp = pretend.stub(ok=False) + + def get(url): + if url == "https://example.com/.well-known/jwks.json": + return jwks_resp + else: + return openid_resp + + requests = pretend.stub(get=pretend.call_recorder(get)) + sentry_sdk = pretend.stub( + capture_message=pretend.call_recorder(lambda msg: pretend.stub()) + ) + monkeypatch.setattr(services, "requests", requests) + monkeypatch.setattr(services, "sentry_sdk", sentry_sdk) + + keys = service._refresh_keyset() + + assert keys == {} + assert metrics.increment.calls == [] + assert requests.get.calls == [ + pretend.call("https://example.com/.well-known/openid-configuration"), + pretend.call("https://example.com/.well-known/jwks.json"), + ] + assert sentry_sdk.capture_message.calls == [ + pretend.call( + "OIDC provider example failed to return JWKS JSON: " + "https://example.com/.well-known/jwks.json" + ) + ] + + def test_refresh_keyset_oidc_config_no_jwks_keys(self, monkeypatch): + metrics = pretend.stub(increment=pretend.call_recorder(lambda *a, **kw: None)) + service = services.OIDCProviderService( + provider="example", + issuer_url="https://example.com", + cache_url="rediss://fake.example.com", + metrics=metrics, + ) + + monkeypatch.setattr(services.redis, "StrictRedis", fakeredis.FakeStrictRedis) + + openid_resp = pretend.stub( + ok=True, + json=lambda: { + "jwks_uri": "https://example.com/.well-known/jwks.json", + }, + ) + jwks_resp = pretend.stub(ok=True, json=lambda: {}) + + def get(url): + if url == "https://example.com/.well-known/jwks.json": + return jwks_resp + else: + return openid_resp + + requests = pretend.stub(get=pretend.call_recorder(get)) + sentry_sdk = pretend.stub( + capture_message=pretend.call_recorder(lambda msg: pretend.stub()) + ) + monkeypatch.setattr(services, "requests", requests) + monkeypatch.setattr(services, "sentry_sdk", sentry_sdk) + + keys = service._refresh_keyset() + + assert keys == {} + assert metrics.increment.calls == [] + assert requests.get.calls == [ + pretend.call("https://example.com/.well-known/openid-configuration"), + pretend.call("https://example.com/.well-known/jwks.json"), + ] + assert sentry_sdk.capture_message.calls == [ + pretend.call("OIDC provider example returned JWKS JSON but no keys") + ] + + def test_refresh_keyset_successful(self, monkeypatch): + metrics = pretend.stub(increment=pretend.call_recorder(lambda *a, **kw: None)) + service = services.OIDCProviderService( + provider="example", + issuer_url="https://example.com", + cache_url="rediss://fake.example.com", + metrics=metrics, + ) + + # Create a fake server to provide persistent state through each + # StrictRedis.from_url context manager. + server = fakeredis.FakeServer() + from_url = functools.partial(fakeredis.FakeStrictRedis.from_url, server=server) + monkeypatch.setattr(services.redis.StrictRedis, "from_url", from_url) + + openid_resp = pretend.stub( + ok=True, + json=lambda: { + "jwks_uri": "https://example.com/.well-known/jwks.json", + }, + ) + jwks_resp = pretend.stub( + ok=True, json=lambda: {"keys": [{"kid": "fake-key-id", "foo": "bar"}]} + ) + + def get(url): + if url == "https://example.com/.well-known/jwks.json": + return jwks_resp + else: + return openid_resp + + requests = pretend.stub(get=pretend.call_recorder(get)) + sentry_sdk = pretend.stub( + capture_message=pretend.call_recorder(lambda msg: pretend.stub()) + ) + monkeypatch.setattr(services, "requests", requests) + monkeypatch.setattr(services, "sentry_sdk", sentry_sdk) + + keys = service._refresh_keyset() + + assert keys == {"fake-key-id": {"kid": "fake-key-id", "foo": "bar"}} + assert metrics.increment.calls == [] + assert requests.get.calls == [ + pretend.call("https://example.com/.well-known/openid-configuration"), + pretend.call("https://example.com/.well-known/jwks.json"), + ] + assert sentry_sdk.capture_message.calls == [] + + # Ensure that we also cached the updated keyset as part of refreshing. + keys, timeout = service._get_keyset() + assert keys == {"fake-key-id": {"kid": "fake-key-id", "foo": "bar"}} + assert timeout is True + + def test_get_key_cached(self, monkeypatch): + metrics = pretend.stub(increment=pretend.call_recorder(lambda *a, **kw: None)) + service = services.OIDCProviderService( + provider="example", + issuer_url="https://example.com", + cache_url="rediss://fake.example.com", + metrics=metrics, + ) + + keyset = { + "fake-key-id": { + "kid": "fake-key-id", + "n": "ZHVtbXkK", + "kty": "RSA", + "alg": "RS256", + "e": "AQAB", + "use": "sig", + "x5c": ["dummy"], + "x5t": "dummy", + } + } + monkeypatch.setattr(service, "_get_keyset", lambda: (keyset, True)) + + key = service.get_key("fake-key-id") + assert isinstance(key, PyJWK) + assert key.key_id == "fake-key-id" + + assert metrics.increment.calls == [] + + def test_get_key_uncached(self, monkeypatch): + metrics = pretend.stub(increment=pretend.call_recorder(lambda *a, **kw: None)) + service = services.OIDCProviderService( + provider="example", + issuer_url="https://example.com", + cache_url="rediss://fake.example.com", + metrics=metrics, + ) + + keyset = { + "fake-key-id": { + "kid": "fake-key-id", + "n": "ZHVtbXkK", + "kty": "RSA", + "alg": "RS256", + "e": "AQAB", + "use": "sig", + "x5c": ["dummy"], + "x5t": "dummy", + } + } + monkeypatch.setattr(service, "_get_keyset", lambda: ({}, False)) + monkeypatch.setattr(service, "_refresh_keyset", lambda: keyset) + + key = service.get_key("fake-key-id") + assert isinstance(key, PyJWK) + assert key.key_id == "fake-key-id" + + assert metrics.increment.calls == [] + + def test_get_key_refresh_fails(self, monkeypatch): + metrics = pretend.stub(increment=pretend.call_recorder(lambda *a, **kw: None)) + service = services.OIDCProviderService( + provider="example", + issuer_url="https://example.com", + cache_url="rediss://fake.example.com", + metrics=metrics, + ) + + monkeypatch.setattr(service, "_get_keyset", lambda: ({}, False)) + monkeypatch.setattr(service, "_refresh_keyset", lambda: {}) + + key = service.get_key("fake-key-id") + assert key is None + + assert metrics.increment.calls == [ + pretend.call( + "warehouse.oidc.get_key.error", + tags=["provider:example", "key_id:fake-key-id"], + ) + ] diff --git a/tests/unit/test_config.py b/tests/unit/test_config.py --- a/tests/unit/test_config.py +++ b/tests/unit/test_config.py @@ -332,6 +332,7 @@ def __init__(self): pretend.call(".email"), pretend.call(".accounts"), pretend.call(".macaroons"), + pretend.call(".oidc"), pretend.call(".malware"), pretend.call(".manage"), pretend.call(".packaging"),
Limetime management for OIDC JWKS The first step in verifying the OIDC JWTs is verifying their signatures, which means checking them against the OIDC provider's signing key set. There are a few approaches we could take to acquiring and maintaining the updatedness of those keys: 1. Bake them into the Warehouse codebase. We could pull the current JWKS blob from the URI referenced in the provider's `openid-configuration`. * Pros: Simple. * Cons: More tedious to update in the event of key updates/rotations; requires a PR each time any provider does so. Gaps between the rotation and updates means that Warehouse would probably reject authentic JWTs. 2. Fetch the JWKS for each provider on Warehouse startup/initialization. Restarts always re-fetch the JWKS. * Pros: Simple. * Cons: Most of the same cons as (1). 3. All of (2), but we also include a periodic job that checks each OIDC provider's JWKS on a schedule. * Pros: No rotation downtime. * Cons: Requires the most code.
(3) is probably the right approach. At minimum, we're going to need: 1. A registry/configuration list of supported OpenID connectors (just GitHub, for the time being), tied to their `openid-configuration` URLs 2. A scheduled periodic task (similar to the ones already in Warehouse) for checking the `openid-configuration` + JWKS URL 3. A verification service for incoming JWTs
2022-01-20T21:55:07Z
[]
[]
pypi/warehouse
10,719
pypi__warehouse-10719
[ "10720" ]
b20a4f9536078d43c68fa3b1e7b492a90e208cb5
diff --git a/warehouse/accounts/__init__.py b/warehouse/accounts/__init__.py --- a/warehouse/accounts/__init__.py +++ b/warehouse/accounts/__init__.py @@ -77,7 +77,6 @@ def _basic_auth_login(username, password, request): elif login_service.check_password( user.id, password, - request.remote_addr, tags=["mechanism:basic_auth", "method:auth", "auth_method:basic"], ): if breach_service.check_password( diff --git a/warehouse/accounts/views.py b/warehouse/accounts/views.py --- a/warehouse/accounts/views.py +++ b/warehouse/accounts/views.py @@ -532,9 +532,7 @@ def register(request, _form_class=RegistrationForm): user = user_service.create_user( form.username.data, form.full_name.data, form.new_password.data ) - email = user_service.add_email( - user.id, form.email.data, request.remote_addr, primary=True - ) + email = user_service.add_email(user.id, form.email.data, primary=True) user_service.record_event( user.id, tag="account:create",
diff --git a/tests/unit/accounts/test_core.py b/tests/unit/accounts/test_core.py --- a/tests/unit/accounts/test_core.py +++ b/tests/unit/accounts/test_core.py @@ -62,7 +62,7 @@ def test_with_invalid_password(self, pyramid_request, pyramid_services): get_user=pretend.call_recorder(lambda user_id: user), find_userid=pretend.call_recorder(lambda username: 1), check_password=pretend.call_recorder( - lambda userid, password, ip_address, tags=None: False + lambda userid, password, tags=None: False ), is_disabled=pretend.call_recorder(lambda user_id: (False, None)), ) @@ -89,7 +89,6 @@ def test_with_invalid_password(self, pyramid_request, pyramid_services): pretend.call( 1, "mypass", - "1.2.3.4", tags=["mechanism:basic_auth", "method:auth", "auth_method:basic"], ) ] @@ -110,7 +109,7 @@ def test_with_disabled_user_no_reason(self, pyramid_request, pyramid_services): get_user=pretend.call_recorder(lambda user_id: user), find_userid=pretend.call_recorder(lambda username: 1), check_password=pretend.call_recorder( - lambda userid, password, ip_address, tags=None: False + lambda userid, password, tags=None: False ), is_disabled=pretend.call_recorder(lambda user_id: (True, None)), ) @@ -137,7 +136,6 @@ def test_with_disabled_user_no_reason(self, pyramid_request, pyramid_services): pretend.call( 1, "mypass", - "1.2.3.4", tags=["mechanism:basic_auth", "method:auth", "auth_method:basic"], ) ] @@ -155,7 +153,7 @@ def test_with_disabled_user_compromised_pw(self, pyramid_request, pyramid_servic get_user=pretend.call_recorder(lambda user_id: user), find_userid=pretend.call_recorder(lambda username: 1), check_password=pretend.call_recorder( - lambda userid, password, ip_address, tags=None: False + lambda userid, password, tags=None: False ), is_disabled=pretend.call_recorder( lambda user_id: (True, DisableReason.CompromisedPassword) @@ -190,7 +188,7 @@ def test_with_valid_password(self, monkeypatch, pyramid_request, pyramid_service get_user=pretend.call_recorder(lambda user_id: user), find_userid=pretend.call_recorder(lambda username: 2), check_password=pretend.call_recorder( - lambda userid, password, ip_address, tags=None: True + lambda userid, password, tags=None: True ), update_user=pretend.call_recorder(lambda userid, last_login: None), is_disabled=pretend.call_recorder(lambda user_id: (False, None)), @@ -221,7 +219,6 @@ def test_with_valid_password(self, monkeypatch, pyramid_request, pyramid_service pretend.call( 2, "mypass", - "1.2.3.4", tags=["mechanism:basic_auth", "method:auth", "auth_method:basic"], ) ] @@ -244,7 +241,7 @@ def test_via_basic_auth_compromised( get_user=pretend.call_recorder(lambda user_id: user), find_userid=pretend.call_recorder(lambda username: 2), check_password=pretend.call_recorder( - lambda userid, password, ip_address, tags=None: True + lambda userid, password, tags=None: True ), is_disabled=pretend.call_recorder(lambda user_id: (False, None)), disable_password=pretend.call_recorder(lambda user_id, reason=None: None), @@ -272,7 +269,6 @@ def test_via_basic_auth_compromised( pretend.call( 2, "mypass", - "1.2.3.4", tags=["mechanism:basic_auth", "method:auth", "auth_method:basic"], ) ] diff --git a/tests/unit/accounts/test_views.py b/tests/unit/accounts/test_views.py --- a/tests/unit/accounts/test_views.py +++ b/tests/unit/accounts/test_views.py @@ -1279,9 +1279,7 @@ def test_register_redirect(self, db_request, monkeypatch): assert create_user.calls == [ pretend.call("username_value", "full_name", "MyStr0ng!shP455w0rd") ] - assert add_email.calls == [ - pretend.call(user.id, "[email protected]", db_request.remote_addr, primary=True) - ] + assert add_email.calls == [pretend.call(user.id, "[email protected]", primary=True)] assert send_email.calls == [pretend.call(db_request, (user, email))] assert record_event.calls == [ pretend.call(
Received "500: Internal Server Error" Package upload appears to have failed. <!-- NOTE: This issue should be for problems with PyPI itself, including: * pypi.org * test.pypi.org * files.pythonhosted.org This issue should NOT be for a project installed from PyPI. If you are having an issue with a specific package, you should reach out to the maintainers of that project directly instead. Furthermore, this issue should NOT be for any non-PyPI properties (like python.org, docs.python.org, etc.) If your problem is related to search (a new or updated project doesn't appear in the PyPI search results), please wait for a couple of hours and check again before reporting it. The search index may take some time to be updated. --> I try to upload a package to PIP in the standard way: twine upload dist/* It worked in the past, but now all I get is: ``` Uploading dynprog-0.1.2-py3-none-any.whl 100%|█████████████████████████████████████████████████████████████████| 26.0k/26.0k [00:00<00:00, 27.2kB/s] Received "500: Internal Server Error" Package upload appears to have failed. Retry 1 of 5 Uploading dynprog-0.1.2-py3-none-any.whl 100%|█████████████████████████████████████████████████████████████████| 26.0k/26.0k [00:00<00:00, 95.0kB/s] Received "500: Internal Server Error" Package upload appears to have failed. Retry 2 of 5 Uploading dynprog-0.1.2-py3-none-any.whl 100%|█████████████████████████████████████████████████████████████████| 26.0k/26.0k [00:00<00:00, 83.5kB/s] Received "500: Internal Server Error" Package upload appears to have failed. Retry 3 of 5 Uploading dynprog-0.1.2-py3-none-any.whl 100%|██████████████████████████████████████████████████████████████████| 26.0k/26.0k [00:00<00:00, 104kB/s] Received "500: Internal Server Error" Package upload appears to have failed. Retry 4 of 5 Uploading dynprog-0.1.2-py3-none-any.whl 100%|█████████████████████████████████████████████████████████████████| 26.0k/26.0k [00:00<00:00, 98.4kB/s] Received "500: Internal Server Error" Package upload appears to have failed. Retry 5 of 5 500 Internal Server Error <html> <head> <title>Internal Server Error</title> </head> <body> <h1><p>Internal Server Error</p></h1> </body> </html> HTTPError: 500 Internal Server Error from https://upload.pypi.org/legacy/ Internal Server Error ``` **Expected behavior** I expected to get a more detailed error message, so that I can understand what is wrong with my package. **To Reproduce** Unzip the attached file. Then run `twine upload dist/*`. **My Platform** Windows 10, standard ADSL Internet connection. [dist.zip](https://github.com/pypa/warehouse/files/8042027/dist.zip)
2022-02-10T15:00:55Z
[]
[]
pypi/warehouse
10,725
pypi__warehouse-10725
[ "8897", "8897" ]
fa7c968461b2c8954011572852dfa1fc8d60e23c
diff --git a/warehouse/accounts/views.py b/warehouse/accounts/views.py --- a/warehouse/accounts/views.py +++ b/warehouse/accounts/views.py @@ -58,6 +58,7 @@ send_email_verification_email, send_password_change_email, send_password_reset_email, + send_recovery_code_reminder_email, ) from warehouse.packaging.models import ( JournalEntry, @@ -282,6 +283,10 @@ def two_factor_and_totp_validate(request, _form_class=TOTPAuthenticationForm): .hexdigest() .lower(), ) + + if not two_factor_state.get("has_recovery_codes", False): + send_recovery_code_reminder_email(request, request.user) + return resp else: form.totp_value.data = "" @@ -372,6 +377,10 @@ def webauthn_authentication_validate(request): .hexdigest() .lower(), ) + + if not request.user.has_recovery_codes: + send_recovery_code_reminder_email(request, request.user) + return { "success": request._("Successful WebAuthn assertion"), "redirect_to": redirect_to, diff --git a/warehouse/email/__init__.py b/warehouse/email/__init__.py --- a/warehouse/email/__init__.py +++ b/warehouse/email/__init__.py @@ -421,6 +421,11 @@ def send_recovery_code_used_email(request, user): return {"username": user.username} +@_email("recovery-code-reminder") +def send_recovery_code_reminder_email(request, user): + return {"username": user.username} + + def includeme(config): email_sending_class = config.maybe_dotted(config.registry.settings["mail.backend"]) config.register_service_factory(email_sending_class.create_service, IEmailSender)
diff --git a/tests/unit/accounts/test_views.py b/tests/unit/accounts/test_views.py --- a/tests/unit/accounts/test_views.py +++ b/tests/unit/accounts/test_views.py @@ -544,7 +544,10 @@ def test_get_returns_recovery_code_status(self, pyramid_request, redirect_url): assert result == {"has_recovery_codes": True} @pytest.mark.parametrize("redirect_url", ["test_redirect_url", None]) - def test_totp_auth(self, monkeypatch, pyramid_request, redirect_url): + @pytest.mark.parametrize("has_recovery_codes", [True, False]) + def test_totp_auth( + self, monkeypatch, pyramid_request, redirect_url, has_recovery_codes + ): remember = pretend.call_recorder(lambda request, user_id: [("foo", "bar")]) monkeypatch.setattr(views, "remember", remember) @@ -558,17 +561,17 @@ def test_totp_auth(self, monkeypatch, pyramid_request, redirect_url): ) ) + user = pretend.stub( + last_login=(datetime.datetime.utcnow() - datetime.timedelta(days=1)), + has_recovery_codes=has_recovery_codes, + ) user_service = pretend.stub( find_userid=pretend.call_recorder(lambda username: 1), - get_user=pretend.call_recorder( - lambda userid: pretend.stub( - last_login=(datetime.datetime.utcnow() - datetime.timedelta(days=1)) - ) - ), + get_user=pretend.call_recorder(lambda userid: user), update_user=lambda *a, **k: None, has_totp=lambda userid: True, has_webauthn=lambda userid: False, - has_recovery_codes=lambda userid: False, + has_recovery_codes=lambda userid: has_recovery_codes, check_totp_value=lambda userid, totp_value: True, record_event=pretend.call_recorder(lambda *a, **kw: None), ) @@ -606,6 +609,11 @@ def test_totp_auth(self, monkeypatch, pyramid_request, redirect_url): pyramid_request.params = pretend.stub( get=pretend.call_recorder(lambda k: query_params.get(k)) ) + pyramid_request.user = user + + send_email = pretend.call_recorder(lambda *a: None) + monkeypatch.setattr(views, "send_recovery_code_reminder_email", send_email) + result = views.two_factor_and_totp_validate( pyramid_request, _form_class=form_class ) @@ -627,6 +635,9 @@ def test_totp_auth(self, monkeypatch, pyramid_request, redirect_url): ) ] assert pyramid_request.session.record_auth_timestamp.calls == [pretend.call()] + assert send_email.calls == ( + [] if has_recovery_codes else [pretend.call(pyramid_request, user)] + ) def test_totp_auth_already_authed(self): request = pretend.stub( @@ -735,6 +746,7 @@ def test_two_factor_token_invalid(self, pyramid_request): class TestWebAuthn: def test_webauthn_get_options_already_authenticated(self, pyramid_request): request = pretend.stub(authenticated_userid=pretend.stub(), _=lambda a: a) + result = views.webauthn_authentication_options(request) assert result == {"fail": {"errors": ["Already authenticated"]}} @@ -836,7 +848,8 @@ def test_webauthn_validate_invalid_form(self, monkeypatch): assert result == {"fail": {"errors": ["Fake validation failure"]}} - def test_webauthn_validate(self, monkeypatch, pyramid_request): + @pytest.mark.parametrize("has_recovery_codes", [True, False]) + def test_webauthn_validate(self, monkeypatch, pyramid_request, has_recovery_codes): _get_two_factor_data = pretend.call_recorder( lambda r: {"redirect_to": "foobar", "userid": 1} ) @@ -845,7 +858,10 @@ def test_webauthn_validate(self, monkeypatch, pyramid_request): _login_user = pretend.call_recorder(lambda *a, **kw: pretend.stub()) monkeypatch.setattr(views, "_login_user", _login_user) - user = pretend.stub(webauthn=pretend.stub(sign_count=pretend.stub())) + user = pretend.stub( + webauthn=pretend.stub(sign_count=pretend.stub()), + has_recovery_codes=has_recovery_codes, + ) user_service = pretend.stub( get_user=pretend.call_recorder(lambda uid: user), @@ -858,6 +874,7 @@ def test_webauthn_validate(self, monkeypatch, pyramid_request): clear_webauthn_challenge=pretend.call_recorder(lambda: pretend.stub()), ) pyramid_request.find_service = lambda *a, **kw: user_service + pyramid_request.user = user form_obj = pretend.stub( validate=pretend.call_recorder(lambda: True), @@ -869,6 +886,9 @@ def test_webauthn_validate(self, monkeypatch, pyramid_request): form_class = pretend.call_recorder(lambda *a, **kw: form_obj) monkeypatch.setattr(views, "WebAuthnAuthenticationForm", form_class) + send_email = pretend.call_recorder(lambda *a: None) + monkeypatch.setattr(views, "send_recovery_code_reminder_email", send_email) + result = views.webauthn_authentication_validate(pyramid_request) assert _get_two_factor_data.calls == [pretend.call(pyramid_request)] @@ -884,6 +904,9 @@ def test_webauthn_validate(self, monkeypatch, pyramid_request): assert pyramid_request.session.clear_webauthn_challenge.calls == [ pretend.call() ] + assert send_email.calls == ( + [] if has_recovery_codes else [pretend.call(pyramid_request, user)] + ) assert result == { "success": "Successful WebAuthn assertion", diff --git a/tests/unit/email/test_init.py b/tests/unit/email/test_init.py --- a/tests/unit/email/test_init.py +++ b/tests/unit/email/test_init.py @@ -3467,6 +3467,7 @@ class TestRecoveryCodeEmails: [ (email.send_recovery_codes_generated_email, "recovery-codes-generated"), (email.send_recovery_code_used_email, "recovery-code-used"), + (email.send_recovery_code_reminder_email, "recovery-code-reminder"), ], ) def test_recovery_code_emails(
Encourage generation of 2FA recovery codes In order to cut down on 2FA reset requests, PyPI should encourage/require users to generate recovery codes. Some ideas: * Require recovery codes to be generated before upload can succeed if 2FA is enabled * Require recovery codes to be generated before the user can log in * Require recovery codes to be generated as part of the 2FA setup * Additionally, require the user to burn one of the codes immediately after generation to ensure the codes have actually been downloaded/stored. Encourage generation of 2FA recovery codes In order to cut down on 2FA reset requests, PyPI should encourage/require users to generate recovery codes. Some ideas: * Require recovery codes to be generated before upload can succeed if 2FA is enabled * Require recovery codes to be generated before the user can log in * Require recovery codes to be generated as part of the 2FA setup * Additionally, require the user to burn one of the codes immediately after generation to ensure the codes have actually been downloaded/stored.
> Additionally, require the user to burn one of the codes immediately after generation to ensure the codes have actually been downloaded/stored. I think this is the only way to make sure users actually did write them somewhere. If we could also ask sometimes at login time whether they still have the codes (e.g. add a checkbox once every ~2 months asking them to check that they can still access their recovery code, without requesting that they enter one, just to make sure they're aware that it's a thing they might need, and highlighting that if they loose their 2FA app without recovery codes, they will be permanently locked out without any recovery procedure) > Additionally, require the user to burn one of the codes immediately after generation to ensure the codes have actually been downloaded/stored. I think this is the only way to make sure users actually did write them somewhere. If we could also ask sometimes at login time whether they still have the codes (e.g. add a checkbox once every ~2 months asking them to check that they can still access their recovery code, without requesting that they enter one, just to make sure they're aware that it's a thing they might need, and highlighting that if they loose their 2FA app without recovery codes, they will be permanently locked out without any recovery procedure)
2022-02-11T02:47:36Z
[]
[]
pypi/warehouse
10,742
pypi__warehouse-10742
[ "13710" ]
55476f39266473e49d21a3a8e64b7cda4d71fdbf
diff --git a/warehouse/accounts/views.py b/warehouse/accounts/views.py --- a/warehouse/accounts/views.py +++ b/warehouse/accounts/views.py @@ -834,7 +834,7 @@ def _error(message): try: email = ( request.db.query(Email) - .filter(Email.id == data["email.id"], Email.user == request.user) + .filter(Email.id == int(data["email.id"]), Email.user == request.user) .one() ) except NoResultFound: diff --git a/warehouse/admin/bans.py b/warehouse/admin/bans.py --- a/warehouse/admin/bans.py +++ b/warehouse/admin/bans.py @@ -10,6 +10,9 @@ # See the License for the specific language governing permissions and # limitations under the License. +from sqlalchemy import type_coerce +from sqlalchemy.dialects.postgresql import INET + from warehouse.accounts.interfaces import IUserService from warehouse.events.models import IpAddress @@ -18,10 +21,10 @@ class Bans: def __init__(self, request): self.request = request - def by_ip(self, ip_address): + def by_ip(self, ip_address: str) -> bool: banned = ( self.request.db.query(IpAddress) - .filter_by(ip_address=ip_address, is_banned=True) + .filter_by(ip_address=type_coerce(ip_address, INET), is_banned=True) .one_or_none() ) if banned is not None: diff --git a/warehouse/banners/views.py b/warehouse/banners/views.py --- a/warehouse/banners/views.py +++ b/warehouse/banners/views.py @@ -29,7 +29,7 @@ def list_banner_messages(request): if banner_id: query = request.db.query(Banner).filter(Banner.id == banner_id) else: - today = str(datetime.date.today()) + today = datetime.date.today() query = request.db.query(Banner).filter( (Banner.active == True) & (Banner.end >= today) # noqa ) diff --git a/warehouse/cli/db/__init__.py b/warehouse/cli/db/__init__.py --- a/warehouse/cli/db/__init__.py +++ b/warehouse/cli/db/__init__.py @@ -10,32 +10,9 @@ # See the License for the specific language governing permissions and # limitations under the License. -import contextlib - -from sqlalchemy import text - from warehouse.cli import warehouse [email protected] -def alembic_lock(engine, alembic_config): - with engine.begin() as connection: - # Attempt to acquire the alembic lock, this will wait until the lock - # has been acquired allowing multiple commands to wait for each other. - connection.execute(text("SELECT pg_advisory_lock(hashtext('alembic'))")) - - try: - # Tell Alembic use our current connection instead of creating it's - # own. - alembic_config.attributes["connection"] = connection - - # Yield control back up to let the command itself run. - yield alembic_config - finally: - # Finally we need to release the lock we've acquired. - connection.execute(text("SELECT pg_advisory_unlock(hashtext('alembic'))")) - - @warehouse.group() # pragma: no branch def db(): """ diff --git a/warehouse/cli/db/branches.py b/warehouse/cli/db/branches.py --- a/warehouse/cli/db/branches.py +++ b/warehouse/cli/db/branches.py @@ -13,7 +13,7 @@ import alembic.command import click -from warehouse.cli.db import alembic_lock, db +from warehouse.cli.db import db @db.command() @@ -22,7 +22,4 @@ def branches(config, **kwargs): """ Show current branch points. """ - with alembic_lock( - config.registry["sqlalchemy.engine"], config.alembic_config() - ) as alembic_config: - alembic.command.branches(alembic_config, **kwargs) + alembic.command.branches(config.alembic_config(), **kwargs) diff --git a/warehouse/cli/db/current.py b/warehouse/cli/db/current.py --- a/warehouse/cli/db/current.py +++ b/warehouse/cli/db/current.py @@ -13,7 +13,7 @@ import alembic.command import click -from warehouse.cli.db import alembic_lock, db +from warehouse.cli.db import db @db.command() @@ -22,7 +22,4 @@ def current(config, **kwargs): """ Display the current revision for a database. """ - with alembic_lock( - config.registry["sqlalchemy.engine"], config.alembic_config() - ) as alembic_config: - alembic.command.current(alembic_config, **kwargs) + alembic.command.current(config.alembic_config(), **kwargs) diff --git a/warehouse/cli/db/downgrade.py b/warehouse/cli/db/downgrade.py --- a/warehouse/cli/db/downgrade.py +++ b/warehouse/cli/db/downgrade.py @@ -13,7 +13,7 @@ import alembic.command import click -from warehouse.cli.db import alembic_lock, db +from warehouse.cli.db import db @db.command() @@ -23,7 +23,4 @@ def downgrade(config, revision, **kwargs): """ Revert to a previous version. """ - with alembic_lock( - config.registry["sqlalchemy.engine"], config.alembic_config() - ) as alembic_config: - alembic.command.downgrade(alembic_config, revision, **kwargs) + alembic.command.downgrade(config.alembic_config(), revision, **kwargs) diff --git a/warehouse/cli/db/heads.py b/warehouse/cli/db/heads.py --- a/warehouse/cli/db/heads.py +++ b/warehouse/cli/db/heads.py @@ -13,7 +13,7 @@ import alembic.command import click -from warehouse.cli.db import alembic_lock, db +from warehouse.cli.db import db @db.command() @@ -28,7 +28,4 @@ def heads(config, **kwargs): """ Show current available heads. """ - with alembic_lock( - config.registry["sqlalchemy.engine"], config.alembic_config() - ) as alembic_config: - alembic.command.heads(alembic_config, **kwargs) + alembic.command.heads(config.alembic_config(), **kwargs) diff --git a/warehouse/cli/db/history.py b/warehouse/cli/db/history.py --- a/warehouse/cli/db/history.py +++ b/warehouse/cli/db/history.py @@ -13,7 +13,7 @@ import alembic.command import click -from warehouse.cli.db import alembic_lock, db +from warehouse.cli.db import db @db.command() @@ -23,7 +23,4 @@ def history(config, revision_range, **kwargs): """ List changeset scripts in chronological order. """ - with alembic_lock( - config.registry["sqlalchemy.engine"], config.alembic_config() - ) as alembic_config: - alembic.command.history(alembic_config, revision_range, **kwargs) + alembic.command.history(config.alembic_config(), revision_range, **kwargs) diff --git a/warehouse/cli/db/merge.py b/warehouse/cli/db/merge.py --- a/warehouse/cli/db/merge.py +++ b/warehouse/cli/db/merge.py @@ -13,7 +13,7 @@ import alembic.command import click -from warehouse.cli.db import alembic_lock, db +from warehouse.cli.db import db @db.command() @@ -34,7 +34,4 @@ def merge(config, revisions, **kwargs): Takes one or more revisions or "heads" for all heads and merges them into a single revision. """ - with alembic_lock( - config.registry["sqlalchemy.engine"], config.alembic_config() - ) as alembic_config: - alembic.command.merge(alembic_config, revisions, **kwargs) + alembic.command.merge(config.alembic_config(), revisions, **kwargs) diff --git a/warehouse/cli/db/revision.py b/warehouse/cli/db/revision.py --- a/warehouse/cli/db/revision.py +++ b/warehouse/cli/db/revision.py @@ -13,7 +13,7 @@ import alembic.command import click -from warehouse.cli.db import alembic_lock, db +from warehouse.cli.db import db @db.command() @@ -49,7 +49,4 @@ def revision(config, **kwargs): """ Create a new revision file. """ - with alembic_lock( - config.registry["sqlalchemy.engine"], config.alembic_config() - ) as alembic_config: - alembic.command.revision(alembic_config, **kwargs) + alembic.command.revision(config.alembic_config(), **kwargs) diff --git a/warehouse/cli/db/show.py b/warehouse/cli/db/show.py --- a/warehouse/cli/db/show.py +++ b/warehouse/cli/db/show.py @@ -13,7 +13,7 @@ import alembic.command import click -from warehouse.cli.db import alembic_lock, db +from warehouse.cli.db import db @db.command() @@ -23,7 +23,4 @@ def show(config, revision, **kwargs): """ Show the revision(s) denoted by the given symbol. """ - with alembic_lock( - config.registry["sqlalchemy.engine"], config.alembic_config() - ) as alembic_config: - alembic.command.show(alembic_config, revision, **kwargs) + alembic.command.show(config.alembic_config(), revision, **kwargs) diff --git a/warehouse/cli/db/stamp.py b/warehouse/cli/db/stamp.py --- a/warehouse/cli/db/stamp.py +++ b/warehouse/cli/db/stamp.py @@ -13,7 +13,7 @@ import alembic.command import click -from warehouse.cli.db import alembic_lock, db +from warehouse.cli.db import db @db.command() @@ -23,7 +23,4 @@ def stamp(config, revision, **kwargs): """ Stamp the revision table with the given revision. """ - with alembic_lock( - config.registry["sqlalchemy.engine"], config.alembic_config() - ) as alembic_config: - alembic.command.stamp(alembic_config, revision, **kwargs) + alembic.command.stamp(config.alembic_config(), revision, **kwargs) diff --git a/warehouse/cli/db/upgrade.py b/warehouse/cli/db/upgrade.py --- a/warehouse/cli/db/upgrade.py +++ b/warehouse/cli/db/upgrade.py @@ -13,7 +13,7 @@ import alembic.command import click -from warehouse.cli.db import alembic_lock, db +from warehouse.cli.db import db @db.command() @@ -23,7 +23,4 @@ def upgrade(config, revision, **kwargs): """ Upgrade database. """ - with alembic_lock( - config.registry["sqlalchemy.engine"], config.alembic_config() - ) as alembic_config: - alembic.command.upgrade(alembic_config, revision, **kwargs) + alembic.command.upgrade(config.alembic_config(), revision, **kwargs) diff --git a/warehouse/ip_addresses/models.py b/warehouse/ip_addresses/models.py --- a/warehouse/ip_addresses/models.py +++ b/warehouse/ip_addresses/models.py @@ -45,8 +45,8 @@ class IpAddress(db.Model): {"comment": "Tracks IP Addresses that have modified PyPI state"}, ) - def __repr__(self): - return self.ip_address + def __repr__(self) -> str: + return str(self.ip_address) def __lt__(self, other): return self.id < other.id diff --git a/warehouse/migrations/env.py b/warehouse/migrations/env.py --- a/warehouse/migrations/env.py +++ b/warehouse/migrations/env.py @@ -42,37 +42,25 @@ def run_migrations_online(): In this scenario we need to create an Engine and associate a connection with the context. """ - connectable = context.config.attributes.get("connection", None) + options = context.config.get_section(context.config.config_ini_section) + url = options.pop("url") + connectable = create_engine(url, poolclass=pool.NullPool) - if connectable is None: - options = context.config.get_section(context.config.config_ini_section) - url = options.pop("url") - connectable = create_engine(url, poolclass=pool.NullPool) + with connectable.connect() as connection: + connection.execute(text("SET statement_timeout = 5000")) + connection.execute(text("SET lock_timeout = 4000")) - with connectable.connect() as connection: - connection.execute(text("SET statement_timeout = 5000")) - connection.execute(text("SET lock_timeout = 4000")) - - context.configure( - connection=connection, - target_metadata=db.metadata, - compare_server_default=True, - transaction_per_migration=True, - ) - with context.begin_transaction(): - context.run_migrations() - else: context.configure( - connection=connectable, + connection=connection, target_metadata=db.metadata, compare_server_default=True, transaction_per_migration=True, ) - context.execute(text("SET statement_timeout = 5000")) - context.execute(text("SET lock_timeout = 4000")) - with context.begin_transaction(): + connection.execute(text("SELECT pg_advisory_lock(hashtext('alembic'))")) context.run_migrations() + context.get_bind().commit() + connection.execute(text("SELECT pg_advisory_unlock(hashtext('alembic'))")) if context.is_offline_mode(): diff --git a/warehouse/migrations/versions/1b97443dea8a_create_missing_fk_indexes.py b/warehouse/migrations/versions/1b97443dea8a_create_missing_fk_indexes.py --- a/warehouse/migrations/versions/1b97443dea8a_create_missing_fk_indexes.py +++ b/warehouse/migrations/versions/1b97443dea8a_create_missing_fk_indexes.py @@ -26,71 +26,71 @@ def upgrade(): # CREATE INDEX CONCURRENTLY cannot happen inside a transaction. We'll close # our transaction here and issue the statement. - op.execute("COMMIT") - - op.create_index( - op.f("ix_macaroons_user_id"), - "macaroons", - ["user_id"], - unique=False, - postgresql_concurrently=True, - ) - op.create_index( - op.f("ix_project_events_project_id"), - "project_events", - ["project_id"], - unique=False, - postgresql_concurrently=True, - ) - op.create_index( - op.f("ix_release_vulnerabilities_release_id"), - "release_vulnerabilities", - ["release_id"], - unique=False, - postgresql_concurrently=True, - ) - op.create_index( - op.f("ix_releases_description_id"), - "releases", - ["description_id"], - unique=False, - postgresql_concurrently=True, - ) - op.create_index( - op.f("ix_role_invitations_project_id"), - "role_invitations", - ["project_id"], - unique=False, - postgresql_concurrently=True, - ) - op.create_index( - op.f("ix_role_invitations_user_id"), - "role_invitations", - ["user_id"], - unique=False, - postgresql_concurrently=True, - ) - op.create_index( - op.f("ix_user_events_user_id"), - "user_events", - ["user_id"], - unique=False, - postgresql_concurrently=True, - ) - op.create_index( - op.f("ix_user_recovery_codes_user_id"), - "user_recovery_codes", - ["user_id"], - unique=False, - postgresql_concurrently=True, - ) - op.create_index( - op.f("ix_user_security_keys_user_id"), - "user_security_keys", - ["user_id"], - unique=False, - postgresql_concurrently=True, - ) + op.get_bind().commit() + with op.get_context().autocommit_block(): + op.create_index( + op.f("ix_macaroons_user_id"), + "macaroons", + ["user_id"], + unique=False, + postgresql_concurrently=True, + ) + op.create_index( + op.f("ix_project_events_project_id"), + "project_events", + ["project_id"], + unique=False, + postgresql_concurrently=True, + ) + op.create_index( + op.f("ix_release_vulnerabilities_release_id"), + "release_vulnerabilities", + ["release_id"], + unique=False, + postgresql_concurrently=True, + ) + op.create_index( + op.f("ix_releases_description_id"), + "releases", + ["description_id"], + unique=False, + postgresql_concurrently=True, + ) + op.create_index( + op.f("ix_role_invitations_project_id"), + "role_invitations", + ["project_id"], + unique=False, + postgresql_concurrently=True, + ) + op.create_index( + op.f("ix_role_invitations_user_id"), + "role_invitations", + ["user_id"], + unique=False, + postgresql_concurrently=True, + ) + op.create_index( + op.f("ix_user_events_user_id"), + "user_events", + ["user_id"], + unique=False, + postgresql_concurrently=True, + ) + op.create_index( + op.f("ix_user_recovery_codes_user_id"), + "user_recovery_codes", + ["user_id"], + unique=False, + postgresql_concurrently=True, + ) + op.create_index( + op.f("ix_user_security_keys_user_id"), + "user_security_keys", + ["user_id"], + unique=False, + postgresql_concurrently=True, + ) def downgrade(): diff --git a/warehouse/migrations/versions/2db9b00c8d00_index_canonical_version_for_releases.py b/warehouse/migrations/versions/2db9b00c8d00_index_canonical_version_for_releases.py --- a/warehouse/migrations/versions/2db9b00c8d00_index_canonical_version_for_releases.py +++ b/warehouse/migrations/versions/2db9b00c8d00_index_canonical_version_for_releases.py @@ -26,15 +26,16 @@ def upgrade(): # CREATE INDEX CONCURRENTLY cannot happen inside a transaction. We'll close # our transaction here and issue the statement. - op.execute("COMMIT") - - op.create_index( - "release_canonical_version_idx", - "releases", - ["canonical_version"], - unique=False, - postgresql_concurrently=True, - ) + op.get_bind().commit() + + with op.get_context().autocommit_block(): + op.create_index( + "release_canonical_version_idx", + "releases", + ["canonical_version"], + unique=False, + postgresql_concurrently=True, + ) def downgrade(): diff --git a/warehouse/migrations/versions/4490777c984f_migrate_existing_data_for_release_is_.py b/warehouse/migrations/versions/4490777c984f_migrate_existing_data_for_release_is_.py --- a/warehouse/migrations/versions/4490777c984f_migrate_existing_data_for_release_is_.py +++ b/warehouse/migrations/versions/4490777c984f_migrate_existing_data_for_release_is_.py @@ -55,7 +55,7 @@ def upgrade(): """ ) ) - conn.execute(sa.text("COMMIT")) + op.get_bind().commit() op.alter_column( "releases", diff --git a/warehouse/migrations/versions/68a00c174ba5_add_missing_indexes_for_foreign_keys.py b/warehouse/migrations/versions/68a00c174ba5_add_missing_indexes_for_foreign_keys.py --- a/warehouse/migrations/versions/68a00c174ba5_add_missing_indexes_for_foreign_keys.py +++ b/warehouse/migrations/versions/68a00c174ba5_add_missing_indexes_for_foreign_keys.py @@ -33,16 +33,17 @@ def upgrade(): op.create_index( op.f("ix_ses_events_email_id"), "ses_events", ["email_id"], unique=False ) - # CREATE INDEX CONCURRENTLY cannot happen inside a transaction. We'll close - # our transaction here and issue the statement. - op.execute("COMMIT") - op.create_index( - "journals_submitted_by_idx", - "journals", - ["submitted_by"], - unique=False, - postgresql_concurrently=True, - ) + # CREATE INDEX CONCURRENTLY cannot happen inside a transaction. We'll run this + # outside of the transaction for the migration. + op.get_bind().commit() + with op.get_context().autocommit_block(): + op.create_index( + "journals_submitted_by_idx", + "journals", + ["submitted_by"], + unique=False, + postgresql_concurrently=True, + ) def downgrade(): diff --git a/warehouse/migrations/versions/c5f718cb98ac_add_cached_bool_on_files_table.py b/warehouse/migrations/versions/c5f718cb98ac_add_cached_bool_on_files_table.py --- a/warehouse/migrations/versions/c5f718cb98ac_add_cached_bool_on_files_table.py +++ b/warehouse/migrations/versions/c5f718cb98ac_add_cached_bool_on_files_table.py @@ -38,14 +38,15 @@ def upgrade(): ) # CREATE INDEX CONCURRENTLY cannot happen inside a transaction. We'll close # our transaction here and issue the statement. - op.execute("COMMIT") - op.create_index( - "release_files_cached_idx", - "release_files", - ["cached"], - unique=False, - postgresql_concurrently=True, - ) + op.get_bind().commit() + with op.get_context().autocommit_block(): + op.create_index( + "release_files_cached_idx", + "release_files", + ["cached"], + unique=False, + postgresql_concurrently=True, + ) def downgrade(): diff --git a/warehouse/migrations/versions/d142f435bb39_add_archived_column_to_files.py b/warehouse/migrations/versions/d142f435bb39_add_archived_column_to_files.py --- a/warehouse/migrations/versions/d142f435bb39_add_archived_column_to_files.py +++ b/warehouse/migrations/versions/d142f435bb39_add_archived_column_to_files.py @@ -41,14 +41,15 @@ def upgrade(): # CREATE INDEX CONCURRENTLY cannot happen inside a transaction. We'll close # our transaction here and issue the statement. - op.execute("COMMIT") - op.create_index( - "release_files_archived_idx", - "release_files", - ["archived"], - unique=False, - postgresql_concurrently=True, - ) + op.get_bind().commit() + with op.get_context().autocommit_block(): + op.create_index( + "release_files_archived_idx", + "release_files", + ["archived"], + unique=False, + postgresql_concurrently=True, + ) def downgrade(): diff --git a/warehouse/utils/wsgi.py b/warehouse/utils/wsgi.py --- a/warehouse/utils/wsgi.py +++ b/warehouse/utils/wsgi.py @@ -16,6 +16,8 @@ from typing import TYPE_CHECKING +from sqlalchemy import type_coerce +from sqlalchemy.dialects.postgresql import INET from sqlalchemy.exc import NoResultFound from warehouse.ip_addresses.models import IpAddress @@ -138,12 +140,11 @@ def _remote_addr_hashed(request: Request) -> str: def _ip_address(request): """Return the IpAddress object for the remote address from the environment.""" + remote_inet = type_coerce(request.remote_addr, INET) try: - ip_address = ( - request.db.query(IpAddress).filter_by(ip_address=request.remote_addr).one() - ) + ip_address = request.db.query(IpAddress).filter_by(ip_address=remote_inet).one() except NoResultFound: - ip_address = IpAddress(ip_address=request.remote_addr) + ip_address = IpAddress(ip_address=remote_inet) request.db.add(ip_address) ip_address.hashed_ip_address = request.remote_addr_hashed
diff --git a/requirements/tests.in b/requirements/tests.in --- a/requirements/tests.in +++ b/requirements/tests.in @@ -3,7 +3,7 @@ factory_boy freezegun pretend pytest>=3.0.0 -pytest-postgresql>=3.1.3,<4.0.0 +pytest-postgresql>=3.1.3,<6.0.0 pytest-socket pytz responses>=0.5.1 diff --git a/requirements/tests.txt b/requirements/tests.txt --- a/requirements/tests.txt +++ b/requirements/tests.txt @@ -207,6 +207,10 @@ psutil==5.9.5 \ --hash=sha256:c607bb3b57dc779d55e1554846352b4e358c10fff3abf3514a7a6601beebdb30 \ --hash=sha256:ea8518d152174e1249c4f2a1c89e3e6065941df2fa13a1ab45327716a23c2b48 # via mirakuru +psycopg==3.1.9 \ + --hash=sha256:ab400f207a8c120bafdd8077916d8f6c0106e809401378708485b016508c30c9 \ + --hash=sha256:fbbac339274d8733ee70ba9822297af3e8871790a26e967b5ea53e30a4b74dcc + # via pytest-postgresql pytest==7.4.0 \ --hash=sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32 \ --hash=sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a @@ -214,9 +218,9 @@ pytest==7.4.0 \ # -r requirements/tests.in # pytest-postgresql # pytest-socket -pytest-postgresql==3.1.3 \ - --hash=sha256:05b87a192741511f5171e0300689a531a2a48b4483c69ae2b5f565d3e429b1d5 \ - --hash=sha256:3649bcac5a0cd0d2cc1470a1087739990d402e2e910d53265ac486321a833898 +pytest-postgresql==5.0.0 \ + --hash=sha256:22edcbafab8995ee85b8d948ddfaad4f70c2c7462303d7477ecd2f77fc9d15bd \ + --hash=sha256:6e8f0773b57c9b8975b6392c241b7b81b7018f32079a533f368f2fbda732ecd3 # via -r requirements/tests.in pytest-socket==0.6.0 \ --hash=sha256:363c1d67228315d4fc7912f1aabfd570de29d0e3db6217d61db5728adacd7138 \ @@ -294,6 +298,10 @@ types-pyyaml==6.0.12.11 \ --hash=sha256:7d340b19ca28cddfdba438ee638cd4084bde213e501a3978738543e27094775b \ --hash=sha256:a461508f3096d1d5810ec5ab95d7eeecb651f3a15b71959999988942063bf01d # via responses +typing-extensions==4.7.1 \ + --hash=sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36 \ + --hash=sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2 + # via psycopg urllib3==1.26.16 \ --hash=sha256:8d36afa7616d8ab714608411b4a3b13e58f463aee519024578e062e141dce20f \ --hash=sha256:8f135f6502756bde6b2a9b28989df5fbe87c9970cecaa69041edcce7f0589b14 @@ -312,3 +320,9 @@ webtest==3.0.0 \ --hash=sha256:2a001a9efa40d2a7e5d9cd8d1527c75f41814eb6afce2c3d207402547b1e5ead \ --hash=sha256:54bd969725838d9861a9fa27f8d971f79d275d94ae255f5c501f53bb6d9929eb # via -r requirements/tests.in + +# The following packages are considered to be unsafe in a requirements file: +setuptools==68.0.0 \ + --hash=sha256:11e52c67415a381d10d6b462ced9cfb97066179f0e871399e006c4ab101fc85f \ + --hash=sha256:baf1fdb41c6da4cd2eae722e135500da913332ab3f2f5c7d33af9b492acb5235 + # via pytest-postgresql diff --git a/tests/conftest.py b/tests/conftest.py --- a/tests/conftest.py +++ b/tests/conftest.py @@ -28,7 +28,7 @@ import webtest as _webtest from jinja2 import Environment, FileSystemLoader -from psycopg2.errors import InvalidCatalogName +from psycopg.errors import InvalidCatalogName from pyramid.i18n import TranslationString from pyramid.static import ManifestCacheBuster from pyramid_jinja2 import IJinja2Environment @@ -242,7 +242,7 @@ def database(request): def drop_database(): janitor.drop() - return f"postgresql://{pg_user}@{pg_host}:{pg_port}/{pg_db}" + return f"postgresql+psycopg://{pg_user}@{pg_host}:{pg_port}/{pg_db}" class MockManifestCacheBuster(ManifestCacheBuster): diff --git a/tests/unit/cli/test_db.py b/tests/unit/cli/test_db.py --- a/tests/unit/cli/test_db.py +++ b/tests/unit/cli/test_db.py @@ -33,21 +33,6 @@ from warehouse.cli.db.upgrade import upgrade -def _compare_alembic_locks(calls: list[pretend.call]) -> bool: - sql = [] - for t in calls: - assert len(t.args) == 1 - assert len(t.kwargs) == 0 - - tc = t.args[0] - assert isinstance(tc, sqlalchemy.sql.expression.TextClause) - sql.append(tc.text) - return sql == [ - "SELECT pg_advisory_lock(hashtext('alembic'))", - "SELECT pg_advisory_unlock(hashtext('alembic'))", - ] - - def test_branches_command(monkeypatch, cli, pyramid_config): alembic_branches = pretend.call_recorder(lambda config: None) monkeypatch.setattr(alembic.command, "branches", alembic_branches) @@ -65,8 +50,6 @@ def test_branches_command(monkeypatch, cli, pyramid_config): result = cli.invoke(branches, obj=pyramid_config) assert result.exit_code == 0 - assert alembic_config.attributes == {"connection": connection} - assert _compare_alembic_locks(connection.execute.calls) assert alembic_branches.calls == [pretend.call(alembic_config)] @@ -87,8 +70,6 @@ def test_current_command(monkeypatch, cli, pyramid_config): result = cli.invoke(current, obj=pyramid_config) assert result.exit_code == 0 - assert alembic_config.attributes == {"connection": connection} - assert _compare_alembic_locks(connection.execute.calls) assert alembic_current.calls == [pretend.call(alembic_config)] @@ -109,8 +90,6 @@ def test_downgrade_command(monkeypatch, cli, pyramid_config): result = cli.invoke(downgrade, ["--", "-1"], obj=pyramid_config) assert result.exit_code == 0 - assert alembic_config.attributes == {"connection": connection} - assert _compare_alembic_locks(connection.execute.calls) assert alembic_downgrade.calls == [pretend.call(alembic_config, "-1")] @@ -139,8 +118,6 @@ def test_heads_command(monkeypatch, cli, pyramid_config, args, ekwargs): result = cli.invoke(heads, args, obj=pyramid_config) assert result.exit_code == 0 - assert alembic_config.attributes == {"connection": connection} - assert _compare_alembic_locks(connection.execute.calls) assert alembic_heads.calls == [pretend.call(alembic_config, **ekwargs)] @@ -161,8 +138,6 @@ def test_history_command(monkeypatch, cli, pyramid_config): result = cli.invoke(history, ["foo:bar"], obj=pyramid_config) assert result.exit_code == 0 - assert alembic_config.attributes == {"connection": connection} - assert _compare_alembic_locks(connection.execute.calls) assert alembic_history.calls == [pretend.call(alembic_config, "foo:bar")] @@ -202,8 +177,6 @@ def test_merge_command(monkeypatch, cli, pyramid_config, args, eargs, ekwargs): result = cli.invoke(merge, args, obj=pyramid_config) assert result.exit_code == 0 - assert alembic_config.attributes == {"connection": connection} - assert _compare_alembic_locks(connection.execute.calls) assert alembic_merge.calls == [pretend.call(alembic_config, *eargs, **ekwargs)] @@ -260,8 +233,6 @@ def test_revision_command(monkeypatch, cli, pyramid_config, args, ekwargs): result = cli.invoke(revision, args, obj=pyramid_config) assert result.exit_code == 0 - assert alembic_config.attributes == {"connection": connection} - assert _compare_alembic_locks(connection.execute.calls) assert alembic_revision.calls == [pretend.call(alembic_config, **ekwargs)] @@ -282,8 +253,6 @@ def test_show_command(monkeypatch, cli, pyramid_config): result = cli.invoke(show, ["foo"], obj=pyramid_config) assert result.exit_code == 0 - assert alembic_config.attributes == {"connection": connection} - assert _compare_alembic_locks(connection.execute.calls) assert alembic_show.calls == [pretend.call(alembic_config, "foo")] @@ -304,8 +273,6 @@ def test_stamp_command(monkeypatch, cli, pyramid_config): result = cli.invoke(stamp, ["foo"], obj=pyramid_config) assert result.exit_code == 0 - assert alembic_config.attributes == {"connection": connection} - assert _compare_alembic_locks(connection.execute.calls) assert alembic_stamp.calls == [pretend.call(alembic_config, "foo")] @@ -326,8 +293,6 @@ def test_upgrade_command(monkeypatch, cli, pyramid_config): result = cli.invoke(upgrade, ["foo"], obj=pyramid_config) assert result.exit_code == 0 - assert alembic_config.attributes == {"connection": connection} - assert _compare_alembic_locks(connection.execute.calls) assert alembic_upgrade.calls == [pretend.call(alembic_config, "foo")] diff --git a/tests/unit/test_db.py b/tests/unit/test_db.py --- a/tests/unit/test_db.py +++ b/tests/unit/test_db.py @@ -14,7 +14,7 @@ import alembic.config import pretend -import psycopg2.extensions +import psycopg import pytest import sqlalchemy import venusian @@ -114,7 +114,7 @@ def config_cls(): def test_raises_db_available_error(pyramid_services, metrics): def raiser(): - raise OperationalError("foo", {}, psycopg2.OperationalError()) + raise OperationalError("foo", {}, psycopg.OperationalError()) engine = pretend.stub(connect=raiser) request = pretend.stub( @@ -199,7 +199,6 @@ def test_create_session_read_only_mode( connection = pretend.stub( connection=pretend.stub( - get_transaction_status=lambda: pretend.stub(), set_session=lambda **kw: None, rollback=lambda: None, ), diff --git a/tests/unit/utils/test_wsgi.py b/tests/unit/utils/test_wsgi.py --- a/tests/unit/utils/test_wsgi.py +++ b/tests/unit/utils/test_wsgi.py @@ -13,6 +13,8 @@ import pretend import pytest +from sqlalchemy import type_coerce +from sqlalchemy.dialects.postgresql import INET from sqlalchemy.exc import NoResultFound from warehouse.ip_addresses.models import IpAddress @@ -196,7 +198,9 @@ def test_ip_address_exists(db_request): def test_ip_address_created(db_request): with pytest.raises(NoResultFound): - db_request.db.query(IpAddress).filter_by(ip_address="192.0.2.69").one() + db_request.db.query(IpAddress).filter_by( + ip_address=type_coerce("192.0.2.69", INET) + ).one() db_request.environ["GEOIP_CITY"] = "Anytown, ST" db_request.remote_addr = "192.0.2.69" @@ -204,8 +208,12 @@ def test_ip_address_created(db_request): wsgi._ip_address(db_request) - ip_address = db_request.db.query(IpAddress).filter_by(ip_address="192.0.2.69").one() - assert ip_address.ip_address == "192.0.2.69" + ip_address = ( + db_request.db.query(IpAddress) + .filter_by(ip_address=type_coerce("192.0.2.69", INET)) + .one() + ) + assert str(ip_address.ip_address) == "192.0.2.69" assert ip_address.hashed_ip_address == "deadbeef" assert ip_address.geoip_info == {"city": "Anytown, ST"}
chore(deps): bump pytest-postgresql from 3.1.3 to 5.0.0 Bumps [pytest-postgresql](https://github.com/ClearcodeHQ/pytest-postgresql) from 3.1.3 to 5.0.0. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/ClearcodeHQ/pytest-postgresql/blob/main/CHANGES.rst">pytest-postgresql's changelog</a>.</em></p> <blockquote> <h1>5.0.0 (2023-05-20)</h1> <h2>Breaking changes</h2> <ul> <li> <p>Drop support for Python 3.7 (<code>[#706](https://github.com/ClearcodeHQ/pytest-postgresql/issues/706) &lt;https://https://github.com/ClearcodeHQ/pytest-postgresql/issues/706&gt;</code>_)</p> </li> <li> <p>psycopg is now a mandatory requirement. With pyscop2 it was optional due to the different ways it could be installed:</p> <ul> <li>psycopg2 - that built itself</li> <li>psycopg2-binary - binary distribution</li> <li>psycopg2cffi - pypy enabled version</li> </ul> <p>Since psycopg version 3 there's only one package to install. (<code>[#744](https://github.com/ClearcodeHQ/pytest-postgresql/issues/744) &lt;https://https://github.com/ClearcodeHQ/pytest-postgresql/issues/744&gt;</code>_)</p> </li> <li> <p>Dropped --postgresql-logsprefix/postgresql_logsprefix options. All fixture data is already distinguished by tmpdir itself. (<code>[#748](https://github.com/ClearcodeHQ/pytest-postgresql/issues/748) &lt;https://https://github.com/ClearcodeHQ/pytest-postgresql/issues/748&gt;</code>_)</p> </li> </ul> <h2>Features</h2> <ul> <li>Re-rise FileNotFound errors with more meaningful messages. (<code>[#598](https://github.com/ClearcodeHQ/pytest-postgresql/issues/598) &lt;https://https://github.com/ClearcodeHQ/pytest-postgresql/issues/598&gt;</code>_)</li> <li>Support Python 3.11 (<code>[#678](https://github.com/ClearcodeHQ/pytest-postgresql/issues/678) &lt;https://https://github.com/ClearcodeHQ/pytest-postgresql/issues/678&gt;</code>_)</li> </ul> <h2>Miscellaneus</h2> <ul> <li>Drop PyPy from CI (<code>[#669](https://github.com/ClearcodeHQ/pytest-postgresql/issues/669) &lt;https://https://github.com/ClearcodeHQ/pytest-postgresql/issues/669&gt;</code>_)</li> <li>pytest-postgresql will now recognise and use development postgresql versions (<code>[#691](https://github.com/ClearcodeHQ/pytest-postgresql/issues/691) &lt;https://https://github.com/ClearcodeHQ/pytest-postgresql/issues/691&gt;</code>_)</li> <li>Use towncrier to maintain project's newsfragments/changelog (<code>[#700](https://github.com/ClearcodeHQ/pytest-postgresql/issues/700) &lt;https://https://github.com/ClearcodeHQ/pytest-postgresql/issues/700&gt;</code>_)</li> <li>Move project dependency management to pipenv (<code>[#701](https://github.com/ClearcodeHQ/pytest-postgresql/issues/701) &lt;https://https://github.com/ClearcodeHQ/pytest-postgresql/issues/701&gt;</code>_)</li> <li>Migrate to shared automerge workflow for automatic tests dependency updates (<code>[#702](https://github.com/ClearcodeHQ/pytest-postgresql/issues/702) &lt;https://https://github.com/ClearcodeHQ/pytest-postgresql/issues/702&gt;</code>_)</li> <li>Use tbump instead of bumpversion to manage versioning and releases. (<code>[#703](https://github.com/ClearcodeHQ/pytest-postgresql/issues/703) &lt;https://https://github.com/ClearcodeHQ/pytest-postgresql/issues/703&gt;</code>_)</li> <li>Move most of package configuration to pyproject.toml (<code>[#704](https://github.com/ClearcodeHQ/pytest-postgresql/issues/704) &lt;https://https://github.com/ClearcodeHQ/pytest-postgresql/issues/704&gt;</code>_)</li> <li>Introduce Typed config dict (<code>[#706](https://github.com/ClearcodeHQ/pytest-postgresql/issues/706) &lt;https://https://github.com/ClearcodeHQ/pytest-postgresql/issues/706&gt;</code>_)</li> <li>Use ankane/setup-postgres@v1 to setup postgresql in CI instead of custom scripts. (<code>[#708](https://github.com/ClearcodeHQ/pytest-postgresql/issues/708) &lt;https://https://github.com/ClearcodeHQ/pytest-postgresql/issues/708&gt;</code>_)</li> <li>Pass codecov_token to codecov action to upload coverage. (<code>[#721](https://github.com/ClearcodeHQ/pytest-postgresql/issues/721) &lt;https://https://github.com/ClearcodeHQ/pytest-postgresql/issues/721&gt;</code>_)</li> <li>Replaced flake8 and pydocstyle with ruff, turned on isort rules (<code>[#735](https://github.com/ClearcodeHQ/pytest-postgresql/issues/735) &lt;https://https://github.com/ClearcodeHQ/pytest-postgresql/issues/735&gt;</code>_)</li> <li>Split single bid test job into smaller running each after another. (<code>[#740](https://github.com/ClearcodeHQ/pytest-postgresql/issues/740) &lt;https://https://github.com/ClearcodeHQ/pytest-postgresql/issues/740&gt;</code>_)</li> </ul> <h1>4.1.1</h1> <h2>Misc</h2> <ul> <li>Error message typo fix</li> <li>Docker documentation example typo fixes</li> </ul> <!-- raw HTML omitted --> </blockquote> <p>... (truncated)</p> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/ClearcodeHQ/pytest-postgresql/commit/a53278561c12419db8538552f0bb3dc17213c8bf"><code>a532785</code></a> Release 5.0.0</li> <li><a href="https://github.com/ClearcodeHQ/pytest-postgresql/commit/99cea01bd423763150f8fff9b43f2525119356d0"><code>99cea01</code></a> Revert &quot;Publish package with trusted publishers&quot;</li> <li><a href="https://github.com/ClearcodeHQ/pytest-postgresql/commit/87af7c3098bf24f1a9d37ed8bbd06969ea919192"><code>87af7c3</code></a> Merge pull request <a href="https://redirect.github.com/ClearcodeHQ/pytest-postgresql/issues/751">#751</a> from ClearcodeHQ/new-publish</li> <li><a href="https://github.com/ClearcodeHQ/pytest-postgresql/commit/95de1321b5ec6f95255f0ebdc51feb6ba5b70af0"><code>95de132</code></a> Publish package with trusted publishers</li> <li><a href="https://github.com/ClearcodeHQ/pytest-postgresql/commit/066959a838073c0549efa3240a42118a18ba2fb0"><code>066959a</code></a> Merge pull request <a href="https://redirect.github.com/ClearcodeHQ/pytest-postgresql/issues/750">#750</a> from ClearcodeHQ/file-not-found</li> <li><a href="https://github.com/ClearcodeHQ/pytest-postgresql/commit/1d17defd3397a8af6edb9804067e2d7e56500058"><code>1d17def</code></a> Better exception messages when we could not find proper executable - closes <a href="https://redirect.github.com/ClearcodeHQ/pytest-postgresql/issues/598">#598</a></li> <li><a href="https://github.com/ClearcodeHQ/pytest-postgresql/commit/9d314a237befb173a17684df48d74991ea5e2f30"><code>9d314a2</code></a> Merge pull request <a href="https://redirect.github.com/ClearcodeHQ/pytest-postgresql/issues/749">#749</a> from ClearcodeHQ/drop-logfile</li> <li><a href="https://github.com/ClearcodeHQ/pytest-postgresql/commit/1d30d47ab17e9435d6415c92cc34c9d3795da4d6"><code>1d30d47</code></a> Dropped --postgresql-logsprefix - closes <a href="https://redirect.github.com/ClearcodeHQ/pytest-postgresql/issues/748">#748</a></li> <li><a href="https://github.com/ClearcodeHQ/pytest-postgresql/commit/523870819612b34d3df4784382c36644169febcc"><code>5238708</code></a> Bump ruff from 0.0.267 to 0.0.269</li> <li><a href="https://github.com/ClearcodeHQ/pytest-postgresql/commit/71124089698603e813cfcd580814be6ff92c1d92"><code>7112408</code></a> Bump ruff from 0.0.267 to 0.0.269</li> <li>Additional commits viewable in <a href="https://github.com/ClearcodeHQ/pytest-postgresql/compare/v3.1.3...v5.0.0">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pytest-postgresql&package-manager=pip&previous-version=3.1.3&new-version=5.0.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) You can trigger a rebase of this PR by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> > **Note** > Automatic rebases have been disabled on this pull request as it has been open for over 30 days.
2022-02-13T15:06:02Z
[]
[]
pypi/warehouse
10,753
pypi__warehouse-10753
[ "10617" ]
5112d7de845ba8eafbad40cc1a383cdd374e7c8c
diff --git a/warehouse/accounts/interfaces.py b/warehouse/accounts/interfaces.py --- a/warehouse/accounts/interfaces.py +++ b/warehouse/accounts/interfaces.py @@ -12,12 +12,7 @@ from zope.interface import Attribute, Interface - -class RateLimiterException(Exception): - def __init__(self, *args, resets_in, **kwargs): - self.resets_in = resets_in - - return super().__init__(*args, **kwargs) +from warehouse.rate_limiting.interfaces import RateLimiterException class TooManyFailedLogins(RateLimiterException): diff --git a/warehouse/admin/flags.py b/warehouse/admin/flags.py --- a/warehouse/admin/flags.py +++ b/warehouse/admin/flags.py @@ -22,6 +22,7 @@ class AdminFlagValue(enum.Enum): DISALLOW_NEW_PROJECT_REGISTRATION = "disallow-new-project-registration" DISALLOW_NEW_UPLOAD = "disallow-new-upload" DISALLOW_NEW_USER_REGISTRATION = "disallow-new-user-registration" + DISALLOW_OIDC = "disallow-oidc" READ_ONLY = "read-only" diff --git a/warehouse/config.py b/warehouse/config.py --- a/warehouse/config.py +++ b/warehouse/config.py @@ -270,6 +270,18 @@ def configure(settings=None): "PASSWORD_RESET_RATELIMIT_STRING", default="5 per day", ) + maybe_set( + settings, + "warehouse.manage.oidc.user_registration_ratelimit_string", + "USER_OIDC_REGISTRATION_RATELIMIT_STRING", + default="20 per day", + ) + maybe_set( + settings, + "warehouse.manage.oidc.ip_registration_ratelimit_string", + "IP_OIDC_REGISTRATION_RATELIMIT_STRING", + default="20 per day", + ) # 2FA feature flags maybe_set( @@ -294,6 +306,15 @@ def configure(settings=None): default=False, ) + # OIDC feature flags + maybe_set( + settings, + "warehouse.oidc.enabled", + "OIDC_ENABLED", + coercer=distutils.util.strtobool, + default=False, + ) + # Add the settings we use when the environment is set to development. if settings["warehouse.env"] == Environment.development: settings.setdefault("enforce_https", False) diff --git a/warehouse/email/__init__.py b/warehouse/email/__init__.py --- a/warehouse/email/__init__.py +++ b/warehouse/email/__init__.py @@ -466,6 +466,28 @@ def send_recovery_code_reminder_email(request, user): return {"username": user.username} +@_email("oidc-provider-added") +def send_oidc_provider_added_email(request, user, project_name, provider): + # We use the request's user, since they're the one triggering the action. + return { + "username": request.user.username, + "project_name": project_name, + "provider_name": provider.provider_name, + "provider_spec": str(provider), + } + + +@_email("oidc-provider-removed") +def send_oidc_provider_removed_email(request, user, project_name, provider): + # We use the request's user, since they're the one triggering the action. + return { + "username": request.user.username, + "project_name": project_name, + "provider_name": provider.provider_name, + "provider_spec": str(provider), + } + + def includeme(config): email_sending_class = config.maybe_dotted(config.registry.settings["mail.backend"]) config.register_service_factory(email_sending_class.create_service, IEmailSender) diff --git a/warehouse/manage/__init__.py b/warehouse/manage/__init__.py --- a/warehouse/manage/__init__.py +++ b/warehouse/manage/__init__.py @@ -17,6 +17,7 @@ from warehouse.accounts.forms import ReAuthenticateForm from warehouse.accounts.interfaces import IUserService +from warehouse.rate_limiting import IRateLimiter, RateLimit DEFAULT_TIME_TO_REAUTH = 30 * 60 # 30 minutes @@ -62,3 +63,21 @@ def wrapped(context, request): def includeme(config): config.add_view_deriver(reauth_view, over="rendered_view", under="decorated_view") + + user_oidc_registration_ratelimit_string = config.registry.settings.get( + "warehouse.manage.oidc.user_registration_ratelimit_string" + ) + config.register_service_factory( + RateLimit(user_oidc_registration_ratelimit_string), + IRateLimiter, + name="user_oidc.provider.register", + ) + + ip_oidc_registration_ratelimit_string = config.registry.settings.get( + "warehouse.manage.oidc.ip_registration_ratelimit_string" + ) + config.register_service_factory( + RateLimit(ip_oidc_registration_ratelimit_string), + IRateLimiter, + name="ip_oidc.provider.register", + ) diff --git a/warehouse/manage/views.py b/warehouse/manage/views.py --- a/warehouse/manage/views.py +++ b/warehouse/manage/views.py @@ -16,7 +16,12 @@ import pyqrcode from paginate_sqlalchemy import SqlalchemyOrmPage as SQLAlchemyORMPage -from pyramid.httpexceptions import HTTPBadRequest, HTTPNotFound, HTTPSeeOther +from pyramid.httpexceptions import ( + HTTPBadRequest, + HTTPNotFound, + HTTPSeeOther, + HTTPTooManyRequests, +) from pyramid.response import Response from pyramid.view import view_config, view_defaults from sqlalchemy import func @@ -41,6 +46,8 @@ send_collaborator_removed_email, send_collaborator_role_changed_email, send_email_verification_email, + send_oidc_provider_added_email, + send_oidc_provider_removed_email, send_password_change_email, send_primary_email_change_email, send_project_role_verification_email, @@ -72,6 +79,10 @@ SaveAccountForm, Toggle2FARequirementForm, ) +from warehouse.metrics.interfaces import IMetricsService +from warehouse.oidc.forms import DeleteProviderForm, GitHubProviderForm +from warehouse.oidc.interfaces import TooManyOIDCRegistrations +from warehouse.oidc.models import GitHubProvider, OIDCProvider from warehouse.packaging.models import ( File, JournalEntry, @@ -82,6 +93,7 @@ RoleInvitation, RoleInvitationStatus, ) +from warehouse.rate_limiting import IRateLimiter from warehouse.utils.http import is_safe_url from warehouse.utils.paginate import paginate_url_factory from warehouse.utils.project import confirm_project, destroy_docs, remove_project @@ -132,6 +144,17 @@ def user_projects(request): } +def project_owners(request, project): + """Return all users who are owners of the project.""" + owner_roles = ( + request.db.query(User.id) + .join(Role.user) + .filter(Role.role_name == "Owner", Role.project == project) + .subquery() + ) + return request.db.query(User).join(owner_roles, User.id == owner_roles.c.id).all() + + @view_defaults( route_name="manage.account", renderer="manage/account.html", @@ -1059,6 +1082,252 @@ def toggle_2fa_requirement(self): ) +@view_defaults( + context=Project, + route_name="manage.project.settings.publishing", + renderer="manage/publishing.html", + uses_session=True, + require_csrf=True, + require_methods=False, + permission="manage:project", + has_translations=True, + require_reauth=True, + http_cache=0, +) +class ManageOIDCProviderViews: + def __init__(self, project, request): + self.request = request + self.project = project + self.oidc_enabled = self.request.registry.settings["warehouse.oidc.enabled"] + self.metrics = self.request.find_service(IMetricsService, context=None) + + @property + def _ratelimiters(self): + return { + "user.oidc": self.request.find_service( + IRateLimiter, name="user_oidc.provider.register" + ), + "ip.oidc": self.request.find_service( + IRateLimiter, name="ip_oidc.provider.register" + ), + } + + def _hit_ratelimits(self): + self._ratelimiters["user.oidc"].hit(self.request.user.id) + self._ratelimiters["ip.oidc"].hit(self.request.remote_addr) + + def _check_ratelimits(self): + if not self._ratelimiters["user.oidc"].test(self.request.user.id): + raise TooManyOIDCRegistrations( + resets_in=self._ratelimiters["user.oidc"].resets_in( + self.request.user.id + ) + ) + + if not self._ratelimiters["ip.oidc"].test(self.request.remote_addr): + raise TooManyOIDCRegistrations( + resets_in=self._ratelimiters["ip.oidc"].resets_in( + self.request.remote_addr + ) + ) + + @property + def github_provider_form(self): + return GitHubProviderForm( + self.request.POST, + api_token=self.request.registry.settings.get("github.token"), + ) + + @property + def default_response(self): + return { + "oidc_enabled": self.oidc_enabled, + "project": self.project, + "github_provider_form": self.github_provider_form, + } + + @view_config(request_method="GET") + def manage_project_oidc_providers(self): + if not self.oidc_enabled: + raise HTTPNotFound + + if self.request.flags.enabled(AdminFlagValue.DISALLOW_OIDC): + self.request.session.flash( + ( + "OpenID Connect is temporarily disabled. " + "See https://pypi.org/help#admin-intervention for details." + ), + queue="error", + ) + + return self.default_response + + @view_config(request_method="POST", request_param=GitHubProviderForm.__params__) + def add_github_oidc_provider(self): + if not self.oidc_enabled: + raise HTTPNotFound + + self.metrics.increment( + "warehouse.oidc.add_provider.attempt", tags=["provider:GitHub"] + ) + + if self.request.flags.enabled(AdminFlagValue.DISALLOW_OIDC): + self.request.session.flash( + ( + "OpenID Connect is temporarily disabled. " + "See https://pypi.org/help#admin-intervention for details." + ), + queue="error", + ) + return self.default_response + + try: + self._check_ratelimits() + except TooManyOIDCRegistrations as exc: + self.metrics.increment( + "warehouse.oidc.add_provider.ratelimited", tags=["provider:GitHub"] + ) + return HTTPTooManyRequests( + self.request._( + "There have been too many attempted OpenID Connect registrations. " + "Try again later." + ), + retry_after=exc.resets_in.total_seconds(), + ) + + self._hit_ratelimits() + + response = self.default_response + form = response["github_provider_form"] + + if form.validate(): + # GitHub OIDC providers are unique on the tuple of + # (repository_name, owner, workflow_filename), so we check for + # an already registered one before creating. + provider = ( + self.request.db.query(GitHubProvider) + .filter( + GitHubProvider.repository_name == form.repository.data, + GitHubProvider.owner == form.normalized_owner, + GitHubProvider.workflow_filename == form.workflow_filename.data, + ) + .one_or_none() + ) + if provider is None: + provider = GitHubProvider( + repository_name=form.repository.data, + owner=form.normalized_owner, + owner_id=form.owner_id, + workflow_filename=form.workflow_filename.data, + ) + + self.request.db.add(provider) + + # Each project has a unique set of OIDC providers; the same + # provider can't be registered to the project more than once. + if provider in self.project.oidc_providers: + self.request.session.flash( + f"{provider} is already registered with {self.project.name}", + queue="error", + ) + return response + + for user in self.project.users: + send_oidc_provider_added_email( + self.request, + user, + project_name=self.project.name, + provider=provider, + ) + + self.project.oidc_providers.append(provider) + + self.project.record_event( + tag="project:oidc:provider-added", + ip_address=self.request.remote_addr, + additional={ + "provider": provider.provider_name, + "id": str(provider.id), + "specifier": str(provider), + }, + ) + + self.request.session.flash( + f"Added {provider} to {self.project.name}", + queue="success", + ) + + self.metrics.increment( + "warehouse.oidc.add_provider.ok", tags=["provider:GitHub"] + ) + + return response + + @view_config(request_method="POST", request_param=DeleteProviderForm.__params__) + def delete_oidc_provider(self): + if not self.oidc_enabled: + raise HTTPNotFound + + self.metrics.increment("warehouse.oidc.delete_provider.attempt") + + if self.request.flags.enabled(AdminFlagValue.DISALLOW_OIDC): + self.request.session.flash( + ( + "OpenID Connect is temporarily disabled. " + "See https://pypi.org/help#admin-intervention for details." + ), + queue="error", + ) + return self.default_response + + form = DeleteProviderForm(self.request.POST) + + if form.validate(): + provider = self.request.db.query(OIDCProvider).get(form.provider_id.data) + + # provider will be `None` here if someone manually futzes with the form. + if provider is None or provider not in self.project.oidc_providers: + self.request.session.flash( + "Invalid publisher for project", + queue="error", + ) + return self.default_response + + for user in self.project.users: + send_oidc_provider_removed_email( + self.request, + user, + project_name=self.project.name, + provider=provider, + ) + + # NOTE: We remove the provider from the project, but we don't actually + # delete the provider model itself (since it might be associated + # with other projects). + self.project.oidc_providers.remove(provider) + + self.project.record_event( + tag="project:oidc:provider-removed", + ip_address=self.request.remote_addr, + additional={ + "provider": provider.provider_name, + "id": str(provider.id), + "specifier": str(provider), + }, + ) + + self.request.session.flash( + f"Removed {provider} from {self.project.name}", queue="success" + ) + + self.metrics.increment( + "warehouse.oidc.delete_provider.ok", + tags=[f"provider:{provider.provider_name}"], + ) + + return self.default_response + + def get_user_role_in_project(project, user, request): return ( request.db.query(Role) @@ -1821,13 +2090,7 @@ def change_project_role(project, request, _form_class=ChangeRoleForm): }, ) - owner_roles = ( - request.db.query(Role) - .filter(Role.project == project) - .filter(Role.role_name == "Owner") - .all() - ) - owner_users = {owner.user for owner in owner_roles} + owner_users = set(project_owners(request, project)) # Don't send owner notification email to new user # if they are now an owner owner_users.discard(role.user) @@ -1898,13 +2161,7 @@ def delete_project_role(project, request): }, ) - owner_roles = ( - request.db.query(Role) - .filter(Role.project == project) - .filter(Role.role_name == "Owner") - .all() - ) - owner_users = {owner.user for owner in owner_roles} + owner_users = set(project_owners(request, project)) # Don't send owner notification email to new user # if they are now an owner owner_users.discard(role.user) diff --git a/warehouse/migrations/versions/f345394c444f_add_initial_oidc_provider_models.py b/warehouse/migrations/versions/f345394c444f_add_initial_oidc_provider_models.py new file mode 100644 --- /dev/null +++ b/warehouse/migrations/versions/f345394c444f_add_initial_oidc_provider_models.py @@ -0,0 +1,106 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +Add initial OIDC provider models + +Revision ID: f345394c444f +Revises: fdf9e337538a +Create Date: 2022-02-15 21:11:41.693791 +""" + +import sqlalchemy as sa + +from alembic import op +from sqlalchemy.dialects import postgresql + +revision = "f345394c444f" +down_revision = "fdf9e337538a" + +# Note: It is VERY important to ensure that a migration does not lock for a +# long period of time and to ensure that each individual migration does +# not break compatibility with the *previous* version of the code base. +# This is because the migrations will be ran automatically as part of the +# deployment process, but while the previous version of the code is still +# up and running. Thus backwards incompatible changes must be broken up +# over multiple migrations inside of multiple pull requests in order to +# phase them in over multiple deploys. + + +def upgrade(): + op.create_table( + "oidc_providers", + sa.Column( + "id", + postgresql.UUID(as_uuid=True), + server_default=sa.text("gen_random_uuid()"), + nullable=False, + ), + sa.Column("discriminator", sa.String(), nullable=True), + sa.PrimaryKeyConstraint("id"), + ) + op.create_table( + "github_oidc_providers", + sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False), + sa.Column("repository_name", sa.String(), nullable=True), + sa.Column("owner", sa.String(), nullable=True), + sa.Column("owner_id", sa.String(), nullable=True), + sa.Column("workflow_filename", sa.String(), nullable=True), + sa.ForeignKeyConstraint( + ["id"], + ["oidc_providers.id"], + ), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint( + "repository_name", + "owner", + "workflow_filename", + name="_github_oidc_provider_uc", + ), + ) + op.create_table( + "oidc_provider_project_association", + sa.Column( + "id", + postgresql.UUID(as_uuid=True), + server_default=sa.text("gen_random_uuid()"), + nullable=False, + ), + sa.Column("oidc_provider_id", postgresql.UUID(as_uuid=True), nullable=False), + sa.Column("project_id", postgresql.UUID(as_uuid=True), nullable=False), + sa.ForeignKeyConstraint( + ["oidc_provider_id"], + ["oidc_providers.id"], + ), + sa.ForeignKeyConstraint( + ["project_id"], + ["projects.id"], + ), + sa.PrimaryKeyConstraint("id", "oidc_provider_id", "project_id"), + ) + op.execute( + """ + INSERT INTO admin_flags(id, description, enabled, notify) + VALUES ( + 'disallow-oidc', + 'Disallow ALL OpenID Connect behavior, including authentication', + FALSE, + FALSE + ) + """ + ) + + +def downgrade(): + op.drop_table("oidc_provider_project_association") + op.drop_table("github_oidc_providers") + op.drop_table("oidc_providers") + op.execute("DELETE FROM admin_flags WHERE id = 'disallow-oidc'") diff --git a/warehouse/oidc/forms.py b/warehouse/oidc/forms.py new file mode 100644 --- /dev/null +++ b/warehouse/oidc/forms.py @@ -0,0 +1,148 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import re + +import requests +import sentry_sdk +import wtforms + +from warehouse import forms +from warehouse.i18n import localize as _ + +_VALID_GITHUB_REPO = re.compile(r"^[a-zA-Z0-9-_.]+$") +_VALID_GITHUB_OWNER = re.compile(r"^[a-zA-Z0-9][a-zA-Z0-9-]*$") + + +class GitHubProviderForm(forms.Form): + __params__ = ["owner", "repository", "workflow_filename"] + + owner = wtforms.StringField( + validators=[ + wtforms.validators.DataRequired( + message=_("Specify GitHub repository owner (username or organization)"), + ), + ] + ) + + repository = wtforms.StringField( + validators=[ + wtforms.validators.DataRequired(message=_("Specify repository name")), + wtforms.validators.Regexp( + _VALID_GITHUB_REPO, message=_("Invalid repository name") + ), + ] + ) + + workflow_filename = wtforms.StringField( + validators=[ + wtforms.validators.DataRequired(message=_("Specify workflow filename")) + ] + ) + + def __init__(self, *args, api_token, **kwargs): + super().__init__(*args, **kwargs) + self._api_token = api_token + + def _headers_auth(self): + if not self._api_token: + return {} + return {"Authorization": f"token {self._api_token}"} + + def _lookup_owner(self, owner): + # To actually validate the owner, we ask GitHub's API about them. + # We can't do this for the repository, since it might be private. + try: + response = requests.get( + f"https://api.github.com/users/{owner}", + headers={ + "Accept": "application/vnd.github.v3+json", + **self._headers_auth(), + }, + allow_redirects=True, + ) + response.raise_for_status() + except requests.HTTPError: + if response.status_code == 404: + raise wtforms.validators.ValidationError( + _("Unknown GitHub user or organization.") + ) + if response.status_code == 403: + # GitHub's API uses 403 to signal rate limiting, and returns a JSON + # blob explaining the reason. + sentry_sdk.capture_message( + "Exceeded GitHub rate limit for user lookups. " + f"Reason: {response.json()}" + ) + raise wtforms.validators.ValidationError( + _( + "GitHub has rate-limited this action. " + "Try again in a few minutes." + ) + ) + else: + sentry_sdk.capture_message( + f"Unexpected error from GitHub user lookup: {response.content=}" + ) + raise wtforms.validators.ValidationError( + _("Unexpected error from GitHub. Try again.") + ) + except requests.Timeout: + sentry_sdk.capture_message( + "Timeout from GitHub user lookup API (possibly offline)" + ) + raise wtforms.validators.ValidationError( + _("Unexpected timeout from GitHub. Try again in a few minutes.") + ) + + return response.json() + + def validate_owner(self, field): + owner = field.data + + # We pre-filter owners with a regex, to avoid loading GitHub's API + # with usernames/org names that will never be valid. + if not _VALID_GITHUB_OWNER.match(owner): + raise wtforms.validators.ValidationError( + _("Invalid GitHub user or organization name.") + ) + + owner_info = self._lookup_owner(owner) + + # NOTE: Use the normalized owner name as provided by GitHub. + self.normalized_owner = owner_info["login"] + self.owner_id = owner_info["id"] + + def validate_workflow_filename(self, field): + workflow_filename = field.data + + if not ( + workflow_filename.endswith(".yml") or workflow_filename.endswith(".yaml") + ): + raise wtforms.validators.ValidationError( + _("Workflow name must end with .yml or .yaml") + ) + + if "/" in workflow_filename: + raise wtforms.validators.ValidationError( + _("Workflow filename must be a filename only, without directories") + ) + + +class DeleteProviderForm(forms.Form): + __params__ = ["provider_id"] + + provider_id = wtforms.StringField( + validators=[ + wtforms.validators.UUID(message=_("Provider must be specified by ID")) + ] + ) diff --git a/warehouse/oidc/interfaces.py b/warehouse/oidc/interfaces.py --- a/warehouse/oidc/interfaces.py +++ b/warehouse/oidc/interfaces.py @@ -13,6 +13,8 @@ from zope.interface import Interface +from warehouse.rate_limiting.interfaces import RateLimiterException + class IOIDCProviderService(Interface): def get_key(key_id): @@ -26,7 +28,24 @@ def get_key(key_id): """ pass - def verify(token): + def verify_signature_only(token): + """ + Verify the given JWT's signature and basic claims, returning + the decoded JWT, or `None` if invalid. + + This function **does not** verify the token's suitability + for a particular action; subsequent checks on the decoded token's + third party claims must be done to ensure that. """ - Verify the given JWT. + + def verify_for_project(token, project): """ + Verify the given JWT's signature and basic claims in the same + manner as `verify_signature_only`, but *also* verify that the JWT's + claims are consistent with at least one of the project's registered + OIDC providers. + """ + + +class TooManyOIDCRegistrations(RateLimiterException): + pass diff --git a/warehouse/oidc/models.py b/warehouse/oidc/models.py new file mode 100644 --- /dev/null +++ b/warehouse/oidc/models.py @@ -0,0 +1,182 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from typing import Any, Callable, Dict, Set + +import sentry_sdk + +from sqlalchemy import Column, ForeignKey, String, UniqueConstraint, orm +from sqlalchemy.dialects.postgresql import UUID + +from warehouse import db +from warehouse.packaging.models import Project + + +class OIDCProviderProjectAssociation(db.Model): + __tablename__ = "oidc_provider_project_association" + + oidc_provider_id = Column( + UUID(as_uuid=True), + ForeignKey("oidc_providers.id"), + nullable=False, + primary_key=True, + ) + project_id = Column( + UUID(as_uuid=True), ForeignKey("projects.id"), nullable=False, primary_key=True + ) + + +class OIDCProvider(db.Model): + __tablename__ = "oidc_providers" + + discriminator = Column(String) + projects = orm.relationship( + Project, + secondary=OIDCProviderProjectAssociation.__table__, # type: ignore + backref="oidc_providers", + ) + + __mapper_args__ = { + "polymorphic_identity": "oidc_providers", + "polymorphic_on": discriminator, + } + + # A map of claim names to "check" functions, each of which + # has the signature `check(ground-truth, signed-claim) -> bool`. + __verifiable_claims__: Dict[str, Callable[[Any, Any], bool]] = dict() + + # Claims that have already been verified during the JWT signature + # verification phase. + __preverified_claims__ = { + "iss", + "iat", + "nbf", + "exp", + "aud", + } + + # Individual providers should explicitly override this set, + # indicating any custom claims that are known to be present but are + # not checked as part of verifying the JWT. + __unchecked_claims__: Set[str] = set() + + @classmethod + def all_known_claims(cls): + """ + Returns all claims "known" to this provider. + """ + return ( + cls.__verifiable_claims__.keys() + | cls.__preverified_claims__ + | cls.__unchecked_claims__ + ) + + def verify_claims(self, signed_claims): + """ + Given a JWT that has been successfully decoded (checked for a valid + signature and basic claims), verify it against the more specific + claims of this provider. + """ + + # Defensive programming: treat the absence of any claims to verify + # as a failure rather than trivially valid. + if not self.__verifiable_claims__: + return False + + # All claims should be accounted for. + # The presence of an unaccounted claim is not an error, only a warning + # that the JWT payload has changed. + unaccounted_claims = signed_claims.keys() - self.all_known_claims() + if unaccounted_claims: + sentry_sdk.capture_message( + f"JWT for {self.__class__.__name__} has unaccounted claims: " + f"{unaccounted_claims}" + ) + + # Finally, perform the actual claim verification. + for claim_name, check in self.__verifiable_claims__.items(): + # All verifiable claims are mandatory. The absence of a missing + # claim *is* an error, since it indicates a breaking change in the + # JWT's payload. + signed_claim = signed_claims.get(claim_name) + if signed_claim is None: + sentry_sdk.capture_message( + f"JWT for {self.__class__.__name__} is missing claim: {claim_name}" + ) + return False + + if not check(getattr(self, claim_name), signed_claim): + return False + + return True + + @property + def provider_name(self): # pragma: no cover + # Only concrete subclasses of OIDCProvider are constructed. + return NotImplemented + + +class GitHubProvider(OIDCProvider): + __tablename__ = "github_oidc_providers" + __mapper_args__ = {"polymorphic_identity": "github_oidc_providers"} + __table_args__ = ( + UniqueConstraint( + "repository_name", + "owner", + "workflow_filename", + name="_github_oidc_provider_uc", + ), + ) + + id = Column(UUID(as_uuid=True), ForeignKey(OIDCProvider.id), primary_key=True) + repository_name = Column(String) + owner = Column(String) + owner_id = Column(String) + workflow_filename = Column(String) + + __verifiable_claims__ = { + "repository": str.__eq__, + "workflow": str.__eq__, + } + + __unchecked_claims__ = { + "actor", + "jti", + "sub", + "ref", + "sha", + "run_id", + "run_number", + "run_attempt", + "head_ref", + "base_ref", + "event_name", + "ref_type", + # TODO(#11096): Support reusable workflows. + "job_workflow_ref", + } + + @property + def provider_name(self): + return "GitHub" + + @property + def repository(self): + return f"{self.owner}/{self.repository_name}" + + @property + def workflow(self): + return self.workflow_filename + + def __str__(self): + return f"{self.workflow_filename} @ {self.repository}" diff --git a/warehouse/oidc/services.py b/warehouse/oidc/services.py --- a/warehouse/oidc/services.py +++ b/warehouse/oidc/services.py @@ -12,11 +12,11 @@ import json +import jwt import redis import requests import sentry_sdk -from jwt import PyJWK from zope.interface import implementer from warehouse.metrics.interfaces import IMetricsService @@ -148,10 +148,87 @@ def get_key(self, key_id): tags=[f"provider:{self.provider}", f"key_id:{key_id}"], ) return None - return PyJWK(keyset[key_id]) + return jwt.PyJWK(keyset[key_id]) - def verify(self, token): - return NotImplemented + def _get_key_for_token(self, token): + """ + Return a JWK suitable for verifying the given JWT. + + The JWT is not verified at this point, and this step happens + prior to any verification. + """ + unverified_header = jwt.get_unverified_header(token) + return self.get_key(unverified_header["kid"]) + + def verify_signature_only(self, token): + key = self._get_key_for_token(token) + + try: + # NOTE: Many of the keyword arguments here are defaults, but we + # set them explicitly to assert the intended verification behavior. + signed_payload = jwt.decode( + token, + key=key, + algorithms=["RS256"], + verify_signature=True, + # "require" only checks for the presence of these claims, not + # their validity. Each has a corresponding "verify_" kwarg + # that enforces their actual validity. + require=["iss", "iat", "nbf", "exp", "aud"], + verify_iss=True, + verify_iat=True, + verify_nbf=True, + verify_exp=True, + verify_aud=True, + issuer=self.issuer_url, + audience="pypi", + leeway=30, + ) + return signed_payload + except jwt.PyJWTError: + return None + except Exception as e: + # We expect pyjwt to only raise subclasses of PyJWTError, but + # we can't enforce this. Other exceptions indicate an abstraction + # leak, so we log them for upstream reporting. + sentry_sdk.capture_message(f"JWT verify raised generic error: {e}") + return None + + def verify_for_project(self, token, project): + signed_payload = self.verify_signature_only(token) + + metrics_tags = [f"project:{project.name}", f"provider:{self.provider}"] + self.metrics.increment( + "warehouse.oidc.verify_for_project.attempt", + tags=metrics_tags, + ) + + if signed_payload is None: + self.metrics.increment( + "warehouse.oidc.verify_for_project.invalid_signature", + tags=metrics_tags, + ) + return False + + # In order for a signed JWT to be valid for a particular PyPI project, + # it must match at least one of the OIDC providers registered to + # the project. + verified = any( + provider.verify_claims(signed_payload) + for provider in project.oidc_providers + ) + if not verified: + self.metrics.increment( + "warehouse.oidc.verify_for_project.invalid_claims", + tags=metrics_tags, + ) + else: + self.metrics.increment( + "warehouse.oidc.verify_for_project.ok", + tags=metrics_tags, + ) + + return verified class OIDCProviderServiceFactory: diff --git a/warehouse/rate_limiting/interfaces.py b/warehouse/rate_limiting/interfaces.py --- a/warehouse/rate_limiting/interfaces.py +++ b/warehouse/rate_limiting/interfaces.py @@ -38,3 +38,10 @@ def clear(*identifiers): """ Clears the rate limiter identified by the identifiers. """ + + +class RateLimiterException(Exception): + def __init__(self, *args, resets_in, **kwargs): + self.resets_in = resets_in + + return super().__init__(*args, **kwargs) diff --git a/warehouse/routes.py b/warehouse/routes.py --- a/warehouse/routes.py +++ b/warehouse/routes.py @@ -229,6 +229,13 @@ def includeme(config): traverse="/{project_name}", domain=warehouse, ) + config.add_route( + "manage.project.settings.publishing", + "/manage/project/{project_name}/settings/publishing/", + factory="warehouse.packaging.models:ProjectFactory", + traverse="/{project_name}", + domain=warehouse, + ) config.add_route( "manage.project.delete_project", "/manage/project/{project_name}/delete_project/",
diff --git a/tests/unit/email/test_init.py b/tests/unit/email/test_init.py --- a/tests/unit/email/test_init.py +++ b/tests/unit/email/test_init.py @@ -3575,3 +3575,96 @@ def test_recovery_code_emails( }, ) ] + + +class TestOIDCProviderEmails: + @pytest.mark.parametrize( + "fn, template_name", + [ + (email.send_oidc_provider_added_email, "oidc-provider-added"), + (email.send_oidc_provider_removed_email, "oidc-provider-removed"), + ], + ) + def test_oidc_provider_emails( + self, pyramid_request, pyramid_config, monkeypatch, fn, template_name + ): + stub_user = pretend.stub( + id="id", + username="username", + name="", + email="[email protected]", + primary_email=pretend.stub(email="[email protected]", verified=True), + ) + subject_renderer = pyramid_config.testing_add_renderer( + f"email/{ template_name }/subject.txt" + ) + subject_renderer.string_response = "Email Subject" + body_renderer = pyramid_config.testing_add_renderer( + f"email/{ template_name }/body.txt" + ) + body_renderer.string_response = "Email Body" + html_renderer = pyramid_config.testing_add_renderer( + f"email/{ template_name }/body.html" + ) + html_renderer.string_response = "Email HTML Body" + + send_email = pretend.stub( + delay=pretend.call_recorder(lambda *args, **kwargs: None) + ) + pyramid_request.task = pretend.call_recorder(lambda *args, **kwargs: send_email) + monkeypatch.setattr(email, "send_email", send_email) + + pyramid_request.db = pretend.stub( + query=lambda a: pretend.stub( + filter=lambda *a: pretend.stub( + one=lambda: pretend.stub(user_id=stub_user.id) + ) + ), + ) + pyramid_request.user = stub_user + pyramid_request.registry.settings = {"mail.sender": "[email protected]"} + + project_name = "test_project" + fakeprovider = pretend.stub(provider_name="fakeprovider") + # NOTE: Can't set __str__ using pretend.stub() + monkeypatch.setattr( + fakeprovider.__class__, "__str__", lambda s: "fakespecifier" + ) + + result = fn( + pyramid_request, stub_user, project_name=project_name, provider=fakeprovider + ) + + assert result == { + "username": stub_user.username, + "project_name": project_name, + "provider_name": "fakeprovider", + "provider_spec": "fakespecifier", + } + subject_renderer.assert_() + body_renderer.assert_(username=stub_user.username, project_name=project_name) + html_renderer.assert_(username=stub_user.username, project_name=project_name) + assert pyramid_request.task.calls == [pretend.call(send_email)] + assert send_email.delay.calls == [ + pretend.call( + f"{stub_user.username} <{stub_user.email}>", + { + "subject": "Email Subject", + "body_text": "Email Body", + "body_html": ( + "<html>\n<head></head>\n" + "<body><p>Email HTML Body</p></body>\n</html>\n" + ), + }, + { + "tag": "account:email:sent", + "user_id": stub_user.id, + "additional": { + "from_": "[email protected]", + "to": stub_user.email, + "subject": "Email Subject", + "redact_ip": False, + }, + }, + ) + ] diff --git a/tests/unit/manage/test_init.py b/tests/unit/manage/test_init.py --- a/tests/unit/manage/test_init.py +++ b/tests/unit/manage/test_init.py @@ -94,13 +94,37 @@ def view(context, request): assert request.session.needs_reauthentication.calls == needs_reauth_calls -def test_includeme(): +def test_includeme(monkeypatch): + settings = { + "warehouse.manage.oidc.user_registration_ratelimit_string": "10 per day", + "warehouse.manage.oidc.ip_registration_ratelimit_string": "100 per day", + } + config = pretend.stub( add_view_deriver=pretend.call_recorder(lambda f, over, under: None), + register_service_factory=pretend.call_recorder(lambda s, i, **kw: None), + registry=pretend.stub( + settings=pretend.stub(get=pretend.call_recorder(lambda k: settings.get(k))) + ), ) + rate_limit_class = pretend.call_recorder(lambda s: s) + rate_limit_iface = pretend.stub() + monkeypatch.setattr(manage, "RateLimit", rate_limit_class) + monkeypatch.setattr(manage, "IRateLimiter", rate_limit_iface) + manage.includeme(config) assert config.add_view_deriver.calls == [ pretend.call(manage.reauth_view, over="rendered_view", under="decorated_view") ] + assert config.register_service_factory.calls == [ + pretend.call( + "10 per day", rate_limit_iface, name="user_oidc.provider.register" + ), + pretend.call("100 per day", rate_limit_iface, name="ip_oidc.provider.register"), + ] + assert config.registry.settings.get.calls == [ + pretend.call("warehouse.manage.oidc.user_registration_ratelimit_string"), + pretend.call("warehouse.manage.oidc.ip_registration_ratelimit_string"), + ] diff --git a/tests/unit/manage/test_views.py b/tests/unit/manage/test_views.py --- a/tests/unit/manage/test_views.py +++ b/tests/unit/manage/test_views.py @@ -18,7 +18,12 @@ import pytest from paginate_sqlalchemy import SqlalchemyOrmPage as SQLAlchemyORMPage -from pyramid.httpexceptions import HTTPBadRequest, HTTPNotFound, HTTPSeeOther +from pyramid.httpexceptions import ( + HTTPBadRequest, + HTTPNotFound, + HTTPSeeOther, + HTTPTooManyRequests, +) from pyramid.response import Response from sqlalchemy.orm import joinedload from sqlalchemy.orm.exc import NoResultFound @@ -37,6 +42,8 @@ from warehouse.forklift.legacy import MAX_FILESIZE, MAX_PROJECT_SIZE from warehouse.macaroons.interfaces import IMacaroonService from warehouse.manage import views +from warehouse.metrics.interfaces import IMetricsService +from warehouse.oidc.interfaces import TooManyOIDCRegistrations from warehouse.packaging.models import ( File, JournalEntry, @@ -46,6 +53,7 @@ RoleInvitation, User, ) +from warehouse.rate_limiting import IRateLimiter from warehouse.utils.paginate import paginate_url_factory from warehouse.utils.project import remove_documentation @@ -4682,3 +4690,805 @@ def test_raises_404_with_out_of_range_page(self, db_request): with pytest.raises(HTTPNotFound): assert views.manage_project_journal(project, db_request) + + +class TestManageOIDCProviderViews: + def test_initializes(self): + metrics = pretend.stub() + project = pretend.stub() + request = pretend.stub( + registry=pretend.stub(settings={"warehouse.oidc.enabled": True}), + find_service=pretend.call_recorder(lambda *a, **kw: metrics), + ) + view = views.ManageOIDCProviderViews(project, request) + + assert view.project is project + assert view.request is request + assert view.oidc_enabled + assert view.metrics is metrics + + assert view.request.find_service.calls == [ + pretend.call(IMetricsService, context=None) + ] + + @pytest.mark.parametrize( + "ip_exceeded, user_exceeded", + [ + (False, False), + (False, True), + (True, False), + ], + ) + def test_ratelimiting(self, ip_exceeded, user_exceeded): + project = pretend.stub() + + metrics = pretend.stub() + user_rate_limiter = pretend.stub( + hit=pretend.call_recorder(lambda *a, **kw: None), + test=pretend.call_recorder(lambda uid: not user_exceeded), + resets_in=pretend.call_recorder(lambda uid: pretend.stub()), + ) + ip_rate_limiter = pretend.stub( + hit=pretend.call_recorder(lambda *a, **kw: None), + test=pretend.call_recorder(lambda ip: not ip_exceeded), + resets_in=pretend.call_recorder(lambda uid: pretend.stub()), + ) + + def find_service(iface, name=None, context=None): + if iface is IMetricsService: + return metrics + + if name == "user_oidc.provider.register": + return user_rate_limiter + else: + return ip_rate_limiter + + request = pretend.stub( + registry=pretend.stub(settings={"warehouse.oidc.enabled": True}), + find_service=pretend.call_recorder(find_service), + user=pretend.stub(id=pretend.stub()), + remote_addr=pretend.stub(), + ) + + view = views.ManageOIDCProviderViews(project, request) + + assert view._ratelimiters == { + "user.oidc": user_rate_limiter, + "ip.oidc": ip_rate_limiter, + } + assert request.find_service.calls == [ + pretend.call(IMetricsService, context=None), + pretend.call(IRateLimiter, name="user_oidc.provider.register"), + pretend.call(IRateLimiter, name="ip_oidc.provider.register"), + ] + + view._hit_ratelimits() + + assert user_rate_limiter.hit.calls == [ + pretend.call(request.user.id), + ] + assert ip_rate_limiter.hit.calls == [pretend.call(request.remote_addr)] + + if user_exceeded or ip_exceeded: + with pytest.raises(TooManyOIDCRegistrations): + view._check_ratelimits() + else: + view._check_ratelimits() + + def test_manage_project_oidc_providers(self, monkeypatch): + project = pretend.stub() + request = pretend.stub( + registry=pretend.stub( + settings={ + "warehouse.oidc.enabled": True, + "github.token": "fake-api-token", + }, + ), + find_service=lambda *a, **kw: None, + flags=pretend.stub(enabled=pretend.call_recorder(lambda f: False)), + POST=pretend.stub(), + ) + + github_provider_form_obj = pretend.stub() + github_provider_form_cls = pretend.call_recorder( + lambda *a, **kw: github_provider_form_obj + ) + monkeypatch.setattr(views, "GitHubProviderForm", github_provider_form_cls) + + view = views.ManageOIDCProviderViews(project, request) + assert view.manage_project_oidc_providers() == { + "oidc_enabled": True, + "project": project, + "github_provider_form": github_provider_form_obj, + } + + assert request.flags.enabled.calls == [ + pretend.call(AdminFlagValue.DISALLOW_OIDC) + ] + assert github_provider_form_cls.calls == [ + pretend.call(request.POST, api_token="fake-api-token") + ] + + def test_manage_project_oidc_providers_admin_disabled(self, monkeypatch): + project = pretend.stub() + request = pretend.stub( + registry=pretend.stub( + settings={ + "warehouse.oidc.enabled": True, + "github.token": "fake-api-token", + }, + ), + find_service=lambda *a, **kw: None, + flags=pretend.stub(enabled=pretend.call_recorder(lambda f: True)), + session=pretend.stub(flash=pretend.call_recorder(lambda *a, **kw: None)), + POST=pretend.stub(), + ) + + view = views.ManageOIDCProviderViews(project, request) + github_provider_form_obj = pretend.stub() + github_provider_form_cls = pretend.call_recorder( + lambda *a, **kw: github_provider_form_obj + ) + monkeypatch.setattr(views, "GitHubProviderForm", github_provider_form_cls) + + view = views.ManageOIDCProviderViews(project, request) + assert view.manage_project_oidc_providers() == { + "oidc_enabled": True, + "project": project, + "github_provider_form": github_provider_form_obj, + } + + assert request.flags.enabled.calls == [ + pretend.call(AdminFlagValue.DISALLOW_OIDC) + ] + assert request.session.flash.calls == [ + pretend.call( + ( + "OpenID Connect is temporarily disabled. " + "See https://pypi.org/help#admin-intervention for details." + ), + queue="error", + ) + ] + assert github_provider_form_cls.calls == [ + pretend.call(request.POST, api_token="fake-api-token") + ] + + def test_manage_project_oidc_providers_oidc_not_enabled(self): + project = pretend.stub() + request = pretend.stub( + registry=pretend.stub(settings={"warehouse.oidc.enabled": False}), + find_service=lambda *a, **kw: None, + ) + + view = views.ManageOIDCProviderViews(project, request) + + with pytest.raises(HTTPNotFound): + view.manage_project_oidc_providers() + + def test_add_github_oidc_provider_preexisting(self, monkeypatch): + provider = pretend.stub( + id="fakeid", + provider_name="GitHub", + repository_name="fakerepo", + owner="fakeowner", + owner_id="1234", + workflow_filename="fakeworkflow.yml", + ) + # NOTE: Can't set __str__ using pretend.stub() + monkeypatch.setattr(provider.__class__, "__str__", lambda s: "fakespecifier") + + project = pretend.stub( + name="fakeproject", + oidc_providers=[], + record_event=pretend.call_recorder(lambda *a, **kw: None), + users=[], + ) + + metrics = pretend.stub(increment=pretend.call_recorder(lambda *a, **kw: None)) + + request = pretend.stub( + registry=pretend.stub( + settings={ + "warehouse.oidc.enabled": True, + "github.token": "fake-api-token", + } + ), + find_service=lambda *a, **kw: metrics, + flags=pretend.stub(enabled=pretend.call_recorder(lambda f: False)), + session=pretend.stub(flash=pretend.call_recorder(lambda *a, **kw: None)), + POST=pretend.stub(), + db=pretend.stub( + query=lambda *a: pretend.stub( + filter=lambda *a: pretend.stub(one_or_none=lambda: provider) + ), + add=pretend.call_recorder(lambda o: None), + ), + remote_addr="0.0.0.0", + ) + + github_provider_form_obj = pretend.stub( + validate=pretend.call_recorder(lambda: True), + repository=pretend.stub(data=provider.repository_name), + normalized_owner=provider.owner, + workflow_filename=pretend.stub(data=provider.workflow_filename), + ) + github_provider_form_cls = pretend.call_recorder( + lambda *a, **kw: github_provider_form_obj + ) + monkeypatch.setattr(views, "GitHubProviderForm", github_provider_form_cls) + + view = views.ManageOIDCProviderViews(project, request) + monkeypatch.setattr( + view, "_hit_ratelimits", pretend.call_recorder(lambda: None) + ) + monkeypatch.setattr( + view, "_check_ratelimits", pretend.call_recorder(lambda: None) + ) + + assert view.add_github_oidc_provider() == { + "oidc_enabled": True, + "project": project, + "github_provider_form": github_provider_form_obj, + } + assert view.metrics.increment.calls == [ + pretend.call( + "warehouse.oidc.add_provider.attempt", tags=["provider:GitHub"] + ), + pretend.call("warehouse.oidc.add_provider.ok", tags=["provider:GitHub"]), + ] + assert project.record_event.calls == [ + pretend.call( + tag="project:oidc:provider-added", + ip_address=request.remote_addr, + additional={ + "provider": "GitHub", + "id": "fakeid", + "specifier": "fakespecifier", + }, + ) + ] + assert request.session.flash.calls == [ + pretend.call( + "Added fakespecifier to fakeproject", + queue="success", + ) + ] + assert request.db.add.calls == [] + assert github_provider_form_obj.validate.calls == [pretend.call()] + assert view._hit_ratelimits.calls == [pretend.call()] + assert view._check_ratelimits.calls == [pretend.call()] + assert project.oidc_providers == [provider] + + def test_add_github_oidc_provider_created(self, monkeypatch): + fakeusers = [pretend.stub(), pretend.stub(), pretend.stub()] + project = pretend.stub( + name="fakeproject", + oidc_providers=[], + record_event=pretend.call_recorder(lambda *a, **kw: None), + users=fakeusers, + ) + + metrics = pretend.stub(increment=pretend.call_recorder(lambda *a, **kw: None)) + + request = pretend.stub( + registry=pretend.stub( + settings={ + "warehouse.oidc.enabled": True, + "github.token": "fake-api-token", + } + ), + find_service=lambda *a, **kw: metrics, + flags=pretend.stub(enabled=pretend.call_recorder(lambda f: False)), + session=pretend.stub(flash=pretend.call_recorder(lambda *a, **kw: None)), + POST=pretend.stub(), + db=pretend.stub( + query=lambda *a: pretend.stub( + filter=lambda *a: pretend.stub(one_or_none=lambda: None) + ), + add=pretend.call_recorder(lambda o: setattr(o, "id", "fakeid")), + ), + remote_addr="0.0.0.0", + ) + + github_provider_form_obj = pretend.stub( + validate=pretend.call_recorder(lambda: True), + repository=pretend.stub(data="fakerepo"), + normalized_owner="fakeowner", + owner_id="1234", + workflow_filename=pretend.stub(data="fakeworkflow.yml"), + ) + github_provider_form_cls = pretend.call_recorder( + lambda *a, **kw: github_provider_form_obj + ) + monkeypatch.setattr(views, "GitHubProviderForm", github_provider_form_cls) + monkeypatch.setattr( + views, + "send_oidc_provider_added_email", + pretend.call_recorder(lambda *a, **kw: None), + ) + + view = views.ManageOIDCProviderViews(project, request) + monkeypatch.setattr( + view, "_hit_ratelimits", pretend.call_recorder(lambda: None) + ) + monkeypatch.setattr( + view, "_check_ratelimits", pretend.call_recorder(lambda: None) + ) + + assert view.add_github_oidc_provider() == { + "oidc_enabled": True, + "project": project, + "github_provider_form": github_provider_form_obj, + } + assert view.metrics.increment.calls == [ + pretend.call( + "warehouse.oidc.add_provider.attempt", tags=["provider:GitHub"] + ), + pretend.call("warehouse.oidc.add_provider.ok", tags=["provider:GitHub"]), + ] + assert project.record_event.calls == [ + pretend.call( + tag="project:oidc:provider-added", + ip_address=request.remote_addr, + additional={ + "provider": "GitHub", + "id": "fakeid", + "specifier": "fakeworkflow.yml @ fakeowner/fakerepo", + }, + ) + ] + assert request.session.flash.calls == [ + pretend.call( + "Added fakeworkflow.yml @ fakeowner/fakerepo to fakeproject", + queue="success", + ) + ] + assert request.db.add.calls == [pretend.call(project.oidc_providers[0])] + assert github_provider_form_obj.validate.calls == [pretend.call()] + assert views.send_oidc_provider_added_email.calls == [ + pretend.call( + request, + fakeuser, + project_name="fakeproject", + provider=project.oidc_providers[0], + ) + for fakeuser in fakeusers + ] + assert view._hit_ratelimits.calls == [pretend.call()] + assert view._check_ratelimits.calls == [pretend.call()] + assert len(project.oidc_providers) == 1 + + def test_add_github_oidc_provider_already_registered_with_project( + self, monkeypatch + ): + provider = pretend.stub( + id="fakeid", + provider_name="GitHub", + repository_name="fakerepo", + owner="fakeowner", + owner_id="1234", + workflow_filename="fakeworkflow.yml", + ) + # NOTE: Can't set __str__ using pretend.stub() + monkeypatch.setattr(provider.__class__, "__str__", lambda s: "fakespecifier") + + metrics = pretend.stub(increment=pretend.call_recorder(lambda *a, **kw: None)) + + project = pretend.stub( + name="fakeproject", + oidc_providers=[provider], + record_event=pretend.call_recorder(lambda *a, **kw: None), + ) + + request = pretend.stub( + registry=pretend.stub( + settings={ + "warehouse.oidc.enabled": True, + "github.token": "fake-api-token", + } + ), + find_service=lambda *a, **kw: metrics, + flags=pretend.stub(enabled=pretend.call_recorder(lambda f: False)), + session=pretend.stub(flash=pretend.call_recorder(lambda *a, **kw: None)), + POST=pretend.stub(), + db=pretend.stub( + query=lambda *a: pretend.stub( + filter=lambda *a: pretend.stub(one_or_none=lambda: provider) + ), + ), + ) + + github_provider_form_obj = pretend.stub( + validate=pretend.call_recorder(lambda: True), + repository=pretend.stub(data=provider.repository_name), + normalized_owner=provider.owner, + workflow_filename=pretend.stub(data=provider.workflow_filename), + ) + github_provider_form_cls = pretend.call_recorder( + lambda *a, **kw: github_provider_form_obj + ) + monkeypatch.setattr(views, "GitHubProviderForm", github_provider_form_cls) + + view = views.ManageOIDCProviderViews(project, request) + monkeypatch.setattr( + view, "_hit_ratelimits", pretend.call_recorder(lambda: None) + ) + monkeypatch.setattr( + view, "_check_ratelimits", pretend.call_recorder(lambda: None) + ) + + assert view.add_github_oidc_provider() == { + "oidc_enabled": True, + "project": project, + "github_provider_form": github_provider_form_obj, + } + assert view.metrics.increment.calls == [ + pretend.call( + "warehouse.oidc.add_provider.attempt", tags=["provider:GitHub"] + ), + ] + assert project.record_event.calls == [] + assert request.session.flash.calls == [ + pretend.call( + "fakespecifier is already registered with fakeproject", + queue="error", + ) + ] + + def test_add_github_oidc_provider_ratelimited(self, monkeypatch): + project = pretend.stub() + + metrics = pretend.stub(increment=pretend.call_recorder(lambda *a, **kw: None)) + + request = pretend.stub( + registry=pretend.stub( + settings={ + "warehouse.oidc.enabled": True, + } + ), + find_service=lambda *a, **kw: metrics, + flags=pretend.stub(enabled=pretend.call_recorder(lambda f: False)), + _=lambda s: s, + ) + + view = views.ManageOIDCProviderViews(project, request) + monkeypatch.setattr( + view, + "_check_ratelimits", + pretend.call_recorder( + pretend.raiser( + TooManyOIDCRegistrations( + resets_in=pretend.stub(total_seconds=lambda: 60) + ) + ) + ), + ) + + assert view.add_github_oidc_provider().__class__ == HTTPTooManyRequests + assert view.metrics.increment.calls == [ + pretend.call( + "warehouse.oidc.add_provider.attempt", tags=["provider:GitHub"] + ), + pretend.call( + "warehouse.oidc.add_provider.ratelimited", tags=["provider:GitHub"] + ), + ] + + def test_add_github_oidc_provider_oidc_not_enabled(self): + project = pretend.stub() + request = pretend.stub( + registry=pretend.stub(settings={"warehouse.oidc.enabled": False}), + find_service=lambda *a, **kw: None, + ) + + view = views.ManageOIDCProviderViews(project, request) + + with pytest.raises(HTTPNotFound): + view.add_github_oidc_provider() + + def test_add_github_oidc_provider_admin_disabled(self, monkeypatch): + project = pretend.stub() + metrics = pretend.stub(increment=pretend.call_recorder(lambda *a, **kw: None)) + request = pretend.stub( + registry=pretend.stub(settings={"warehouse.oidc.enabled": True}), + find_service=lambda *a, **kw: metrics, + flags=pretend.stub(enabled=pretend.call_recorder(lambda f: True)), + session=pretend.stub(flash=pretend.call_recorder(lambda *a, **kw: None)), + _=lambda s: s, + ) + + view = views.ManageOIDCProviderViews(project, request) + default_response = {"_": pretend.stub()} + monkeypatch.setattr( + views.ManageOIDCProviderViews, "default_response", default_response + ) + + assert view.add_github_oidc_provider() == default_response + assert view.metrics.increment.calls == [ + pretend.call( + "warehouse.oidc.add_provider.attempt", tags=["provider:GitHub"] + ), + ] + assert request.session.flash.calls == [ + pretend.call( + ( + "OpenID Connect is temporarily disabled. " + "See https://pypi.org/help#admin-intervention for details." + ), + queue="error", + ) + ] + + def test_add_github_oidc_provider_invalid_form(self, monkeypatch): + project = pretend.stub() + metrics = pretend.stub(increment=pretend.call_recorder(lambda *a, **kw: None)) + request = pretend.stub( + registry=pretend.stub(settings={"warehouse.oidc.enabled": True}), + find_service=lambda *a, **kw: metrics, + flags=pretend.stub(enabled=pretend.call_recorder(lambda f: False)), + session=pretend.stub(flash=pretend.call_recorder(lambda *a, **kw: None)), + _=lambda s: s, + ) + + github_provider_form_obj = pretend.stub( + validate=pretend.call_recorder(lambda: False), + ) + github_provider_form_cls = pretend.call_recorder( + lambda *a, **kw: github_provider_form_obj + ) + monkeypatch.setattr(views, "GitHubProviderForm", github_provider_form_cls) + + view = views.ManageOIDCProviderViews(project, request) + default_response = {"github_provider_form": github_provider_form_obj} + monkeypatch.setattr( + views.ManageOIDCProviderViews, "default_response", default_response + ) + monkeypatch.setattr( + view, "_check_ratelimits", pretend.call_recorder(lambda: None) + ) + monkeypatch.setattr( + view, "_hit_ratelimits", pretend.call_recorder(lambda: None) + ) + + assert view.add_github_oidc_provider() == default_response + assert view.metrics.increment.calls == [ + pretend.call( + "warehouse.oidc.add_provider.attempt", tags=["provider:GitHub"] + ), + ] + assert view._hit_ratelimits.calls == [pretend.call()] + assert view._check_ratelimits.calls == [pretend.call()] + assert github_provider_form_obj.validate.calls == [pretend.call()] + + def test_delete_oidc_provider(self, monkeypatch): + provider = pretend.stub( + provider_name="fakeprovider", + id="fakeid", + ) + # NOTE: Can't set __str__ using pretend.stub() + monkeypatch.setattr(provider.__class__, "__str__", lambda s: "fakespecifier") + + fakeusers = [pretend.stub(), pretend.stub(), pretend.stub()] + project = pretend.stub( + oidc_providers=[provider], + name="fakeproject", + record_event=pretend.call_recorder(lambda *a, **kw: None), + users=fakeusers, + ) + metrics = pretend.stub(increment=pretend.call_recorder(lambda *a, **kw: None)) + request = pretend.stub( + registry=pretend.stub(settings={"warehouse.oidc.enabled": True}), + find_service=lambda *a, **kw: metrics, + flags=pretend.stub(enabled=pretend.call_recorder(lambda f: False)), + session=pretend.stub(flash=pretend.call_recorder(lambda *a, **kw: None)), + POST=pretend.stub(), + db=pretend.stub( + query=lambda *a: pretend.stub(get=lambda id: provider), + ), + remote_addr="0.0.0.0", + ) + + delete_provider_form_obj = pretend.stub( + validate=pretend.call_recorder(lambda: True), + provider_id=pretend.stub(data="fakeid"), + ) + delete_provider_form_cls = pretend.call_recorder( + lambda *a, **kw: delete_provider_form_obj + ) + monkeypatch.setattr(views, "DeleteProviderForm", delete_provider_form_cls) + monkeypatch.setattr( + views, + "send_oidc_provider_removed_email", + pretend.call_recorder(lambda *a, **kw: None), + ) + + view = views.ManageOIDCProviderViews(project, request) + default_response = {"_": pretend.stub()} + monkeypatch.setattr( + views.ManageOIDCProviderViews, "default_response", default_response + ) + + assert view.delete_oidc_provider() == default_response + assert provider not in project.oidc_providers + + assert view.metrics.increment.calls == [ + pretend.call( + "warehouse.oidc.delete_provider.attempt", + ), + pretend.call( + "warehouse.oidc.delete_provider.ok", tags=["provider:fakeprovider"] + ), + ] + + assert project.record_event.calls == [ + pretend.call( + tag="project:oidc:provider-removed", + ip_address=request.remote_addr, + additional={ + "provider": "fakeprovider", + "id": "fakeid", + "specifier": "fakespecifier", + }, + ) + ] + + assert request.flags.enabled.calls == [ + pretend.call(AdminFlagValue.DISALLOW_OIDC) + ] + assert request.session.flash.calls == [ + pretend.call("Removed fakespecifier from fakeproject", queue="success") + ] + + assert delete_provider_form_cls.calls == [pretend.call(request.POST)] + assert delete_provider_form_obj.validate.calls == [pretend.call()] + + assert views.send_oidc_provider_removed_email.calls == [ + pretend.call( + request, fakeuser, project_name="fakeproject", provider=provider + ) + for fakeuser in fakeusers + ] + + def test_delete_oidc_provider_invalid_form(self, monkeypatch): + provider = pretend.stub() + project = pretend.stub(oidc_providers=[provider]) + metrics = pretend.stub(increment=pretend.call_recorder(lambda *a, **kw: None)) + request = pretend.stub( + registry=pretend.stub(settings={"warehouse.oidc.enabled": True}), + find_service=lambda *a, **kw: metrics, + flags=pretend.stub(enabled=pretend.call_recorder(lambda f: False)), + POST=pretend.stub(), + ) + + delete_provider_form_obj = pretend.stub( + validate=pretend.call_recorder(lambda: False), + ) + delete_provider_form_cls = pretend.call_recorder( + lambda *a, **kw: delete_provider_form_obj + ) + monkeypatch.setattr(views, "DeleteProviderForm", delete_provider_form_cls) + + view = views.ManageOIDCProviderViews(project, request) + default_response = {"_": pretend.stub()} + monkeypatch.setattr( + views.ManageOIDCProviderViews, "default_response", default_response + ) + + assert view.delete_oidc_provider() == default_response + assert len(project.oidc_providers) == 1 + + assert view.metrics.increment.calls == [ + pretend.call( + "warehouse.oidc.delete_provider.attempt", + ), + ] + + assert delete_provider_form_cls.calls == [pretend.call(request.POST)] + assert delete_provider_form_obj.validate.calls == [pretend.call()] + + @pytest.mark.parametrize( + "other_provider", [None, pretend.stub(id="different-fakeid")] + ) + def test_delete_oidc_provider_not_found(self, monkeypatch, other_provider): + provider = pretend.stub( + provider_name="fakeprovider", + id="fakeid", + ) + # NOTE: Can't set __str__ using pretend.stub() + monkeypatch.setattr(provider.__class__, "__str__", lambda s: "fakespecifier") + + project = pretend.stub( + oidc_providers=[provider], + name="fakeproject", + record_event=pretend.call_recorder(lambda *a, **kw: None), + ) + metrics = pretend.stub(increment=pretend.call_recorder(lambda *a, **kw: None)) + request = pretend.stub( + registry=pretend.stub(settings={"warehouse.oidc.enabled": True}), + find_service=lambda *a, **kw: metrics, + flags=pretend.stub(enabled=pretend.call_recorder(lambda f: False)), + session=pretend.stub(flash=pretend.call_recorder(lambda *a, **kw: None)), + POST=pretend.stub(), + db=pretend.stub( + query=lambda *a: pretend.stub(get=lambda id: other_provider), + ), + remote_addr="0.0.0.0", + ) + + delete_provider_form_obj = pretend.stub( + validate=pretend.call_recorder(lambda: True), + provider_id=pretend.stub(data="different-fakeid"), + ) + delete_provider_form_cls = pretend.call_recorder( + lambda *a, **kw: delete_provider_form_obj + ) + monkeypatch.setattr(views, "DeleteProviderForm", delete_provider_form_cls) + + view = views.ManageOIDCProviderViews(project, request) + default_response = {"_": pretend.stub()} + monkeypatch.setattr( + views.ManageOIDCProviderViews, "default_response", default_response + ) + + assert view.delete_oidc_provider() == default_response + assert provider in project.oidc_providers # not deleted + assert other_provider not in project.oidc_providers + + assert view.metrics.increment.calls == [ + pretend.call( + "warehouse.oidc.delete_provider.attempt", + ), + ] + + assert project.record_event.calls == [] + assert request.session.flash.calls == [ + pretend.call("Invalid publisher for project", queue="error") + ] + + assert delete_provider_form_cls.calls == [pretend.call(request.POST)] + assert delete_provider_form_obj.validate.calls == [pretend.call()] + + def test_delete_oidc_provider_oidc_not_enabled(self): + project = pretend.stub() + request = pretend.stub( + registry=pretend.stub(settings={"warehouse.oidc.enabled": False}), + find_service=lambda *a, **kw: None, + ) + + view = views.ManageOIDCProviderViews(project, request) + + with pytest.raises(HTTPNotFound): + view.delete_oidc_provider() + + def test_delete_oidc_provider_admin_disabled(self, monkeypatch): + project = pretend.stub() + metrics = pretend.stub(increment=pretend.call_recorder(lambda *a, **kw: None)) + request = pretend.stub( + registry=pretend.stub(settings={"warehouse.oidc.enabled": True}), + find_service=lambda *a, **kw: metrics, + flags=pretend.stub(enabled=pretend.call_recorder(lambda f: True)), + session=pretend.stub(flash=pretend.call_recorder(lambda *a, **kw: None)), + ) + + view = views.ManageOIDCProviderViews(project, request) + default_response = {"_": pretend.stub()} + monkeypatch.setattr( + views.ManageOIDCProviderViews, "default_response", default_response + ) + + assert view.delete_oidc_provider() == default_response + assert view.metrics.increment.calls == [ + pretend.call( + "warehouse.oidc.delete_provider.attempt", + ), + ] + assert request.session.flash.calls == [ + pretend.call( + ( + "OpenID Connect is temporarily disabled. " + "See https://pypi.org/help#admin-intervention for details." + ), + queue="error", + ) + ] diff --git a/tests/unit/oidc/test_forms.py b/tests/unit/oidc/test_forms.py new file mode 100644 --- /dev/null +++ b/tests/unit/oidc/test_forms.py @@ -0,0 +1,249 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pretend +import pytest +import wtforms + +from requests import HTTPError, Timeout +from webob.multidict import MultiDict + +from warehouse.oidc import forms + + +class TestGitHubProviderForm: + @pytest.mark.parametrize( + "token, headers", + [ + ( + None, + {}, + ), + ("fake-token", {"Authorization": "token fake-token"}), + ], + ) + def test_creation(self, token, headers): + form = forms.GitHubProviderForm(api_token=token) + + assert form._api_token == token + assert form._headers_auth() == headers + + def test_lookup_owner_404(self, monkeypatch): + response = pretend.stub( + status_code=404, raise_for_status=pretend.raiser(HTTPError) + ) + requests = pretend.stub( + get=pretend.call_recorder(lambda o, **kw: response), HTTPError=HTTPError + ) + monkeypatch.setattr(forms, "requests", requests) + + form = forms.GitHubProviderForm(api_token="fake-token") + with pytest.raises(wtforms.validators.ValidationError): + form._lookup_owner("some-owner") + + assert requests.get.calls == [ + pretend.call( + "https://api.github.com/users/some-owner", + headers={ + "Accept": "application/vnd.github.v3+json", + "Authorization": "token fake-token", + }, + allow_redirects=True, + ) + ] + + def test_lookup_owner_403(self, monkeypatch): + response = pretend.stub( + status_code=403, + raise_for_status=pretend.raiser(HTTPError), + json=lambda: {"message": "fake-message"}, + ) + requests = pretend.stub( + get=pretend.call_recorder(lambda o, **kw: response), HTTPError=HTTPError + ) + monkeypatch.setattr(forms, "requests", requests) + + sentry_sdk = pretend.stub(capture_message=pretend.call_recorder(lambda s: None)) + monkeypatch.setattr(forms, "sentry_sdk", sentry_sdk) + + form = forms.GitHubProviderForm(api_token="fake-token") + with pytest.raises(wtforms.validators.ValidationError): + form._lookup_owner("some-owner") + + assert requests.get.calls == [ + pretend.call( + "https://api.github.com/users/some-owner", + headers={ + "Accept": "application/vnd.github.v3+json", + "Authorization": "token fake-token", + }, + allow_redirects=True, + ) + ] + assert sentry_sdk.capture_message.calls == [ + pretend.call( + "Exceeded GitHub rate limit for user lookups. " + "Reason: {'message': 'fake-message'}" + ) + ] + + def test_lookup_owner_other_http_error(self, monkeypatch): + response = pretend.stub( + # anything that isn't 404 or 403 + status_code=422, + raise_for_status=pretend.raiser(HTTPError), + content=b"fake-content", + ) + requests = pretend.stub( + get=pretend.call_recorder(lambda o, **kw: response), HTTPError=HTTPError + ) + monkeypatch.setattr(forms, "requests", requests) + + sentry_sdk = pretend.stub(capture_message=pretend.call_recorder(lambda s: None)) + monkeypatch.setattr(forms, "sentry_sdk", sentry_sdk) + + form = forms.GitHubProviderForm(api_token="fake-token") + with pytest.raises(wtforms.validators.ValidationError): + form._lookup_owner("some-owner") + + assert requests.get.calls == [ + pretend.call( + "https://api.github.com/users/some-owner", + headers={ + "Accept": "application/vnd.github.v3+json", + "Authorization": "token fake-token", + }, + allow_redirects=True, + ) + ] + + assert sentry_sdk.capture_message.calls == [ + pretend.call( + "Unexpected error from GitHub user lookup: " + "response.content=b'fake-content'" + ) + ] + + def test_lookup_owner_http_timeout(self, monkeypatch): + requests = pretend.stub( + get=pretend.raiser(Timeout), + Timeout=Timeout, + HTTPError=HTTPError, + ) + monkeypatch.setattr(forms, "requests", requests) + + sentry_sdk = pretend.stub(capture_message=pretend.call_recorder(lambda s: None)) + monkeypatch.setattr(forms, "sentry_sdk", sentry_sdk) + + form = forms.GitHubProviderForm(api_token="fake-token") + with pytest.raises(wtforms.validators.ValidationError): + form._lookup_owner("some-owner") + + assert sentry_sdk.capture_message.calls == [ + pretend.call("Timeout from GitHub user lookup API (possibly offline)") + ] + + def test_lookup_owner_succeeds(self, monkeypatch): + fake_owner_info = pretend.stub() + response = pretend.stub( + status_code=200, + raise_for_status=pretend.call_recorder(lambda: None), + json=lambda: fake_owner_info, + ) + requests = pretend.stub( + get=pretend.call_recorder(lambda o, **kw: response), HTTPError=HTTPError + ) + monkeypatch.setattr(forms, "requests", requests) + + form = forms.GitHubProviderForm(api_token="fake-token") + info = form._lookup_owner("some-owner") + + assert requests.get.calls == [ + pretend.call( + "https://api.github.com/users/some-owner", + headers={ + "Accept": "application/vnd.github.v3+json", + "Authorization": "token fake-token", + }, + allow_redirects=True, + ) + ] + assert response.raise_for_status.calls == [pretend.call()] + assert info == fake_owner_info + + @pytest.mark.parametrize( + "data", + [ + {"owner": None, "repository": "some", "workflow_filename": "some"}, + {"owner": "", "repository": "some", "workflow_filename": "some"}, + { + "owner": "invalid_characters@", + "repository": "some", + "workflow_filename": "some", + }, + {"repository": None, "owner": "some", "workflow_filename": "some"}, + {"repository": "", "owner": "some", "workflow_filename": "some"}, + { + "repository": "$invalid#characters", + "owner": "some", + "workflow_filename": "some", + }, + {"repository": "some", "owner": "some", "workflow_filename": None}, + {"repository": "some", "owner": "some", "workflow_filename": ""}, + ], + ) + def test_validate_basic_invalid_fields(self, monkeypatch, data): + form = forms.GitHubProviderForm(MultiDict(data), api_token=pretend.stub()) + + # We're testing only the basic validation here. + owner_info = {"login": "fake-username", "id": "1234"} + monkeypatch.setattr(form, "_lookup_owner", lambda o: owner_info) + + assert not form.validate() + + def test_validate(self, monkeypatch): + data = MultiDict( + { + "owner": "some-owner", + "repository": "some-repo", + "workflow_filename": "some-workflow.yml", + } + ) + form = forms.GitHubProviderForm(MultiDict(data), api_token=pretend.stub()) + + # We're testing only the basic validation here. + owner_info = {"login": "fake-username", "id": "1234"} + monkeypatch.setattr(form, "_lookup_owner", lambda o: owner_info) + + assert form.validate() + + def test_validate_owner(self, monkeypatch): + form = forms.GitHubProviderForm(api_token=pretend.stub()) + + owner_info = {"login": "some-username", "id": "1234"} + monkeypatch.setattr(form, "_lookup_owner", lambda o: owner_info) + + field = pretend.stub(data="SOME-USERNAME") + form.validate_owner(field) + + assert form.normalized_owner == "some-username" + assert form.owner_id == "1234" + + @pytest.mark.parametrize( + "workflow_filename", ["missing_suffix", "/slash", "/many/slashes", "/slash.yml"] + ) + def test_validate_workflow_filename(self, workflow_filename): + form = forms.GitHubProviderForm(api_token=pretend.stub()) + field = pretend.stub(data=workflow_filename) + + with pytest.raises(wtforms.validators.ValidationError): + form.validate_workflow_filename(field) diff --git a/tests/unit/oidc/test_models.py b/tests/unit/oidc/test_models.py new file mode 100644 --- /dev/null +++ b/tests/unit/oidc/test_models.py @@ -0,0 +1,130 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pretend + +from warehouse.oidc import models + + +class TestOIDCProvider: + def test_oidc_provider_not_default_verifiable(self): + provider = models.OIDCProvider(projects=[]) + + assert not provider.verify_claims(signed_claims={}) + + +class TestGitHubProvider: + def test_github_provider_all_known_claims(self): + assert models.GitHubProvider.all_known_claims() == { + # verifiable claims + "repository", + "workflow", + # preverified claims + "iss", + "iat", + "nbf", + "exp", + "aud", + # unchecked claims + "actor", + "jti", + "sub", + "ref", + "sha", + "run_id", + "run_number", + "run_attempt", + "head_ref", + "base_ref", + "event_name", + "ref_type", + "job_workflow_ref", + } + + def test_github_provider_computed_properties(self): + provider = models.GitHubProvider( + repository_name="fakerepo", + owner="fakeowner", + owner_id="fakeid", + workflow_filename="fakeworkflow.yml", + ) + + for claim_name in provider.__verifiable_claims__.keys(): + assert getattr(provider, claim_name) is not None + + assert str(provider) == "fakeworkflow.yml @ fakeowner/fakerepo" + + def test_github_provider_unaccounted_claims(self, monkeypatch): + provider = models.GitHubProvider( + repository_name="fakerepo", + owner="fakeowner", + owner_id="fakeid", + workflow_filename="fakeworkflow.yml", + ) + + sentry_sdk = pretend.stub(capture_message=pretend.call_recorder(lambda s: None)) + monkeypatch.setattr(models, "sentry_sdk", sentry_sdk) + + # We don't care if these actually verify, only that they're present. + signed_claims = { + claim_name: "fake" + for claim_name in models.GitHubProvider.all_known_claims() + } + signed_claims["fake-claim"] = "fake" + assert not provider.verify_claims(signed_claims=signed_claims) + assert sentry_sdk.capture_message.calls == [ + pretend.call( + "JWT for GitHubProvider has unaccounted claims: {'fake-claim'}" + ) + ] + + def test_github_provider_missing_claims(self, monkeypatch): + provider = models.GitHubProvider( + repository_name="fakerepo", + owner="fakeowner", + owner_id="fakeid", + workflow_filename="fakeworkflow.yml", + ) + + sentry_sdk = pretend.stub(capture_message=pretend.call_recorder(lambda s: None)) + monkeypatch.setattr(models, "sentry_sdk", sentry_sdk) + + signed_claims = { + claim_name: "fake" + for claim_name in models.GitHubProvider.all_known_claims() + } + signed_claims.pop("repository") + assert not provider.verify_claims(signed_claims=signed_claims) + assert sentry_sdk.capture_message.calls == [ + pretend.call("JWT for GitHubProvider is missing claim: repository") + ] + + def test_github_provider_verifies(self, monkeypatch): + provider = models.GitHubProvider( + repository_name="fakerepo", + owner="fakeowner", + owner_id="fakeid", + workflow_filename="fakeworkflow.yml", + ) + + noop_check = pretend.call_recorder(lambda l, r: True) + verifiable_claims = { + claim_name: noop_check for claim_name in provider.__verifiable_claims__ + } + monkeypatch.setattr(provider, "__verifiable_claims__", verifiable_claims) + + signed_claims = { + claim_name: "fake" + for claim_name in models.GitHubProvider.all_known_claims() + } + assert provider.verify_claims(signed_claims=signed_claims) + assert len(noop_check.calls) == len(verifiable_claims) diff --git a/tests/unit/oidc/test_services.py b/tests/unit/oidc/test_services.py --- a/tests/unit/oidc/test_services.py +++ b/tests/unit/oidc/test_services.py @@ -11,8 +11,9 @@ # limitations under the License. import pretend +import pytest -from jwt import PyJWK +from jwt import PyJWK, PyJWTError from zope.interface.verify import verifyClass from warehouse.oidc import interfaces, services @@ -49,14 +50,151 @@ def test_oidc_provider_service_factory(): class TestOIDCProviderService: - def test_verify(self): + def test_verify_signature_only(self, monkeypatch): service = services.OIDCProviderService( provider=pretend.stub(), issuer_url=pretend.stub(), cache_url=pretend.stub(), metrics=pretend.stub(), ) - assert service.verify(pretend.stub()) == NotImplemented + + token = pretend.stub() + decoded = pretend.stub() + jwt = pretend.stub(decode=pretend.call_recorder(lambda t, **kwargs: decoded)) + monkeypatch.setattr( + service, "_get_key_for_token", pretend.call_recorder(lambda t: "fake-key") + ) + monkeypatch.setattr(services, "jwt", jwt) + + assert service.verify_signature_only(token) == decoded + assert jwt.decode.calls == [ + pretend.call( + token, + key="fake-key", + algorithms=["RS256"], + verify_signature=True, + require=["iss", "iat", "nbf", "exp", "aud"], + verify_iss=True, + verify_iat=True, + verify_nbf=True, + verify_exp=True, + verify_aud=True, + issuer=service.issuer_url, + audience="pypi", + leeway=30, + ) + ] + + @pytest.mark.parametrize("exc", [PyJWTError, ValueError]) + def test_verify_signature_only_fails(self, monkeypatch, exc): + service = services.OIDCProviderService( + provider=pretend.stub(), + issuer_url=pretend.stub(), + cache_url=pretend.stub(), + metrics=pretend.stub(), + ) + + token = pretend.stub() + jwt = pretend.stub(decode=pretend.raiser(exc), PyJWTError=PyJWTError) + monkeypatch.setattr( + service, "_get_key_for_token", pretend.call_recorder(lambda t: "fake-key") + ) + monkeypatch.setattr(services, "jwt", jwt) + + assert service.verify_signature_only(token) is None + + def test_verify_for_project(self, monkeypatch): + service = services.OIDCProviderService( + provider="fakeprovider", + issuer_url=pretend.stub(), + cache_url=pretend.stub(), + metrics=pretend.stub( + increment=pretend.call_recorder(lambda *a, **kw: None) + ), + ) + + token = pretend.stub() + claims = pretend.stub() + monkeypatch.setattr( + service, "verify_signature_only", pretend.call_recorder(lambda t: claims) + ) + + provider = pretend.stub(verify_claims=pretend.call_recorder(lambda c: True)) + project = pretend.stub(name="fakeproject", oidc_providers=[provider]) + + assert service.verify_for_project(token, project) + assert service.metrics.increment.calls == [ + pretend.call( + "warehouse.oidc.verify_for_project.attempt", + tags=["project:fakeproject", "provider:fakeprovider"], + ), + pretend.call( + "warehouse.oidc.verify_for_project.ok", + tags=["project:fakeproject", "provider:fakeprovider"], + ), + ] + assert service.verify_signature_only.calls == [pretend.call(token)] + assert provider.verify_claims.calls == [pretend.call(claims)] + + def test_verify_for_project_invalid_signature(self, monkeypatch): + service = services.OIDCProviderService( + provider="fakeprovider", + issuer_url=pretend.stub(), + cache_url=pretend.stub(), + metrics=pretend.stub( + increment=pretend.call_recorder(lambda *a, **kw: None) + ), + ) + + token = pretend.stub() + monkeypatch.setattr(service, "verify_signature_only", lambda t: None) + + project = pretend.stub(name="fakeproject") + + assert not service.verify_for_project(token, project) + assert service.metrics.increment.calls == [ + pretend.call( + "warehouse.oidc.verify_for_project.attempt", + tags=["project:fakeproject", "provider:fakeprovider"], + ), + pretend.call( + "warehouse.oidc.verify_for_project.invalid_signature", + tags=["project:fakeproject", "provider:fakeprovider"], + ), + ] + + def test_verify_for_project_invalid_claims(self, monkeypatch): + service = services.OIDCProviderService( + provider="fakeprovider", + issuer_url=pretend.stub(), + cache_url=pretend.stub(), + metrics=pretend.stub( + increment=pretend.call_recorder(lambda *a, **kw: None) + ), + ) + + token = pretend.stub() + claims = pretend.stub() + monkeypatch.setattr( + service, "verify_signature_only", pretend.call_recorder(lambda t: claims) + ) + + provider = pretend.stub(verify_claims=pretend.call_recorder(lambda c: False)) + project = pretend.stub(name="fakeproject", oidc_providers=[provider]) + + assert not service.verify_for_project(token, project) + assert service.metrics.increment.calls == [ + pretend.call( + "warehouse.oidc.verify_for_project.attempt", + tags=["project:fakeproject", "provider:fakeprovider"], + ), + pretend.call( + "warehouse.oidc.verify_for_project.invalid_claims", + tags=["project:fakeproject", "provider:fakeprovider"], + ), + ] + assert service.verify_signature_only.calls == [pretend.call(token)] + assert provider.verify_claims.calls == [pretend.call(claims)] def test_get_keyset_not_cached(self, monkeypatch, mockredis): service = services.OIDCProviderService( @@ -402,3 +540,25 @@ def test_get_key_refresh_fails(self, monkeypatch): tags=["provider:example", "key_id:fake-key-id"], ) ] + + def test_get_key_for_token(self, monkeypatch): + token = pretend.stub() + key = pretend.stub() + + service = services.OIDCProviderService( + provider="example", + issuer_url="https://example.com", + cache_url="rediss://fake.example.com", + metrics=pretend.stub(), + ) + monkeypatch.setattr(service, "get_key", pretend.call_recorder(lambda kid: key)) + + monkeypatch.setattr( + services.jwt, + "get_unverified_header", + pretend.call_recorder(lambda token: {"kid": "fake-key-id"}), + ) + + assert service._get_key_for_token(token) == key + assert service.get_key.calls == [pretend.call("fake-key-id")] + assert services.jwt.get_unverified_header.calls == [pretend.call(token)] diff --git a/tests/unit/test_config.py b/tests/unit/test_config.py --- a/tests/unit/test_config.py +++ b/tests/unit/test_config.py @@ -255,9 +255,12 @@ def __init__(self): "warehouse.account.global_login_ratelimit_string": "1000 per 5 minutes", "warehouse.account.email_add_ratelimit_string": "2 per day", "warehouse.account.password_reset_ratelimit_string": "5 per day", + "warehouse.manage.oidc.user_registration_ratelimit_string": "20 per day", + "warehouse.manage.oidc.ip_registration_ratelimit_string": "20 per day", "warehouse.two_factor_requirement.enabled": False, "warehouse.two_factor_mandate.available": False, "warehouse.two_factor_mandate.enabled": False, + "warehouse.oidc.enabled": False, } if environment == config.Environment.development: expected_settings.update( diff --git a/tests/unit/test_routes.py b/tests/unit/test_routes.py --- a/tests/unit/test_routes.py +++ b/tests/unit/test_routes.py @@ -245,6 +245,13 @@ def add_policy(name, filename): traverse="/{project_name}", domain=warehouse, ), + pretend.call( + "manage.project.settings.publishing", + "/manage/project/{project_name}/settings/publishing/", + factory="warehouse.packaging.models:ProjectFactory", + traverse="/{project_name}", + domain=warehouse, + ), pretend.call( "manage.project.delete_project", "/manage/project/{project_name}/delete_project/",
Update Warehouse's models to accomodate OIDC verification Original thought: Warehouse's `User` model, at minimum, will probably need one or more new columns (possibly relations) for each supported provider. Upon conversation with @di: we don't actually need the AuthN step, so we can probably get away without any changes to the `User` model. Instead, each project model will need a GitHub repository URL and workflow name to check incoming JWTs against.
Open questions about the data model: * The relationship between GitHub repositories and PyPI projects is potentially many-to-one (one repo hosting many Python packages), so it probably doesn't make sense to have the tuple of `(repo_url, workflow_name)` be unique. It might even make sense to have it be a relation to a separate table, so that PyPI admins can perform more direct administration/triage (e.g., deleting the pairing for every project that references a particular repo in the event of a compromise.) * Because of the above many-to-one relationship, we probably need to use the `aud` field in the JWT after all, since all other default claims won't allow us to disambiguate between different projects being published in the same workflow. * Alternatively: we could *require* GitHub repos to use per-Python-project workflows, e.g. `(https://github.com/foo/bar, release-bar-api)` and `(https://github.com/foo/bar, release-bar-cli)` for `bar-api` and `bar-cli` respectively. But this might be imposing too much structure and might hinder adoption. TL;DR of the above: if we make the `(repo_url, workflow_name)` tuple unique for each `Project` model, then we don't have any ambiguities when it comes to creating the right temporary token for the workflow. Otherwise, we need *some* ambiguity resolution technique, whether that's setting `aud` or something else. > The relationship between GitHub repositories and PyPI projects is potentially many-to-one (one repo hosting many Python packages), so it probably doesn't make sense to have the tuple of (repo_url, workflow_name) be unique. It might even make sense to have it be a relation to a separate table, so that PyPI admins can perform more direct administration/triage (e.g., deleting the pairing for every project that references a particular repo in the event of a compromise.) Agreed. Not just multiple projects per repo, but that a single workflow in a single repo might publish multiple discrete projects. Considering that it might someday be useful to determine the inverse of the relationship ("which projects correspond to repo X?") I think this could be a separate `SourceRepository` table, and an association table between that and `Projects`. > Because of the above many-to-one relationship, we probably need to use the aud field in the JWT after all, since all other default claims won't allow us to disambiguate between different projects being published in the same workflow. We should be able to distinguish based on the artifact being uploaded (filename, metadata), no? Overall, we should think about this in a generic way and not a GitHub-specific way, e.g. as a relationship between a project and a source repository with a "type" of GitHub, which is the default type for now. > Considering that it might someday be useful to determine the inverse of the relationship ("which projects correspond to repo X?") I think this could be a separate `SourceRepository` table, and an association table between that and `Projects`. Sounds good! Just to clarify: are you thinking that `SourceRepository` should be GitHub only, or should it have some kind of column identifying the source host? If the latter, we should also give some thought to how we store the workflow name, since other source hosts won't necessarily have the same "workflow" concept for cross-checking an OIDC JWT against. > We should be able to distinguish based on the artifact being uploaded (filename, metadata), no? Oh, that's a good point -- I forgot that package distributions have well-structured filenames 😅 That does indeed solve the problem without needing the `aud`, which is nice. We'll probably want to still use some of GitHub's non default claims, but that shouldn't either complicate or impede our ability to support other providers. The latter, and I think `SourceRepository` is probably not ideal because we're conflating two things which happen to be the almost the same with GitHub: the source location and the thing that can publish. So maybe this is a `Publisher` or `DelegatedPublisher` or `PublisherClaim` or `OIDCPublisher` or `Claimant` or something? And the actual values are claims, so we could store the key/value pairs that we expect to validate for the claim, so this could be something like: ```json { "iss": "https://token.actions.githubusercontent.com", "sub": "repo:octo-org/octo-repo:environment:prod", "additonal_claims": { "whatever": "whatever" ... } } ``` Since per https://openid.net/specs/openid-connect-core-1_0.html > The `sub` (subject) and `iss` (issuer) Claims, used together, are the only Claims that an RP can rely upon as a stable identifier for the End-User Also... this is probably actually a many:many relationship. I can't see a reason why a project would be limited to a single delegated publisher. However, in practice, I imagine most users will only want a single publisher, so we can stick to 1 publisher per project for now, but keep in mind that we might make it many:many in the future. https://github.com/pypa/warehouse/issues/10645#issuecomment-1021505427 gave us a clearer picture of what claims we want for the GitHub case. What that in mind, here's the data/relationship model I have in mind: ``` Project -> [Publisher] # a Project can have multiple Publishers Publisher = {name, issuer, [PublisherClaim]} # name is a "pretty" name like "github" PublisherClaim = {key, value} # key and value are unstructured strings ``` `PublisherClaim` is the murkiest part -- we need to be able to support things that aren't "claims" in the OAuth/OIDC sense, but are data needed to *verify* claims. For example, for a `github` publisher, the `[PublisherClaim]` table slice might look like: **key**|**value** :-----:|:-----: repository|foobar owner|woodruffw owner\_id|1122334455 workflow\_name|release As a result, this might not be the best terminology or the best way to store the "claims." Another option is to store them as a `claims`/`claim_verification` JSON blob on each `Publisher`. Also N.B.: We might not want to store the `issuer` with each `Publisher`, since it's essentially duplicated en masse. We could instead key it based on `name`, which should be a relatively small set of supported OIDC providers. That's what I had in mind in part with the `OIDC_PROVIDERS` dict originally in #10628. Similar to https://github.com/pypa/warehouse/issues/10617#issuecomment-1025929613: we'll want to verify the `aud`, but we don't need to store it (since it'll always be `pypi` or some other fixed string we pick). > As a result, this might not be the best terminology or the best way to store the "claims." I think I agree. What we really want is something that can store the necessary key/value details, and that for each claim to be verified, can generate a string from them. A very rough sketch: ``` class PublisherThingy: def __init__(self, repository, owner, owner_id, workflow_name): self.repository = repository self.owner = owner ... @claim def repository(self): return f"{self.owner}/{self.repository}" def verify_claims(self, identity_token): for claim in self.claims: if claim != identity_token[claim]: return False return True ```
2022-02-14T20:48:27Z
[]
[]
pypi/warehouse
10,793
pypi__warehouse-10793
[ "5792" ]
3521a34af5277efdc0cd720bb65690cdad13f074
diff --git a/warehouse/accounts/auth_policy.py b/warehouse/accounts/auth_policy.py --- a/warehouse/accounts/auth_policy.py +++ b/warehouse/accounts/auth_policy.py @@ -79,18 +79,37 @@ def permits(self, context, principals, permission): # 2FA requireable, if 2FA is indeed required, and if the user has 2FA # enabled if subpolicy_permits and isinstance(context, TwoFactorRequireable): - if context.owners_require_2fa and not request.user.has_two_factor: + if ( + request.registry.settings["warehouse.two_factor_requirement.enabled"] + and context.owners_require_2fa + and not request.user.has_two_factor + ): return WarehouseDenied( "This project requires two factor authentication to be enabled " "for all contributors.", reason="owners_require_2fa", ) - if context.pypi_mandates_2fa and not request.user.has_two_factor: + if ( + request.registry.settings["warehouse.two_factor_mandate.enabled"] + and context.pypi_mandates_2fa + and not request.user.has_two_factor + ): return WarehouseDenied( "PyPI requires two factor authentication to be enabled " "for all contributors to this project.", reason="pypi_mandates_2fa", ) + if ( + request.registry.settings["warehouse.two_factor_mandate.available"] + and context.pypi_mandates_2fa + and not request.user.has_two_factor + ): + request.session.flash( + "This project is included in PyPI's two-factor mandate " + "for critical projects. In the future, you will be unable to " + "perform this action without enabling 2FA for your account", + queue="warning", + ) return subpolicy_permits diff --git a/warehouse/config.py b/warehouse/config.py --- a/warehouse/config.py +++ b/warehouse/config.py @@ -271,6 +271,29 @@ def configure(settings=None): default="5 per day", ) + # 2FA feature flags + maybe_set( + settings, + "warehouse.two_factor_requirement.enabled", + "TWOFACTORREQUIREMENT_ENABLED", + coercer=distutils.util.strtobool, + default=False, + ) + maybe_set( + settings, + "warehouse.two_factor_mandate.available", + "TWOFACTORMANDATE_AVAILABLE", + coercer=distutils.util.strtobool, + default=False, + ) + maybe_set( + settings, + "warehouse.two_factor_mandate.enabled", + "TWOFACTORMANDATE_ENABLED", + coercer=distutils.util.strtobool, + default=False, + ) + # Add the settings we use when the environment is set to development. if settings["warehouse.env"] == Environment.development: settings.setdefault("enforce_https", False) diff --git a/warehouse/manage/forms.py b/warehouse/manage/forms.py --- a/warehouse/manage/forms.py +++ b/warehouse/manage/forms.py @@ -297,3 +297,9 @@ def validate_macaroon_id(self, field): macaroon_id = field.data if self.macaroon_service.find_macaroon(macaroon_id) is None: raise wtforms.validators.ValidationError("No such macaroon") + + +class Toggle2FARequirementForm(forms.Form): + __params__ = ["two_factor_requirement_sentinel"] + + two_factor_requirement_sentinel = wtforms.HiddenField() diff --git a/warehouse/manage/views.py b/warehouse/manage/views.py --- a/warehouse/manage/views.py +++ b/warehouse/manage/views.py @@ -70,6 +70,7 @@ ProvisionTOTPForm, ProvisionWebAuthnForm, SaveAccountForm, + Toggle2FARequirementForm, ) from warehouse.packaging.models import ( File, @@ -975,7 +976,7 @@ def _key(project): } -@view_config( +@view_defaults( route_name="manage.project.settings", context=Project, renderer="manage/settings.html", @@ -983,13 +984,67 @@ def _key(project): permission="manage:project", has_translations=True, require_reauth=True, + require_methods=False, ) -def manage_project_settings(project, request): - return { - "project": project, - "MAX_FILESIZE": MAX_FILESIZE, - "MAX_PROJECT_SIZE": MAX_PROJECT_SIZE, - } +class ManageProjectSettingsViews: + def __init__(self, project, request): + self.project = project + self.request = request + self.toggle_2fa_requirement_form_class = Toggle2FARequirementForm + + @view_config(request_method="GET") + def manage_project_settings(self): + return { + "project": self.project, + "MAX_FILESIZE": MAX_FILESIZE, + "MAX_PROJECT_SIZE": MAX_PROJECT_SIZE, + "toggle_2fa_form": self.toggle_2fa_requirement_form_class(), + } + + @view_config( + request_method="POST", + request_param=Toggle2FARequirementForm.__params__, + require_reauth=True, + ) + def toggle_2fa_requirement(self): + if not self.request.registry.settings[ + "warehouse.two_factor_requirement.enabled" + ]: + raise HTTPNotFound + + if self.project.pypi_mandates_2fa: + self.request.session.flash( + "2FA requirement cannot be disabled for critical projects", + queue="error", + ) + elif self.project.owners_require_2fa: + self.project.owners_require_2fa = False + self.project.record_event( + tag="project:owners_require_2fa:disabled", + ip_address=self.request.remote_addr, + additional={"modified_by": self.request.user.username}, + ) + self.request.session.flash( + f"2FA requirement disabled for { self.project.name }", + queue="success", + ) + else: + self.project.owners_require_2fa = True + self.project.record_event( + tag="project:owners_require_2fa:enabled", + ip_address=self.request.remote_addr, + additional={"modified_by": self.request.user.username}, + ) + self.request.session.flash( + f"2FA requirement enabled for { self.project.name }", + queue="success", + ) + + return HTTPSeeOther( + self.request.route_path( + "manage.project.settings", project_name=self.project.name + ) + ) def get_user_role_in_project(project, user, request):
diff --git a/tests/unit/accounts/test_auth_policy.py b/tests/unit/accounts/test_auth_policy.py --- a/tests/unit/accounts/test_auth_policy.py +++ b/tests/unit/accounts/test_auth_policy.py @@ -161,6 +161,11 @@ def test_permits_if_non_2fa_requireable_context(self, monkeypatch): assert result == permits_result def test_permits_if_context_does_not_require_2fa(self, monkeypatch, db_request): + db_request.registry.settings = { + "warehouse.two_factor_mandate.enabled": True, + "warehouse.two_factor_mandate.available": True, + "warehouse.two_factor_requirement.enabled": True, + } get_current_request = pretend.call_recorder(lambda: db_request) monkeypatch.setattr(auth_policy, "get_current_request", get_current_request) @@ -170,23 +175,67 @@ def test_permits_if_context_does_not_require_2fa(self, monkeypatch, db_request): ) policy = auth_policy.TwoFactorAuthorizationPolicy(policy=backing_policy) context = ProjectFactory.create( - owners_require_2fa=False, pypi_mandates_2fa=False + owners_require_2fa=False, + pypi_mandates_2fa=False, ) result = policy.permits(context, pretend.stub(), pretend.stub()) assert result == permits_result - @pytest.mark.parametrize( - "owners_require_2fa, pypi_mandates_2fa", - [ - (True, False), - (False, True), - (True, True), - ], - ) + def test_flashes_if_context_requires_2fa_but_not_enabled( + self, monkeypatch, db_request + ): + db_request.registry.settings = { + "warehouse.two_factor_mandate.enabled": False, + "warehouse.two_factor_mandate.available": True, + "warehouse.two_factor_requirement.enabled": True, + } + db_request.session.flash = pretend.call_recorder(lambda m, queue: None) + db_request.user = pretend.stub(has_two_factor=False) + get_current_request = pretend.call_recorder(lambda: db_request) + monkeypatch.setattr(auth_policy, "get_current_request", get_current_request) + + permits_result = Allowed("Because") + backing_policy = pretend.stub( + permits=pretend.call_recorder(lambda *a, **kw: permits_result) + ) + policy = auth_policy.TwoFactorAuthorizationPolicy(policy=backing_policy) + context = ProjectFactory.create( + owners_require_2fa=False, + pypi_mandates_2fa=True, + ) + result = policy.permits(context, pretend.stub(), pretend.stub()) + + assert result == permits_result + assert db_request.session.flash.calls == [ + pretend.call( + "This project is included in PyPI's two-factor mandate " + "for critical projects. In the future, you will be unable to " + "perform this action without enabling 2FA for your account", + queue="warning", + ), + ] + + @pytest.mark.parametrize("owners_require_2fa", [True, False]) + @pytest.mark.parametrize("pypi_mandates_2fa", [True, False]) + @pytest.mark.parametrize("two_factor_requirement_enabled", [True, False]) + @pytest.mark.parametrize("two_factor_mandate_available", [True, False]) + @pytest.mark.parametrize("two_factor_mandate_enabled", [True, False]) def test_permits_if_user_has_2fa( - self, monkeypatch, owners_require_2fa, pypi_mandates_2fa, db_request + self, + monkeypatch, + owners_require_2fa, + pypi_mandates_2fa, + two_factor_requirement_enabled, + two_factor_mandate_available, + two_factor_mandate_enabled, + db_request, ): + db_request.registry.settings = { + "warehouse.two_factor_requirement.enabled": two_factor_requirement_enabled, + "warehouse.two_factor_mandate.available": two_factor_mandate_available, + "warehouse.two_factor_mandate.enabled": two_factor_mandate_enabled, + } user = pretend.stub(has_two_factor=True) db_request.user = user get_current_request = pretend.call_recorder(lambda: db_request) @@ -213,8 +262,17 @@ def test_permits_if_user_has_2fa( ], ) def test_denies_if_2fa_is_required_but_user_doesnt_have_2fa( - self, monkeypatch, owners_require_2fa, pypi_mandates_2fa, reason, db_request + self, + monkeypatch, + owners_require_2fa, + pypi_mandates_2fa, + reason, + db_request, ): + db_request.registry.settings = { + "warehouse.two_factor_requirement.enabled": owners_require_2fa, + "warehouse.two_factor_mandate.enabled": pypi_mandates_2fa, + } user = pretend.stub(has_two_factor=False) db_request.user = user get_current_request = pretend.call_recorder(lambda: db_request) diff --git a/tests/unit/manage/test_views.py b/tests/unit/manage/test_views.py --- a/tests/unit/manage/test_views.py +++ b/tests/unit/manage/test_views.py @@ -2377,13 +2377,152 @@ class TestManageProjectSettings: def test_manage_project_settings(self): request = pretend.stub() project = pretend.stub() + view = views.ManageProjectSettingsViews(project, request) + form = pretend.stub + view.toggle_2fa_requirement_form_class = lambda: form - assert views.manage_project_settings(project, request) == { + assert view.manage_project_settings() == { "project": project, "MAX_FILESIZE": MAX_FILESIZE, "MAX_PROJECT_SIZE": MAX_PROJECT_SIZE, + "toggle_2fa_form": form, } + @pytest.mark.parametrize("enabled", [False, None]) + def test_toggle_2fa_requirement_feature_disabled(self, enabled): + request = pretend.stub( + registry=pretend.stub( + settings={"warehouse.two_factor_requirement.enabled": enabled} + ), + ) + + project = pretend.stub() + view = views.ManageProjectSettingsViews(project, request) + with pytest.raises(HTTPNotFound): + view.toggle_2fa_requirement() + + @pytest.mark.parametrize( + "owners_require_2fa, expected, expected_flash_calls", + [ + ( + False, + False, + [ + pretend.call( + "2FA requirement cannot be disabled for critical projects", + queue="error", + ) + ], + ), + ( + True, + True, + [ + pretend.call( + "2FA requirement cannot be disabled for critical projects", + queue="error", + ) + ], + ), + ], + ) + def test_toggle_2fa_requirement_critical( + self, + owners_require_2fa, + expected, + expected_flash_calls, + db_request, + ): + db_request.registry = pretend.stub( + settings={"warehouse.two_factor_requirement.enabled": True} + ) + db_request.session = pretend.stub( + flash=pretend.call_recorder(lambda message, queue: None) + ) + db_request.route_path = pretend.call_recorder(lambda *a, **kw: "/foo/bar/") + db_request.user = pretend.stub(username="foo") + + project = ProjectFactory.create( + name="foo", + owners_require_2fa=owners_require_2fa, + pypi_mandates_2fa=True, + ) + view = views.ManageProjectSettingsViews(project, db_request) + + result = view.toggle_2fa_requirement() + + assert project.owners_require_2fa == expected + assert project.pypi_mandates_2fa + assert db_request.session.flash.calls == expected_flash_calls + assert db_request.route_path.calls == [ + pretend.call("manage.project.settings", project_name="foo") + ] + assert isinstance(result, HTTPSeeOther) + assert result.status_code == 303 + assert result.headers["Location"] == "/foo/bar/" + + @pytest.mark.parametrize( + "owners_require_2fa, expected, expected_flash_calls, tag", + [ + ( + False, + True, + [pretend.call("2FA requirement enabled for foo", queue="success")], + "project:owners_require_2fa:enabled", + ), + ( + True, + False, + [pretend.call("2FA requirement disabled for foo", queue="success")], + "project:owners_require_2fa:disabled", + ), + ], + ) + def test_toggle_2fa_requirement_non_critical( + self, + owners_require_2fa, + expected, + expected_flash_calls, + tag, + db_request, + ): + db_request.registry = pretend.stub( + settings={"warehouse.two_factor_requirement.enabled": True} + ) + db_request.session = pretend.stub( + flash=pretend.call_recorder(lambda message, queue: None) + ) + db_request.route_path = pretend.call_recorder(lambda *a, **kw: "/foo/bar/") + db_request.user = pretend.stub(username="foo") + + project = ProjectFactory.create( + name="foo", + owners_require_2fa=owners_require_2fa, + pypi_mandates_2fa=False, + ) + view = views.ManageProjectSettingsViews(project, db_request) + + result = view.toggle_2fa_requirement() + + assert project.owners_require_2fa == expected + assert not project.pypi_mandates_2fa + assert db_request.session.flash.calls == expected_flash_calls + assert db_request.route_path.calls == [ + pretend.call("manage.project.settings", project_name="foo") + ] + assert isinstance(result, HTTPSeeOther) + assert result.status_code == 303 + assert result.headers["Location"] == "/foo/bar/" + + event = ( + db_request.db.query(ProjectEvent) + .join(ProjectEvent.project) + .filter(ProjectEvent.project_id == project.id) + .one() + ) + assert event.tag == tag + assert event.additional == {"modified_by": db_request.user.username} + def test_delete_project_no_confirm(self): project = pretend.stub(normalized_name="foo") request = pretend.stub( diff --git a/tests/unit/test_config.py b/tests/unit/test_config.py --- a/tests/unit/test_config.py +++ b/tests/unit/test_config.py @@ -244,6 +244,9 @@ def __init__(self): "warehouse.account.global_login_ratelimit_string": "1000 per 5 minutes", "warehouse.account.email_add_ratelimit_string": "2 per day", "warehouse.account.password_reset_ratelimit_string": "5 per day", + "warehouse.two_factor_requirement.enabled": False, + "warehouse.two_factor_mandate.available": False, + "warehouse.two_factor_mandate.enabled": False, } if environment == config.Environment.development: expected_settings.update(
Add ability to require 2FA for a given project **What's the problem this feature will solve?** Currently there is no way to require a certain level of security for a given project. **Describe the solution you'd like** Allow a project to require that 2FA is enabled before a new owners/maintainers can be added. This should also prevent users that are not in compliance from doing the activities permitted by their role. **Additional context** ~Blocked on #5790, otherwise this would allow any attacker to determine the 2FA status for a given user.~
Some UX questions to be resolved include: * If a user wants to send an invitation and the recipient doesn't have 2FA on, what happens? * if you get an invite and don't have 2FA on, what happens? * In projects with multiple owners: can any owner turn that on? Maybe it's not allowed until all the existing owners/maintainers have turned on 2FA? Once turned on, can any owner turn it off by themselves? * Will we expose anywhere, in public, the info that this is a requirement for this project? (Leaning towards no, since that allows malfeasors to discover which projects DON'T have this requirement.) * What happens to an existing owner/maintainer of a project if they try to disable 2FA on their account? Maybe they retain maintainer/owner status but are blocked from actually performing any of their privileged actions? Would they be unable to create project-scoped API tokens, for instance? So we'd want to research this with user tests and check prior art on other similar platforms. That, plus developing flows and templates, makes this a feature that might take something like 2 weeks of UX work. Until we get into it we won't know how much backend work is involved in accommodating flows and edge cases here, and how much PM work it'll take to document, publicize, triage related issues, etc., but it feels to me like it might take ~6 days of backend and ~4 days of PM work. It would also be great to have an admin panel to make it easy for a PyPI site administrator to check whether a specific project has this turned on (to help with troubleshooting and potentially with quiet outreach to improve the security of specific high-profile packages). (Doing that, including underpinnings and methodical unit & integration tests, might take about 5 days of work.) (Roughly scoping this out as we look for additional funding.) Has there been any further movement on the UX decisions here? Or would this need further work scoping to make those decisions? Some answers to the above questions: > If a user wants to send an invitation and the recipient doesn't have 2FA on, what happens? The recipient receives the invitation regardless (so as not to reveal an individual users 2FA status). > if you get an invite and don't have 2FA on, what happens? The recipient can accept the invitation, but can't manage the project until they enable 2FA. > In projects with multiple owners: can any owner turn that on? Maybe it's not allowed until all the existing owners/maintainers have turned on 2FA? Any owner can turn it on. Enabling It when not all collaborators have 2FA enabled will temporarily restrict their access to the project > Once turned on, can any owner turn it off by themselves? Yes. > Will we expose anywhere, in public, the info that this is a requirement for this project? (Leaning towards no, since that allows malfeasors to discover which projects DON'T have this requirement.) Yes, we should publicly expose whether the project requires 2FA, to encourage more projects to adopt this setting. We should not expose whether an _individual_ has 2FA enabled. (Given that this feature doesn't currently exist, after it is created it would still be reasonable to assume that most projects don't have a 2FA requirement). > What happens to an existing owner/maintainer of a project if they try to disable 2FA on their account? Maybe they retain maintainer/owner status but are blocked from actually performing any of their privileged actions? Would they be unable to create project-scoped API tokens, for instance? They retain maintainer/owner status but are blocked from actually performing any of their privileged actions, including creating project-scoped API tokens.
2022-02-18T22:51:31Z
[]
[]
pypi/warehouse
10,803
pypi__warehouse-10803
[ "4948", "10447", "10469" ]
b26deed0be553aade4dc929c4a115730eab81a43
diff --git a/bin/depchecker.py b/bin/depchecker.py new file mode 100644 --- /dev/null +++ b/bin/depchecker.py @@ -0,0 +1,33 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License.import sys + +import sys + +from pip_api import parse_requirements + +left, right = sys.argv[1:3] +left_reqs = parse_requirements(left).keys() +right_reqs = parse_requirements(right).keys() + +extra_in_left = left_reqs - right_reqs +extra_in_right = right_reqs - left_reqs + +if extra_in_left: + for dep in sorted(extra_in_left): + print("- {}".format(dep)) + +if extra_in_right: + for dep in sorted(extra_in_right): + print("+ {}".format(dep)) + +if extra_in_left or extra_in_right: + sys.exit(1)
diff --git a/bin/tests b/bin/tests --- a/bin/tests +++ b/bin/tests @@ -7,7 +7,7 @@ set -e export LC_ALL="${ENCODING:-en_US.UTF-8}" export LANG="${ENCODING:-en_US.UTF-8}" -COMMAND_ARGS=$@ +COMMAND_ARGS="$@" # Test the postgres connection while [ $# -gt 0 ]; do
Run everything in containers $ make reformat # Create our Python 3.6 virtual environment rm -rf .state/env python3.6 -m venv .state/env make: python3.6: No such file or directory make: *** [.state/env/pyvenv.cfg] Error 1 Circular dependency in Makefile From https://github.com/pypa/warehouse/pull/10278#issuecomment-974902013: > I believe this change has introduced a circular dependency, as perceived by these messages: > > ```shellsession > $ make -v | head -1 > GNU Make 3.81 > $ make build > make[1]: Circular requirements/docs.txt <- .state/env/pyvenv.cfg dependency dropped. > ... > ``` > > For funsies, I tried the same with [remake](http://bashdb.sourceforge.net/remake/), which uses a newer version of `make`: > > ```shellsession > $ remake --version | head -1 > GNU Make 4.3+dbg-1.5 > $ remake build > remake[1]: Entering directory '/Users/miketheman/workspace/miketheman/warehouse' > remake[1]: Circular requirements/docs.txt <- .state/env/pyvenv.cfg dependency dropped. > remake[1]: Circular requirements/lint.txt <- .state/env/pyvenv.cfg dependency dropped. > remake[1]: '.state/docker-build' is up to date. > ... > ``` > > I don't know if this is harmful yet, but I'm having other issues getting started with development, and wondered if this is somehow related or not. _Originally posted by @miketheman in https://github.com/pypa/warehouse/issues/10278#issuecomment-974902013_ Fix broken development setup This removes circular dependency in Makefile added in #10278 Unblocks #10052 Fixes #10446 Fixes #10447
At the moment you'll need Python 3.6 installed on your machine to run this command. I'd recommend using [`pyenv`](https://github.com/pyenv/pyenv) to have multiple Python versions installed. If you're interested, we might want to look into a way to move this into a Docker container instead, to avoid this issue in the future. Yes, it would be great! Same for linting... @di, I can work on this - should be pretty straightforward I guess the question is whether you should be able to run stuff (especially build docs) without having to run docker. If that's the case, it makes sense to still preserve venv on the host machine. _Or_ it could be either separate target (IMO messy) or some kind of command line option or environment variable so you can select whether you want host or docker (or just default to docker if py3.6 is not found). Thoughts? I think it's OK to assume that a Warehouse contributor has Docker installed. However, it's worth noting that Travis currently runs these commands (and the `test` command) without using a container -- [the way it does this is by invoking the `bin/tests` script directly instead of using the Makefile command](https://github.com/pypa/warehouse/blob/master/.travis.yml#L42), so it is necessary to "preserve" a way to run this without Docker, but it should only be necessary for CI. I am also experiencing issues with this. I am unable to run `make lint` or `make reformat` I manage my interpreters through `brew`. It recently moved me from 3.6 to 3.7. It would help me and other home-brew users as well if this was in the docker container. Now that #9083 is merged, the remaining things that aren't running in containers are: - [ ] [`make docs`](https://github.com/pypa/warehouse/blob/961575f8405566f8d567d51a0ea928c7a9eecb63/Makefile#L114-L116) - [ ] [`make deps`](https://github.com/pypa/warehouse/blob/961575f8405566f8d567d51a0ea928c7a9eecb63/Makefile#L122-L131) - [ ] [`make translations`](https://github.com/pypa/warehouse/blob/961575f8405566f8d567d51a0ea928c7a9eecb63/Makefile#L175-L209) (and it's subcommands) Reposting my comment: I think we could mostly solve the problem by making the targets more granular, but at core we need to solve something: in order to install pip-compile, we need to know which version to install it too, which means we need to run pip-compile and so on. If we install pip-compile without the hashes, it kinda defeats the purpose of using hashes in the first place. So one solution could be to not disclose to `make` that the pip-compile target needs the pip-compile requirement file, considering that it doesn't often change, and the venv gets recreated once in a while anyway. So we would have: - A target for venv creation but not installation of dependencies - a target for pip-tools that would depend on that one, which would actually use the requirements/pip-tools.txt file but wouldn't say it - requirements/*.txt would depend on that one - the full environement target, the one everyone uses, would use the relevant .txt files and declare it. I believe it would work. Or, we could take this as an opportunity to move more towards #4948, since it doesn't make a ton of sense right now for us to compile dependencies on one platform (the host OS) to be used on a different platform (inside the container images), which removes the need to build dependencies in the virtual environment. Why not to vendorize `pip-compile`? For example with `pex`. ```python pip install pex pex pip-tools --console-script pip-compile --output-file pip-compile.pex ``` ``` -rwxr-xr-x 1 root root 4750609 Dec 7 10:53 pip-compile.pex ``` @di @ewdurbin please review. @ewjoachim addressed all comments. PTAL. @di @ewjoachim @ewdurbin ping?
2022-02-22T04:18:18Z
[]
[]
pypi/warehouse
10,848
pypi__warehouse-10848
[ "10847" ]
59dd616114c3d80ea00b7f77f9706b5edf3348b3
diff --git a/warehouse/email/__init__.py b/warehouse/email/__init__.py --- a/warehouse/email/__init__.py +++ b/warehouse/email/__init__.py @@ -10,6 +10,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import datetime import functools from email.headerregistry import Address @@ -65,7 +66,15 @@ def send_email(task, request, recipient, msg, success_event): task.retry(exc=exc) -def _send_email_to_user(request, user, msg, *, email=None, allow_unverified=False): +def _send_email_to_user( + request, + user, + msg, + *, + email=None, + allow_unverified=False, + repeat_window=None, +): # If we were not given a specific email object, then we'll default to using # the User's primary email address. if email is None: @@ -78,6 +87,13 @@ def _send_email_to_user(request, user, msg, *, email=None, allow_unverified=Fals if email is None or not (email.verified or allow_unverified): return + # If we've already sent this email within the repeat_window, don't send it. + if repeat_window is not None: + sender = request.find_service(IEmailSender) + last_sent = sender.last_sent(to=email.email, subject=msg.subject) + if last_sent and (datetime.datetime.now() - last_sent) <= repeat_window: + return + request.task(send_email).delay( _compute_recipient(user, email.email), { @@ -98,7 +114,12 @@ def _send_email_to_user(request, user, msg, *, email=None, allow_unverified=Fals ) -def _email(name, *, allow_unverified=False): +def _email( + name, + *, + allow_unverified=False, + repeat_window=None, +): """ This decorator is used to turn an e function into an email sending function! @@ -148,7 +169,12 @@ def wrapper(request, user_or_users, **kwargs): user, email = recipient, None _send_email_to_user( - request, user, msg, email=email, allow_unverified=allow_unverified + request, + user, + msg, + email=email, + allow_unverified=allow_unverified, + repeat_window=repeat_window, ) return context @@ -211,7 +237,11 @@ def send_token_compromised_email_leak(request, user, *, public_url, origin): return {"username": user.username, "public_url": public_url, "origin": origin} -@_email("basic-auth-with-2fa", allow_unverified=True) +@_email( + "basic-auth-with-2fa", + allow_unverified=True, + repeat_window=datetime.timedelta(days=1), +) def send_basic_auth_with_two_factor_email(request, user): return {} diff --git a/warehouse/email/interfaces.py b/warehouse/email/interfaces.py --- a/warehouse/email/interfaces.py +++ b/warehouse/email/interfaces.py @@ -24,3 +24,8 @@ def send(recipient, message): """ Sends an EmailMessage to the given recipient. """ + + def last_sent(to, subject): + """ + Determines when an email was last sent, if at all + """ diff --git a/warehouse/email/services.py b/warehouse/email/services.py --- a/warehouse/email/services.py +++ b/warehouse/email/services.py @@ -79,6 +79,10 @@ def send(self, recipient, message): ) ) + def last_sent(self, to, subject): + # We don't store previously sent emails, so nothing to comapre against + return None + @implementer(IEmailSender) class SESEmailSender: @@ -127,6 +131,19 @@ def send(self, recipient, message): ) ) + def last_sent(self, to, subject): + last_email = ( + self._db.query(SESEmailMessage) + .filter( + SESEmailMessage.to == to, + SESEmailMessage.subject == subject, + ) + .order_by(SESEmailMessage.created.desc()) + .first() + ) + if last_email: + return last_email.created + class ConsoleAndSMTPEmailSender(SMTPEmailSender): def send(self, recipient, message):
diff --git a/tests/conftest.py b/tests/conftest.py --- a/tests/conftest.py +++ b/tests/conftest.py @@ -30,6 +30,7 @@ from pyramid.i18n import TranslationString from pyramid.static import ManifestCacheBuster from pyramid_jinja2 import IJinja2Environment +from pyramid_mailer.mailer import DummyMailer from pytest_postgresql.config import get_config from pytest_postgresql.janitor import DatabaseJanitor from sqlalchemy import event @@ -38,6 +39,9 @@ from warehouse import admin, config, static from warehouse.accounts import services as account_services +from warehouse.accounts.interfaces import ITokenService +from warehouse.email import services as email_services +from warehouse.email.interfaces import IEmailSender from warehouse.macaroons import services as macaroon_services from warehouse.metrics import IMetricsService @@ -113,11 +117,14 @@ def find_service(self, iface=None, context=None, name=""): @pytest.fixture -def pyramid_services(metrics): +def pyramid_services(metrics, email_service, token_service): services = _Services() # Register our global services. services.register_service(metrics, IMetricsService, None, name="") + services.register_service(email_service, IEmailSender, None, name="") + services.register_service(token_service, ITokenService, None, name="password") + services.register_service(token_service, ITokenService, None, name="email") return services @@ -281,6 +288,13 @@ def token_service(app_config): return account_services.TokenService(secret="secret", salt="salt", max_age=21600) [email protected] +def email_service(): + return email_services.SMTPEmailSender( + mailer=DummyMailer(), sender="[email protected]" + ) + + class QueryRecorder: def __init__(self): self.queries = [] diff --git a/tests/unit/email/test_init.py b/tests/unit/email/test_init.py --- a/tests/unit/email/test_init.py +++ b/tests/unit/email/test_init.py @@ -19,7 +19,7 @@ from sqlalchemy.orm.exc import NoResultFound from warehouse import email -from warehouse.accounts.interfaces import ITokenService, IUserService +from warehouse.accounts.interfaces import IUserService from warehouse.email.interfaces import IEmailSender from warehouse.email.services import EmailMessage @@ -192,6 +192,30 @@ def test_doesnt_send_with_unverified(self, primary_email, address): assert request.task.calls == [] assert task.delay.calls == [] + def test_doesnt_send_within_reset_window(self, pyramid_request, pyramid_services): + email_service = pretend.stub( + last_sent=pretend.call_recorder( + lambda to, subject: datetime.datetime.now() + - datetime.timedelta(seconds=69) + ) + ) + pyramid_services.register_service(email_service, IEmailSender, None, name="") + + task = pretend.stub(delay=pretend.call_recorder(lambda *a, **kw: None)) + pyramid_request.task = pretend.call_recorder(lambda x: task) + + address = "[email protected]" + user = pretend.stub(primary_email=pretend.stub(email=address, verified=True)) + + msg = EmailMessage(subject="My Subject", body_text="My Body") + + email._send_email_to_user( + pyramid_request, user, msg, repeat_window=datetime.timedelta(seconds=420) + ) + + assert pyramid_request.task.calls == [] + assert task.delay.calls == [] + @pytest.mark.parametrize( ("username", "primary_email", "address", "expected"), [ @@ -424,9 +448,6 @@ def test_send_password_reset_email( stub_email = pretend.stub(email=email_addr, verified=verified) pyramid_request.method = "POST" token_service.dumps = pretend.call_recorder(lambda a: "TOKEN") - pyramid_request.find_service = pretend.call_recorder( - lambda *a, **kw: token_service - ) subject_renderer = pyramid_config.testing_add_renderer( "email/password-reset/subject.txt" @@ -479,9 +500,6 @@ def test_send_password_reset_email( } ) ] - assert pyramid_request.find_service.calls == [ - pretend.call(ITokenService, name="password") - ] assert pyramid_request.task.calls == [pretend.call(send_email)] assert send_email.delay.calls == [ pretend.call( @@ -523,9 +541,6 @@ def test_email_verification_email( stub_email = pretend.stub(id="id", email="[email protected]", verified=False) pyramid_request.method = "POST" token_service.dumps = pretend.call_recorder(lambda a: "TOKEN") - pyramid_request.find_service = pretend.call_recorder( - lambda *a, **kw: token_service - ) subject_renderer = pyramid_config.testing_add_renderer( "email/verify-email/subject.txt" @@ -571,9 +586,6 @@ def test_email_verification_email( assert token_service.dumps.calls == [ pretend.call({"action": "email-verify", "email.id": str(stub_email.id)}) ] - assert pyramid_request.find_service.calls == [ - pretend.call(ITokenService, name="email") - ] assert pyramid_request.task.calls == [pretend.call(send_email)] assert send_email.delay.calls == [ pretend.call( diff --git a/tests/unit/email/test_services.py b/tests/unit/email/test_services.py --- a/tests/unit/email/test_services.py +++ b/tests/unit/email/test_services.py @@ -137,6 +137,12 @@ def test_send(self, sender_class): assert msg.recipients == ["[email protected]"] assert msg.sender == "DevPyPI <[email protected]>" + def test_last_sent(self, sender_class): + mailer = DummyMailer() + service = sender_class(mailer, sender="DevPyPI <[email protected]>") + + assert service.last_sent(to=pretend.stub(), subject=pretend.stub) is None + class TestConsoleAndSMTPEmailSender: def test_send(self, capsys): @@ -306,3 +312,53 @@ def test_send_with_unicode_and_html(self, db_session): assert em.from_ == "[email protected]" assert em.to == "[email protected]" assert em.subject == "This is a Subject" + + def test_last_sent(self, db_session): + to = "[email protected]" + subject = "I care about this" + + # Send some random emails + aws_client = pretend.stub( + send_raw_email=pretend.call_recorder( + lambda *a, **kw: {"MessageId": str(uuid.uuid4()) + "-ses"} + ) + ) + sender = SESEmailSender( + aws_client, sender="DevPyPI <[email protected]>", db=db_session + ) + for address in [to, "[email protected]"]: + for subject in [subject, "I do not care about this"]: + sender.send( + f"Foobar <{ to }>", + EmailMessage( + subject=subject, body_text="This is a plain text body" + ), + ) + + # Send the last email that we care about + resp = {"MessageId": str(uuid.uuid4()) + "-ses"} + aws_client = pretend.stub( + send_raw_email=pretend.call_recorder(lambda *a, **kw: resp) + ) + sender = SESEmailSender( + aws_client, sender="DevPyPI <[email protected]>", db=db_session + ) + sender.send( + f"Foobar <{ to }>", + EmailMessage(subject=subject, body_text="This is a plain text body"), + ) + + em = ( + db_session.query(SESEmailMessage) + .filter_by(message_id=resp["MessageId"]) + .one() + ) + + assert sender.last_sent(to, subject) == em.created + + def test_last_sent_none(self, db_session): + to = "[email protected]" + subject = "I care about this" + sender = SESEmailSender(pretend.stub(), sender=pretend.stub(), db=db_session) + + assert sender.last_sent(to, subject) is None
"Migrate to API tokens" sent many many times <!-- NOTE: This issue should be for problems with PyPI itself, including: * pypi.org * test.pypi.org * files.pythonhosted.org This issue should NOT be for a project installed from PyPI. If you are having an issue with a specific package, you should reach out to the maintainers of that project directly instead. Furthermore, this issue should NOT be for any non-PyPI properties (like python.org, docs.python.org, etc.) If your problem is related to search (a new or updated project doesn't appear in the PyPI search results), please wait for a couple of hours and check again before reporting it. The search index may take some time to be updated. --> **Describe the bug** I recently uploaded with `twine` and basic auth and was sent *12* emails . I uploaded a total of 4 files so at most I expect 4 emails and ideally only 1 **Expected behavior** not to receive 12 duplicate emails in quick succession **To Reproduce** upload a package using `twine` with basic auth **My Platform** ```console $ twine --version twine version 3.7.1 (importlib_metadata: 4.10.0, pkginfo: 1.8.2, requests: 2.27.1, requests-toolbelt: 0.9.1, tqdm: 4.62.3) $ ~/opt/venv/bin/python --version --version Python 3.8.10 (default, Nov 26 2021, 20:14:08) [GCC 9.3.0] $ lsb_release -a No LSB modules are available. Distributor ID: Ubuntu Description: Ubuntu 20.04.4 LTS Release: 20.04 Codename: focal ``` **Additional context** n/a <summary> a raw email <details> ``` Subject: [PyPI] Migrate to API tokens for uploading to PyPI From: PyPI <[email protected]> To: asottile <...> MIME-Version: 1.0 Content-Type: multipart/alternative; boundary="===============3208068886958749528==" Message-ID: <0101017f4330a776-e90c22a8-0638-478b-9aa3-cda048ea52e1-000000@us-west-2.amazonses.com> Date: Tue, 1 Mar 2022 01:55:36 +0000 Feedback-ID: 1.us-west-2.ZRFc7KXPaRh5E01FOWpg1s1VfcS7E0CO2O03ZZnJB3U=:AmazonSES X-SES-Outgoing: 2022.03.01-54.240.27.21 X-Area1Security-Origin: EXTERNAL X-Area1Security-Processed: 086babc1a6160fb7de2a5f52e72e77ec;2;NONE;2022-03-01T01:55:47 --===============3208068886958749528== Content-Type: text/plain; charset="utf-8" Content-Transfer-Encoding: quoted-printable # What? During your recent upload or upload attempt to PyPI, we noticed you used ba= sic authentication (username & password). However, your account has two-fac= tor authentication (2FA) enabled. In the near future, PyPI will begin prohibiting uploads using basic authent= ication for accounts with two-factor authentication enabled. Instead, we wi= ll require API tokens to be used. # What should I do? First, generate an API token for your account or project at https://pypi.or= g/manage/account/token/. Then, use this token when publishing instead of yo= ur username and password. See https://pypi.org/help/#apitoken for help usin= g API tokens to publish. --===============3208068886958749528== Content-Type: text/html; charset="utf-8" Content-Transfer-Encoding: quoted-printable MIME-Version: 1.0 <!DOCTYPE html> <html> <head> <meta charset=3D"utf-8"> <meta name=3D"viewport" content=3D"width=3Ddevice-width"> </head> <body> =20 <h3>What?</h3> <p> During your recent upload or upload attempt to PyPI, we noticed you used = basic authentication (username &amp; password). However, your account has t= wo-factor authentication (2FA) enabled. </p> <p> In the near future, PyPI will begin prohibiting uploads using basic authe= ntication for accounts with two-factor authentication enabled. Instead, we = will require API tokens to be used. </p> <h3>What should I do?</h3> <p> First, generate an API token for your account or project at https://pypi.= org/manage/account/token/. Then, use this token when publishing instead of = your username and password. See https://pypi.org/help/#apitoken for help us= ing API tokens to publish. </p> =20 </body> </html> --===============3208068886958749528==-- ``` </details> </summary>
2022-03-01T02:19:41Z
[]
[]
pypi/warehouse
10,859
pypi__warehouse-10859
[ "10849" ]
8921520cfee911bd6c8b29ad1160280b2043416d
diff --git a/warehouse/accounts/__init__.py b/warehouse/accounts/__init__.py --- a/warehouse/accounts/__init__.py +++ b/warehouse/accounts/__init__.py @@ -66,6 +66,13 @@ def _authenticate(userid, request): if user is None: return + if request.session.password_outdated(login_service.get_password_timestamp(userid)): + request.session.invalidate() + request.session.flash( + request._("Session invalidated by password change"), queue="error" + ) + return + principals = [] if user.is_superuser: diff --git a/warehouse/accounts/interfaces.py b/warehouse/accounts/interfaces.py --- a/warehouse/accounts/interfaces.py +++ b/warehouse/accounts/interfaces.py @@ -246,6 +246,12 @@ def check_recovery_code(user_id, code): burned. """ + def get_password_timestamp(user_id): + """ + Returns POSIX timestamp corresponding to the datetime that the users password + was most recently updated + """ + class ITokenService(Interface): def dumps(data): diff --git a/warehouse/accounts/services.py b/warehouse/accounts/services.py --- a/warehouse/accounts/services.py +++ b/warehouse/accounts/services.py @@ -80,14 +80,17 @@ def __init__(self, session, *, ratelimiters=None, remote_addr, metrics): ) self.remote_addr = remote_addr self._metrics = metrics + self.cached_get_user = functools.lru_cache()(self._get_user) - @functools.lru_cache() - def get_user(self, userid): + def _get_user(self, userid): # TODO: We probably don't actually want to just return the database # object here. # TODO: We need some sort of Anonymous User. return self.db.query(User).options(joinedload(User.webauthn)).get(userid) + def get_user(self, userid): + return self.cached_get_user(userid) + @functools.lru_cache() def get_user_by_username(self, username): user_id = self.find_userid(username) @@ -579,6 +582,10 @@ def check_recovery_code(self, user_id, code): self._metrics.increment("warehouse.authentication.recovery_code.ok") return True + def get_password_timestamp(self, user_id): + user = self.get_user(user_id) + return user.password_date.timestamp() if user.password_date is not None else 0 + @implementer(ITokenService) class TokenService: diff --git a/warehouse/accounts/views.py b/warehouse/accounts/views.py --- a/warehouse/accounts/views.py +++ b/warehouse/accounts/views.py @@ -990,6 +990,9 @@ def _login_user(request, userid, two_factor_method=None, two_factor_label=None): }, ) request.session.record_auth_timestamp() + request.session.record_password_timestamp( + user_service.get_password_timestamp(userid) + ) return headers @@ -1064,5 +1067,8 @@ def reauthenticate(request, _form_class=ReAuthenticateForm): if request.method == "POST" and form.validate(): request.session.record_auth_timestamp() + request.session.record_password_timestamp( + user_service.get_password_timestamp(request.user.id) + ) return resp diff --git a/warehouse/manage/views.py b/warehouse/manage/views.py --- a/warehouse/manage/views.py +++ b/warehouse/manage/views.py @@ -361,6 +361,11 @@ def change_password(self): tag="account:password:change", ) send_password_change_email(self.request, self.request.user) + self.request.db.flush() # Ensure changes are persisted to DB + self.request.db.refresh(self.request.user) # Pickup new password_date + self.request.session.record_password_timestamp( + self.user_service.get_password_timestamp(self.request.user.id) + ) self.request.session.flash("Password updated", queue="success") return {**self.default_response, "change_password_form": form} diff --git a/warehouse/sessions.py b/warehouse/sessions.py --- a/warehouse/sessions.py +++ b/warehouse/sessions.py @@ -88,6 +88,7 @@ class Session(dict): _totp_secret_key = "_totp_secret" _webauthn_challenge_key = "_webauthn_challenge" _reauth_timestamp_key = "_reauth_timestamp" + _password_timestamp_key = "_password_timestamp" # A number of our methods need to be decorated so that they also call # self.changed() @@ -145,6 +146,20 @@ def record_auth_timestamp(self): self[self._reauth_timestamp_key] = datetime.datetime.now().timestamp() self.changed() + def record_password_timestamp(self, timestamp): + self[self._password_timestamp_key] = timestamp + self.changed() + + def password_outdated(self, current_password_timestamp): + stored_password_timestamp = self.get(self._password_timestamp_key) + + if stored_password_timestamp is None: + # This session predates invalidation by password reset... since + # we cannot say for sure, let it live its life. + return False + + return current_password_timestamp != stored_password_timestamp + def needs_reauthentication(self, time_to_reauth): reauth_timestamp = self.get(self._reauth_timestamp_key, 0) current_time = datetime.datetime.now().timestamp()
diff --git a/tests/unit/accounts/test_core.py b/tests/unit/accounts/test_core.py --- a/tests/unit/accounts/test_core.py +++ b/tests/unit/accounts/test_core.py @@ -285,13 +285,21 @@ def test_via_basic_auth_compromised( class TestAuthenticate: @pytest.mark.parametrize( - ("is_superuser", "is_moderator", "is_psf_staff", "expected"), + ( + "is_superuser", + "is_moderator", + "is_psf_staff", + "password_out_of_date", + "expected", + ), [ - (False, False, False, []), + (False, False, False, False, []), + (False, False, False, True, None), ( True, False, False, + False, [ "group:admins", "group:moderators", @@ -303,12 +311,14 @@ class TestAuthenticate: False, True, False, + False, ["group:moderators", "group:with_admin_dashboard_access"], ), ( True, True, False, + False, [ "group:admins", "group:moderators", @@ -320,12 +330,14 @@ class TestAuthenticate: False, False, True, + False, ["group:psf_staff", "group:with_admin_dashboard_access"], ), ( False, True, True, + False, [ "group:moderators", "group:psf_staff", @@ -334,18 +346,44 @@ class TestAuthenticate: ), ], ) - def test_with_user(self, is_superuser, is_moderator, is_psf_staff, expected): + def test_with_user( + self, + pyramid_request, + pyramid_services, + is_superuser, + is_moderator, + is_psf_staff, + password_out_of_date, + expected, + ): user = pretend.stub( is_superuser=is_superuser, is_moderator=is_moderator, is_psf_staff=is_psf_staff, ) - service = pretend.stub(get_user=pretend.call_recorder(lambda userid: user)) - request = pretend.stub(find_service=lambda iface, context: service) + service = pretend.stub( + get_user=pretend.call_recorder(lambda userid: user), + get_password_timestamp=lambda userid: 0, + ) + pyramid_services.register_service(service, IUserService, None) + pyramid_request.session.password_outdated = lambda ts: password_out_of_date + pyramid_request.session.invalidate = pretend.call_recorder(lambda: None) + pyramid_request.session.flash = pretend.call_recorder( + lambda msg, queue=None: None + ) - assert accounts._authenticate(1, request) == expected + assert accounts._authenticate(1, pyramid_request) == expected assert service.get_user.calls == [pretend.call(1)] + if password_out_of_date: + assert pyramid_request.session.invalidate.calls == [pretend.call()] + assert pyramid_request.session.flash.calls == [ + pretend.call("Session invalidated by password change", queue="error") + ] + else: + assert pyramid_request.session.invalidate.calls == [] + assert pyramid_request.session.flash.calls == [] + def test_without_user(self): service = pretend.stub(get_user=pretend.call_recorder(lambda userid: None)) request = pretend.stub(find_service=lambda iface, context: service) diff --git a/tests/unit/accounts/test_services.py b/tests/unit/accounts/test_services.py --- a/tests/unit/accounts/test_services.py +++ b/tests/unit/accounts/test_services.py @@ -893,6 +893,20 @@ def test_regenerate_recovery_codes(self, user_service): assert len(new_codes) == 8 assert [c.id for c in initial_codes] != [c.id for c in new_codes] + def test_get_password_timestamp(self, user_service): + create_time = datetime.datetime.utcnow() + with freezegun.freeze_time(create_time): + user = UserFactory.create() + user.password_date = create_time + + assert user_service.get_password_timestamp(user.id) == create_time.timestamp() + + def test_get_password_timestamp_no_value(self, user_service): + user = UserFactory.create() + user.password_date = None + + assert user_service.get_password_timestamp(user.id) == 0 + class TestTokenService: def test_verify_service(self): diff --git a/tests/unit/accounts/test_views.py b/tests/unit/accounts/test_views.py --- a/tests/unit/accounts/test_views.py +++ b/tests/unit/accounts/test_views.py @@ -195,6 +195,7 @@ def test_post_validate_redirects( update_user=pretend.call_recorder(lambda *a, **kw: None), has_two_factor=lambda userid: False, record_event=pretend.call_recorder(lambda *a, **kw: None), + get_password_timestamp=lambda userid: 0, ) breach_service = pretend.stub(check_password=lambda password, tags=None: False) @@ -220,6 +221,7 @@ def test_post_validate_redirects( pyramid_request.session.record_auth_timestamp = pretend.call_recorder( lambda *args: None ) + pyramid_request.session.record_password_timestamp = lambda timestamp: None form_obj = pretend.stub( validate=pretend.call_recorder(lambda: True), @@ -287,6 +289,7 @@ def test_post_validate_no_redirects( update_user=lambda *a, **k: None, has_two_factor=lambda userid: False, record_event=pretend.call_recorder(lambda *a, **kw: None), + get_password_timestamp=lambda userid: 0, ) breach_service = pretend.stub(check_password=lambda password, tags=None: False) @@ -301,6 +304,7 @@ def test_post_validate_no_redirects( pyramid_request.session.record_auth_timestamp = pretend.call_recorder( lambda *args: None ) + pyramid_request.session.record_password_timestamp = lambda timestamp: None form_obj = pretend.stub( validate=pretend.call_recorder(lambda: True), @@ -574,6 +578,7 @@ def test_totp_auth( has_recovery_codes=lambda userid: has_recovery_codes, check_totp_value=lambda userid, totp_value: True, record_event=pretend.call_recorder(lambda *a, **kw: None), + get_password_timestamp=lambda userid: 0, ) new_session = {} @@ -589,6 +594,7 @@ def test_totp_auth( update=new_session.update, invalidate=pretend.call_recorder(lambda: None), new_csrf_token=pretend.call_recorder(lambda: None), + get_password_timestamp=lambda userid: 0, ) pyramid_request.set_property( @@ -597,6 +603,7 @@ def test_totp_auth( pyramid_request.session.record_auth_timestamp = pretend.call_recorder( lambda *args: None ) + pyramid_request.session.record_password_timestamp = lambda timestamp: None form_obj = pretend.stub( validate=pretend.call_recorder(lambda: True), @@ -1019,6 +1026,7 @@ def test_recovery_code_auth(self, monkeypatch, pyramid_request, redirect_url): has_recovery_codes=lambda userid: True, check_recovery_code=lambda userid, recovery_code_value: True, record_event=pretend.call_recorder(lambda *a, **kw: None), + get_password_timestamp=lambda userid: 0, ) new_session = {} @@ -1043,6 +1051,7 @@ def test_recovery_code_auth(self, monkeypatch, pyramid_request, redirect_url): pyramid_request.session.record_auth_timestamp = pretend.call_recorder( lambda *args: None ) + pyramid_request.session.record_password_timestamp = lambda timestamp: None form_obj = pretend.stub( validate=pretend.call_recorder(lambda: True), @@ -1266,6 +1275,7 @@ def test_register_redirect(self, db_request, monkeypatch): db_request.session.record_auth_timestamp = pretend.call_recorder( lambda *args: None ) + db_request.session.record_password_timestamp = lambda ts: None db_request.find_service = pretend.call_recorder( lambda *args, **kwargs: pretend.stub( csp_policy={}, @@ -1279,6 +1289,7 @@ def test_register_redirect(self, db_request, monkeypatch): add_email=add_email, check_password=lambda pw, tags=None: False, record_event=record_event, + get_password_timestamp=lambda uid: 0, ) ) db_request.route_path = pretend.call_recorder(lambda name: "/") @@ -2405,7 +2416,7 @@ def test_profile_public_email_returns_user(self): class TestReAuthentication: @pytest.mark.parametrize("next_route", [None, "/manage/accounts", "/projects/"]) def test_reauth(self, monkeypatch, pyramid_request, pyramid_services, next_route): - user_service = pretend.stub() + user_service = pretend.stub(get_password_timestamp=lambda uid: 0) response = pretend.stub() monkeypatch.setattr(views, "HTTPSeeOther", lambda url: response) @@ -2416,7 +2427,8 @@ def test_reauth(self, monkeypatch, pyramid_request, pyramid_services, next_route pyramid_request.session.record_auth_timestamp = pretend.call_recorder( lambda *args: None ) - pyramid_request.user = pretend.stub(username=pretend.stub()) + pyramid_request.session.record_password_timestamp = lambda ts: None + pyramid_request.user = pretend.stub(id=pretend.stub, username=pretend.stub()) pyramid_request.matched_route = pretend.stub(name=pretend.stub()) pyramid_request.matchdict = {"foo": "bar"} diff --git a/tests/unit/manage/test_views.py b/tests/unit/manage/test_views.py --- a/tests/unit/manage/test_views.py +++ b/tests/unit/manage/test_views.py @@ -597,6 +597,7 @@ def test_change_password(self, monkeypatch): user_service = pretend.stub( update_user=pretend.call_recorder(lambda *a, **kw: None), record_event=pretend.call_recorder(lambda *a, **kw: None), + get_password_timestamp=lambda uid: 0, ) request = pretend.stub( POST={ @@ -604,7 +605,10 @@ def test_change_password(self, monkeypatch): "new_password": new_password, "password_confirm": new_password, }, - session=pretend.stub(flash=pretend.call_recorder(lambda *a, **kw: None)), + session=pretend.stub( + flash=pretend.call_recorder(lambda *a, **kw: None), + record_password_timestamp=lambda ts: None, + ), find_service=lambda *a, **kw: user_service, user=pretend.stub( id=pretend.stub(), @@ -612,6 +616,10 @@ def test_change_password(self, monkeypatch): email=pretend.stub(), name=pretend.stub(), ), + db=pretend.stub( + flush=lambda: None, + refresh=lambda obj: None, + ), remote_addr="0.0.0.0", ) change_pwd_obj = pretend.stub( diff --git a/tests/unit/test_sessions.py b/tests/unit/test_sessions.py --- a/tests/unit/test_sessions.py +++ b/tests/unit/test_sessions.py @@ -318,6 +318,28 @@ def test_clear_webauthn_challenge(self): session.clear_webauthn_challenge() assert not session[session._webauthn_challenge_key] + def test_record_password_timestamp(self): + session = Session() + assert not session.should_save() + session.record_password_timestamp(1646230636) + + assert session[session._password_timestamp_key] == 1646230636 + assert session.should_save() + + @pytest.mark.parametrize( + ("stored", "current", "expected"), + [ + (1600000000, 0, True), + (1600000000, 1600000000, False), + (0, 1600000000, True), + (None, 1600000000, False), + ], + ) + def test_password_outdated(self, stored, current, expected): + session = Session() + session.record_password_timestamp(stored) + assert session.password_outdated(current) == expected + class TestSessionFactory: def test_initialize(self, monkeypatch):
Invalidate active sessions on password change In the event of a password change, all other active sessions should be invalidated.
@ewjoachim, perhaps you'd be interested in working this? Let me try, but if you don't see a PR by the end of the week, consider it free for grabs :)
2022-03-02T16:31:59Z
[]
[]
pypi/warehouse
11,122
pypi__warehouse-11122
[ "6255" ]
acc258d3033484829fa7cde22ecd8f7fa63100c9
diff --git a/warehouse/macaroons/caveats.py b/warehouse/macaroons/caveats.py --- a/warehouse/macaroons/caveats.py +++ b/warehouse/macaroons/caveats.py @@ -11,6 +11,7 @@ # limitations under the License. import json +import time import pymacaroons @@ -24,43 +25,51 @@ class InvalidMacaroonError(Exception): class Caveat: def __init__(self, verifier): self.verifier = verifier + # TODO: Surface this failure reason to the user. + # See: https://github.com/pypa/warehouse/issues/9018 + self.failure_reason = None - def verify(self, predicate): - raise InvalidMacaroonError + def verify(self, predicate) -> bool: + return False def __call__(self, predicate): return self.verify(predicate) class V1Caveat(Caveat): - def verify_projects(self, projects): + def verify_projects(self, projects) -> bool: # First, ensure that we're actually operating in # the context of a package. if not isinstance(self.verifier.context, Project): - raise InvalidMacaroonError( + self.failure_reason = ( "project-scoped token used outside of a project context" ) + return False project = self.verifier.context if project.normalized_name in projects: return True - raise InvalidMacaroonError( + self.failure_reason = ( f"project-scoped token is not valid for project '{project.name}'" ) + return False - def verify(self, predicate): + def verify(self, predicate) -> bool: try: data = json.loads(predicate) except ValueError: - raise InvalidMacaroonError("malformatted predicate") + self.failure_reason = "malformatted predicate" + return False if data.get("version") != 1: - raise InvalidMacaroonError("invalidate version in predicate") + self.failure_reason = "invalid version in predicate" + return False permissions = data.get("permissions") if permissions is None: - raise InvalidMacaroonError("invalid permissions in predicate") + self.failure_reason = "invalid permissions in predicate" + return False if permissions == "user": # User-scoped tokens behave exactly like a user's normal credentials. @@ -68,11 +77,34 @@ def verify(self, predicate): projects = permissions.get("projects") if projects is None: - raise InvalidMacaroonError("invalid projects in predicate") + self.failure_reason = "invalid projects in predicate" + return False return self.verify_projects(projects) +class ExpiryCaveat(Caveat): + def verify(self, predicate): + try: + data = json.loads(predicate) + expiry = data["exp"] + not_before = data["nbf"] + except (KeyError, ValueError, TypeError): + self.failure_reason = "malformatted predicate" + return False + + if not expiry or not not_before: + self.failure_reason = "missing fields" + return False + + now = int(time.time()) + if now < not_before or now >= expiry: + self.failure_reason = "token is expired" + return False + + return True + + class Verifier: def __init__(self, macaroon, context, principals, permission): self.macaroon = macaroon @@ -83,6 +115,7 @@ def __init__(self, macaroon, context, principals, permission): def verify(self, key): self.verifier.satisfy_general(V1Caveat(self)) + self.verifier.satisfy_general(ExpiryCaveat(self)) try: return self.verifier.verify(self.macaroon, key) @@ -90,4 +123,4 @@ def verify(self, key): pymacaroons.exceptions.MacaroonInvalidSignatureException, Exception, # https://github.com/ecordell/pymacaroons/issues/50 ): - raise InvalidMacaroonError("invalid macaroon signature") + return False
diff --git a/tests/unit/macaroons/test_caveats.py b/tests/unit/macaroons/test_caveats.py --- a/tests/unit/macaroons/test_caveats.py +++ b/tests/unit/macaroons/test_caveats.py @@ -11,13 +11,16 @@ # limitations under the License. import json +import os +import time import pretend +import pymacaroons import pytest from pymacaroons.exceptions import MacaroonInvalidSignatureException -from warehouse.macaroons.caveats import Caveat, InvalidMacaroonError, V1Caveat, Verifier +from warehouse.macaroons.caveats import Caveat, ExpiryCaveat, V1Caveat, Verifier from ...common.db.packaging import ProjectFactory @@ -28,10 +31,8 @@ def test_creation(self): caveat = Caveat(verifier) assert caveat.verifier is verifier - with pytest.raises(InvalidMacaroonError): - caveat.verify(pretend.stub()) - with pytest.raises(InvalidMacaroonError): - caveat(pretend.stub()) + assert caveat.verify(pretend.stub()) is False + assert caveat(pretend.stub()) is False class TestV1Caveat: @@ -47,8 +48,7 @@ def test_verify_invalid_predicates(self, predicate, result): verifier = pretend.stub() caveat = V1Caveat(verifier) - with pytest.raises(InvalidMacaroonError): - caveat(predicate) + assert caveat(predicate) is False def test_verify_valid_predicate(self): verifier = pretend.stub() @@ -62,8 +62,8 @@ def test_verify_project_invalid_context(self): caveat = V1Caveat(verifier) predicate = {"version": 1, "permissions": {"projects": ["notfoobar"]}} - with pytest.raises(InvalidMacaroonError): - caveat(json.dumps(predicate)) + + assert caveat(json.dumps(predicate)) is False def test_verify_project_invalid_project_name(self, db_request): project = ProjectFactory.create(name="foobar") @@ -71,8 +71,8 @@ def test_verify_project_invalid_project_name(self, db_request): caveat = V1Caveat(verifier) predicate = {"version": 1, "permissions": {"projects": ["notfoobar"]}} - with pytest.raises(InvalidMacaroonError): - caveat(json.dumps(predicate)) + + assert caveat(json.dumps(predicate)) is False def test_verify_project_no_projects_object(self, db_request): project = ProjectFactory.create(name="foobar") @@ -83,8 +83,8 @@ def test_verify_project_no_projects_object(self, db_request): "version": 1, "permissions": {"somethingthatisntprojects": ["blah"]}, } - with pytest.raises(InvalidMacaroonError): - caveat(json.dumps(predicate)) + + assert caveat(json.dumps(predicate)) is False def test_verify_project(self, db_request): project = ProjectFactory.create(name="foobar") @@ -95,6 +95,56 @@ def test_verify_project(self, db_request): assert caveat(json.dumps(predicate)) is True +class TestExpiryCaveat: + @pytest.mark.parametrize( + "predicate", + [ + # invalid JSON + "invalid json", + # missing nbf and exp + '{"missing": "values"}', + # nbf and exp present, but null + '{"nbf": null, "exp": null}', + # nbf and exp present, but empty + '{"nbf": "", "exp": ""}', + # valid JSON, but wrong type + "[]", + ], + ) + def test_verify_invalid_predicates(self, predicate): + verifier = pretend.stub() + caveat = ExpiryCaveat(verifier) + + assert caveat(predicate) is False + + def test_verify_not_before(self): + verifier = pretend.stub() + caveat = ExpiryCaveat(verifier) + + not_before = int(time.time()) + 60 + expiry = not_before + 60 + predicate = json.dumps({"exp": expiry, "nbf": not_before}) + assert caveat(predicate) is False + + def test_verify_already_expired(self): + verifier = pretend.stub() + caveat = ExpiryCaveat(verifier) + + not_before = int(time.time()) - 10 + expiry = not_before - 5 + predicate = json.dumps({"exp": expiry, "nbf": not_before}) + assert caveat(predicate) is False + + def test_verify_ok(self): + verifier = pretend.stub() + caveat = ExpiryCaveat(verifier) + + not_before = int(time.time()) - 10 + expiry = int(time.time()) + 60 + predicate = json.dumps({"exp": expiry, "nbf": not_before}) + assert caveat(predicate) + + class TestVerifier: def test_creation(self): macaroon = pretend.stub() @@ -108,7 +158,7 @@ def test_creation(self): assert verifier.principals is principals assert verifier.permission is permission - def test_verify(self, monkeypatch): + def test_verify_invalid_signature(self, monkeypatch): verify = pretend.call_recorder( pretend.raiser(MacaroonInvalidSignatureException) ) @@ -120,6 +170,53 @@ def test_verify(self, monkeypatch): verifier = Verifier(macaroon, context, principals, permission) monkeypatch.setattr(verifier.verifier, "verify", verify) - with pytest.raises(InvalidMacaroonError): - verifier.verify(key) + assert verifier.verify(key) is False assert verify.calls == [pretend.call(macaroon, key)] + + @pytest.mark.parametrize( + ["caveats", "valid"], + [ + # Both V1 and expiry present and valid. + ( + [ + {"permissions": "user", "version": 1}, + {"exp": int(time.time()) + 3600, "nbf": int(time.time()) - 1}, + ], + True, + ), + # V1 only present and valid. + ([{"permissions": "user", "version": 1}], True), + # V1 and expiry present but V1 invalid. + ([{"permissions": "bad", "version": 1}], False), + # V1 and expiry present but expiry invalid. + ( + [ + {"permissions": "user", "version": 1}, + {"exp": int(time.time()) + 1, "nbf": int(time.time()) + 3600}, + ], + False, + ), + ], + ) + def test_verify(self, monkeypatch, caveats, valid): + key = os.urandom(32) + m = pymacaroons.Macaroon( + location="fakelocation", + identifier="fakeid", + key=key, + version=pymacaroons.MACAROON_V2, + ) + + for caveat in caveats: + m.add_first_party_caveat(json.dumps(caveat)) + + # Round-trip through serialization to ensure we're not clinging to any state. + serialized_macaroon = m.serialize() + deserialized_macaroon = pymacaroons.Macaroon.deserialize(serialized_macaroon) + + context = pretend.stub() + principals = pretend.stub() + permission = pretend.stub() + + verifier = Verifier(deserialized_macaroon, context, principals, permission) + assert verifier.verify(key) is valid
Add caveats to macaroons for expiration (time) and version **What's the problem this feature will solve?** This will allow further attenuating the permissions granted by an API key **Describe the solution you'd like** Addition of two addition types of caveat, project version (for uploads) and time (expiry).
This is a fine idea to add onto future work for #994 -- thank you! I believe it's out of scope for [our current funding for security improvements to PyPI](https://pyfound.blogspot.com/2018/12/upcoming-pypi-improvements-for-2019.html), sorry to say. Another potentially useful caveat would be IP address/range, for future consideration. @rcipkins is going to take a stab at this! @rcipkins -- how is this going? @brainwane I am almost finished, I just need to fix a couple things and add tests! @rcipkins - Great to hear! It's a good idea to push your branch to your GitHub fork and start a "work in progress" ("WIP") pull request. That way: * in case your computer breaks or is lost/stolen, you haven't lost your progress * other people can easily see that you're working on the issue, and avoid accidentally duplicating your effort * it's easy to ask "am I on the right track?" questions about your general architectural approach, before spending time polishing stuff and fixing tests To do this, push your branch to your fork, and create a pull request where the start of the PR title is "(WIP)". Looking forward to seeing it! Done! Thank you so much! NB: This is addressed by #11122. (We'll need separate UI work to make that caveat useable with user-minted macaroons, but that PR will add all of the backend logic needed.)
2022-04-05T20:43:26Z
[]
[]
pypi/warehouse
11,184
pypi__warehouse-11184
[ "11070" ]
7da6d17deef7153177baaf06864a34db319600e3
diff --git a/warehouse/accounts/interfaces.py b/warehouse/accounts/interfaces.py --- a/warehouse/accounts/interfaces.py +++ b/warehouse/accounts/interfaces.py @@ -78,6 +78,12 @@ def get_user_by_email(email): if there is no user with that email. """ + def get_admins(): + """ + Return a list of user objects corresponding with admin users, or [] + if there is no admin users. + """ + def find_userid(username): """ Find the unique user identifier for the given username or None if there diff --git a/warehouse/accounts/services.py b/warehouse/accounts/services.py --- a/warehouse/accounts/services.py +++ b/warehouse/accounts/services.py @@ -101,6 +101,10 @@ def get_user_by_email(self, email): user_id = self.find_userid_by_email(email) return None if user_id is None else self.get_user(user_id) + @functools.lru_cache() + def get_admins(self): + return self.db.query(User).filter(User.is_superuser.is_(True)).all() + @functools.lru_cache() def find_userid(self, username): try: diff --git a/warehouse/admin/flags.py b/warehouse/admin/flags.py --- a/warehouse/admin/flags.py +++ b/warehouse/admin/flags.py @@ -18,6 +18,7 @@ class AdminFlagValue(enum.Enum): + DISABLE_ORGANIZATIONS = "disable-organizations" DISALLOW_DELETION = "disallow-deletion" DISALLOW_NEW_PROJECT_REGISTRATION = "disallow-new-project-registration" DISALLOW_NEW_UPLOAD = "disallow-new-upload" diff --git a/warehouse/admin/routes.py b/warehouse/admin/routes.py --- a/warehouse/admin/routes.py +++ b/warehouse/admin/routes.py @@ -20,6 +20,11 @@ def includeme(config): # General Admin pages config.add_route("admin.dashboard", "/admin/", domain=warehouse) + # Organization related Admin pages + config.add_route( + "admin.organization.approve", "/admin/organizations/approve/", domain=warehouse + ) + # User related Admin pages config.add_route("admin.user.list", "/admin/users/", domain=warehouse) config.add_route("admin.user.detail", "/admin/users/{user_id}/", domain=warehouse) diff --git a/warehouse/admin/views/organizations.py b/warehouse/admin/views/organizations.py new file mode 100644 --- /dev/null +++ b/warehouse/admin/views/organizations.py @@ -0,0 +1,28 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from pyramid.view import view_config + + +# This is a placeholder so we can reference `admin.organization.approve` +# as a route in the admin-new-organization-requested email. +@view_config( + route_name="admin.organization.approve", + renderer="admin/organizations/approve.html", + permission="admin", + require_methods=False, + uses_session=True, + has_translations=True, +) +def approve(request): + # TODO + return {} diff --git a/warehouse/config.py b/warehouse/config.py --- a/warehouse/config.py +++ b/warehouse/config.py @@ -518,6 +518,9 @@ def configure(settings=None): # Register logged-in views config.include(".manage") + # Register our organization support. + config.include(".organizations") + # Allow the packaging app to register any services it has. config.include(".packaging") diff --git a/warehouse/email/__init__.py b/warehouse/email/__init__.py --- a/warehouse/email/__init__.py +++ b/warehouse/email/__init__.py @@ -186,6 +186,22 @@ def wrapper(request, user_or_users, **kwargs): return inner +# Email templates for administrators. + + +@_email("admin-new-organization-requested") +def send_admin_new_organization_requested_email( + request, user, *, organization_name, initiator_username +): + return { + "initiator_username": initiator_username, + "organization_name": organization_name, + } + + +# Email templates for users. + + @_email("password-reset", allow_unverified=True) def send_password_reset_email(request, user_and_email): user, _ = user_and_email @@ -267,6 +283,11 @@ def send_primary_email_change_email(request, user_and_email): } +@_email("new-organization-requested") +def send_new_organization_requested_email(request, user, *, organization_name): + return {"organization_name": organization_name} + + @_email("collaborator-added") def send_collaborator_added_email( request, email_recipients, *, user, submitter, project_name, role diff --git a/warehouse/manage/forms.py b/warehouse/manage/forms.py --- a/warehouse/manage/forms.py +++ b/warehouse/manage/forms.py @@ -27,6 +27,8 @@ ) from warehouse.i18n import localize as _ +# /manage/account/ forms + class RoleNameMixin: @@ -303,3 +305,100 @@ class Toggle2FARequirementForm(forms.Form): __params__ = ["two_factor_requirement_sentinel"] two_factor_requirement_sentinel = wtforms.HiddenField() + + +# /manage/organizations/ forms + + +class NewOrganizationNameMixin: + + name = wtforms.StringField( + validators=[ + wtforms.validators.DataRequired( + message="Specify organization account name" + ), + wtforms.validators.Length( + max=50, + message=_( + "Choose an organization account name with 50 characters or less." + ), + ), + # the regexp below must match the CheckConstraint + # for the name field in organizations.model.Organization + wtforms.validators.Regexp( + r"^[a-zA-Z0-9][a-zA-Z0-9._-]*[a-zA-Z0-9]$", + message=_( + "The organization account name is invalid. " + "Organization account names " + "must be composed of letters, numbers, " + "dots, hyphens and underscores. And must " + "also start and finish with a letter or number. " + "Choose a different organization account name." + ), + ), + ] + ) + + def validate_name(self, field): + if self.organization_service.find_organizationid(field.data) is not None: + raise wtforms.validators.ValidationError( + _( + "This organization account name is already being " + "used by another account. Choose a different " + "organization account name." + ) + ) + + +class CreateOrganizationForm(forms.Form, NewOrganizationNameMixin): + + __params__ = ["name", "display_name", "link_url", "description", "orgtype"] + + def __init__(self, *args, organization_service, **kwargs): + super().__init__(*args, **kwargs) + self.organization_service = organization_service + + display_name = wtforms.StringField( + validators=[ + wtforms.validators.DataRequired(message="Specify your organization name"), + wtforms.validators.Length( + max=100, + message=_( + "The organization name is too long. " + "Choose a organization name with 100 characters or less." + ), + ), + ] + ) + link_url = wtforms.URLField( + validators=[ + wtforms.validators.DataRequired(message="Specify your organization URL"), + wtforms.validators.Length( + max=400, + message=_( + "The organization URL is too long. " + "Choose a organization URL with 400 characters or less." + ), + ), + ] + ) + description = wtforms.TextAreaField( + validators=[ + wtforms.validators.DataRequired( + message="Specify your organization description" + ), + wtforms.validators.Length( + max=400, + message=_( + "The organization description is too long. " + "Choose a organization description with 400 characters or less." + ), + ), + ] + ) + orgtype = wtforms.SelectField( + choices=[("Company", "Company"), ("Community", "Community")], + validators=[ + wtforms.validators.DataRequired(message="Select organization type"), + ], + ) diff --git a/warehouse/manage/views.py b/warehouse/manage/views.py --- a/warehouse/manage/views.py +++ b/warehouse/manage/views.py @@ -43,9 +43,11 @@ from warehouse.admin.flags import AdminFlagValue from warehouse.email import ( send_account_deletion_email, + send_admin_new_organization_requested_email, send_collaborator_removed_email, send_collaborator_role_changed_email, send_email_verification_email, + send_new_organization_requested_email, send_oidc_provider_added_email, send_oidc_provider_removed_email, send_password_change_email, @@ -70,6 +72,7 @@ ChangeRoleForm, ConfirmPasswordForm, CreateMacaroonForm, + CreateOrganizationForm, CreateRoleForm, DeleteMacaroonForm, DeleteTOTPForm, @@ -83,6 +86,7 @@ from warehouse.oidc.forms import DeleteProviderForm, GitHubProviderForm from warehouse.oidc.interfaces import TooManyOIDCRegistrations from warehouse.oidc.models import GitHubProvider, OIDCProvider +from warehouse.organizations.interfaces import IOrganizationService from warehouse.packaging.models import ( File, JournalEntry, @@ -968,6 +972,112 @@ def delete_macaroon(self): return HTTPSeeOther(redirect_to) +@view_defaults( + route_name="manage.organizations", + renderer="manage/organizations.html", + uses_session=True, + require_csrf=True, + require_methods=False, + permission="manage:user", + has_translations=True, +) +class ManageOrganizationsViews: + def __init__(self, request): + self.request = request + self.user_service = request.find_service(IUserService, context=None) + self.organization_service = request.find_service( + IOrganizationService, context=None + ) + + @property + def default_response(self): + return { + "create_organization_form": CreateOrganizationForm( + organization_service=self.organization_service, + ), + } + + @view_config(request_method="GET") + def manage_organizations(self): + if self.request.flags.enabled(AdminFlagValue.DISABLE_ORGANIZATIONS): + raise HTTPNotFound + + return self.default_response + + @view_config(request_method="POST", request_param=CreateOrganizationForm.__params__) + def create_organization(self): + if self.request.flags.enabled(AdminFlagValue.DISABLE_ORGANIZATIONS): + raise HTTPNotFound + + form = CreateOrganizationForm( + self.request.POST, + organization_service=self.organization_service, + ) + + if form.validate(): + data = form.data + organization = self.organization_service.add_organization(**data) + self.organization_service.record_event( + organization.id, + tag="organization:create", + additional={"created_by_user_id": str(self.request.user.id)}, + ) + self.organization_service.add_catalog_entry( + organization.name, organization.id + ) + self.organization_service.record_event( + organization.id, + tag="organization:catalog_entry:add", + additional={"submitted_by_user_id": str(self.request.user.id)}, + ) + self.organization_service.add_organization_role( + "Owner", self.request.user.id, organization.id + ) + self.organization_service.record_event( + organization.id, + tag="organization:organization_role:invite", + additional={ + "submitted_by_user_id": str(self.request.user.id), + "role_name": "Owner", + "target_user_id": str(self.request.user.id), + }, + ) + self.organization_service.record_event( + organization.id, + tag="organization:organization_role:accepted", + additional={ + "submitted_by_user_id": str(self.request.user.id), + "role_name": "Owner", + "target_user_id": str(self.request.user.id), + }, + ) + self.user_service.record_event( + self.request.user.id, + tag="account:organization_role:accepted", + additional={ + "submitted_by_user_id": str(self.request.user.id), + "organization_name": organization.name, + "role_name": "Owner", + }, + ) + send_admin_new_organization_requested_email( + self.request, + self.user_service.get_admins(), + organization_name=organization.name, + initiator_username=self.request.user.username, + ) + send_new_organization_requested_email( + self.request, self.request.user, organization_name=organization.name + ) + self.request.session.flash( + "Request for new organization submitted", queue="success" + ) + else: + return {"create_organization_form": form} + + return self.default_response + + @view_config( route_name="manage.projects", renderer="manage/projects.html", diff --git a/warehouse/migrations/versions/4a985d158c3c_add_organization_events_table.py b/warehouse/migrations/versions/4a985d158c3c_add_organization_events_table.py new file mode 100644 --- /dev/null +++ b/warehouse/migrations/versions/4a985d158c3c_add_organization_events_table.py @@ -0,0 +1,78 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +add_organization_events_table + +Revision ID: 4a985d158c3c +Revises: 614a7fcb40ed +Create Date: 2022-04-14 02:25:50.805348 +""" + +import sqlalchemy as sa + +from alembic import op +from sqlalchemy.dialects import postgresql + +revision = "4a985d158c3c" +down_revision = "614a7fcb40ed" + +# Note: It is VERY important to ensure that a migration does not lock for a +# long period of time and to ensure that each individual migration does +# not break compatibility with the *previous* version of the code base. +# This is because the migrations will be ran automatically as part of the +# deployment process, but while the previous version of the code is still +# up and running. Thus backwards incompatible changes must be broken up +# over multiple migrations inside of multiple pull requests in order to +# phase them in over multiple deploys. + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table( + "organization_events", + sa.Column( + "id", + postgresql.UUID(as_uuid=True), + server_default=sa.text("gen_random_uuid()"), + nullable=False, + ), + sa.Column("source_id", postgresql.UUID(as_uuid=True), nullable=False), + sa.Column("tag", sa.String(), nullable=False), + sa.Column( + "time", sa.DateTime(), server_default=sa.text("now()"), nullable=False + ), + sa.Column("ip_address", sa.String(), nullable=False), + sa.Column("additional", postgresql.JSONB(astext_type=sa.Text()), nullable=True), + sa.ForeignKeyConstraint( + ["source_id"], + ["organizations.id"], + initially="DEFERRED", + deferrable=True, + ), + sa.PrimaryKeyConstraint("id"), + ) + op.create_index( + op.f("ix_organization_events_source_id"), + "organization_events", + ["source_id"], + unique=False, + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index( + op.f("ix_organization_events_source_id"), table_name="organization_events" + ) + op.drop_table("organization_events") + # ### end Alembic commands ### diff --git a/warehouse/migrations/versions/614a7fcb40ed_create_organization_models.py b/warehouse/migrations/versions/614a7fcb40ed_create_organization_models.py new file mode 100644 --- /dev/null +++ b/warehouse/migrations/versions/614a7fcb40ed_create_organization_models.py @@ -0,0 +1,271 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +Create Organization models + +Revision ID: 614a7fcb40ed +Revises: 5e02c4f9f95c +Create Date: 2022-04-13 17:23:17.396325 +""" + +import sqlalchemy as sa +import sqlalchemy_utils + +from alembic import op +from sqlalchemy.dialects import postgresql + +revision = "614a7fcb40ed" +down_revision = "5e02c4f9f95c" + +# Note: It is VERY important to ensure that a migration does not lock for a +# long period of time and to ensure that each individual migration does +# not break compatibility with the *previous* version of the code base. +# This is because the migrations will be ran automatically as part of the +# deployment process, but while the previous version of the code is still +# up and running. Thus backwards incompatible changes must be broken up +# over multiple migrations inside of multiple pull requests in order to +# phase them in over multiple deploys. + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table( + "organizations", + sa.Column( + "id", + postgresql.UUID(as_uuid=True), + server_default=sa.text("gen_random_uuid()"), + nullable=False, + ), + sa.Column("name", sa.Text(), nullable=False), + sa.Column("display_name", sa.Text(), nullable=False), + sa.Column("orgtype", sa.Text(), nullable=False), + sa.Column("link_url", sqlalchemy_utils.types.url.URLType(), nullable=False), + sa.Column("description", sa.Text(), nullable=False), + sa.Column( + "is_active", sa.Boolean(), nullable=False, server_default=sa.sql.false() + ), + sa.Column("is_approved", sa.Boolean(), nullable=True), + sa.Column( + "created", sa.DateTime(), server_default=sa.text("now()"), nullable=False + ), + sa.Column("date_approved", sa.DateTime(), nullable=True), + sa.CheckConstraint( + "name ~* '^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$'::text", + name="organizations_valid_name", + ), + sa.PrimaryKeyConstraint("id"), + ) + op.create_index( + op.f("ix_organizations_created"), "organizations", ["created"], unique=False + ) + op.create_table( + "organization_invitations", + sa.Column( + "id", + postgresql.UUID(as_uuid=True), + server_default=sa.text("gen_random_uuid()"), + nullable=False, + ), + sa.Column("invite_status", sa.Text(), nullable=False), + sa.Column("token", sa.Text(), nullable=False), + sa.Column("user_id", postgresql.UUID(as_uuid=True), nullable=False), + sa.Column("organization_id", postgresql.UUID(as_uuid=True), nullable=False), + sa.ForeignKeyConstraint( + ["organization_id"], + ["organizations.id"], + onupdate="CASCADE", + ondelete="CASCADE", + ), + sa.ForeignKeyConstraint( + ["user_id"], ["users.id"], onupdate="CASCADE", ondelete="CASCADE" + ), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint( + "user_id", + "organization_id", + name="_organization_invitations_user_organization_uc", + ), + ) + op.create_index( + op.f("ix_organization_invitations_organization_id"), + "organization_invitations", + ["organization_id"], + unique=False, + ) + op.create_index( + op.f("ix_organization_invitations_user_id"), + "organization_invitations", + ["user_id"], + unique=False, + ) + op.create_index( + "organization_invitations_user_id_idx", + "organization_invitations", + ["user_id"], + unique=False, + ) + op.create_table( + "organization_name_catalog", + sa.Column( + "id", + postgresql.UUID(as_uuid=True), + server_default=sa.text("gen_random_uuid()"), + nullable=False, + ), + sa.Column("normalized_name", sa.Text(), nullable=False), + sa.Column("organization_id", postgresql.UUID(as_uuid=True), nullable=False), + sa.ForeignKeyConstraint( + ["organization_id"], + ["organizations.id"], + onupdate="CASCADE", + ondelete="CASCADE", + ), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint( + "normalized_name", + "organization_id", + name="_organization_name_catalog_normalized_name_organization_uc", + ), + ) + op.create_index( + "organization_name_catalog_normalized_name_idx", + "organization_name_catalog", + ["normalized_name"], + unique=False, + ) + op.create_index( + "organization_name_catalog_organization_id_idx", + "organization_name_catalog", + ["organization_id"], + unique=False, + ) + op.create_table( + "organization_project", + sa.Column( + "id", + postgresql.UUID(as_uuid=True), + server_default=sa.text("gen_random_uuid()"), + nullable=False, + ), + sa.Column( + "is_active", sa.Boolean(), nullable=False, server_default=sa.sql.false() + ), + sa.Column("organization_id", postgresql.UUID(as_uuid=True), nullable=False), + sa.Column("project_id", postgresql.UUID(as_uuid=True), nullable=False), + sa.ForeignKeyConstraint( + ["organization_id"], + ["organizations.id"], + onupdate="CASCADE", + ondelete="CASCADE", + ), + sa.ForeignKeyConstraint( + ["project_id"], ["projects.id"], onupdate="CASCADE", ondelete="CASCADE" + ), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint( + "organization_id", + "project_id", + name="_organization_project_organization_project_uc", + ), + ) + op.create_index( + "organization_project_organization_id_idx", + "organization_project", + ["organization_id"], + unique=False, + ) + op.create_index( + "organization_project_project_id_idx", + "organization_project", + ["project_id"], + unique=False, + ) + op.create_table( + "organization_roles", + sa.Column( + "id", + postgresql.UUID(as_uuid=True), + server_default=sa.text("gen_random_uuid()"), + nullable=False, + ), + sa.Column("role_name", sa.Text(), nullable=False), + sa.Column("user_id", postgresql.UUID(as_uuid=True), nullable=False), + sa.Column("organization_id", postgresql.UUID(as_uuid=True), nullable=False), + sa.ForeignKeyConstraint( + ["organization_id"], + ["organizations.id"], + onupdate="CASCADE", + ondelete="CASCADE", + ), + sa.ForeignKeyConstraint( + ["user_id"], ["users.id"], onupdate="CASCADE", ondelete="CASCADE" + ), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint( + "user_id", + "organization_id", + name="_organization_roles_user_organization_uc", + ), + ) + op.create_index( + "organization_roles_organization_id_idx", + "organization_roles", + ["organization_id"], + unique=False, + ) + op.create_index( + "organization_roles_user_id_idx", + "organization_roles", + ["user_id"], + unique=False, + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index("organization_roles_user_id_idx", table_name="organization_roles") + op.drop_index( + "organization_roles_organization_id_idx", table_name="organization_roles" + ) + op.drop_table("organization_roles") + op.drop_index( + "organization_project_project_id_idx", table_name="organization_project" + ) + op.drop_index( + "organization_project_organization_id_idx", table_name="organization_project" + ) + op.drop_table("organization_project") + op.drop_index( + "organization_name_catalog_organization_id_idx", + table_name="organization_name_catalog", + ) + op.drop_index( + "organization_name_catalog_name_idx", table_name="organization_name_catalog" + ) + op.drop_table("organization_name_catalog") + op.drop_index( + "organization_invitations_user_id_idx", table_name="organization_invitations" + ) + op.drop_index( + op.f("ix_organization_invitations_user_id"), + table_name="organization_invitations", + ) + op.drop_index( + op.f("ix_organization_invitations_organization_id"), + table_name="organization_invitations", + ) + op.drop_table("organization_invitations") + op.drop_index(op.f("ix_organizations_created"), table_name="organizations") + op.drop_table("organizations") + # ### end Alembic commands ### diff --git a/warehouse/migrations/versions/9f0f99509d92_add_disable_organizations_flag.py b/warehouse/migrations/versions/9f0f99509d92_add_disable_organizations_flag.py new file mode 100644 --- /dev/null +++ b/warehouse/migrations/versions/9f0f99509d92_add_disable_organizations_flag.py @@ -0,0 +1,55 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +Add disable-organizations AdminFlag + +Revision ID: 9f0f99509d92 +Revises: 4a985d158c3c +Create Date: 2022-04-18 02:04:40.318843 +""" + +from alembic import op + +revision = "9f0f99509d92" +down_revision = "4a985d158c3c" + +# Note: It is VERY important to ensure that a migration does not lock for a +# long period of time and to ensure that each individual migration does +# not break compatibility with the *previous* version of the code base. +# This is because the migrations will be ran automatically as part of the +# deployment process, but while the previous version of the code is still +# up and running. Thus backwards incompatible changes must be broken up +# over multiple migrations inside of multiple pull requests in order to +# phase them in over multiple deploys. + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.execute( + """ + INSERT INTO admin_flags(id, description, enabled, notify) + VALUES ( + 'disable-organizations', + 'Disallow ALL functionality for Organizations', + TRUE, + FALSE + ) + """ + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.execute("DELETE FROM admin_flags WHERE id = 'disable-organizations'") + + # ### end Alembic commands ### diff --git a/warehouse/organizations/__init__.py b/warehouse/organizations/__init__.py new file mode 100644 --- /dev/null +++ b/warehouse/organizations/__init__.py @@ -0,0 +1,19 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from warehouse.organizations.interfaces import IOrganizationService +from warehouse.organizations.services import database_organization_factory + + +def includeme(config): + # Register our organization service + config.register_service_factory(database_organization_factory, IOrganizationService) diff --git a/warehouse/organizations/interfaces.py b/warehouse/organizations/interfaces.py new file mode 100644 --- /dev/null +++ b/warehouse/organizations/interfaces.py @@ -0,0 +1,67 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from zope.interface import Interface + + +class IOrganizationService(Interface): + def get_organization(organization_id): + """ + Return the organization object that represents the given organizationid, or None if + there is no organization for that ID. + """ + + def get_organization_by_name(name): + """ + Return the organization object corresponding with the given organization name, or None + if there is no organization with that name. + """ + + def find_organizationid(name): + """ + Find the unique organization identifier for the given name or None if there + is no organization with the given name. + """ + + def add_organization(name, display_name, orgtype, link_url, description): + """ + Accepts a organization object, and attempts to create an organization with those + attributes. + """ + + def add_catalog_entry(name, organization_id): + """ + Adds the organization name to the organization name catalog + """ + + def add_organization_role(role_name, user_id, organization_id): + """ + Adds the organization role to the specified user and org + """ + + def approve_organization(organization_id): + """ + Performs operations necessary to approve an organization + """ + + def decline_organization(organization_id): + """ + Performs operations necessary to reject approval of an organization + """ + + def record_event(organization_id, *, tag, additional=None): + """ + Creates a new Organization.Event for the given organization with the given + tag, IP address, and additional metadata. + + Returns the event. + """ diff --git a/warehouse/organizations/models.py b/warehouse/organizations/models.py new file mode 100644 --- /dev/null +++ b/warehouse/organizations/models.py @@ -0,0 +1,241 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import enum + +from sqlalchemy import ( + Boolean, + CheckConstraint, + Column, + DateTime, + Enum, + ForeignKey, + Index, + Text, + UniqueConstraint, + func, + orm, + sql, +) + +# from sqlalchemy.orm.exc import NoResultFound +from sqlalchemy_utils.types.url import URLType + +from warehouse import db +from warehouse.accounts.models import User +from warehouse.events.models import HasEvents +from warehouse.utils.attrs import make_repr + + +class OrganizationRoleType(enum.Enum): + + BillingManager = "Billing Manager" + Manager = "Manager" + Member = "Member" + Owner = "Owner" + + +class OrganizationRole(db.Model): + + __tablename__ = "organization_roles" + __table_args__ = ( + Index("organization_roles_user_id_idx", "user_id"), + Index("organization_roles_organization_id_idx", "organization_id"), + UniqueConstraint( + "user_id", + "organization_id", + name="_organization_roles_user_organization_uc", + ), + ) + + __repr__ = make_repr("role_name") + + role_name = Column( + Enum(OrganizationRoleType, values_callable=lambda x: [e.value for e in x]), + nullable=False, + ) + user_id = Column( + ForeignKey("users.id", onupdate="CASCADE", ondelete="CASCADE"), nullable=False + ) + organization_id = Column( + ForeignKey("organizations.id", onupdate="CASCADE", ondelete="CASCADE"), + nullable=False, + ) + + user = orm.relationship(User, lazy=False) + organization = orm.relationship("Organization", lazy=False) + + +class OrganizationProject(db.Model): + + __tablename__ = "organization_project" + __table_args__ = ( + Index("organization_project_organization_id_idx", "organization_id"), + Index("organization_project_project_id_idx", "project_id"), + UniqueConstraint( + "organization_id", + "project_id", + name="_organization_project_organization_project_uc", + ), + ) + + __repr__ = make_repr("project_id", "organization_id", "is_active") + + is_active = Column(Boolean, nullable=False, default=False) + organization_id = Column( + ForeignKey("organizations.id", onupdate="CASCADE", ondelete="CASCADE"), + nullable=False, + ) + project_id = Column( + ForeignKey("projects.id", onupdate="CASCADE", ondelete="CASCADE"), + nullable=False, + ) + + organization = orm.relationship("Organization", lazy=False) + project = orm.relationship("Project", lazy=False) + + +class OrganizationType(enum.Enum): + + Community = "Community" + Company = "Company" + + +# TODO: For future use +# class OrganizationFactory: +# def __init__(self, request): +# self.request = request +# +# def __getitem__(self, organization): +# try: +# return ( +# self.request.db.query(Organization) +# .filter( +# Organization.normalized_name +# == func.normalize_pep426_name(organization) +# ) +# .one() +# ) +# except NoResultFound: +# raise KeyError from None + + +# TODO: Determine if this should also utilize SitemapMixin and TwoFactorRequireable +# class Organization(SitemapMixin, TwoFactorRequireable, HasEvents, db.Model): +class Organization(HasEvents, db.Model): + __tablename__ = "organizations" + __table_args__ = ( + CheckConstraint( + "name ~* '^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$'::text", + name="organizations_valid_name", + ), + ) + + __repr__ = make_repr("name") + + name = Column(Text, nullable=False) + normalized_name = orm.column_property(func.normalize_pep426_name(name)) + display_name = Column(Text, nullable=False) + orgtype = Column( + Enum(OrganizationType, values_callable=lambda x: [e.value for e in x]), + nullable=False, + ) + link_url = Column(URLType, nullable=False) + description = Column(Text, nullable=False) + is_active = Column(Boolean, nullable=False, default=False) + is_approved = Column(Boolean) + created = Column( + DateTime(timezone=False), + nullable=False, + server_default=sql.func.now(), + index=True, + ) + date_approved = Column( + DateTime(timezone=False), + nullable=True, + onupdate=func.now(), + ) + + # TODO: Determine if cascade applies to any of these relationships + users = orm.relationship( + User, secondary=OrganizationRole.__table__, backref="organizations" # type: ignore # noqa + ) + projects = orm.relationship( + "Project", secondary=OrganizationProject.__table__, backref="organizations" # type: ignore # noqa + ) + + # TODO: + # def __acl__(self): + + +class OrganizationNameCatalog(db.Model): + + __tablename__ = "organization_name_catalog" + __table_args__ = ( + Index("organization_name_catalog_normalized_name_idx", "normalized_name"), + Index("organization_name_catalog_organization_id_idx", "organization_id"), + UniqueConstraint( + "normalized_name", + "organization_id", + name="_organization_name_catalog_normalized_name_organization_uc", + ), + ) + + __repr__ = make_repr("normalized_name", "organization_id") + + normalized_name = Column(Text, nullable=False) + organization_id = Column( + ForeignKey("organizations.id", onupdate="CASCADE", ondelete="CASCADE"), + nullable=False, + ) + + +class OrganizationInvitationStatus(enum.Enum): + + Pending = "pending" + Expired = "expired" + + +class OrganizationInvitation(db.Model): + + __tablename__ = "organization_invitations" + __table_args__ = ( + Index("organization_invitations_user_id_idx", "user_id"), + UniqueConstraint( + "user_id", + "organization_id", + name="_organization_invitations_user_organization_uc", + ), + ) + + __repr__ = make_repr("invite_status", "user", "organization") + + invite_status = Column( + Enum( + OrganizationInvitationStatus, values_callable=lambda x: [e.value for e in x] + ), + nullable=False, + ) + token = Column(Text, nullable=False) + user_id = Column( + ForeignKey("users.id", onupdate="CASCADE", ondelete="CASCADE"), + nullable=False, + index=True, + ) + organization_id = Column( + ForeignKey("organizations.id", onupdate="CASCADE", ondelete="CASCADE"), + nullable=False, + index=True, + ) + + user = orm.relationship(User, lazy=False) + organization = orm.relationship("Organization", lazy=False) diff --git a/warehouse/organizations/services.py b/warehouse/organizations/services.py new file mode 100644 --- /dev/null +++ b/warehouse/organizations/services.py @@ -0,0 +1,147 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import datetime + +from sqlalchemy.orm.exc import NoResultFound +from zope.interface import implementer + +from warehouse.organizations.interfaces import IOrganizationService +from warehouse.organizations.models import ( + Organization, + OrganizationNameCatalog, + OrganizationRole, +) + + +@implementer(IOrganizationService) +class DatabaseOrganizationService: + def __init__(self, db_session, remote_addr): + self.db = db_session + self.remote_addr = remote_addr + + def get_organization(self, organization_id): + """ + Return the organization object that represents the given organizationid, + or None if there is no organization for that ID. + """ + return self.db.query(Organization).get(organization_id) + + def get_organization_by_name(self, name): + """ + Return the organization object corresponding with the given organization name, + or None if there is no organization with that name. + """ + organization_id = self.find_organizationid(name) + return ( + None if organization_id is None else self.get_organization(organization_id) + ) + + def find_organizationid(self, name): + """ + Find the unique organization identifier for the given normalized name or None + if there is no organization with the given name. + """ + try: + organization = ( + self.db.query(Organization.id) + .filter(Organization.normalized_name == name) + .one() + ) + except NoResultFound: + return + + return organization.id + + def add_organization(self, name, display_name, orgtype, link_url, description): + """ + Accepts a organization object, and attempts to create an organization with those + attributes. + """ + organization = Organization( + name=name, + display_name=display_name, + orgtype=orgtype, + link_url=link_url, + description=description, + ) + self.db.add(organization) + self.db.flush() + + return organization + + def add_catalog_entry(self, name, organization_id): + """ + Adds the organization name to the organization name catalog + """ + organization = self.get_organization(organization_id) + catalog_entry = OrganizationNameCatalog( + normalized_name=name, organization_id=organization.id + ) + + self.db.add(catalog_entry) + self.db.flush() + + return catalog_entry + + def add_organization_role(self, role_name, user_id, organization_id): + """ + Adds the organization role to the specified user and org + """ + organization = self.get_organization(organization_id) + role = OrganizationRole( + role_name=role_name, user_id=user_id, organization_id=organization.id + ) + + self.db.add(role) + self.db.flush() + + return role + + def approve_organization(self, organization_id): + """ + Performs operations necessary to approve an Organization + """ + organization = self.get_organization(organization_id) + organization.is_active = True + organization.is_approved = True + organization.date_approved = datetime.datetime.now() + # self.db.flush() + + return organization + + def decline_organization(self, organization_id): + """ + Performs operations necessary to reject approval of an Organization + """ + organization = self.get_organization(organization_id) + organization.is_approved = False + organization.date_approved = datetime.datetime.now() + # self.db.flush() + + return organization + + def record_event(self, organization_id, *, tag, additional=None): + """ + Creates a new Organization.Event for the given organization with the given + tag, IP address, and additional metadata. + + Returns the event. + """ + organization = self.get_organization(organization_id) + return organization.record_event( + tag=tag, ip_address=self.remote_addr, additional=additional + ) + + +def database_organization_factory(context, request): + return DatabaseOrganizationService(request.db, remote_addr=request.remote_addr) diff --git a/warehouse/routes.py b/warehouse/routes.py --- a/warehouse/routes.py +++ b/warehouse/routes.py @@ -221,6 +221,7 @@ def includeme(config): domain=warehouse, ) config.add_route("manage.account.token", "/manage/account/token/", domain=warehouse) + config.add_route("manage.organizations", "/manage/organizations/", domain=warehouse) config.add_route("manage.projects", "/manage/projects/", domain=warehouse) config.add_route( "manage.project.settings",
diff --git a/tests/common/db/organizations.py b/tests/common/db/organizations.py new file mode 100644 --- /dev/null +++ b/tests/common/db/organizations.py @@ -0,0 +1,97 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import datetime + +import factory +import faker + +from warehouse.organizations.models import ( + Organization, + OrganizationInvitation, + OrganizationNameCatalog, + OrganizationProject, + OrganizationRole, +) + +from .accounts import UserFactory +from .base import WarehouseFactory +from .packaging import ProjectFactory + +fake = faker.Faker() + + +class OrganizationFactory(WarehouseFactory): + class Meta: + model = Organization + + id = factory.Faker("uuid4", cast_to=None) + name = factory.Faker("word") + normalized_name = factory.Faker("word") + display_name = factory.Faker("word") + orgtype = "Community" + link_url = factory.Faker("uri") + description = factory.Faker("sentence") + is_active = True + is_approved = False + created = factory.Faker( + "date_time_between_dates", + datetime_start=datetime.datetime(2020, 1, 1), + datetime_end=datetime.datetime(2022, 1, 1), + ) + date_approved = factory.Faker( + "date_time_between_dates", datetime_start=datetime.datetime(2020, 1, 1) + ) + + +class OrganizationEventFactory(WarehouseFactory): + class Meta: + model = Organization.Event + + source = factory.SubFactory(OrganizationFactory) + + +class OrganizationNameCatalogFactory(WarehouseFactory): + class Meta: + model = OrganizationNameCatalog + + name = factory.Faker("orgname") + organization_id = factory.Faker("uuid4", cast_to=None) + + +class OrganizationRoleFactory(WarehouseFactory): + class Meta: + model = OrganizationRole + + role_name = "Owner" + user = factory.SubFactory(UserFactory) + organization = factory.SubFactory(OrganizationFactory) + + +class OrganizationInvitationFactory(WarehouseFactory): + class Meta: + model = OrganizationInvitation + + invite_status = "pending" + token = "test_token" + user = factory.SubFactory(UserFactory) + organization = factory.SubFactory(OrganizationFactory) + + +class OrganizationProjectFactory(WarehouseFactory): + class Meta: + model = OrganizationProject + + id = factory.Faker("uuid4", cast_to=None) + is_active = True + organization = factory.SubFactory(OrganizationFactory) + project = factory.SubFactory(ProjectFactory) diff --git a/tests/conftest.py b/tests/conftest.py --- a/tests/conftest.py +++ b/tests/conftest.py @@ -45,6 +45,7 @@ from warehouse.email.interfaces import IEmailSender from warehouse.macaroons import services as macaroon_services from warehouse.metrics import IMetricsService +from warehouse.organizations import services as organization_services from .common.db import Session @@ -285,6 +286,13 @@ def macaroon_service(db_session): return macaroon_services.DatabaseMacaroonService(db_session) [email protected] +def organization_service(db_session, remote_addr): + return organization_services.DatabaseOrganizationService( + db_session, remote_addr=remote_addr + ) + + @pytest.fixture def token_service(app_config): return account_services.TokenService(secret="secret", salt="salt", max_age=21600) diff --git a/tests/functional/manage/test_views.py b/tests/functional/manage/test_views.py --- a/tests/functional/manage/test_views.py +++ b/tests/functional/manage/test_views.py @@ -15,7 +15,10 @@ from webob.multidict import MultiDict from warehouse.accounts.interfaces import IPasswordBreachedService, IUserService +from warehouse.admin.flags import AdminFlagValue from warehouse.manage import views +from warehouse.organizations.interfaces import IOrganizationService +from warehouse.organizations.models import OrganizationType from ...common.db.accounts import EmailFactory, UserFactory @@ -23,18 +26,80 @@ class TestManageAccount: def test_save_account(self, pyramid_services, user_service, db_request): breach_service = pretend.stub() + organization_service = pretend.stub() pyramid_services.register_service(user_service, IUserService, None) pyramid_services.register_service( breach_service, IPasswordBreachedService, None ) + pyramid_services.register_service( + organization_service, IOrganizationService, None + ) user = UserFactory.create(name="old name") EmailFactory.create(primary=True, verified=True, public=True, user=user) db_request.user = user db_request.method = "POST" db_request.path = "/manage/accounts/" db_request.POST = MultiDict({"name": "new name", "public_email": ""}) - views.ManageAccountViews(db_request).save_account() + views.ManageAccountViews(db_request).save_account() user = user_service.get_user(user.id) + assert user.name == "new name" assert user.public_email is None + + +class TestManageOrganizations: + def test_create_organization( + self, + pyramid_services, + user_service, + organization_service, + db_request, + monkeypatch, + ): + pyramid_services.register_service(user_service, IUserService, None) + pyramid_services.register_service( + organization_service, IOrganizationService, None + ) + user = UserFactory.create(name="old name") + EmailFactory.create(primary=True, verified=True, public=True, user=user) + db_request.user = user + db_request.method = "POST" + db_request.path = "/manage/organizations/" + db_request.POST = MultiDict( + { + "name": "psf", + "display_name": "Python Software Foundation", + "orgtype": "Community", + "link_url": "https://www.python.org/psf/", + "description": ( + "To promote, protect, and advance the Python programming " + "language, and to support and facilitate the growth of a " + "diverse and international community of Python programmers" + ), + } + ) + monkeypatch.setattr( + db_request, + "flags", + pretend.stub(enabled=pretend.call_recorder(lambda *a: False)), + ) + send_email = pretend.call_recorder(lambda *a, **kw: None) + monkeypatch.setattr( + views, "send_admin_new_organization_requested_email", send_email + ) + monkeypatch.setattr(views, "send_new_organization_requested_email", send_email) + + views.ManageOrganizationsViews(db_request).create_organization() + organization = organization_service.get_organization_by_name( + db_request.POST["name"] + ) + + assert db_request.flags.enabled.calls == [ + pretend.call(AdminFlagValue.DISABLE_ORGANIZATIONS), + ] + assert organization.name == db_request.POST["name"] + assert organization.display_name == db_request.POST["display_name"] + assert organization.orgtype == OrganizationType[db_request.POST["orgtype"]] + assert organization.link_url == db_request.POST["link_url"] + assert organization.description == db_request.POST["description"] diff --git a/tests/unit/accounts/test_services.py b/tests/unit/accounts/test_services.py --- a/tests/unit/accounts/test_services.py +++ b/tests/unit/accounts/test_services.py @@ -408,6 +408,14 @@ def test_get_user_by_email_failure(self, user_service): assert found_user is None + def test_get_admins(self, user_service): + admin = UserFactory.create(is_superuser=True) + user = UserFactory.create(is_superuser=False) + admins = user_service.get_admins() + + assert admin in admins + assert user not in admins + def test_disable_password(self, user_service): user = UserFactory.create() diff --git a/tests/unit/admin/test_routes.py b/tests/unit/admin/test_routes.py --- a/tests/unit/admin/test_routes.py +++ b/tests/unit/admin/test_routes.py @@ -26,6 +26,11 @@ def test_includeme(): assert config.add_route.calls == [ pretend.call("admin.dashboard", "/admin/", domain=warehouse), + pretend.call( + "admin.organization.approve", + "/admin/organizations/approve/", + domain=warehouse, + ), pretend.call("admin.user.list", "/admin/users/", domain=warehouse), pretend.call("admin.user.detail", "/admin/users/{user_id}/", domain=warehouse), pretend.call( diff --git a/tests/unit/admin/views/test_organizations.py b/tests/unit/admin/views/test_organizations.py new file mode 100644 --- /dev/null +++ b/tests/unit/admin/views/test_organizations.py @@ -0,0 +1,20 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pretend + +from warehouse.admin.views import organizations as views + + +class TestOrganizations: + def test_approve(self): + assert views.approve(pretend.stub()) == {} diff --git a/tests/unit/email/test_init.py b/tests/unit/email/test_init.py --- a/tests/unit/email/test_init.py +++ b/tests/unit/email/test_init.py @@ -413,6 +413,101 @@ def retry(exc): assert task.retry.calls == [pretend.call(exc=exc)] +class TestSendAdminNewOrganizationRequestedEmail: + def test_send_admin_new_organization_requested_email( + self, pyramid_request, pyramid_config, monkeypatch + ): + admin_user = pretend.stub( + id="admin", + username="admin", + name="PyPI Adminstrator", + email="[email protected]", + primary_email=pretend.stub(email="[email protected]", verified=True), + ) + initiator_user = pretend.stub( + id="id", + username="username", + name="", + email="[email protected]", + primary_email=pretend.stub(email="[email protected]", verified=True), + ) + organization_name = "example" + + subject_renderer = pyramid_config.testing_add_renderer( + "email/admin-new-organization-requested/subject.txt" + ) + subject_renderer.string_response = "Email Subject" + body_renderer = pyramid_config.testing_add_renderer( + "email/admin-new-organization-requested/body.txt" + ) + body_renderer.string_response = "Email Body" + html_renderer = pyramid_config.testing_add_renderer( + "email/admin-new-organization-requested/body.html" + ) + html_renderer.string_response = "Email HTML Body" + + send_email = pretend.stub( + delay=pretend.call_recorder(lambda *args, **kwargs: None) + ) + pyramid_request.task = pretend.call_recorder(lambda *args, **kwargs: send_email) + monkeypatch.setattr(email, "send_email", send_email) + + pyramid_request.db = pretend.stub( + query=lambda a: pretend.stub( + filter=lambda *a: pretend.stub( + one=lambda: pretend.stub(user_id=admin_user.id) + ) + ), + ) + pyramid_request.user = initiator_user + pyramid_request.registry.settings = {"mail.sender": "[email protected]"} + + result = email.send_admin_new_organization_requested_email( + pyramid_request, + admin_user, + organization_name=organization_name, + initiator_username=initiator_user.username, + ) + + assert result == { + "organization_name": organization_name, + "initiator_username": initiator_user.username, + } + subject_renderer.assert_() + body_renderer.assert_( + organization_name=organization_name, + initiator_username=initiator_user.username, + ) + html_renderer.assert_( + organization_name=organization_name, + initiator_username=initiator_user.username, + ) + assert pyramid_request.task.calls == [pretend.call(send_email)] + assert send_email.delay.calls == [ + pretend.call( + f"{admin_user.name} <{admin_user.email}>", + { + "subject": "Email Subject", + "body_text": "Email Body", + "body_html": ( + "<html>\n<head></head>\n" + "<body><p>Email HTML Body</p></body>\n</html>\n" + ), + }, + { + "tag": "account:email:sent", + "user_id": admin_user.id, + "additional": { + "from_": "[email protected]", + "to": admin_user.email, + "subject": "Email Subject", + "redact_ip": True, + }, + }, + ) + ] + + class TestSendPasswordResetEmail: @pytest.mark.parametrize( ("verified", "email_addr"), @@ -1260,6 +1355,84 @@ def test_primary_email_change_email_unverified( assert send_email.delay.calls == [] +class TestSendNewOrganizationRequestedEmail: + def test_send_new_organization_requested_email( + self, pyramid_request, pyramid_config, monkeypatch + ): + initiator_user = pretend.stub( + id="id", + username="username", + name="", + email="[email protected]", + primary_email=pretend.stub(email="[email protected]", verified=True), + ) + organization_name = "example" + + subject_renderer = pyramid_config.testing_add_renderer( + "email/new-organization-requested/subject.txt" + ) + subject_renderer.string_response = "Email Subject" + body_renderer = pyramid_config.testing_add_renderer( + "email/new-organization-requested/body.txt" + ) + body_renderer.string_response = "Email Body" + html_renderer = pyramid_config.testing_add_renderer( + "email/new-organization-requested/body.html" + ) + html_renderer.string_response = "Email HTML Body" + + send_email = pretend.stub( + delay=pretend.call_recorder(lambda *args, **kwargs: None) + ) + pyramid_request.task = pretend.call_recorder(lambda *args, **kwargs: send_email) + monkeypatch.setattr(email, "send_email", send_email) + + pyramid_request.db = pretend.stub( + query=lambda a: pretend.stub( + filter=lambda *a: pretend.stub( + one=lambda: pretend.stub(user_id=initiator_user.id) + ) + ), + ) + pyramid_request.user = initiator_user + pyramid_request.registry.settings = {"mail.sender": "[email protected]"} + + result = email.send_new_organization_requested_email( + pyramid_request, + initiator_user, + organization_name=organization_name, + ) + + assert result == {"organization_name": organization_name} + subject_renderer.assert_() + body_renderer.assert_(organization_name=organization_name) + html_renderer.assert_(organization_name=organization_name) + assert pyramid_request.task.calls == [pretend.call(send_email)] + assert send_email.delay.calls == [ + pretend.call( + f"{initiator_user.username} <{initiator_user.email}>", + { + "subject": "Email Subject", + "body_text": "Email Body", + "body_html": ( + "<html>\n<head></head>\n" + "<body><p>Email HTML Body</p></body>\n</html>\n" + ), + }, + { + "tag": "account:email:sent", + "user_id": initiator_user.id, + "additional": { + "from_": "[email protected]", + "to": initiator_user.email, + "subject": "Email Subject", + "redact_ip": False, + }, + }, + ) + ] + + class TestCollaboratorAddedEmail: def test_collaborator_added_email( self, pyramid_request, pyramid_config, monkeypatch diff --git a/tests/unit/manage/test_forms.py b/tests/unit/manage/test_forms.py --- a/tests/unit/manage/test_forms.py +++ b/tests/unit/manage/test_forms.py @@ -501,6 +501,44 @@ def test_validate_macaroon_id(self): assert form.validate() +class TestCreateOrganizationForm: + def test_creation(self): + organization_service = pretend.stub() + form = forms.CreateOrganizationForm( + organization_service=organization_service, + ) + + assert form.organization_service is organization_service + + def test_validate_name_with_no_organization(self): + organization_service = pretend.stub( + find_organizationid=pretend.call_recorder(lambda name: None) + ) + form = forms.CreateOrganizationForm(organization_service=organization_service) + field = pretend.stub(data="my_organization_name") + forms._ = lambda string: string + + form.validate_name(field) + + assert organization_service.find_organizationid.calls == [ + pretend.call("my_organization_name") + ] + + def test_validate_name_with_organization(self): + organization_service = pretend.stub( + find_organizationid=pretend.call_recorder(lambda name: 1) + ) + form = forms.CreateOrganizationForm(organization_service=organization_service) + field = pretend.stub(data="my_organization_name") + + with pytest.raises(wtforms.validators.ValidationError): + form.validate_name(field) + + assert organization_service.find_organizationid.calls == [ + pretend.call("my_organization_name") + ] + + class TestSaveAccountForm: def test_public_email_verified(self): email = pretend.stub(verified=True, public=False, email="[email protected]") diff --git a/tests/unit/manage/test_views.py b/tests/unit/manage/test_views.py --- a/tests/unit/manage/test_views.py +++ b/tests/unit/manage/test_views.py @@ -44,6 +44,7 @@ from warehouse.manage import views from warehouse.metrics.interfaces import IMetricsService from warehouse.oidc.interfaces import TooManyOIDCRegistrations +from warehouse.organizations.interfaces import IOrganizationService from warehouse.packaging.models import ( File, JournalEntry, @@ -77,12 +78,14 @@ class TestManageAccount: def test_default_response(self, monkeypatch, public_email, expected_public_email): breach_service = pretend.stub() user_service = pretend.stub() + organization_service = pretend.stub() name = pretend.stub() user_id = pretend.stub() request = pretend.stub( find_service=lambda iface, **kw: { IPasswordBreachedService: breach_service, IUserService: user_service, + IOrganizationService: organization_service, }[iface], user=pretend.stub(name=name, id=user_id, public_email=public_email), ) @@ -2302,6 +2305,296 @@ def test_delete_macaroon_records_events_for_each_project(self, monkeypatch): ] +class TestManageOrganizations: + def test_default_response(self, monkeypatch): + create_organization_obj = pretend.stub() + create_organization_cls = pretend.call_recorder( + lambda *a, **kw: create_organization_obj + ) + monkeypatch.setattr(views, "CreateOrganizationForm", create_organization_cls) + + request = pretend.stub( + user=pretend.stub(id=pretend.stub(), username=pretend.stub()), + find_service=lambda interface, **kw: { + IOrganizationService: pretend.stub(), + IUserService: pretend.stub(), + }[interface], + ) + + view = views.ManageOrganizationsViews(request) + + assert view.default_response == { + "create_organization_form": create_organization_obj, + } + + def test_manage_organizations(self, monkeypatch): + request = pretend.stub( + find_service=lambda *a, **kw: pretend.stub(), + flags=pretend.stub(enabled=pretend.call_recorder(lambda *a: False)), + ) + + default_response = {"default": "response"} + monkeypatch.setattr( + views.ManageOrganizationsViews, "default_response", default_response + ) + view = views.ManageOrganizationsViews(request) + result = view.manage_organizations() + + assert request.flags.enabled.calls == [ + pretend.call(AdminFlagValue.DISABLE_ORGANIZATIONS), + ] + assert result == default_response + + def test_manage_organizations_disallow_organizations(self, monkeypatch): + request = pretend.stub( + find_service=lambda *a, **kw: pretend.stub(), + flags=pretend.stub(enabled=pretend.call_recorder(lambda *a: True)), + ) + + view = views.ManageOrganizationsViews(request) + with pytest.raises(HTTPNotFound): + view.manage_organizations() + assert request.flags.enabled.calls == [ + pretend.call(AdminFlagValue.DISABLE_ORGANIZATIONS), + ] + + def test_create_organization(self, monkeypatch): + admins = [] + user_service = pretend.stub( + get_admins=pretend.call_recorder(lambda *a, **kw: admins), + record_event=pretend.call_recorder(lambda *a, **kw: None), + ) + + organization = pretend.stub( + id=pretend.stub(), + name="psf", + display_name="Python Software Foundation", + orgtype="Community", + link_url="https://www.python.org/psf/", + description=( + "To promote, protect, and advance the Python programming " + "language, and to support and facilitate the growth of a " + "diverse and international community of Python programmers" + ), + is_active=False, + is_approved=None, + ) + catalog_entry = pretend.stub() + role = pretend.stub() + organization_service = pretend.stub( + add_organization=pretend.call_recorder(lambda *a, **kw: organization), + add_catalog_entry=pretend.call_recorder(lambda *a, **kw: catalog_entry), + add_organization_role=pretend.call_recorder(lambda *a, **kw: role), + record_event=pretend.call_recorder(lambda *a, **kw: None), + ) + + request = pretend.stub( + POST={ + "name": organization.name, + "display_name": organization.display_name, + "orgtype": organization.orgtype, + "link_url": organization.link_url, + "description": organization.description, + }, + domain=pretend.stub(), + user=pretend.stub( + id=pretend.stub(), + username=pretend.stub(), + has_primary_verified_email=True, + ), + session=pretend.stub(flash=pretend.call_recorder(lambda *a, **kw: None)), + find_service=lambda interface, **kw: { + IUserService: user_service, + IOrganizationService: organization_service, + }[interface], + flags=pretend.stub(enabled=pretend.call_recorder(lambda *a: False)), + remote_addr="0.0.0.0", + ) + + create_organization_obj = pretend.stub(validate=lambda: True, data=request.POST) + create_organization_cls = pretend.call_recorder( + lambda *a, **kw: create_organization_obj + ) + monkeypatch.setattr(views, "CreateOrganizationForm", create_organization_cls) + + send_email = pretend.call_recorder(lambda *a, **kw: None) + monkeypatch.setattr( + views, "send_admin_new_organization_requested_email", send_email + ) + monkeypatch.setattr(views, "send_new_organization_requested_email", send_email) + + default_response = {"default": "response"} + monkeypatch.setattr( + views.ManageOrganizationsViews, "default_response", default_response + ) + + view = views.ManageOrganizationsViews(request) + result = view.create_organization() + + assert request.flags.enabled.calls == [ + pretend.call(AdminFlagValue.DISABLE_ORGANIZATIONS), + ] + assert user_service.get_admins.calls == [pretend.call()] + assert organization_service.add_organization.calls == [ + pretend.call( + name=organization.name, + display_name=organization.display_name, + orgtype=organization.orgtype, + link_url=organization.link_url, + description=organization.description, + ) + ] + assert organization_service.add_catalog_entry.calls == [ + pretend.call( + organization.name, + organization.id, + ) + ] + assert organization_service.add_organization_role.calls == [ + pretend.call( + "Owner", + request.user.id, + organization.id, + ) + ] + assert organization_service.record_event.calls == [ + pretend.call( + organization.id, + tag="organization:create", + additional={"created_by_user_id": str(request.user.id)}, + ), + pretend.call( + organization.id, + tag="organization:catalog_entry:add", + additional={"submitted_by_user_id": str(request.user.id)}, + ), + pretend.call( + organization.id, + tag="organization:organization_role:invite", + additional={ + "submitted_by_user_id": str(request.user.id), + "role_name": "Owner", + "target_user_id": str(request.user.id), + }, + ), + pretend.call( + organization.id, + tag="organization:organization_role:accepted", + additional={ + "submitted_by_user_id": str(request.user.id), + "role_name": "Owner", + "target_user_id": str(request.user.id), + }, + ), + ] + assert user_service.record_event.calls == [ + pretend.call( + request.user.id, + tag="account:organization_role:accepted", + additional={ + "submitted_by_user_id": str(request.user.id), + "organization_name": organization.name, + "role_name": "Owner", + }, + ), + ] + assert send_email.calls == [ + pretend.call( + request, + admins, + organization_name=organization.name, + initiator_username=request.user.username, + ), + pretend.call( + request, + request.user, + organization_name=organization.name, + ), + ] + assert result == default_response + + def test_create_organization_validation_fails(self, monkeypatch): + admins = [] + user_service = pretend.stub( + get_admins=pretend.call_recorder(lambda *a, **kw: admins), + record_event=pretend.call_recorder(lambda *a, **kw: None), + ) + + organization = pretend.stub() + catalog_entry = pretend.stub() + role = pretend.stub() + organization_service = pretend.stub( + add_organization=pretend.call_recorder(lambda *a, **kw: organization), + add_catalog_entry=pretend.call_recorder(lambda *a, **kw: catalog_entry), + add_organization_role=pretend.call_recorder(lambda *a, **kw: role), + record_event=pretend.call_recorder(lambda *a, **kw: None), + ) + + request = pretend.stub( + POST={ + "name": None, + "display_name": None, + "orgtype": None, + "link_url": None, + "description": None, + }, + domain=pretend.stub(), + user=pretend.stub( + id=pretend.stub(), + username=pretend.stub(), + has_primary_verified_email=True, + ), + session=pretend.stub(flash=pretend.call_recorder(lambda *a, **kw: None)), + find_service=lambda interface, **kw: { + IUserService: user_service, + IOrganizationService: organization_service, + }[interface], + flags=pretend.stub(enabled=pretend.call_recorder(lambda *a: False)), + remote_addr="0.0.0.0", + ) + + create_organization_obj = pretend.stub( + validate=lambda: False, data=request.POST + ) + create_organization_cls = pretend.call_recorder( + lambda *a, **kw: create_organization_obj + ) + monkeypatch.setattr(views, "CreateOrganizationForm", create_organization_cls) + + send_email = pretend.call_recorder(lambda *a, **kw: None) + monkeypatch.setattr( + views, "send_admin_new_organization_requested_email", send_email + ) + monkeypatch.setattr(views, "send_new_organization_requested_email", send_email) + + view = views.ManageOrganizationsViews(request) + result = view.create_organization() + + assert request.flags.enabled.calls == [ + pretend.call(AdminFlagValue.DISABLE_ORGANIZATIONS), + ] + assert user_service.get_admins.calls == [] + assert organization_service.add_organization.calls == [] + assert organization_service.add_catalog_entry.calls == [] + assert organization_service.add_organization_role.calls == [] + assert organization_service.record_event.calls == [] + assert send_email.calls == [] + assert result == {"create_organization_form": create_organization_obj} + + def test_create_organizations_disallow_organizations(self, monkeypatch): + request = pretend.stub( + find_service=lambda *a, **kw: pretend.stub(), + flags=pretend.stub(enabled=pretend.call_recorder(lambda *a: True)), + ) + + view = views.ManageOrganizationsViews(request) + with pytest.raises(HTTPNotFound): + view.create_organization() + assert request.flags.enabled.calls == [ + pretend.call(AdminFlagValue.DISABLE_ORGANIZATIONS), + ] + + class TestManageProjects: def test_manage_projects(self, db_request): older_release = ReleaseFactory(created=datetime.datetime(2015, 1, 1)) diff --git a/tests/unit/organizations/__init__.py b/tests/unit/organizations/__init__.py new file mode 100644 --- /dev/null +++ b/tests/unit/organizations/__init__.py @@ -0,0 +1,18 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from warehouse.organizations.interfaces import IOrganizationService +from warehouse.organizations.services import database_organization_factory + + +def includeme(config): + config.register_service_factory(database_organization_factory, IOrganizationService) diff --git a/tests/unit/organizations/test_services.py b/tests/unit/organizations/test_services.py new file mode 100644 --- /dev/null +++ b/tests/unit/organizations/test_services.py @@ -0,0 +1,122 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pretend + +from zope.interface.verify import verifyClass + +from warehouse.organizations import services +from warehouse.organizations.interfaces import IOrganizationService +from warehouse.organizations.models import OrganizationRoleType + +from ...common.db.organizations import OrganizationFactory, UserFactory + + +def test_database_organizations_factory(): + db = pretend.stub() + remote_addr = pretend.stub() + context = pretend.stub() + request = pretend.stub(db=db, remote_addr=remote_addr) + + service = services.database_organization_factory(context, request) + assert service.db is db + assert service.remote_addr is remote_addr + + +class TestDatabaseOrganizationService: + def test_verify_service(self): + assert verifyClass(IOrganizationService, services.DatabaseOrganizationService) + + def test_service_creation(self, remote_addr): + session = pretend.stub() + service = services.DatabaseOrganizationService(session, remote_addr=remote_addr) + + assert service.db is session + assert service.remote_addr is remote_addr + + def test_get_organization(self, organization_service): + organization = OrganizationFactory.create() + assert organization_service.get_organization(organization.id) == organization + + def test_get_organization_by_name(self, organization_service): + organization = OrganizationFactory.create() + assert ( + organization_service.get_organization_by_name(organization.name) + == organization + ) + + def test_find_organizationid(self, organization_service): + organization = OrganizationFactory.create() + assert ( + organization_service.find_organizationid(organization.name) + == organization.id + ) + + def test_find_organizationid_nonexistent_org(self, organization_service): + assert organization_service.find_organizationid("a_spoon_in_the_matrix") is None + + def test_add_organization(self, organization_service): + organization = OrganizationFactory.create() + new_org = organization_service.add_organization( + name=organization.name, + display_name=organization.display_name, + orgtype=organization.orgtype, + link_url=organization.link_url, + description=organization.description, + ) + organization_service.db.flush() + org_from_db = organization_service.get_organization(new_org.id) + + assert org_from_db.name == organization.name + assert org_from_db.display_name == organization.display_name + assert org_from_db.orgtype == organization.orgtype + assert org_from_db.link_url == organization.link_url + assert org_from_db.description == organization.description + assert not org_from_db.is_active + + def test_add_catalog_entry(self, organization_service): + organization = OrganizationFactory.create() + + catalog_entry = organization_service.add_catalog_entry( + organization.normalized_name, organization.id + ) + assert catalog_entry.normalized_name == organization.normalized_name + assert catalog_entry.organization_id == organization.id + + def test_add_organization_role(self, organization_service, user_service): + user = UserFactory.create() + organization = OrganizationFactory.create() + + added_role = organization_service.add_organization_role( + OrganizationRoleType.Owner.value, user.id, organization.id + ) + assert added_role.role_name == OrganizationRoleType.Owner.value + assert added_role.user_id == user.id + assert added_role.organization_id == organization.id + + def test_approve_organization(self, organization_service): + organization = OrganizationFactory.create() + organization_service.approve_organization(organization.id) + + assert organization.is_active is True + assert organization.is_approved is True + assert organization.date_approved is not None + + def test_decline_organization(self, organization_service): + organization = OrganizationFactory.create() + organization_service.decline_organization(organization.id) + + assert organization.is_approved is False + assert organization.date_approved is not None + + # def test_record_event(self, organization_id, *, tag, additional=None): + # raise NotImplementedError diff --git a/tests/unit/test_config.py b/tests/unit/test_config.py --- a/tests/unit/test_config.py +++ b/tests/unit/test_config.py @@ -352,6 +352,7 @@ def __init__(self): pretend.call(".oidc"), pretend.call(".malware"), pretend.call(".manage"), + pretend.call(".organizations"), pretend.call(".packaging"), pretend.call(".redirects"), pretend.call(".routes"), diff --git a/tests/unit/test_routes.py b/tests/unit/test_routes.py --- a/tests/unit/test_routes.py +++ b/tests/unit/test_routes.py @@ -237,6 +237,9 @@ def add_policy(name, filename): pretend.call( "manage.account.token", "/manage/account/token/", domain=warehouse ), + pretend.call( + "manage.organizations", "/manage/organizations/", domain=warehouse + ), pretend.call("manage.projects", "/manage/projects/", domain=warehouse), pretend.call( "manage.project.settings",
Create an Organization account Feature request for organization account project in PyPI. Description | Users can create a new Organization account. The name of the account will reflect the Organization’s name. -- | -- User value | The Organization account will allow entities to own Projects on PyPI and assign permission levels across team members and projects Acceptance criteria | Email notification to PyPI Admin and account creator. Pending Organization visible in the PyPI Admin panel including necessary information for review. Approval and rejection actions in the PyPI Admin to either create the organization or reject it.
2022-04-15T08:25:32Z
[]
[]
pypi/warehouse
11,208
pypi__warehouse-11208
[ "11287" ]
7cb17c9ca6dde44e0a6baf8e4a6fb5e3912f9b3d
diff --git a/warehouse/accounts/models.py b/warehouse/accounts/models.py --- a/warehouse/accounts/models.py +++ b/warehouse/accounts/models.py @@ -159,7 +159,6 @@ def recent_events(self): (User.Event.source_id == self.id) & (User.Event.time >= last_ninety) ) .order_by(User.Event.time.desc()) - .all() ) @property diff --git a/warehouse/admin/routes.py b/warehouse/admin/routes.py --- a/warehouse/admin/routes.py +++ b/warehouse/admin/routes.py @@ -22,7 +22,22 @@ def includeme(config): # Organization related Admin pages config.add_route( - "admin.organization.approve", "/admin/organizations/approve/", domain=warehouse + "admin.organization.list", "/admin/organizations/", domain=warehouse + ) + config.add_route( + "admin.organization.detail", + "/admin/organizations/{organization_id}/", + domain=warehouse, + ) + config.add_route( + "admin.organization.approve", + "/admin/organizations/{organization_id}/approve/", + domain=warehouse, + ) + config.add_route( + "admin.organization.decline", + "/admin/organizations/{organization_id}/decline/", + domain=warehouse, ) # User related Admin pages diff --git a/warehouse/admin/views/organizations.py b/warehouse/admin/views/organizations.py --- a/warehouse/admin/views/organizations.py +++ b/warehouse/admin/views/organizations.py @@ -10,19 +10,308 @@ # See the License for the specific language governing permissions and # limitations under the License. +import shlex + +from paginate_sqlalchemy import SqlalchemyOrmPage as SQLAlchemyORMPage +from pyramid.httpexceptions import HTTPBadRequest, HTTPNotFound, HTTPSeeOther from pyramid.view import view_config +from sqlalchemy import or_ + +from warehouse.accounts.interfaces import IUserService +from warehouse.admin.flags import AdminFlagValue +from warehouse.email import ( + send_admin_new_organization_approved_email, + send_admin_new_organization_declined_email, + send_new_organization_approved_email, + send_new_organization_declined_email, +) +from warehouse.organizations.interfaces import IOrganizationService +from warehouse.organizations.models import Organization +from warehouse.utils.paginate import paginate_url_factory + + +@view_config( + route_name="admin.organization.list", + renderer="admin/organizations/list.html", + permission="moderator", + uses_session=True, +) +def organization_list(request): + if request.flags.enabled(AdminFlagValue.DISABLE_ORGANIZATIONS): + raise HTTPNotFound + + q = request.params.get("q", "") + terms = shlex.split(q) + + try: + page_num = int(request.params.get("page", 1)) + except ValueError: + raise HTTPBadRequest("'page' must be an integer.") from None + + organizations_query = request.db.query(Organization).order_by( + Organization.normalized_name + ) + + if q: + filters = [] + for term in terms: + # Examples: + # - search individual words or "whole phrase" in any field + # - name:psf + # - organization:python + # - url:.org + # - description:word + # - description:"whole phrase" + # - is:approved + # - is:declined + # - is:submitted + # - is:active + # - is:inactive + try: + field, value = term.lower().split(":", 1) + except ValueError: + field, value = "", term + if field == "name": + # Add filter for `name` or `normalized_name` fields. + filters.append( + [ + Organization.name.ilike(f"%{value}%"), + Organization.normalized_name.ilike(f"%{value}%"), + ] + ) + elif field == "org" or field == "organization": + # Add filter for `display_name` field. + filters.append(Organization.display_name.ilike(f"%{value}%")) + elif field == "url" or field == "link_url": + # Add filter for `link_url` field. + filters.append(Organization.link_url.ilike(f"%{value}%")) + elif field == "desc" or field == "description": + # Add filter for `description` field. + filters.append(Organization.description.ilike(f"%{value}%")) + elif field == "is": + # Add filter for `is_approved` or `is_active` field. + if "approved".startswith(value): + filters.append(Organization.is_approved == True) # noqa: E712 + elif "declined".startswith(value): + filters.append(Organization.is_approved == False) # noqa: E712 + elif "submitted".startswith(value): + filters.append(Organization.is_approved == None) # noqa: E711 + elif "active".startswith(value): + filters.append(Organization.is_active == True) # noqa: E712 + elif "inactive".startswith(value): + filters.append(Organization.is_active == False) # noqa: E712 + else: + # Add filter for any field. + filters.append( + [ + Organization.name.ilike(f"%{term}%"), + Organization.normalized_name.ilike(f"%{term}%"), + Organization.display_name.ilike(f"%{term}%"), + Organization.link_url.ilike(f"%{term}%"), + Organization.description.ilike(f"%{term}%"), + ] + ) + # Use AND to add each filter. Use OR to combine subfilters. + for filter_or_subfilters in filters: + if isinstance(filter_or_subfilters, list): + # Add list of subfilters combined with OR. + organizations_query = organizations_query.filter( + or_(*filter_or_subfilters) + ) + else: + # Add single filter. + organizations_query = organizations_query.filter(filter_or_subfilters) + + organizations = SQLAlchemyORMPage( + organizations_query, + page=page_num, + items_per_page=25, + url_maker=paginate_url_factory(request), + ) + + return {"organizations": organizations, "query": q, "terms": terms} + + +@view_config( + route_name="admin.organization.detail", + require_methods=False, + renderer="admin/organizations/detail.html", + permission="admin", + has_translations=True, + uses_session=True, + require_csrf=True, + require_reauth=True, +) +def organization_detail(request): + if request.flags.enabled(AdminFlagValue.DISABLE_ORGANIZATIONS): + raise HTTPNotFound + + organization_service = request.find_service(IOrganizationService, context=None) + user_service = request.find_service(IUserService, context=None) + + organization_id = request.matchdict["organization_id"] + organization = organization_service.get_organization(organization_id) + if organization is None: + raise HTTPNotFound + + create_event = ( + organization.events.filter(Organization.Event.tag == "organization:create") + .order_by(Organization.Event.time.desc()) + .first() + ) + user = user_service.get_user(create_event.additional["created_by_user_id"]) + + if organization.is_approved is True: + approve_event = ( + organization.events.filter(Organization.Event.tag == "organization:approve") + .order_by(Organization.Event.time.desc()) + .first() + ) + admin = user_service.get_user(approve_event.additional["approved_by_user_id"]) + elif organization.is_approved is False: + decline_event = ( + organization.events.filter(Organization.Event.tag == "organization:decline") + .order_by(Organization.Event.time.desc()) + .first() + ) + admin = user_service.get_user(decline_event.additional["declined_by_user_id"]) + else: + admin = None + + return { + "admin": admin, + "organization": organization, + "user": user, + } -# This is a placeholder so we can reference `admin.organization.approve` -# as a route in the admin-new-organization-requested email. @view_config( route_name="admin.organization.approve", + require_methods=["POST"], renderer="admin/organizations/approve.html", permission="admin", - require_methods=False, + has_translations=True, uses_session=True, + require_csrf=True, + require_reauth=True, +) +def organization_approve(request): + if request.flags.enabled(AdminFlagValue.DISABLE_ORGANIZATIONS): + raise HTTPNotFound + + organization_service = request.find_service(IOrganizationService, context=None) + user_service = request.find_service(IUserService, context=None) + + organization_id = request.matchdict["organization_id"] + organization = organization_service.get_organization(organization_id) + if organization is None: + raise HTTPNotFound + elif organization.name != request.params.get("organization_name"): + request.session.flash("Wrong confirmation input", queue="error") + return HTTPSeeOther( + request.route_path( + "admin.organization.detail", organization_id=organization.id + ) + ) + + create_event = ( + organization.events.filter(Organization.Event.tag == "organization:create") + .order_by(Organization.Event.time.desc()) + .first() + ) + user = user_service.get_user(create_event.additional["created_by_user_id"]) + + message = request.params.get("message", "") + + organization_service.approve_organization(organization.id) + organization_service.record_event( + organization.id, + tag="organization:approve", + additional={"approved_by_user_id": str(request.user.id)}, + ) + send_admin_new_organization_approved_email( + request, + user_service.get_admins(), + organization_name=organization.name, + initiator_username=user.username, + message=message, + ) + send_new_organization_approved_email( + request, + user, + organization_name=organization.name, + message=message, + ) + request.session.flash( + f'Request for "{organization.name}" organization approved', queue="success" + ) + + return HTTPSeeOther( + request.route_path("admin.organization.detail", organization_id=organization.id) + ) + + +@view_config( + route_name="admin.organization.decline", + require_methods=["POST"], + renderer="admin/organizations/decline.html", + permission="admin", has_translations=True, + uses_session=True, + require_csrf=True, + require_reauth=True, ) -def approve(request): - # TODO - return {} +def organization_decline(request): + if request.flags.enabled(AdminFlagValue.DISABLE_ORGANIZATIONS): + raise HTTPNotFound + + organization_service = request.find_service(IOrganizationService, context=None) + user_service = request.find_service(IUserService, context=None) + + organization_id = request.matchdict["organization_id"] + organization = organization_service.get_organization(organization_id) + if organization is None: + raise HTTPNotFound + elif organization.name != request.params.get("organization_name"): + request.session.flash("Wrong confirmation input", queue="error") + return HTTPSeeOther( + request.route_path( + "admin.organization.detail", organization_id=organization.id + ) + ) + + create_event = ( + organization.events.filter(Organization.Event.tag == "organization:create") + .order_by(Organization.Event.time.desc()) + .first() + ) + user = user_service.get_user(create_event.additional["created_by_user_id"]) + + message = request.params.get("message", "") + + organization_service.decline_organization(organization.id) + organization_service.record_event( + organization.id, + tag="organization:decline", + additional={"declined_by_user_id": str(request.user.id)}, + ) + send_admin_new_organization_declined_email( + request, + user_service.get_admins(), + organization_name=organization.name, + initiator_username=user.username, + message=message, + ) + send_new_organization_declined_email( + request, + user, + organization_name=organization.name, + message=message, + ) + request.session.flash( + f'Request for "{organization.name}" organization declined', queue="success" + ) + + return HTTPSeeOther( + request.route_path("admin.organization.detail", organization_id=organization.id) + ) diff --git a/warehouse/config.py b/warehouse/config.py --- a/warehouse/config.py +++ b/warehouse/config.py @@ -420,6 +420,7 @@ def configure(settings=None): jglobals.setdefault("now", "warehouse.utils:now") # And some enums to reuse in the templates + jglobals.setdefault("AdminFlagValue", "warehouse.admin.flags:AdminFlagValue") jglobals.setdefault( "RoleInvitationStatus", "warehouse.packaging.models:RoleInvitationStatus" ) diff --git a/warehouse/email/__init__.py b/warehouse/email/__init__.py --- a/warehouse/email/__init__.py +++ b/warehouse/email/__init__.py @@ -191,10 +191,33 @@ def wrapper(request, user_or_users, **kwargs): @_email("admin-new-organization-requested") def send_admin_new_organization_requested_email( - request, user, *, organization_name, initiator_username + request, user, *, organization_name, initiator_username, organization_id ): return { "initiator_username": initiator_username, + "organization_id": organization_id, + "organization_name": organization_name, + } + + +@_email("admin-new-organization-approved") +def send_admin_new_organization_approved_email( + request, user, *, organization_name, initiator_username, message="" +): + return { + "initiator_username": initiator_username, + "message": message, + "organization_name": organization_name, + } + + +@_email("admin-new-organization-declined") +def send_admin_new_organization_declined_email( + request, user, *, organization_name, initiator_username, message="" +): + return { + "initiator_username": initiator_username, + "message": message, "organization_name": organization_name, } @@ -288,6 +311,26 @@ def send_new_organization_requested_email(request, user, *, organization_name): return {"organization_name": organization_name} +@_email("new-organization-approved") +def send_new_organization_approved_email( + request, user, *, organization_name, message="" +): + return { + "message": message, + "organization_name": organization_name, + } + + +@_email("new-organization-declined") +def send_new_organization_declined_email( + request, user, *, organization_name, message="" +): + return { + "message": message, + "organization_name": organization_name, + } + + @_email("collaborator-added") def send_collaborator_added_email( request, email_recipients, *, user, submitter, project_name, role diff --git a/warehouse/events/models.py b/warehouse/events/models.py --- a/warehouse/events/models.py +++ b/warehouse/events/models.py @@ -70,7 +70,7 @@ def __init_subclass__(cls, /, **kwargs): @declared_attr def events(cls): # noqa: N805 - return orm.relationship(cls.Event, cascade="all, delete-orphan", lazy=True) + return orm.relationship(cls.Event, cascade="all, delete-orphan", lazy="dynamic") def record_event(self, *, tag, ip_address, additional=None): session = orm.object_session(self) diff --git a/warehouse/manage/views.py b/warehouse/manage/views.py --- a/warehouse/manage/views.py +++ b/warehouse/manage/views.py @@ -1065,6 +1065,7 @@ def create_organization(self): self.user_service.get_admins(), organization_name=organization.name, initiator_username=self.request.user.username, + organization_id=organization.id, ) send_new_organization_requested_email( self.request, self.request.user, organization_name=organization.name diff --git a/warehouse/organizations/services.py b/warehouse/organizations/services.py --- a/warehouse/organizations/services.py +++ b/warehouse/organizations/services.py @@ -124,6 +124,7 @@ def decline_organization(self, organization_id): Performs operations necessary to reject approval of an Organization """ organization = self.get_organization(organization_id) + organization.is_active = False organization.is_approved = False organization.date_approved = datetime.datetime.now() # self.db.flush()
diff --git a/tests/conftest.py b/tests/conftest.py --- a/tests/conftest.py +++ b/tests/conftest.py @@ -41,6 +41,7 @@ from warehouse import admin, config, static from warehouse.accounts import services as account_services from warehouse.accounts.interfaces import ITokenService +from warehouse.admin.flags import AdminFlag, AdminFlagValue from warehouse.email import services as email_services from warehouse.email.interfaces import IEmailSender from warehouse.macaroons import services as macaroon_services @@ -350,6 +351,16 @@ def db_request(pyramid_request, db_session): return pyramid_request [email protected]() +def enable_organizations(db_request): + flag = db_request.db.query(AdminFlag).get( + AdminFlagValue.DISABLE_ORGANIZATIONS.value + ) + flag.enabled = False + yield + flag.enabled = True + + class _TestApp(_webtest.TestApp): def xmlrpc(self, path, method, *args): body = xmlrpc.client.dumps(args, methodname=method) diff --git a/tests/unit/accounts/test_models.py b/tests/unit/accounts/test_models.py --- a/tests/unit/accounts/test_models.py +++ b/tests/unit/accounts/test_models.py @@ -110,10 +110,8 @@ def test_recent_events(self, db_session): time=datetime.datetime.now() - datetime.timedelta(days=91), ) - assert len(user.events) == 2 - assert len(user.recent_events) == 1 - assert user.events == [recent_event, stale_event] - assert user.recent_events == [recent_event] + assert user.events.all() == [recent_event, stale_event] + assert user.recent_events.all() == [recent_event] def test_regular_user_not_prohibited_password_reset(self, db_session): user = DBUserFactory.create() diff --git a/tests/unit/admin/test_routes.py b/tests/unit/admin/test_routes.py --- a/tests/unit/admin/test_routes.py +++ b/tests/unit/admin/test_routes.py @@ -26,9 +26,22 @@ def test_includeme(): assert config.add_route.calls == [ pretend.call("admin.dashboard", "/admin/", domain=warehouse), + pretend.call( + "admin.organization.list", "/admin/organizations/", domain=warehouse + ), + pretend.call( + "admin.organization.detail", + "/admin/organizations/{organization_id}/", + domain=warehouse, + ), pretend.call( "admin.organization.approve", - "/admin/organizations/approve/", + "/admin/organizations/{organization_id}/approve/", + domain=warehouse, + ), + pretend.call( + "admin.organization.decline", + "/admin/organizations/{organization_id}/decline/", domain=warehouse, ), pretend.call("admin.user.list", "/admin/users/", domain=warehouse), diff --git a/tests/unit/admin/views/test_organizations.py b/tests/unit/admin/views/test_organizations.py --- a/tests/unit/admin/views/test_organizations.py +++ b/tests/unit/admin/views/test_organizations.py @@ -11,10 +11,706 @@ # limitations under the License. import pretend +import pytest +from pyramid.httpexceptions import HTTPBadRequest, HTTPNotFound + +from warehouse.accounts.interfaces import IUserService from warehouse.admin.views import organizations as views +from warehouse.organizations.interfaces import IOrganizationService + +from ....common.db.organizations import OrganizationFactory + + +class TestOrganizationList: + def test_no_query(self, enable_organizations, db_request): + organizations = sorted( + [OrganizationFactory.create() for _ in range(30)], + key=lambda o: o.normalized_name, + ) + result = views.organization_list(db_request) + + assert result["organizations"].items == organizations[:25] + assert result["query"] == "" + assert result["terms"] == [] + + def test_with_page(self, enable_organizations, db_request): + organizations = sorted( + [OrganizationFactory.create() for _ in range(30)], + key=lambda o: o.normalized_name, + ) + db_request.GET["page"] = "2" + result = views.organization_list(db_request) + + assert result["organizations"].items == organizations[25:50] + assert result["query"] == "" + assert result["terms"] == [] + + def test_with_invalid_page(self, enable_organizations): + request = pretend.stub( + flags=pretend.stub(enabled=lambda *a: False), + params={"page": "not an integer"}, + ) + + with pytest.raises(HTTPBadRequest): + views.organization_list(request) + + def test_basic_query(self, enable_organizations, db_request): + organizations = sorted( + [OrganizationFactory.create() for _ in range(5)], + key=lambda o: o.normalized_name, + ) + db_request.GET["q"] = organizations[0].name + result = views.organization_list(db_request) + + assert result["organizations"].items == [organizations[0]] + assert result["query"] == organizations[0].name + assert result["terms"] == [organizations[0].name] + + def test_name_query(self, enable_organizations, db_request): + organizations = sorted( + [OrganizationFactory.create() for _ in range(5)], + key=lambda o: o.normalized_name, + ) + db_request.GET["q"] = f"name:{organizations[0].name}" + result = views.organization_list(db_request) + + assert result["organizations"].items == [organizations[0]] + assert result["query"] == f"name:{organizations[0].name}" + assert result["terms"] == [f"name:{organizations[0].name}"] + + def test_organization_query(self, enable_organizations, db_request): + organizations = sorted( + [OrganizationFactory.create() for _ in range(5)], + key=lambda o: o.normalized_name, + ) + db_request.GET["q"] = f"organization:{organizations[0].display_name}" + result = views.organization_list(db_request) + + assert result["organizations"].items == [organizations[0]] + assert result["query"] == f"organization:{organizations[0].display_name}" + assert result["terms"] == [f"organization:{organizations[0].display_name}"] + + def test_url_query(self, enable_organizations, db_request): + organizations = sorted( + [OrganizationFactory.create() for _ in range(5)], + key=lambda o: o.normalized_name, + ) + db_request.GET["q"] = f"url:{organizations[0].link_url}" + result = views.organization_list(db_request) + + assert result["organizations"].items == [organizations[0]] + assert result["query"] == f"url:{organizations[0].link_url}" + assert result["terms"] == [f"url:{organizations[0].link_url}"] + + def test_description_query(self, enable_organizations, db_request): + organizations = sorted( + [OrganizationFactory.create() for _ in range(5)], + key=lambda o: o.normalized_name, + ) + db_request.GET["q"] = f"description:'{organizations[0].description}'" + result = views.organization_list(db_request) + + assert result["organizations"].items == [organizations[0]] + assert result["query"] == f"description:'{organizations[0].description}'" + assert result["terms"] == [f"description:{organizations[0].description}"] + + def test_is_approved_query(self, enable_organizations, db_request): + organizations = sorted( + [OrganizationFactory.create() for _ in range(5)], + key=lambda o: o.normalized_name, + ) + organizations[0].is_approved = True + organizations[1].is_approved = True + organizations[2].is_approved = False + organizations[3].is_approved = None + organizations[4].is_approved = None + db_request.GET["q"] = "is:approved" + result = views.organization_list(db_request) + + assert result["organizations"].items == organizations[:2] + assert result["query"] == "is:approved" + assert result["terms"] == ["is:approved"] + + def test_is_declined_query(self, enable_organizations, db_request): + organizations = sorted( + [OrganizationFactory.create() for _ in range(5)], + key=lambda o: o.normalized_name, + ) + organizations[0].is_approved = True + organizations[1].is_approved = True + organizations[2].is_approved = False + organizations[3].is_approved = None + organizations[4].is_approved = None + db_request.GET["q"] = "is:declined" + result = views.organization_list(db_request) + + assert result["organizations"].items == organizations[2:3] + assert result["query"] == "is:declined" + assert result["terms"] == ["is:declined"] + + def test_is_submitted_query(self, enable_organizations, db_request): + organizations = sorted( + [OrganizationFactory.create() for _ in range(5)], + key=lambda o: o.normalized_name, + ) + organizations[0].is_approved = True + organizations[1].is_approved = True + organizations[2].is_approved = False + organizations[3].is_approved = None + organizations[4].is_approved = None + db_request.GET["q"] = "is:submitted" + result = views.organization_list(db_request) + + assert result["organizations"].items == organizations[3:] + assert result["query"] == "is:submitted" + assert result["terms"] == ["is:submitted"] + + def test_is_active_query(self, enable_organizations, db_request): + organizations = sorted( + [OrganizationFactory.create() for _ in range(5)], + key=lambda o: o.normalized_name, + ) + organizations[0].is_active = True + organizations[1].is_active = True + organizations[2].is_active = False + organizations[3].is_active = False + organizations[4].is_active = False + db_request.GET["q"] = "is:active" + result = views.organization_list(db_request) + + assert result["organizations"].items == organizations[:2] + assert result["query"] == "is:active" + assert result["terms"] == ["is:active"] + + def test_is_inactive_query(self, enable_organizations, db_request): + organizations = sorted( + [OrganizationFactory.create() for _ in range(5)], + key=lambda o: o.normalized_name, + ) + organizations[0].is_active = True + organizations[1].is_active = True + organizations[2].is_active = False + organizations[3].is_active = False + organizations[4].is_active = False + db_request.GET["q"] = "is:inactive" + result = views.organization_list(db_request) + + assert result["organizations"].items == organizations[2:] + assert result["query"] == "is:inactive" + assert result["terms"] == ["is:inactive"] + + def test_is_invalid_query(self, enable_organizations, db_request): + organizations = sorted( + [OrganizationFactory.create() for _ in range(5)], + key=lambda o: o.normalized_name, + ) + db_request.GET["q"] = "is:not-actually-a-valid-query" + result = views.organization_list(db_request) + + assert result["organizations"].items == organizations[:25] + assert result["query"] == "is:not-actually-a-valid-query" + assert result["terms"] == ["is:not-actually-a-valid-query"] + + def test_disable_organizations(self, db_request): + with pytest.raises(HTTPNotFound): + views.organization_list(db_request) + + +class TestOrganizationDetail: + def test_detail(self, enable_organizations): + admin = pretend.stub( + id="admin-id", + username="admin", + name="Admin", + public_email="[email protected]", + ) + user = pretend.stub( + id="user-id", + username="example", + name="Example", + public_email="[email protected]", + ) + user_service = pretend.stub( + get_user=lambda userid, **kw: {admin.id: admin, user.id: user}[userid], + ) + create_event = pretend.stub( + additional={"created_by_user_id": str(user.id)}, + ) + organization = pretend.stub( + id=pretend.stub(), + name="example", + display_name="Example", + orgtype=pretend.stub(name="Company"), + link_url="https://www.example.com/", + description=( + "This company is for use in illustrative examples in documents " + "You may use this company in literature without prior " + "coordination or asking for permission." + ), + is_active=False, + is_approved=None, + events=pretend.stub( + filter=lambda *a, **kw: pretend.stub( + order_by=lambda *a, **kw: pretend.stub( + first=lambda *a, **kw: create_event, + ), + ), + ), + ) + organization_service = pretend.stub( + get_organization=lambda *a, **kw: organization, + ) + request = pretend.stub( + flags=pretend.stub(enabled=lambda *a: False), + find_service=lambda iface, **kw: { + IUserService: user_service, + IOrganizationService: organization_service, + }[iface], + matchdict={"organization_id": pretend.stub()}, + ) + + assert views.organization_detail(request) == { + "admin": None, + "user": user, + "organization": organization, + } + + def test_detail_is_approved_true(self, enable_organizations): + admin = pretend.stub( + id="admin-id", + username="admin", + name="Admin", + public_email="[email protected]", + ) + user = pretend.stub( + id="user-id", + username="example", + name="Example", + public_email="[email protected]", + ) + user_service = pretend.stub( + get_user=lambda userid, **kw: {admin.id: admin, user.id: user}[userid], + ) + create_or_approve_event = pretend.stub( + additional={ + "created_by_user_id": str(user.id), + "approved_by_user_id": str(admin.id), + }, + ) + organization = pretend.stub( + id=pretend.stub(), + name="example", + display_name="Example", + orgtype=pretend.stub(name="Company"), + link_url="https://www.example.com/", + description=( + "This company is for use in illustrative examples in documents " + "You may use this company in literature without prior " + "coordination or asking for permission." + ), + is_active=True, + is_approved=True, + events=pretend.stub( + filter=lambda *a, **kw: pretend.stub( + order_by=lambda *a, **kw: pretend.stub( + first=lambda *a, **kw: create_or_approve_event, + ), + ), + ), + ) + organization_service = pretend.stub( + get_organization=lambda *a, **kw: organization, + ) + request = pretend.stub( + flags=pretend.stub(enabled=lambda *a: False), + find_service=lambda iface, **kw: { + IUserService: user_service, + IOrganizationService: organization_service, + }[iface], + matchdict={"organization_id": pretend.stub()}, + ) + + assert views.organization_detail(request) == { + "admin": admin, + "user": user, + "organization": organization, + } + + def test_detail_is_approved_false(self, enable_organizations): + admin = pretend.stub( + id="admin-id", + username="admin", + name="Admin", + public_email="[email protected]", + ) + user = pretend.stub( + id="user-id", + username="example", + name="Example", + public_email="[email protected]", + ) + user_service = pretend.stub( + get_user=lambda userid, **kw: {admin.id: admin, user.id: user}[userid], + ) + create_or_decline_event = pretend.stub( + additional={ + "created_by_user_id": str(user.id), + "declined_by_user_id": str(admin.id), + }, + ) + organization = pretend.stub( + id=pretend.stub(), + name="example", + display_name="Example", + orgtype=pretend.stub(name="Company"), + link_url="https://www.example.com/", + description=( + "This company is for use in illustrative examples in documents " + "You may use this company in literature without prior " + "coordination or asking for permission." + ), + is_active=False, + is_approved=False, + events=pretend.stub( + filter=lambda *a, **kw: pretend.stub( + order_by=lambda *a, **kw: pretend.stub( + first=lambda *a, **kw: create_or_decline_event, + ), + ), + ), + ) + organization_service = pretend.stub( + get_organization=lambda *a, **kw: organization, + ) + request = pretend.stub( + flags=pretend.stub(enabled=lambda *a: False), + find_service=lambda iface, **kw: { + IUserService: user_service, + IOrganizationService: organization_service, + }[iface], + matchdict={"organization_id": pretend.stub()}, + ) + + assert views.organization_detail(request) == { + "admin": admin, + "user": user, + "organization": organization, + } + + def test_detail_not_found(self, enable_organizations): + organization_service = pretend.stub( + get_organization=lambda *a, **kw: None, + ) + request = pretend.stub( + flags=pretend.stub(enabled=lambda *a: False), + find_service=lambda *a, **kw: organization_service, + matchdict={"organization_id": pretend.stub()}, + ) + + with pytest.raises(HTTPNotFound): + views.organization_detail(request) + + def test_approve(self, enable_organizations, monkeypatch): + admin = pretend.stub( + id="admin-id", + username="admin", + name="Admin", + public_email="[email protected]", + ) + user = pretend.stub( + id="user-id", + username="example", + name="Example", + public_email="[email protected]", + ) + user_service = pretend.stub( + get_admins=lambda *a, **kw: [admin], + get_user=lambda userid, **kw: {admin.id: admin, user.id: user}[userid], + ) + create_event = pretend.stub( + additional={"created_by_user_id": str(user.id)}, + ) + organization = pretend.stub( + id=pretend.stub(), + name="example", + display_name="Example", + orgtype=pretend.stub(name="Company"), + link_url="https://www.example.com/", + description=( + "This company is for use in illustrative examples in documents " + "You may use this company in literature without prior " + "coordination or asking for permission." + ), + is_active=False, + is_approved=None, + events=pretend.stub( + filter=lambda *a, **kw: pretend.stub( + order_by=lambda *a, **kw: pretend.stub( + first=lambda *a, **kw: create_event, + ), + ), + ), + ) + organization_service = pretend.stub( + get_organization=lambda *a, **kw: organization, + approve_organization=pretend.call_recorder(lambda *a, **kw: None), + record_event=pretend.call_recorder(lambda *a, **kw: None), + ) + organization_detail_location = (f"/admin/organizations/{organization.id}/",) + message = pretend.stub() + request = pretend.stub( + flags=pretend.stub(enabled=lambda *a: False), + find_service=lambda iface, **kw: { + IUserService: user_service, + IOrganizationService: organization_service, + }[iface], + matchdict={"organization_id": organization.id}, + params={"organization_name": organization.name, "message": message}, + route_path=lambda *a, **kw: organization_detail_location, + session=pretend.stub( + flash=pretend.call_recorder(lambda *a, **kw: None), + ), + user=admin, + ) + send_email = pretend.call_recorder(lambda *a, **kw: None) + monkeypatch.setattr( + views, "send_admin_new_organization_approved_email", send_email + ) + monkeypatch.setattr(views, "send_new_organization_approved_email", send_email) + + result = views.organization_approve(request) + + assert organization_service.approve_organization.calls == [ + pretend.call(organization.id), + ] + assert organization_service.record_event.calls == [ + pretend.call( + organization.id, + tag="organization:approve", + additional={"approved_by_user_id": str(admin.id)}, + ), + ] + assert request.session.flash.calls == [ + pretend.call( + f'Request for "{organization.name}" organization approved', + queue="success", + ), + ] + assert send_email.calls == [ + pretend.call( + request, + [admin], + organization_name=organization.name, + initiator_username=user.username, + message=message, + ), + pretend.call( + request, + user, + organization_name=organization.name, + message=message, + ), + ] + assert result.status_code == 303 + assert result.location == organization_detail_location + + def test_approve_wrong_confirmation_input(self, enable_organizations, monkeypatch): + user_service = pretend.stub() + organization = pretend.stub(id=pretend.stub(), name=pretend.stub()) + organization_service = pretend.stub( + get_organization=lambda *a, **kw: organization, + ) + organization_detail_location = (f"/admin/organizations/{organization.id}/",) + request = pretend.stub( + flags=pretend.stub(enabled=lambda *a: False), + find_service=lambda iface, **kw: { + IUserService: user_service, + IOrganizationService: organization_service, + }[iface], + matchdict={"organization_id": organization.id}, + params={"organization_name": pretend.stub()}, + route_path=lambda *a, **kw: organization_detail_location, + session=pretend.stub( + flash=pretend.call_recorder(lambda *a, **kw: None), + ), + ) + + result = views.organization_approve(request) + + assert request.session.flash.calls == [ + pretend.call("Wrong confirmation input", queue="error"), + ] + assert result.status_code == 303 + assert result.location == organization_detail_location + + def test_approve_not_found(self, enable_organizations): + organization_service = pretend.stub( + get_organization=lambda *a, **kw: None, + ) + request = pretend.stub( + flags=pretend.stub(enabled=lambda *a: False), + find_service=lambda *a, **kw: organization_service, + matchdict={"organization_id": pretend.stub()}, + ) + + with pytest.raises(HTTPNotFound): + views.organization_approve(request) + + def test_decline(self, enable_organizations, monkeypatch): + admin = pretend.stub( + id="admin-id", + username="admin", + name="Admin", + public_email="[email protected]", + ) + user = pretend.stub( + id="user-id", + username="example", + name="Example", + public_email="[email protected]", + ) + user_service = pretend.stub( + get_admins=lambda *a, **kw: [admin], + get_user=lambda userid, **kw: {admin.id: admin, user.id: user}[userid], + ) + create_event = pretend.stub( + additional={"created_by_user_id": str(user.id)}, + ) + organization = pretend.stub( + id=pretend.stub(), + name="example", + display_name="Example", + orgtype=pretend.stub(name="Company"), + link_url="https://www.example.com/", + description=( + "This company is for use in illustrative examples in documents " + "You may use this company in literature without prior " + "coordination or asking for permission." + ), + is_active=False, + is_approved=None, + events=pretend.stub( + filter=lambda *a, **kw: pretend.stub( + order_by=lambda *a, **kw: pretend.stub( + first=lambda *a, **kw: create_event, + ), + ), + ), + ) + organization_service = pretend.stub( + get_organization=lambda *a, **kw: organization, + decline_organization=pretend.call_recorder(lambda *a, **kw: None), + record_event=pretend.call_recorder(lambda *a, **kw: None), + ) + organization_detail_location = (f"/admin/organizations/{organization.id}/",) + message = pretend.stub() + request = pretend.stub( + flags=pretend.stub(enabled=lambda *a: False), + find_service=lambda iface, **kw: { + IUserService: user_service, + IOrganizationService: organization_service, + }[iface], + matchdict={"organization_id": organization.id}, + params={"organization_name": organization.name, "message": message}, + route_path=lambda *a, **kw: organization_detail_location, + session=pretend.stub( + flash=pretend.call_recorder(lambda *a, **kw: None), + ), + user=admin, + ) + send_email = pretend.call_recorder(lambda *a, **kw: None) + monkeypatch.setattr( + views, "send_admin_new_organization_declined_email", send_email + ) + monkeypatch.setattr(views, "send_new_organization_declined_email", send_email) + + result = views.organization_decline(request) + + assert organization_service.decline_organization.calls == [ + pretend.call(organization.id), + ] + assert organization_service.record_event.calls == [ + pretend.call( + organization.id, + tag="organization:decline", + additional={"declined_by_user_id": str(admin.id)}, + ), + ] + assert request.session.flash.calls == [ + pretend.call( + f'Request for "{organization.name}" organization declined', + queue="success", + ), + ] + assert send_email.calls == [ + pretend.call( + request, + [admin], + organization_name=organization.name, + initiator_username=user.username, + message=message, + ), + pretend.call( + request, + user, + organization_name=organization.name, + message=message, + ), + ] + assert result.status_code == 303 + assert result.location == organization_detail_location + + def test_decline_wrong_confirmation_input(self, enable_organizations, monkeypatch): + user_service = pretend.stub() + organization = pretend.stub(id=pretend.stub(), name=pretend.stub()) + organization_service = pretend.stub( + get_organization=lambda *a, **kw: organization, + ) + organization_detail_location = (f"/admin/organizations/{organization.id}/",) + request = pretend.stub( + flags=pretend.stub(enabled=lambda *a: False), + find_service=lambda iface, **kw: { + IUserService: user_service, + IOrganizationService: organization_service, + }[iface], + matchdict={"organization_id": organization.id}, + params={"organization_name": pretend.stub()}, + route_path=lambda *a, **kw: organization_detail_location, + session=pretend.stub( + flash=pretend.call_recorder(lambda *a, **kw: None), + ), + ) + + result = views.organization_decline(request) + + assert request.session.flash.calls == [ + pretend.call("Wrong confirmation input", queue="error"), + ] + assert result.status_code == 303 + assert result.location == organization_detail_location + + def test_decline_not_found(self, enable_organizations): + organization_service = pretend.stub( + get_organization=lambda *a, **kw: None, + ) + request = pretend.stub( + flags=pretend.stub(enabled=lambda *a: False), + find_service=lambda *a, **kw: organization_service, + matchdict={"organization_id": pretend.stub()}, + ) + + with pytest.raises(HTTPNotFound): + views.organization_decline(request) + + def test_detail_disable_organizations(self, db_request): + with pytest.raises(HTTPNotFound): + views.organization_detail(db_request) + def test_approve_disable_organizations(self, db_request): + with pytest.raises(HTTPNotFound): + views.organization_approve(db_request) -class TestOrganizations: - def test_approve(self): - assert views.approve(pretend.stub()) == {} + def test_decline_disable_organizations(self, db_request): + with pytest.raises(HTTPNotFound): + views.organization_decline(db_request) diff --git a/tests/unit/email/test_init.py b/tests/unit/email/test_init.py --- a/tests/unit/email/test_init.py +++ b/tests/unit/email/test_init.py @@ -431,6 +431,7 @@ def test_send_admin_new_organization_requested_email( email="[email protected]", primary_email=pretend.stub(email="[email protected]", verified=True), ) + organization_id = "id" organization_name = "example" subject_renderer = pyramid_config.testing_add_renderer( @@ -467,20 +468,236 @@ def test_send_admin_new_organization_requested_email( admin_user, organization_name=organization_name, initiator_username=initiator_user.username, + organization_id=organization_id, ) assert result == { "organization_name": organization_name, "initiator_username": initiator_user.username, + "organization_id": organization_id, } - subject_renderer.assert_() + subject_renderer.assert_( + organization_name=organization_name, + initiator_username=initiator_user.username, + organization_id=organization_id, + ) body_renderer.assert_( organization_name=organization_name, initiator_username=initiator_user.username, + organization_id=organization_id, ) html_renderer.assert_( organization_name=organization_name, initiator_username=initiator_user.username, + organization_id=organization_id, + ) + assert pyramid_request.task.calls == [pretend.call(send_email)] + assert send_email.delay.calls == [ + pretend.call( + f"{admin_user.name} <{admin_user.email}>", + { + "subject": "Email Subject", + "body_text": "Email Body", + "body_html": ( + "<html>\n<head></head>\n" + "<body><p>Email HTML Body</p></body>\n</html>\n" + ), + }, + { + "tag": "account:email:sent", + "user_id": admin_user.id, + "additional": { + "from_": "[email protected]", + "to": admin_user.email, + "subject": "Email Subject", + "redact_ip": True, + }, + }, + ) + ] + + +class TestSendAdminNewOrganizationApprovedEmail: + def test_send_admin_new_organization_approved_email( + self, pyramid_request, pyramid_config, monkeypatch + ): + admin_user = pretend.stub( + id="admin", + username="admin", + name="PyPI Adminstrator", + email="[email protected]", + primary_email=pretend.stub(email="[email protected]", verified=True), + ) + initiator_user = pretend.stub( + id="id", + username="username", + name="", + email="[email protected]", + primary_email=pretend.stub(email="[email protected]", verified=True), + ) + organization_name = "example" + message = "example message" + + subject_renderer = pyramid_config.testing_add_renderer( + "email/admin-new-organization-approved/subject.txt" + ) + subject_renderer.string_response = "Email Subject" + body_renderer = pyramid_config.testing_add_renderer( + "email/admin-new-organization-approved/body.txt" + ) + body_renderer.string_response = "Email Body" + html_renderer = pyramid_config.testing_add_renderer( + "email/admin-new-organization-approved/body.html" + ) + html_renderer.string_response = "Email HTML Body" + + send_email = pretend.stub( + delay=pretend.call_recorder(lambda *args, **kwargs: None) + ) + pyramid_request.task = pretend.call_recorder(lambda *args, **kwargs: send_email) + monkeypatch.setattr(email, "send_email", send_email) + + pyramid_request.db = pretend.stub( + query=lambda a: pretend.stub( + filter=lambda *a: pretend.stub( + one=lambda: pretend.stub(user_id=admin_user.id) + ) + ), + ) + pyramid_request.user = initiator_user + pyramid_request.registry.settings = {"mail.sender": "[email protected]"} + + result = email.send_admin_new_organization_approved_email( + pyramid_request, + admin_user, + organization_name=organization_name, + initiator_username=initiator_user.username, + message=message, + ) + + assert result == { + "organization_name": organization_name, + "initiator_username": initiator_user.username, + "message": message, + } + subject_renderer.assert_( + organization_name=organization_name, + initiator_username=initiator_user.username, + message=message, + ) + body_renderer.assert_( + organization_name=organization_name, + initiator_username=initiator_user.username, + message=message, + ) + html_renderer.assert_( + organization_name=organization_name, + initiator_username=initiator_user.username, + message=message, + ) + assert pyramid_request.task.calls == [pretend.call(send_email)] + assert send_email.delay.calls == [ + pretend.call( + f"{admin_user.name} <{admin_user.email}>", + { + "subject": "Email Subject", + "body_text": "Email Body", + "body_html": ( + "<html>\n<head></head>\n" + "<body><p>Email HTML Body</p></body>\n</html>\n" + ), + }, + { + "tag": "account:email:sent", + "user_id": admin_user.id, + "additional": { + "from_": "[email protected]", + "to": admin_user.email, + "subject": "Email Subject", + "redact_ip": True, + }, + }, + ) + ] + + +class TestSendAdminNewOrganizationDeclinedEmail: + def test_send_admin_new_organization_declined_email( + self, pyramid_request, pyramid_config, monkeypatch + ): + admin_user = pretend.stub( + id="admin", + username="admin", + name="PyPI Adminstrator", + email="[email protected]", + primary_email=pretend.stub(email="[email protected]", verified=True), + ) + initiator_user = pretend.stub( + id="id", + username="username", + name="", + email="[email protected]", + primary_email=pretend.stub(email="[email protected]", verified=True), + ) + organization_name = "example" + message = "example message" + + subject_renderer = pyramid_config.testing_add_renderer( + "email/admin-new-organization-declined/subject.txt" + ) + subject_renderer.string_response = "Email Subject" + body_renderer = pyramid_config.testing_add_renderer( + "email/admin-new-organization-declined/body.txt" + ) + body_renderer.string_response = "Email Body" + html_renderer = pyramid_config.testing_add_renderer( + "email/admin-new-organization-declined/body.html" + ) + html_renderer.string_response = "Email HTML Body" + + send_email = pretend.stub( + delay=pretend.call_recorder(lambda *args, **kwargs: None) + ) + pyramid_request.task = pretend.call_recorder(lambda *args, **kwargs: send_email) + monkeypatch.setattr(email, "send_email", send_email) + + pyramid_request.db = pretend.stub( + query=lambda a: pretend.stub( + filter=lambda *a: pretend.stub( + one=lambda: pretend.stub(user_id=admin_user.id) + ) + ), + ) + pyramid_request.user = initiator_user + pyramid_request.registry.settings = {"mail.sender": "[email protected]"} + + result = email.send_admin_new_organization_declined_email( + pyramid_request, + admin_user, + organization_name=organization_name, + initiator_username=initiator_user.username, + message=message, + ) + + assert result == { + "organization_name": organization_name, + "initiator_username": initiator_user.username, + "message": message, + } + subject_renderer.assert_( + organization_name=organization_name, + initiator_username=initiator_user.username, + message=message, + ) + body_renderer.assert_( + organization_name=organization_name, + initiator_username=initiator_user.username, + message=message, + ) + html_renderer.assert_( + organization_name=organization_name, + initiator_username=initiator_user.username, + message=message, ) assert pyramid_request.task.calls == [pretend.call(send_email)] assert send_email.delay.calls == [ @@ -1404,7 +1621,7 @@ def test_send_new_organization_requested_email( ) assert result == {"organization_name": organization_name} - subject_renderer.assert_() + subject_renderer.assert_(organization_name=organization_name) body_renderer.assert_(organization_name=organization_name) html_renderer.assert_(organization_name=organization_name) assert pyramid_request.task.calls == [pretend.call(send_email)] @@ -1433,6 +1650,190 @@ def test_send_new_organization_requested_email( ] +class TestSendNewOrganizationApprovedEmail: + def test_send_new_organization_approved_email( + self, pyramid_request, pyramid_config, monkeypatch + ): + initiator_user = pretend.stub( + id="id", + username="username", + name="", + email="[email protected]", + primary_email=pretend.stub(email="[email protected]", verified=True), + ) + organization_name = "example" + message = "example message" + + subject_renderer = pyramid_config.testing_add_renderer( + "email/new-organization-approved/subject.txt" + ) + subject_renderer.string_response = "Email Subject" + body_renderer = pyramid_config.testing_add_renderer( + "email/new-organization-approved/body.txt" + ) + body_renderer.string_response = "Email Body" + html_renderer = pyramid_config.testing_add_renderer( + "email/new-organization-approved/body.html" + ) + html_renderer.string_response = "Email HTML Body" + + send_email = pretend.stub( + delay=pretend.call_recorder(lambda *args, **kwargs: None) + ) + pyramid_request.task = pretend.call_recorder(lambda *args, **kwargs: send_email) + monkeypatch.setattr(email, "send_email", send_email) + + pyramid_request.db = pretend.stub( + query=lambda a: pretend.stub( + filter=lambda *a: pretend.stub( + one=lambda: pretend.stub(user_id=initiator_user.id) + ) + ), + ) + pyramid_request.user = initiator_user + pyramid_request.registry.settings = {"mail.sender": "[email protected]"} + + result = email.send_new_organization_approved_email( + pyramid_request, + initiator_user, + organization_name=organization_name, + message=message, + ) + + assert result == { + "organization_name": organization_name, + "message": message, + } + subject_renderer.assert_( + organization_name=organization_name, + message=message, + ) + body_renderer.assert_( + organization_name=organization_name, + message=message, + ) + html_renderer.assert_( + organization_name=organization_name, + message=message, + ) + assert pyramid_request.task.calls == [pretend.call(send_email)] + assert send_email.delay.calls == [ + pretend.call( + f"{initiator_user.username} <{initiator_user.email}>", + { + "subject": "Email Subject", + "body_text": "Email Body", + "body_html": ( + "<html>\n<head></head>\n" + "<body><p>Email HTML Body</p></body>\n</html>\n" + ), + }, + { + "tag": "account:email:sent", + "user_id": initiator_user.id, + "additional": { + "from_": "[email protected]", + "to": initiator_user.email, + "subject": "Email Subject", + "redact_ip": False, + }, + }, + ) + ] + + +class TestSendNewOrganizationDeclinedEmail: + def test_send_new_organization_declined_email( + self, pyramid_request, pyramid_config, monkeypatch + ): + initiator_user = pretend.stub( + id="id", + username="username", + name="", + email="[email protected]", + primary_email=pretend.stub(email="[email protected]", verified=True), + ) + organization_name = "example" + message = "example message" + + subject_renderer = pyramid_config.testing_add_renderer( + "email/new-organization-declined/subject.txt" + ) + subject_renderer.string_response = "Email Subject" + body_renderer = pyramid_config.testing_add_renderer( + "email/new-organization-declined/body.txt" + ) + body_renderer.string_response = "Email Body" + html_renderer = pyramid_config.testing_add_renderer( + "email/new-organization-declined/body.html" + ) + html_renderer.string_response = "Email HTML Body" + + send_email = pretend.stub( + delay=pretend.call_recorder(lambda *args, **kwargs: None) + ) + pyramid_request.task = pretend.call_recorder(lambda *args, **kwargs: send_email) + monkeypatch.setattr(email, "send_email", send_email) + + pyramid_request.db = pretend.stub( + query=lambda a: pretend.stub( + filter=lambda *a: pretend.stub( + one=lambda: pretend.stub(user_id=initiator_user.id) + ) + ), + ) + pyramid_request.user = initiator_user + pyramid_request.registry.settings = {"mail.sender": "[email protected]"} + + result = email.send_new_organization_declined_email( + pyramid_request, + initiator_user, + organization_name=organization_name, + message=message, + ) + + assert result == { + "organization_name": organization_name, + "message": message, + } + subject_renderer.assert_( + organization_name=organization_name, + message=message, + ) + body_renderer.assert_( + organization_name=organization_name, + message=message, + ) + html_renderer.assert_( + organization_name=organization_name, + message=message, + ) + assert pyramid_request.task.calls == [pretend.call(send_email)] + assert send_email.delay.calls == [ + pretend.call( + f"{initiator_user.username} <{initiator_user.email}>", + { + "subject": "Email Subject", + "body_text": "Email Body", + "body_html": ( + "<html>\n<head></head>\n" + "<body><p>Email HTML Body</p></body>\n</html>\n" + ), + }, + { + "tag": "account:email:sent", + "user_id": initiator_user.id, + "additional": { + "from_": "[email protected]", + "to": initiator_user.email, + "subject": "Email Subject", + "redact_ip": False, + }, + }, + ) + ] + + class TestCollaboratorAddedEmail: def test_collaborator_added_email( self, pyramid_request, pyramid_config, monkeypatch diff --git a/tests/unit/manage/test_views.py b/tests/unit/manage/test_views.py --- a/tests/unit/manage/test_views.py +++ b/tests/unit/manage/test_views.py @@ -2345,7 +2345,7 @@ def test_manage_organizations(self, monkeypatch): ] assert result == default_response - def test_manage_organizations_disallow_organizations(self, monkeypatch): + def test_manage_organizations_disable_organizations(self, monkeypatch): request = pretend.stub( find_service=lambda *a, **kw: pretend.stub(), flags=pretend.stub(enabled=pretend.call_recorder(lambda *a: True)), @@ -2504,6 +2504,7 @@ def test_create_organization(self, monkeypatch): admins, organization_name=organization.name, initiator_username=request.user.username, + organization_id=organization.id, ), pretend.call( request, @@ -2581,7 +2582,7 @@ def test_create_organization_validation_fails(self, monkeypatch): assert send_email.calls == [] assert result == {"create_organization_form": create_organization_obj} - def test_create_organizations_disallow_organizations(self, monkeypatch): + def test_create_organizations_disable_organizations(self, monkeypatch): request = pretend.stub( find_service=lambda *a, **kw: pretend.stub(), flags=pretend.stub(enabled=pretend.call_recorder(lambda *a: True)), @@ -2848,7 +2849,7 @@ def test_toggle_2fa_requirement_non_critical( assert result.status_code == 303 assert result.headers["Location"] == "/foo/bar/" - events = project.events + events = project.events.all() assert len(events) == 1 event = events[0] assert event.tag == tag
List pending organization requests As @ewdurbin described in #11208, in addition to notification emails, we want administrators to have a list of pending organization requests to approve/decline.
2022-04-18T20:25:19Z
[]
[]
pypi/warehouse
11,218
pypi__warehouse-11218
[ "11252" ]
ad9eb8f1887bd8143a13c4f2cd6fba223a032ed1
diff --git a/warehouse/accounts/__init__.py b/warehouse/accounts/__init__.py --- a/warehouse/accounts/__init__.py +++ b/warehouse/accounts/__init__.py @@ -10,41 +10,30 @@ # See the License for the specific language governing permissions and # limitations under the License. -import datetime -import enum - from pyramid.authorization import ACLAuthorizationPolicy -from pyramid.httpexceptions import HTTPUnauthorized -from pyramid_multiauth import MultiAuthenticationPolicy -from warehouse.accounts.auth_policy import ( - BasicAuthAuthenticationPolicy, - SessionAuthenticationPolicy, - TwoFactorAuthorizationPolicy, -) from warehouse.accounts.interfaces import ( IPasswordBreachedService, ITokenService, IUserService, ) -from warehouse.accounts.models import DisableReason +from warehouse.accounts.security_policy import ( + BasicAuthSecurityPolicy, + SessionSecurityPolicy, + TwoFactorAuthorizationPolicy, +) from warehouse.accounts.services import ( HaveIBeenPwnedPasswordBreachedService, NullPasswordBreachedService, TokenServiceFactory, database_login_factory, ) -from warehouse.email import send_password_compromised_email_hibp -from warehouse.errors import ( - BasicAuthAccountFrozen, - BasicAuthBreachedPassword, - BasicAuthFailedPassword, -) -from warehouse.macaroons.auth_policy import ( - MacaroonAuthenticationPolicy, +from warehouse.macaroons.security_policy import ( MacaroonAuthorizationPolicy, + MacaroonSecurityPolicy, ) from warehouse.rate_limiting import IRateLimiter, RateLimit +from warehouse.utils.security_policy import MultiSecurityPolicy __all__ = ["NullPasswordBreachedService", "HaveIBeenPwnedPasswordBreachedService"] @@ -52,125 +41,6 @@ REDIRECT_FIELD_NAME = "next" -class AuthenticationMethod(enum.Enum): - BASIC_AUTH = "basic-auth" - SESSION = "session" - MACAROON = "macaroon" - - -def _format_exc_status(exc, message): - exc.status = f"{exc.status_code} {message}" - return exc - - -def _authenticate(userid, request): - """Apply the necessary principals to the authenticated user""" - login_service = request.find_service(IUserService, context=None) - user = login_service.get_user(userid) - - if user is None: - return - - if request.session.password_outdated(login_service.get_password_timestamp(userid)): - request.session.invalidate() - request.session.flash("Session invalidated by password change", queue="error") - return - - principals = [] - - if user.is_superuser: - principals.append("group:admins") - if user.is_moderator or user.is_superuser: - principals.append("group:moderators") - if user.is_psf_staff or user.is_superuser: - principals.append("group:psf_staff") - - # user must have base admin access if any admin permission - if principals: - principals.append("group:with_admin_dashboard_access") - - return principals - - -def _basic_auth_check(username, password, request): - request.authentication_method = AuthenticationMethod.BASIC_AUTH - - # Basic authentication can only be used for uploading - if request.matched_route.name not in ["forklift.legacy.file_upload"]: - return - - login_service = request.find_service(IUserService, context=None) - breach_service = request.find_service(IPasswordBreachedService, context=None) - - userid = login_service.find_userid(username) - if userid is not None: - user = login_service.get_user(userid) - is_disabled, disabled_for = login_service.is_disabled(user.id) - if is_disabled: - # This technically violates the contract a little bit, this function is - # meant to return None if the user cannot log in. However we want to present - # a different error message than is normal when we're denying the log in - # because of a compromised password. So to do that, we'll need to raise a - # HTTPError that'll ultimately get returned to the client. This is OK to do - # here because we've already successfully authenticated the credentials, so - # it won't screw up the fall through to other authentication mechanisms - # (since we wouldn't have fell through to them anyways). - if disabled_for == DisableReason.CompromisedPassword: - raise _format_exc_status( - BasicAuthBreachedPassword(), breach_service.failure_message_plain - ) - elif disabled_for == DisableReason.AccountFrozen: - raise _format_exc_status(BasicAuthAccountFrozen(), "Account is frozen.") - else: - raise _format_exc_status(HTTPUnauthorized(), "Account is disabled.") - elif login_service.check_password( - user.id, - password, - tags=["mechanism:basic_auth", "method:auth", "auth_method:basic"], - ): - if breach_service.check_password( - password, tags=["method:auth", "auth_method:basic"] - ): - send_password_compromised_email_hibp(request, user) - login_service.disable_password( - user.id, reason=DisableReason.CompromisedPassword - ) - raise _format_exc_status( - BasicAuthBreachedPassword(), breach_service.failure_message_plain - ) - - login_service.update_user(user.id, last_login=datetime.datetime.utcnow()) - return _authenticate(user.id, request) - else: - user.record_event( - tag="account:login:failure", - ip_address=request.remote_addr, - additional={"reason": "invalid_password", "auth_method": "basic"}, - ) - raise _format_exc_status( - BasicAuthFailedPassword(), - "Invalid or non-existent authentication information. " - "See {projecthelp} for more information.".format( - projecthelp=request.help_url(_anchor="invalid-auth") - ), - ) - - -def _session_authenticate(userid, request): - request.authentication_method = AuthenticationMethod.SESSION - - # Session authentication cannot be used for uploading - if request.matched_route.name in ["forklift.legacy.file_upload"]: - return - - return _authenticate(userid, request) - - -def _macaroon_authenticate(userid, request): - request.authentication_method = AuthenticationMethod.MACAROON - return _authenticate(userid, request) - - def _user(request): userid = request.authenticated_userid @@ -206,19 +76,18 @@ def includeme(config): breached_pw_class.create_service, IPasswordBreachedService ) - # Register our authentication and authorization policies - config.set_authentication_policy( - MultiAuthenticationPolicy( - [ - SessionAuthenticationPolicy(callback=_session_authenticate), - BasicAuthAuthenticationPolicy(check=_basic_auth_check), - MacaroonAuthenticationPolicy(callback=_macaroon_authenticate), - ] - ) + # Register our security policies (AuthN + AuthZ) + authz_policy = TwoFactorAuthorizationPolicy( + policy=MacaroonAuthorizationPolicy(policy=ACLAuthorizationPolicy()) ) - config.set_authorization_policy( - TwoFactorAuthorizationPolicy( - policy=MacaroonAuthorizationPolicy(policy=ACLAuthorizationPolicy()) + config.set_security_policy( + MultiSecurityPolicy( + [ + SessionSecurityPolicy(), + BasicAuthSecurityPolicy(), + MacaroonSecurityPolicy(), + ], + authz_policy, ) ) diff --git a/warehouse/accounts/auth_policy.py b/warehouse/accounts/auth_policy.py deleted file mode 100644 --- a/warehouse/accounts/auth_policy.py +++ /dev/null @@ -1,120 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from pyramid.authentication import ( - BasicAuthAuthenticationPolicy as _BasicAuthAuthenticationPolicy, - SessionAuthenticationPolicy as _SessionAuthenticationPolicy, -) -from pyramid.interfaces import IAuthorizationPolicy -from pyramid.threadlocal import get_current_request -from zope.interface import implementer - -from warehouse.accounts.interfaces import IUserService -from warehouse.cache.http import add_vary_callback -from warehouse.errors import WarehouseDenied -from warehouse.packaging.models import TwoFactorRequireable - - -class BasicAuthAuthenticationPolicy(_BasicAuthAuthenticationPolicy): - def unauthenticated_userid(self, request): - # If we're calling into this API on a request, then we want to register - # a callback which will ensure that the response varies based on the - # Authorization header. - request.add_response_callback(add_vary_callback("Authorization")) - - # Dispatch to the real basic authentication policy - username = super().unauthenticated_userid(request) - - # Assuming we got a username from the basic authentication policy, we - # want to locate the userid from the IUserService. - if username is not None: - login_service = request.find_service(IUserService, context=None) - return str(login_service.find_userid(username)) - - -class SessionAuthenticationPolicy(_SessionAuthenticationPolicy): - def unauthenticated_userid(self, request): - # If we're calling into this API on a request, then we want to register - # a callback which will ensure that the response varies based on the - # Cookie header. - request.add_response_callback(add_vary_callback("Cookie")) - - # Dispatch to the real SessionAuthenticationPolicy - return super().unauthenticated_userid(request) - - -@implementer(IAuthorizationPolicy) -class TwoFactorAuthorizationPolicy: - def __init__(self, policy): - self.policy = policy - - def permits(self, context, principals, permission): - # The Pyramid API doesn't let us access the request here, so we have to pull it - # out of the thread local instead. - # TODO: Work with Pyramid devs to figure out if there is a better way to support - # the worklow we are using here or not. - request = get_current_request() - - # Our request could possibly be a None, if there isn't an active request, in - # that case we're going to always deny, because without a request, we can't - # determine if this request is authorized or not. - if request is None: - return WarehouseDenied( - "There was no active request.", reason="no_active_request" - ) - - # Check if the subpolicy permits authorization - subpolicy_permits = self.policy.permits(context, principals, permission) - - # If the request is permitted by the subpolicy, check if the context is - # 2FA requireable, if 2FA is indeed required, and if the user has 2FA - # enabled - if subpolicy_permits and isinstance(context, TwoFactorRequireable): - if ( - request.registry.settings["warehouse.two_factor_requirement.enabled"] - and context.owners_require_2fa - and not request.user.has_two_factor - ): - return WarehouseDenied( - "This project requires two factor authentication to be enabled " - "for all contributors.", - reason="owners_require_2fa", - ) - if ( - request.registry.settings["warehouse.two_factor_mandate.enabled"] - and context.pypi_mandates_2fa - and not request.user.has_two_factor - ): - return WarehouseDenied( - "PyPI requires two factor authentication to be enabled " - "for all contributors to this project.", - reason="pypi_mandates_2fa", - ) - if ( - request.registry.settings["warehouse.two_factor_mandate.available"] - and context.pypi_mandates_2fa - and not request.user.has_two_factor - ): - request.session.flash( - "This project is included in PyPI's two-factor mandate " - "for critical projects. In the future, you will be unable to " - "perform this action without enabling 2FA for your account", - queue="warning", - ) - - return subpolicy_permits - - def principals_allowed_by_permission(self, context, permission): - # We just dispatch this, because this policy doesn't restrict what - # principals are allowed by a particular permission, it just restricts - # specific requests to not have that permission. - return self.policy.principals_allowed_by_permission(context, permission) diff --git a/warehouse/accounts/security_policy.py b/warehouse/accounts/security_policy.py new file mode 100644 --- /dev/null +++ b/warehouse/accounts/security_policy.py @@ -0,0 +1,261 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import datetime + +from pyramid.authentication import ( + SessionAuthenticationHelper, + extract_http_basic_credentials, +) +from pyramid.httpexceptions import HTTPUnauthorized +from pyramid.interfaces import IAuthorizationPolicy, ISecurityPolicy +from pyramid.threadlocal import get_current_request +from zope.interface import implementer + +from warehouse.accounts.interfaces import IPasswordBreachedService, IUserService +from warehouse.accounts.models import DisableReason +from warehouse.cache.http import add_vary_callback +from warehouse.email import send_password_compromised_email_hibp +from warehouse.errors import ( + BasicAuthAccountFrozen, + BasicAuthBreachedPassword, + BasicAuthFailedPassword, + WarehouseDenied, +) +from warehouse.packaging.models import TwoFactorRequireable +from warehouse.utils.security_policy import AuthenticationMethod + + +def _format_exc_status(exc, message): + exc.status = f"{exc.status_code} {message}" + return exc + + +def _basic_auth_check(username, password, request): + # Basic authentication can only be used for uploading + if request.matched_route.name not in ["forklift.legacy.file_upload"]: + return False + + login_service = request.find_service(IUserService, context=None) + breach_service = request.find_service(IPasswordBreachedService, context=None) + + userid = login_service.find_userid(username) + if userid is not None: + user = login_service.get_user(userid) + is_disabled, disabled_for = login_service.is_disabled(user.id) + if is_disabled: + # This technically violates the contract a little bit, this function is + # meant to return False if the user cannot log in. However we want to + # present a different error message than is normal when we're denying the + # log in because of a compromised password. So to do that, we'll need to + # raise a HTTPError that'll ultimately get returned to the client. This is + # OK to do here because we've already successfully authenticated the + # credentials, so it won't screw up the fall through to other authentication + # mechanisms (since we wouldn't have fell through to them anyways). + if disabled_for == DisableReason.CompromisedPassword: + raise _format_exc_status( + BasicAuthBreachedPassword(), breach_service.failure_message_plain + ) + elif disabled_for == DisableReason.AccountFrozen: + raise _format_exc_status(BasicAuthAccountFrozen(), "Account is frozen.") + else: + raise _format_exc_status(HTTPUnauthorized(), "Account is disabled.") + elif login_service.check_password( + user.id, + password, + tags=["mechanism:basic_auth", "method:auth", "auth_method:basic"], + ): + if breach_service.check_password( + password, tags=["method:auth", "auth_method:basic"] + ): + send_password_compromised_email_hibp(request, user) + login_service.disable_password( + user.id, reason=DisableReason.CompromisedPassword + ) + raise _format_exc_status( + BasicAuthBreachedPassword(), breach_service.failure_message_plain + ) + + login_service.update_user(user.id, last_login=datetime.datetime.utcnow()) + return True + else: + user.record_event( + tag="account:login:failure", + ip_address=request.remote_addr, + additional={"reason": "invalid_password", "auth_method": "basic"}, + ) + raise _format_exc_status( + BasicAuthFailedPassword(), + "Invalid or non-existent authentication information. " + "See {projecthelp} for more information.".format( + projecthelp=request.help_url(_anchor="invalid-auth") + ), + ) + + # No user, no authentication. + return False + + +@implementer(ISecurityPolicy) +class SessionSecurityPolicy: + def __init__(self): + self._session_helper = SessionAuthenticationHelper() + + def identity(self, request): + # If we're calling into this API on a request, then we want to register + # a callback which will ensure that the response varies based on the + # Cookie header. + request.add_response_callback(add_vary_callback("Cookie")) + request.authentication_method = AuthenticationMethod.SESSION + + userid = self._session_helper.authenticated_userid(request) + if userid is None: + return None + + # Session authentication cannot be used for uploading + if request.matched_route.name in ["forklift.legacy.file_upload"]: + return None + + login_service = request.find_service(IUserService, context=None) + + # Our session might be "valid" despite predating a password change. + if request.session.password_outdated( + login_service.get_password_timestamp(userid) + ): + request.session.invalidate() + request.session.flash( + "Session invalidated by password change", queue="error" + ) + return None + + # Sessions can only authenticate users, not any other type of identity. + return login_service.get_user(userid) + + def forget(self, request, **kw): + return self._session_helper.forget(request, **kw) + + def remember(self, request, userid, **kw): + return self._session_helper.remember(request, userid, **kw) + + def authenticated_userid(self, request): + # Handled by MultiSecurityPolicy + return NotImplemented + + def permits(self, request, context, permission): + # Handled by MultiSecurityPolicy + return NotImplemented + + +@implementer(ISecurityPolicy) +class BasicAuthSecurityPolicy: + def identity(self, request): + # If we're calling into this API on a request, then we want to register + # a callback which will ensure that the response varies based on the + # Authorization header. + request.add_response_callback(add_vary_callback("Authorization")) + request.authentication_method = AuthenticationMethod.BASIC_AUTH + + credentials = extract_http_basic_credentials(request) + if credentials is None: + return None + + username, password = credentials + if not _basic_auth_check(username, password, request): + return None + + # Like sessions; basic auth can only authenticate users. + login_service = request.find_service(IUserService, context=None) + return login_service.get_user_by_username(username) + + def forget(self, request, **kw): + # No-op. + return [] + + def remember(self, request, userid, **kw): + # NOTE: We could make realm configurable here. + return [("WWW-Authenticate", 'Basic realm="Realm"')] + + def authenticated_userid(self, request): + # Handled by MultiSecurityPolicy + return NotImplemented + + def permits(self, request, context, permission): + # Handled by MultiSecurityPolicy + return NotImplemented + + +@implementer(IAuthorizationPolicy) +class TwoFactorAuthorizationPolicy: + def __init__(self, policy): + self.policy = policy + + def permits(self, context, principals, permission): + # The Pyramid API doesn't let us access the request here, so we have to pull it + # out of the thread local instead. + # TODO: Work with Pyramid devs to figure out if there is a better way to support + # the worklow we are using here or not. + request = get_current_request() + + # Our request could possibly be a None, if there isn't an active request, in + # that case we're going to always deny, because without a request, we can't + # determine if this request is authorized or not. + if request is None: + return WarehouseDenied( + "There was no active request.", reason="no_active_request" + ) + + # Check if the subpolicy permits authorization + subpolicy_permits = self.policy.permits(context, principals, permission) + + # If the request is permitted by the subpolicy, check if the context is + # 2FA requireable, if 2FA is indeed required, and if the user has 2FA + # enabled + if subpolicy_permits and isinstance(context, TwoFactorRequireable): + if ( + request.registry.settings["warehouse.two_factor_requirement.enabled"] + and context.owners_require_2fa + and not request.user.has_two_factor + ): + return WarehouseDenied( + "This project requires two factor authentication to be enabled " + "for all contributors.", + reason="owners_require_2fa", + ) + if ( + request.registry.settings["warehouse.two_factor_mandate.enabled"] + and context.pypi_mandates_2fa + and not request.user.has_two_factor + ): + return WarehouseDenied( + "PyPI requires two factor authentication to be enabled " + "for all contributors to this project.", + reason="pypi_mandates_2fa", + ) + if ( + request.registry.settings["warehouse.two_factor_mandate.available"] + and context.pypi_mandates_2fa + and not request.user.has_two_factor + ): + request.session.flash( + "This project is included in PyPI's two-factor mandate " + "for critical projects. In the future, you will be unable to " + "perform this action without enabling 2FA for your account", + queue="warning", + ) + + return subpolicy_permits + + def principals_allowed_by_permission(self, context, permission): + # We just dispatch this, because this policy doesn't restrict what + # principals are allowed by a particular permission, it just restricts + # specific requests to not have that permission. + return self.policy.principals_allowed_by_permission(context, permission) diff --git a/warehouse/forklift/legacy.py b/warehouse/forklift/legacy.py --- a/warehouse/forklift/legacy.py +++ b/warehouse/forklift/legacy.py @@ -45,7 +45,6 @@ from trove_classifiers import classifiers, deprecated_classifiers from warehouse import forms -from warehouse.accounts import AuthenticationMethod from warehouse.admin.flags import AdminFlagValue from warehouse.classifiers.models import Classifier from warehouse.email import send_basic_auth_with_two_factor_email @@ -65,6 +64,7 @@ ) from warehouse.packaging.tasks import update_bigquery_release_files from warehouse.utils import http, readme +from warehouse.utils.security_policy import AuthenticationMethod ONE_MB = 1 * 1024 * 1024 ONE_GB = 1 * 1024 * 1024 * 1024 diff --git a/warehouse/macaroons/auth_policy.py b/warehouse/macaroons/security_policy.py similarity index 90% rename from warehouse/macaroons/auth_policy.py rename to warehouse/macaroons/security_policy.py --- a/warehouse/macaroons/auth_policy.py +++ b/warehouse/macaroons/security_policy.py @@ -12,8 +12,7 @@ import base64 -from pyramid.authentication import CallbackAuthenticationPolicy -from pyramid.interfaces import IAuthenticationPolicy, IAuthorizationPolicy +from pyramid.interfaces import IAuthorizationPolicy, ISecurityPolicy from pyramid.threadlocal import get_current_request from zope.interface import implementer @@ -21,6 +20,7 @@ from warehouse.errors import WarehouseDenied from warehouse.macaroons.interfaces import IMacaroonService from warehouse.macaroons.services import InvalidMacaroonError +from warehouse.utils.security_policy import AuthenticationMethod def _extract_basic_macaroon(auth): @@ -70,16 +70,14 @@ def _extract_http_macaroon(request): return None -@implementer(IAuthenticationPolicy) -class MacaroonAuthenticationPolicy(CallbackAuthenticationPolicy): - def __init__(self, callback=None): - self.callback = callback - - def unauthenticated_userid(self, request): +@implementer(ISecurityPolicy) +class MacaroonSecurityPolicy: + def identity(self, request): # If we're calling into this API on a request, then we want to register # a callback which will ensure that the response varies based on the # Authorization header. request.add_response_callback(add_vary_callback("Authorization")) + request.authentication_method = AuthenticationMethod.MACAROON # We need to extract our Macaroon from the request. macaroon = _extract_http_macaroon(request) @@ -89,9 +87,7 @@ def unauthenticated_userid(self, request): # Check to see if our Macaroon exists in the database, and if so # fetch the user that is associated with it. macaroon_service = request.find_service(IMacaroonService, context=None) - userid = macaroon_service.find_userid(macaroon) - if userid is not None: - return str(userid) + return macaroon_service.find_from_raw(macaroon).user def remember(self, request, userid, **kw): # This is a NO-OP because our Macaroon header policy doesn't allow @@ -99,12 +95,20 @@ def remember(self, request, userid, **kw): # assumes it has been configured in clients somewhere out of band. return [] - def forget(self, request): + def forget(self, request, **kw): # This is a NO-OP because our Macaroon header policy doesn't allow # the ability for authentication to "forget" the user id. This # assumes it has been configured in clients somewhere out of band. return [] + def authenticated_userid(self, request): + # Handled by MultiSecurityPolicy + return NotImplemented + + def permits(self, request, context, permission): + # Handled by MultiSecurityPolicy + return NotImplemented + @implementer(IAuthorizationPolicy) class MacaroonAuthorizationPolicy: diff --git a/warehouse/packaging/models.py b/warehouse/packaging/models.py --- a/warehouse/packaging/models.py +++ b/warehouse/packaging/models.py @@ -230,9 +230,11 @@ def __acl__(self): query.all(), key=lambda x: ["Owner", "Maintainer"].index(x.role_name) ): if role.role_name == "Owner": - acls.append((Allow, str(role.user.id), ["manage:project", "upload"])) + acls.append( + (Allow, f"user:{role.user.id}", ["manage:project", "upload"]) + ) else: - acls.append((Allow, str(role.user.id), ["upload"])) + acls.append((Allow, f"user:{role.user.id}", ["upload"])) return acls @property diff --git a/warehouse/utils/security_policy.py b/warehouse/utils/security_policy.py new file mode 100644 --- /dev/null +++ b/warehouse/utils/security_policy.py @@ -0,0 +1,109 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import enum + +from pyramid.authorization import Authenticated +from pyramid.interfaces import ISecurityPolicy +from pyramid.security import Denied +from zope.interface import implementer + +from warehouse.accounts.models import User + + +class AuthenticationMethod(enum.Enum): + BASIC_AUTH = "basic-auth" + SESSION = "session" + MACAROON = "macaroon" + + +def _principals_for_authenticated_user(user): + """Apply the necessary principals to the authenticated user""" + principals = [] + if user.is_superuser: + principals.append("group:admins") + if user.is_moderator or user.is_superuser: + principals.append("group:moderators") + if user.is_psf_staff or user.is_superuser: + principals.append("group:psf_staff") + + # user must have base admin access if any admin permission + if principals: + principals.append("group:with_admin_dashboard_access") + + return principals + + +@implementer(ISecurityPolicy) +class MultiSecurityPolicy: + """ + A wrapper for multiple Pyramid 2.0-style "security policies", which replace + Pyramid 1.0's separate AuthN and AuthZ APIs. + + Security policies are checked in the order provided during initialization, + with the following semantics: + + * `identity`: Selected from the first policy to return non-`None` + * `authenticated_userid`: Selected from the request identity, if present + * `forget`: Combined from all policies + * `remember`: Combined from all policies + * `permits`: Uses the AuthZ policy passed during initialization + + These semantics mostly mirror those of `pyramid-multiauth`. + """ + + def __init__(self, policies, authz): + self._policies = policies + self._authz = authz + + def identity(self, request): + for policy in self._policies: + if ident := policy.identity(request): + return ident + + return None + + def authenticated_userid(self, request): + if request.identity and isinstance(request.identity, User): + return str(request.identity.id) + return None + + def forget(self, request, **kw): + headers = [] + for policy in self._policies: + headers.extend(policy.forget(request, **kw)) + return headers + + def remember(self, request, userid, **kw): + headers = [] + for policy in self._policies: + headers.extend(policy.remember(request, userid, **kw)) + return headers + + def permits(self, request, context, permission): + identity = request.identity + principals = [] + if identity is not None: + principals.append(Authenticated) + + if isinstance(identity, User): + principals.append(f"user:{identity.id}") + principals.extend(_principals_for_authenticated_user(identity)) + else: + return Denied("unimplemented") + + # NOTE: Observe that the parameters passed into the underlying AuthZ + # policy here are not the same (or in the same order) as the ones + # passed into `permits` above. This is because the underlying AuthZ + # policy is a "legacy" Pyramid 1.0 style one that implements the + # `IAuthorizationPolicy` interface rather than `ISecurityPolicy`. + return self._authz.permits(context, principals, permission)
diff --git a/tests/unit/accounts/test_auth_policy.py b/tests/unit/accounts/test_auth_policy.py deleted file mode 100644 --- a/tests/unit/accounts/test_auth_policy.py +++ /dev/null @@ -1,316 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import uuid - -import pretend -import pytest - -from pyramid import authentication -from pyramid.interfaces import IAuthenticationPolicy, IAuthorizationPolicy -from pyramid.security import Allowed, Denied -from zope.interface.verify import verifyClass - -from warehouse.accounts import auth_policy -from warehouse.accounts.interfaces import IUserService -from warehouse.errors import WarehouseDenied - -from ...common.db.packaging import ProjectFactory - - -class TestBasicAuthAuthenticationPolicy: - def test_verify(self): - assert verifyClass( - IAuthenticationPolicy, auth_policy.BasicAuthAuthenticationPolicy - ) - - def test_unauthenticated_userid_no_userid(self, monkeypatch): - extract_http_basic_credentials = pretend.call_recorder(lambda request: None) - monkeypatch.setattr( - authentication, - "extract_http_basic_credentials", - extract_http_basic_credentials, - ) - - policy = auth_policy.BasicAuthAuthenticationPolicy(check=pretend.stub()) - - vary_cb = pretend.stub() - add_vary_cb = pretend.call_recorder(lambda *v: vary_cb) - monkeypatch.setattr(auth_policy, "add_vary_callback", add_vary_cb) - - request = pretend.stub( - add_response_callback=pretend.call_recorder(lambda cb: None) - ) - - assert policy.unauthenticated_userid(request) is None - assert extract_http_basic_credentials.calls == [pretend.call(request)] - assert add_vary_cb.calls == [pretend.call("Authorization")] - assert request.add_response_callback.calls == [pretend.call(vary_cb)] - - def test_unauthenticated_userid_with_userid(self, monkeypatch): - extract_http_basic_credentials = pretend.call_recorder( - lambda request: authentication.HTTPBasicCredentials("username", "password") - ) - monkeypatch.setattr( - authentication, - "extract_http_basic_credentials", - extract_http_basic_credentials, - ) - - policy = auth_policy.BasicAuthAuthenticationPolicy(check=pretend.stub()) - - vary_cb = pretend.stub() - add_vary_cb = pretend.call_recorder(lambda *v: vary_cb) - monkeypatch.setattr(auth_policy, "add_vary_callback", add_vary_cb) - - userid = uuid.uuid4() - service = pretend.stub( - find_userid=pretend.call_recorder(lambda username: userid) - ) - request = pretend.stub( - find_service=pretend.call_recorder(lambda iface, context: service), - add_response_callback=pretend.call_recorder(lambda cb: None), - ) - - assert policy.unauthenticated_userid(request) == str(userid) - assert extract_http_basic_credentials.calls == [pretend.call(request)] - assert request.find_service.calls == [pretend.call(IUserService, context=None)] - assert service.find_userid.calls == [pretend.call("username")] - assert add_vary_cb.calls == [pretend.call("Authorization")] - assert request.add_response_callback.calls == [pretend.call(vary_cb)] - - -class TestSessionAuthenticationPolicy: - def test_verify(self): - assert verifyClass( - IAuthenticationPolicy, auth_policy.SessionAuthenticationPolicy - ) - - def test_unauthenticated_userid(self, monkeypatch): - policy = auth_policy.SessionAuthenticationPolicy() - - vary_cb = pretend.stub() - add_vary_cb = pretend.call_recorder(lambda *v: vary_cb) - monkeypatch.setattr(auth_policy, "add_vary_callback", add_vary_cb) - - userid = pretend.stub() - request = pretend.stub( - session={policy.helper.userid_key: userid}, - add_response_callback=pretend.call_recorder(lambda cb: None), - ) - - assert policy.unauthenticated_userid(request) is userid - assert add_vary_cb.calls == [pretend.call("Cookie")] - assert request.add_response_callback.calls == [pretend.call(vary_cb)] - - -class TestTwoFactorAuthorizationPolicy: - def test_verify(self): - assert verifyClass( - IAuthorizationPolicy, auth_policy.TwoFactorAuthorizationPolicy - ) - - def test_permits_no_active_request(self, monkeypatch): - get_current_request = pretend.call_recorder(lambda: None) - monkeypatch.setattr(auth_policy, "get_current_request", get_current_request) - - backing_policy = pretend.stub( - permits=pretend.call_recorder(lambda *a, **kw: pretend.stub()) - ) - policy = auth_policy.TwoFactorAuthorizationPolicy(policy=backing_policy) - result = policy.permits(pretend.stub(), pretend.stub(), pretend.stub()) - - assert result == WarehouseDenied("") - assert result.s == "There was no active request." - - def test_permits_if_context_is_not_permitted_by_backing_policy(self, monkeypatch): - request = pretend.stub() - get_current_request = pretend.call_recorder(lambda: request) - monkeypatch.setattr(auth_policy, "get_current_request", get_current_request) - - permits_result = Denied("Because") - backing_policy = pretend.stub( - permits=pretend.call_recorder(lambda *a, **kw: permits_result) - ) - policy = auth_policy.TwoFactorAuthorizationPolicy(policy=backing_policy) - result = policy.permits(pretend.stub(), pretend.stub(), pretend.stub()) - - assert result == permits_result - - def test_permits_if_non_2fa_requireable_context(self, monkeypatch): - request = pretend.stub() - get_current_request = pretend.call_recorder(lambda: request) - monkeypatch.setattr(auth_policy, "get_current_request", get_current_request) - - permits_result = Allowed("Because") - backing_policy = pretend.stub( - permits=pretend.call_recorder(lambda *a, **kw: permits_result) - ) - policy = auth_policy.TwoFactorAuthorizationPolicy(policy=backing_policy) - result = policy.permits(pretend.stub(), pretend.stub(), pretend.stub()) - - assert result == permits_result - - def test_permits_if_context_does_not_require_2fa(self, monkeypatch, db_request): - db_request.registry.settings = { - "warehouse.two_factor_mandate.enabled": True, - "warehouse.two_factor_mandate.available": True, - "warehouse.two_factor_requirement.enabled": True, - } - get_current_request = pretend.call_recorder(lambda: db_request) - monkeypatch.setattr(auth_policy, "get_current_request", get_current_request) - - permits_result = Allowed("Because") - backing_policy = pretend.stub( - permits=pretend.call_recorder(lambda *a, **kw: permits_result) - ) - policy = auth_policy.TwoFactorAuthorizationPolicy(policy=backing_policy) - context = ProjectFactory.create( - owners_require_2fa=False, - pypi_mandates_2fa=False, - ) - result = policy.permits(context, pretend.stub(), pretend.stub()) - - assert result == permits_result - - def test_flashes_if_context_requires_2fa_but_not_enabled( - self, monkeypatch, db_request - ): - db_request.registry.settings = { - "warehouse.two_factor_mandate.enabled": False, - "warehouse.two_factor_mandate.available": True, - "warehouse.two_factor_requirement.enabled": True, - } - db_request.session.flash = pretend.call_recorder(lambda m, queue: None) - db_request.user = pretend.stub(has_two_factor=False) - get_current_request = pretend.call_recorder(lambda: db_request) - monkeypatch.setattr(auth_policy, "get_current_request", get_current_request) - - permits_result = Allowed("Because") - backing_policy = pretend.stub( - permits=pretend.call_recorder(lambda *a, **kw: permits_result) - ) - policy = auth_policy.TwoFactorAuthorizationPolicy(policy=backing_policy) - context = ProjectFactory.create( - owners_require_2fa=False, - pypi_mandates_2fa=True, - ) - result = policy.permits(context, pretend.stub(), pretend.stub()) - - assert result == permits_result - assert db_request.session.flash.calls == [ - pretend.call( - "This project is included in PyPI's two-factor mandate " - "for critical projects. In the future, you will be unable to " - "perform this action without enabling 2FA for your account", - queue="warning", - ), - ] - - @pytest.mark.parametrize("owners_require_2fa", [True, False]) - @pytest.mark.parametrize("pypi_mandates_2fa", [True, False]) - @pytest.mark.parametrize("two_factor_requirement_enabled", [True, False]) - @pytest.mark.parametrize("two_factor_mandate_available", [True, False]) - @pytest.mark.parametrize("two_factor_mandate_enabled", [True, False]) - def test_permits_if_user_has_2fa( - self, - monkeypatch, - owners_require_2fa, - pypi_mandates_2fa, - two_factor_requirement_enabled, - two_factor_mandate_available, - two_factor_mandate_enabled, - db_request, - ): - db_request.registry.settings = { - "warehouse.two_factor_requirement.enabled": two_factor_requirement_enabled, - "warehouse.two_factor_mandate.available": two_factor_mandate_available, - "warehouse.two_factor_mandate.enabled": two_factor_mandate_enabled, - } - user = pretend.stub(has_two_factor=True) - db_request.user = user - get_current_request = pretend.call_recorder(lambda: db_request) - monkeypatch.setattr(auth_policy, "get_current_request", get_current_request) - - permits_result = Allowed("Because") - backing_policy = pretend.stub( - permits=pretend.call_recorder(lambda *a, **kw: permits_result) - ) - policy = auth_policy.TwoFactorAuthorizationPolicy(policy=backing_policy) - context = ProjectFactory.create( - owners_require_2fa=owners_require_2fa, pypi_mandates_2fa=pypi_mandates_2fa - ) - result = policy.permits(context, pretend.stub(), pretend.stub()) - - assert result == permits_result - - @pytest.mark.parametrize( - "owners_require_2fa, pypi_mandates_2fa, reason", - [ - (True, False, "owners_require_2fa"), - (False, True, "pypi_mandates_2fa"), - (True, True, "pypi_mandates_2fa"), - ], - ) - def test_denies_if_2fa_is_required_but_user_doesnt_have_2fa( - self, - monkeypatch, - owners_require_2fa, - pypi_mandates_2fa, - reason, - db_request, - ): - db_request.registry.settings = { - "warehouse.two_factor_requirement.enabled": owners_require_2fa, - "warehouse.two_factor_mandate.enabled": pypi_mandates_2fa, - } - user = pretend.stub(has_two_factor=False) - db_request.user = user - get_current_request = pretend.call_recorder(lambda: db_request) - monkeypatch.setattr(auth_policy, "get_current_request", get_current_request) - - permits_result = Allowed("Because") - backing_policy = pretend.stub( - permits=pretend.call_recorder(lambda *a, **kw: permits_result) - ) - policy = auth_policy.TwoFactorAuthorizationPolicy(policy=backing_policy) - context = ProjectFactory.create( - owners_require_2fa=owners_require_2fa, pypi_mandates_2fa=pypi_mandates_2fa - ) - result = policy.permits(context, pretend.stub(), pretend.stub()) - - summary = { - "owners_require_2fa": ( - "This project requires two factor authentication to be enabled " - "for all contributors.", - ), - "pypi_mandates_2fa": ( - "PyPI requires two factor authentication to be enabled " - "for all contributors to this project.", - ), - }[reason] - - assert result == WarehouseDenied(summary, reason="two_factor_required") - - def test_principals_allowed_by_permission(self): - principals = pretend.stub() - backing_policy = pretend.stub( - principals_allowed_by_permission=pretend.call_recorder( - lambda *a: principals - ) - ) - policy = auth_policy.TwoFactorAuthorizationPolicy(policy=backing_policy) - - assert ( - policy.principals_allowed_by_permission(pretend.stub(), pretend.stub()) - is principals - ) diff --git a/tests/unit/accounts/test_core.py b/tests/unit/accounts/test_core.py --- a/tests/unit/accounts/test_core.py +++ b/tests/unit/accounts/test_core.py @@ -19,13 +19,14 @@ from pyramid.httpexceptions import HTTPUnauthorized from warehouse import accounts -from warehouse.accounts import AuthenticationMethod +from warehouse.accounts import security_policy from warehouse.accounts.interfaces import ( IPasswordBreachedService, ITokenService, IUserService, ) from warehouse.accounts.models import DisableReason +from warehouse.accounts.security_policy import _basic_auth_check from warehouse.accounts.services import ( HaveIBeenPwnedPasswordBreachedService, TokenServiceFactory, @@ -43,7 +44,7 @@ def test_invalid_route(self, pyramid_request, pyramid_services): pretend.stub(), IPasswordBreachedService, None ) pyramid_request.matched_route = pretend.stub(name="route_name") - assert accounts._basic_auth_check("myuser", "mypass", pyramid_request) is None + assert _basic_auth_check("myuser", "mypass", pyramid_request) is False assert service.find_userid.calls == [] def test_with_no_user(self, pyramid_request, pyramid_services): @@ -53,7 +54,7 @@ def test_with_no_user(self, pyramid_request, pyramid_services): pretend.stub(), IPasswordBreachedService, None ) pyramid_request.matched_route = pretend.stub(name="forklift.legacy.file_upload") - assert accounts._basic_auth_check("myuser", "mypass", pyramid_request) is None + assert _basic_auth_check("myuser", "mypass", pyramid_request) is False assert service.find_userid.calls == [pretend.call("myuser")] def test_with_invalid_password(self, pyramid_request, pyramid_services): @@ -77,9 +78,7 @@ def test_with_invalid_password(self, pyramid_request, pyramid_services): pyramid_request.help_url = pretend.call_recorder(lambda **kw: "/the/help/url/") with pytest.raises(BasicAuthFailedPassword) as excinfo: - assert ( - accounts._basic_auth_check("myuser", "mypass", pyramid_request) is None - ) + assert _basic_auth_check("myuser", "mypass", pyramid_request) is None assert excinfo.value.status == ( "403 Invalid or non-existent authentication information. " @@ -124,9 +123,7 @@ def test_with_disabled_user_no_reason(self, pyramid_request, pyramid_services): pyramid_request.help_url = pretend.call_recorder(lambda **kw: "/the/help/url/") with pytest.raises(HTTPUnauthorized) as excinfo: - assert ( - accounts._basic_auth_check("myuser", "mypass", pyramid_request) is None - ) + assert _basic_auth_check("myuser", "mypass", pyramid_request) is None assert excinfo.value.status == "401 Account is disabled." assert service.find_userid.calls == [pretend.call("myuser")] @@ -154,9 +151,7 @@ def test_with_disabled_user_compromised_pw(self, pyramid_request, pyramid_servic pyramid_request.matched_route = pretend.stub(name="forklift.legacy.file_upload") with pytest.raises(BasicAuthBreachedPassword) as excinfo: - assert ( - accounts._basic_auth_check("myuser", "mypass", pyramid_request) is None - ) + assert _basic_auth_check("myuser", "mypass", pyramid_request) is None assert excinfo.value.status == "401 Bad Password!" assert service.find_userid.calls == [pretend.call("myuser")] @@ -188,9 +183,7 @@ def test_with_disabled_user_frozen(self, pyramid_request, pyramid_services): pyramid_request.help_url = pretend.call_recorder(lambda **kw: "/the/help/url/") with pytest.raises(HTTPUnauthorized) as excinfo: - assert ( - accounts._basic_auth_check("myuser", "mypass", pyramid_request) is None - ) + assert _basic_auth_check("myuser", "mypass", pyramid_request) is None assert excinfo.value.status == "401 Account is frozen." assert service.find_userid.calls == [pretend.call("myuser")] @@ -198,10 +191,6 @@ def test_with_disabled_user_frozen(self, pyramid_request, pyramid_services): assert service.is_disabled.calls == [pretend.call(1)] def test_with_valid_password(self, monkeypatch, pyramid_request, pyramid_services): - principals = pretend.stub() - authenticate = pretend.call_recorder(lambda userid, request: principals) - monkeypatch.setattr(accounts, "_authenticate", authenticate) - user = pretend.stub(id=2, has_two_factor=False) service = pretend.stub( get_user=pretend.call_recorder(lambda user_id: user), @@ -226,10 +215,7 @@ def test_with_valid_password(self, monkeypatch, pyramid_request, pyramid_service now = datetime.datetime.utcnow() with freezegun.freeze_time(now): - assert ( - accounts._basic_auth_check("myuser", "mypass", pyramid_request) - is principals - ) + assert _basic_auth_check("myuser", "mypass", pyramid_request) is True assert service.find_userid.calls == [pretend.call("myuser")] assert service.get_user.calls == [pretend.call(2)] @@ -245,15 +231,13 @@ def test_with_valid_password(self, monkeypatch, pyramid_request, pyramid_service pretend.call("mypass", tags=["method:auth", "auth_method:basic"]) ] assert service.update_user.calls == [pretend.call(2, last_login=now)] - assert authenticate.calls == [pretend.call(2, pyramid_request)] - assert pyramid_request.authentication_method == AuthenticationMethod.BASIC_AUTH def test_via_basic_auth_compromised( self, monkeypatch, pyramid_request, pyramid_services ): send_email = pretend.call_recorder(lambda *a, **kw: None) monkeypatch.setattr( - accounts, "send_password_compromised_email_hibp", send_email + security_policy, "send_password_compromised_email_hibp", send_email ) user = pretend.stub(id=2) @@ -279,7 +263,7 @@ def test_via_basic_auth_compromised( pyramid_request.matched_route = pretend.stub(name="forklift.legacy.file_upload") with pytest.raises(BasicAuthBreachedPassword) as excinfo: - accounts._basic_auth_check("myuser", "mypass", pyramid_request) + _basic_auth_check("myuser", "mypass", pyramid_request) assert excinfo.value.status == "401 Bad Password!" assert service.find_userid.calls == [pretend.call("myuser")] @@ -301,149 +285,6 @@ def test_via_basic_auth_compromised( assert send_email.calls == [pretend.call(pyramid_request, user)] -class TestAuthenticate: - @pytest.mark.parametrize( - ( - "is_superuser", - "is_moderator", - "is_psf_staff", - "password_out_of_date", - "expected", - ), - [ - (False, False, False, False, []), - (False, False, False, True, None), - ( - True, - False, - False, - False, - [ - "group:admins", - "group:moderators", - "group:psf_staff", - "group:with_admin_dashboard_access", - ], - ), - ( - False, - True, - False, - False, - ["group:moderators", "group:with_admin_dashboard_access"], - ), - ( - True, - True, - False, - False, - [ - "group:admins", - "group:moderators", - "group:psf_staff", - "group:with_admin_dashboard_access", - ], - ), - ( - False, - False, - True, - False, - ["group:psf_staff", "group:with_admin_dashboard_access"], - ), - ( - False, - True, - True, - False, - [ - "group:moderators", - "group:psf_staff", - "group:with_admin_dashboard_access", - ], - ), - ], - ) - def test_with_user( - self, - pyramid_request, - pyramid_services, - is_superuser, - is_moderator, - is_psf_staff, - password_out_of_date, - expected, - ): - user = pretend.stub( - is_superuser=is_superuser, - is_moderator=is_moderator, - is_psf_staff=is_psf_staff, - ) - service = pretend.stub( - get_user=pretend.call_recorder(lambda userid: user), - get_password_timestamp=lambda userid: 0, - ) - pyramid_services.register_service(service, IUserService, None) - pyramid_request.session.password_outdated = lambda ts: password_out_of_date - pyramid_request.session.invalidate = pretend.call_recorder(lambda: None) - pyramid_request.session.flash = pretend.call_recorder( - lambda msg, queue=None: None - ) - - assert accounts._authenticate(1, pyramid_request) == expected - assert service.get_user.calls == [pretend.call(1)] - - if password_out_of_date: - assert pyramid_request.session.invalidate.calls == [pretend.call()] - assert pyramid_request.session.flash.calls == [ - pretend.call("Session invalidated by password change", queue="error") - ] - else: - assert pyramid_request.session.invalidate.calls == [] - assert pyramid_request.session.flash.calls == [] - - def test_without_user(self): - service = pretend.stub(get_user=pretend.call_recorder(lambda userid: None)) - request = pretend.stub(find_service=lambda iface, context: service) - - assert accounts._authenticate(1, request) is None - assert service.get_user.calls == [pretend.call(1)] - - -class TestSessionAuthenticate: - def test_route_matched_name_bad(self, monkeypatch): - authenticate_obj = pretend.call_recorder(lambda *a, **kw: True) - monkeypatch.setattr(accounts, "_authenticate", authenticate_obj) - request = pretend.stub( - matched_route=pretend.stub(name="forklift.legacy.file_upload") - ) - assert accounts._session_authenticate(1, request) is None - assert authenticate_obj.calls == [] - assert request.authentication_method == AuthenticationMethod.SESSION - - def test_route_matched_name_ok(self, monkeypatch): - authenticate_obj = pretend.call_recorder(lambda *a, **kw: True) - monkeypatch.setattr(accounts, "_authenticate", authenticate_obj) - request = pretend.stub( - matched_route=pretend.stub(name="includes.current-user-indicator") - ) - assert accounts._session_authenticate(1, request) is True - assert authenticate_obj.calls == [pretend.call(1, request)] - assert request.authentication_method == AuthenticationMethod.SESSION - - -class TestMacaroonAuthenticate: - def test_macaroon_authenticate(self, monkeypatch): - authenticate_obj = pretend.call_recorder(lambda *a, **kw: True) - monkeypatch.setattr(accounts, "_authenticate", authenticate_obj) - request = pretend.stub( - matched_route=pretend.stub(name="includes.current-user-indicator") - ) - assert accounts._macaroon_authenticate(1, request) is True - assert authenticate_obj.calls == [pretend.call(1, request)] - assert request.authentication_method == AuthenticationMethod.MACAROON - - class TestUser: def test_with_user(self): user = pretend.stub() @@ -472,24 +313,28 @@ def test_without_userid(self): def test_includeme(monkeypatch): - macaroon_authn_obj = pretend.stub() - macaroon_authn_cls = pretend.call_recorder(lambda callback: macaroon_authn_obj) - basic_authn_obj = pretend.stub() - basic_authn_cls = pretend.call_recorder(lambda check: basic_authn_obj) - session_authn_obj = pretend.stub() - session_authn_cls = pretend.call_recorder(lambda callback: session_authn_obj) - authn_obj = pretend.stub() - authn_cls = pretend.call_recorder(lambda *a: authn_obj) authz_obj = pretend.stub() authz_cls = pretend.call_recorder(lambda *a, **kw: authz_obj) - monkeypatch.setattr(accounts, "BasicAuthAuthenticationPolicy", basic_authn_cls) - monkeypatch.setattr(accounts, "SessionAuthenticationPolicy", session_authn_cls) - monkeypatch.setattr(accounts, "MacaroonAuthenticationPolicy", macaroon_authn_cls) - monkeypatch.setattr(accounts, "MultiAuthenticationPolicy", authn_cls) monkeypatch.setattr(accounts, "ACLAuthorizationPolicy", authz_cls) monkeypatch.setattr(accounts, "MacaroonAuthorizationPolicy", authz_cls) monkeypatch.setattr(accounts, "TwoFactorAuthorizationPolicy", authz_cls) + multi_policy_obj = pretend.stub() + multi_policy_cls = pretend.call_recorder(lambda ps, authz: multi_policy_obj) + monkeypatch.setattr(accounts, "MultiSecurityPolicy", multi_policy_cls) + + session_policy_obj = pretend.stub() + session_policy_cls = pretend.call_recorder(lambda: session_policy_obj) + monkeypatch.setattr(accounts, "SessionSecurityPolicy", session_policy_cls) + + basic_policy_obj = pretend.stub() + basic_policy_cls = pretend.call_recorder(lambda: basic_policy_obj) + monkeypatch.setattr(accounts, "BasicAuthSecurityPolicy", basic_policy_cls) + + macaroon_policy_obj = pretend.stub() + macaroon_policy_cls = pretend.call_recorder(lambda: macaroon_policy_obj) + monkeypatch.setattr(accounts, "MacaroonSecurityPolicy", macaroon_policy_cls) + config = pretend.stub( registry=pretend.stub( settings={ @@ -504,8 +349,7 @@ def test_includeme(monkeypatch): lambda factory, iface, name=None: None ), add_request_method=pretend.call_recorder(lambda f, name, reify: None), - set_authentication_policy=pretend.call_recorder(lambda p: None), - set_authorization_policy=pretend.call_recorder(lambda p: None), + set_security_policy=pretend.call_recorder(lambda p: None), maybe_dotted=pretend.call_recorder(lambda path: path), add_route_predicate=pretend.call_recorder(lambda name, cls: None), ) @@ -536,17 +380,9 @@ def test_includeme(monkeypatch): assert config.add_request_method.calls == [ pretend.call(accounts._user, name="user", reify=True) ] - assert config.set_authentication_policy.calls == [pretend.call(authn_obj)] - assert config.set_authorization_policy.calls == [pretend.call(authz_obj)] - assert basic_authn_cls.calls == [pretend.call(check=accounts._basic_auth_check)] - assert session_authn_cls.calls == [ - pretend.call(callback=accounts._session_authenticate) - ] - assert authn_cls.calls == [ - pretend.call([session_authn_obj, basic_authn_obj, macaroon_authn_obj]) - ] - assert authz_cls.calls == [ - pretend.call(), - pretend.call(policy=authz_obj), - pretend.call(policy=authz_obj), + assert config.set_security_policy.calls == [pretend.call(multi_policy_obj)] + assert multi_policy_cls.calls == [ + pretend.call( + [session_policy_obj, basic_policy_obj, macaroon_policy_obj], authz_obj + ) ] diff --git a/tests/unit/accounts/test_security_policy.py b/tests/unit/accounts/test_security_policy.py new file mode 100644 --- /dev/null +++ b/tests/unit/accounts/test_security_policy.py @@ -0,0 +1,526 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import pretend +import pytest + +from pyramid.interfaces import IAuthorizationPolicy, ISecurityPolicy +from pyramid.security import Allowed, Denied +from zope.interface.verify import verifyClass + +from warehouse.accounts import security_policy +from warehouse.accounts.interfaces import IUserService +from warehouse.errors import WarehouseDenied +from warehouse.utils.security_policy import AuthenticationMethod + +from ...common.db.packaging import ProjectFactory + + +class TestBasicAuthSecurityPolicy: + def test_verify(self): + assert verifyClass( + ISecurityPolicy, + security_policy.BasicAuthSecurityPolicy, + ) + + def test_noops(self): + policy = security_policy.BasicAuthSecurityPolicy() + assert policy.authenticated_userid(pretend.stub()) == NotImplemented + assert ( + policy.permits(pretend.stub(), pretend.stub(), pretend.stub()) + == NotImplemented + ) + + def test_forget_and_remember(self): + policy = security_policy.BasicAuthSecurityPolicy() + + assert policy.forget(pretend.stub()) == [] + assert policy.remember(pretend.stub(), pretend.stub()) == [ + ("WWW-Authenticate", 'Basic realm="Realm"') + ] + + def test_identity_no_credentials(self, monkeypatch): + extract_http_basic_credentials = pretend.call_recorder(lambda request: None) + monkeypatch.setattr( + security_policy, + "extract_http_basic_credentials", + extract_http_basic_credentials, + ) + + policy = security_policy.BasicAuthSecurityPolicy() + + vary_cb = pretend.stub() + add_vary_cb = pretend.call_recorder(lambda *v: vary_cb) + monkeypatch.setattr(security_policy, "add_vary_callback", add_vary_cb) + + request = pretend.stub( + add_response_callback=pretend.call_recorder(lambda cb: None) + ) + + assert policy.identity(request) is None + assert extract_http_basic_credentials.calls == [pretend.call(request)] + assert add_vary_cb.calls == [pretend.call("Authorization")] + assert request.add_response_callback.calls == [pretend.call(vary_cb)] + + def test_identity_credentials_fail(self, monkeypatch): + creds = (pretend.stub(), pretend.stub()) + extract_http_basic_credentials = pretend.call_recorder(lambda request: creds) + monkeypatch.setattr( + security_policy, + "extract_http_basic_credentials", + extract_http_basic_credentials, + ) + + basic_auth_check = pretend.call_recorder(lambda u, p, r: False) + monkeypatch.setattr(security_policy, "_basic_auth_check", basic_auth_check) + + policy = security_policy.BasicAuthSecurityPolicy() + + vary_cb = pretend.stub() + add_vary_cb = pretend.call_recorder(lambda *v: vary_cb) + monkeypatch.setattr(security_policy, "add_vary_callback", add_vary_cb) + + request = pretend.stub( + add_response_callback=pretend.call_recorder(lambda cb: None) + ) + + assert policy.identity(request) is None + assert extract_http_basic_credentials.calls == [pretend.call(request)] + assert basic_auth_check.calls == [pretend.call(creds[0], creds[1], request)] + assert add_vary_cb.calls == [pretend.call("Authorization")] + assert request.add_response_callback.calls == [pretend.call(vary_cb)] + + def test_identity(self, monkeypatch): + creds = (pretend.stub(), pretend.stub()) + extract_http_basic_credentials = pretend.call_recorder(lambda request: creds) + monkeypatch.setattr( + security_policy, + "extract_http_basic_credentials", + extract_http_basic_credentials, + ) + + basic_auth_check = pretend.call_recorder(lambda u, p, r: True) + monkeypatch.setattr(security_policy, "_basic_auth_check", basic_auth_check) + + policy = security_policy.BasicAuthSecurityPolicy() + + vary_cb = pretend.stub() + add_vary_cb = pretend.call_recorder(lambda *v: vary_cb) + monkeypatch.setattr(security_policy, "add_vary_callback", add_vary_cb) + + user = pretend.stub() + user_service = pretend.stub( + get_user_by_username=pretend.call_recorder(lambda u: user) + ) + request = pretend.stub( + add_response_callback=pretend.call_recorder(lambda cb: None), + find_service=pretend.call_recorder(lambda a, **kw: user_service), + ) + + assert policy.identity(request) is user + assert request.authentication_method == AuthenticationMethod.BASIC_AUTH + assert extract_http_basic_credentials.calls == [pretend.call(request)] + assert basic_auth_check.calls == [pretend.call(creds[0], creds[1], request)] + assert request.find_service.calls == [pretend.call(IUserService, context=None)] + assert user_service.get_user_by_username.calls == [pretend.call(creds[0])] + + assert add_vary_cb.calls == [pretend.call("Authorization")] + assert request.add_response_callback.calls == [pretend.call(vary_cb)] + + +class TestSessionSecurityPolicy: + def test_verify(self): + assert verifyClass( + ISecurityPolicy, + security_policy.SessionSecurityPolicy, + ) + + def test_noops(self): + policy = security_policy.SessionSecurityPolicy() + assert policy.authenticated_userid(pretend.stub()) == NotImplemented + assert ( + policy.permits(pretend.stub(), pretend.stub(), pretend.stub()) + == NotImplemented + ) + + def test_forget_and_remember(self, monkeypatch): + request = pretend.stub() + userid = pretend.stub() + forgets = pretend.stub() + remembers = pretend.stub() + session_helper_obj = pretend.stub( + forget=pretend.call_recorder(lambda r, **kw: forgets), + remember=pretend.call_recorder(lambda r, uid, **kw: remembers), + ) + session_helper_cls = pretend.call_recorder(lambda: session_helper_obj) + monkeypatch.setattr( + security_policy, "SessionAuthenticationHelper", session_helper_cls + ) + + policy = security_policy.SessionSecurityPolicy() + assert session_helper_cls.calls == [pretend.call()] + + assert policy.forget(request, foo=None) == forgets + assert session_helper_obj.forget.calls == [pretend.call(request, foo=None)] + + assert policy.remember(request, userid, foo=None) == remembers + assert session_helper_obj.remember.calls == [ + pretend.call(request, userid, foo=None) + ] + + def test_identity_no_session(self, monkeypatch): + session_helper_obj = pretend.stub( + authenticated_userid=pretend.call_recorder(lambda r: None) + ) + session_helper_cls = pretend.call_recorder(lambda: session_helper_obj) + monkeypatch.setattr( + security_policy, "SessionAuthenticationHelper", session_helper_cls + ) + + policy = security_policy.SessionSecurityPolicy() + + vary_cb = pretend.stub() + add_vary_cb = pretend.call_recorder(lambda *v: vary_cb) + monkeypatch.setattr(security_policy, "add_vary_callback", add_vary_cb) + + request = pretend.stub( + add_response_callback=pretend.call_recorder(lambda cb: None) + ) + + assert policy.identity(request) is None + assert request.authentication_method == AuthenticationMethod.SESSION + assert session_helper_obj.authenticated_userid.calls == [pretend.call(request)] + assert session_helper_cls.calls == [pretend.call()] + + assert add_vary_cb.calls == [pretend.call("Cookie")] + assert request.add_response_callback.calls == [pretend.call(vary_cb)] + + def test_identity_invalid_route(self, monkeypatch): + session_helper_obj = pretend.stub( + authenticated_userid=pretend.call_recorder(lambda r: pretend.stub()) + ) + session_helper_cls = pretend.call_recorder(lambda: session_helper_obj) + monkeypatch.setattr( + security_policy, "SessionAuthenticationHelper", session_helper_cls + ) + + policy = security_policy.SessionSecurityPolicy() + + vary_cb = pretend.stub() + add_vary_cb = pretend.call_recorder(lambda *v: vary_cb) + monkeypatch.setattr(security_policy, "add_vary_callback", add_vary_cb) + + request = pretend.stub( + add_response_callback=pretend.call_recorder(lambda cb: None), + matched_route=pretend.stub(name="forklift.legacy.file_upload"), + ) + + assert policy.identity(request) is None + assert request.authentication_method == AuthenticationMethod.SESSION + assert session_helper_obj.authenticated_userid.calls == [pretend.call(request)] + assert session_helper_cls.calls == [pretend.call()] + + assert add_vary_cb.calls == [pretend.call("Cookie")] + assert request.add_response_callback.calls == [pretend.call(vary_cb)] + + def test_identity_password_outdated(self, monkeypatch): + userid = pretend.stub() + session_helper_obj = pretend.stub( + authenticated_userid=pretend.call_recorder(lambda r: userid) + ) + session_helper_cls = pretend.call_recorder(lambda: session_helper_obj) + monkeypatch.setattr( + security_policy, "SessionAuthenticationHelper", session_helper_cls + ) + + policy = security_policy.SessionSecurityPolicy() + + vary_cb = pretend.stub() + add_vary_cb = pretend.call_recorder(lambda *v: vary_cb) + monkeypatch.setattr(security_policy, "add_vary_callback", add_vary_cb) + + timestamp = pretend.stub() + user_service = pretend.stub( + get_password_timestamp=pretend.call_recorder(lambda uid: timestamp), + ) + request = pretend.stub( + add_response_callback=pretend.call_recorder(lambda cb: None), + matched_route=pretend.stub(name="a.permitted.route"), + find_service=pretend.call_recorder(lambda i, **kw: user_service), + session=pretend.stub( + password_outdated=pretend.call_recorder(lambda ts: True), + invalidate=pretend.call_recorder(lambda: None), + flash=pretend.call_recorder(lambda *a, **kw: None), + ), + ) + + assert policy.identity(request) is None + assert request.authentication_method == AuthenticationMethod.SESSION + assert session_helper_obj.authenticated_userid.calls == [pretend.call(request)] + assert session_helper_cls.calls == [pretend.call()] + assert request.find_service.calls == [pretend.call(IUserService, context=None)] + assert request.session.password_outdated.calls == [pretend.call(timestamp)] + assert user_service.get_password_timestamp.calls == [pretend.call(userid)] + assert request.session.invalidate.calls == [pretend.call()] + assert request.session.flash.calls == [ + pretend.call("Session invalidated by password change", queue="error") + ] + + assert add_vary_cb.calls == [pretend.call("Cookie")] + assert request.add_response_callback.calls == [pretend.call(vary_cb)] + + def test_identity(self, monkeypatch): + userid = pretend.stub() + session_helper_obj = pretend.stub( + authenticated_userid=pretend.call_recorder(lambda r: userid) + ) + session_helper_cls = pretend.call_recorder(lambda: session_helper_obj) + monkeypatch.setattr( + security_policy, "SessionAuthenticationHelper", session_helper_cls + ) + + policy = security_policy.SessionSecurityPolicy() + + vary_cb = pretend.stub() + add_vary_cb = pretend.call_recorder(lambda *v: vary_cb) + monkeypatch.setattr(security_policy, "add_vary_callback", add_vary_cb) + + user = pretend.stub() + timestamp = pretend.stub() + user_service = pretend.stub( + get_user=pretend.call_recorder(lambda uid: user), + get_password_timestamp=pretend.call_recorder(lambda uid: timestamp), + ) + request = pretend.stub( + add_response_callback=pretend.call_recorder(lambda cb: None), + matched_route=pretend.stub(name="a.permitted.route"), + find_service=pretend.call_recorder(lambda i, **kw: user_service), + session=pretend.stub( + password_outdated=pretend.call_recorder(lambda ts: False) + ), + ) + + assert policy.identity(request) is user + assert request.authentication_method == AuthenticationMethod.SESSION + assert session_helper_obj.authenticated_userid.calls == [pretend.call(request)] + assert session_helper_cls.calls == [pretend.call()] + assert request.find_service.calls == [pretend.call(IUserService, context=None)] + assert request.session.password_outdated.calls == [pretend.call(timestamp)] + assert user_service.get_password_timestamp.calls == [pretend.call(userid)] + assert user_service.get_user.calls == [pretend.call(userid)] + + assert add_vary_cb.calls == [pretend.call("Cookie")] + assert request.add_response_callback.calls == [pretend.call(vary_cb)] + + +class TestTwoFactorAuthorizationPolicy: + def test_verify(self): + assert verifyClass( + IAuthorizationPolicy, security_policy.TwoFactorAuthorizationPolicy + ) + + def test_permits_no_active_request(self, monkeypatch): + get_current_request = pretend.call_recorder(lambda: None) + monkeypatch.setattr(security_policy, "get_current_request", get_current_request) + + backing_policy = pretend.stub( + permits=pretend.call_recorder(lambda *a, **kw: pretend.stub()) + ) + policy = security_policy.TwoFactorAuthorizationPolicy(policy=backing_policy) + result = policy.permits(pretend.stub(), pretend.stub(), pretend.stub()) + + assert result == WarehouseDenied("") + assert result.s == "There was no active request." + + def test_permits_if_context_is_not_permitted_by_backing_policy(self, monkeypatch): + request = pretend.stub() + get_current_request = pretend.call_recorder(lambda: request) + monkeypatch.setattr(security_policy, "get_current_request", get_current_request) + + permits_result = Denied("Because") + backing_policy = pretend.stub( + permits=pretend.call_recorder(lambda *a, **kw: permits_result) + ) + policy = security_policy.TwoFactorAuthorizationPolicy(policy=backing_policy) + result = policy.permits(pretend.stub(), pretend.stub(), pretend.stub()) + + assert result == permits_result + + def test_permits_if_non_2fa_requireable_context(self, monkeypatch): + request = pretend.stub() + get_current_request = pretend.call_recorder(lambda: request) + monkeypatch.setattr(security_policy, "get_current_request", get_current_request) + + permits_result = Allowed("Because") + backing_policy = pretend.stub( + permits=pretend.call_recorder(lambda *a, **kw: permits_result) + ) + policy = security_policy.TwoFactorAuthorizationPolicy(policy=backing_policy) + result = policy.permits(pretend.stub(), pretend.stub(), pretend.stub()) + + assert result == permits_result + + def test_permits_if_context_does_not_require_2fa(self, monkeypatch, db_request): + db_request.registry.settings = { + "warehouse.two_factor_mandate.enabled": True, + "warehouse.two_factor_mandate.available": True, + "warehouse.two_factor_requirement.enabled": True, + } + get_current_request = pretend.call_recorder(lambda: db_request) + monkeypatch.setattr(security_policy, "get_current_request", get_current_request) + + permits_result = Allowed("Because") + backing_policy = pretend.stub( + permits=pretend.call_recorder(lambda *a, **kw: permits_result) + ) + policy = security_policy.TwoFactorAuthorizationPolicy(policy=backing_policy) + context = ProjectFactory.create( + owners_require_2fa=False, + pypi_mandates_2fa=False, + ) + result = policy.permits(context, pretend.stub(), pretend.stub()) + + assert result == permits_result + + def test_flashes_if_context_requires_2fa_but_not_enabled( + self, monkeypatch, db_request + ): + db_request.registry.settings = { + "warehouse.two_factor_mandate.enabled": False, + "warehouse.two_factor_mandate.available": True, + "warehouse.two_factor_requirement.enabled": True, + } + db_request.session.flash = pretend.call_recorder(lambda m, queue: None) + db_request.user = pretend.stub(has_two_factor=False) + get_current_request = pretend.call_recorder(lambda: db_request) + monkeypatch.setattr(security_policy, "get_current_request", get_current_request) + + permits_result = Allowed("Because") + backing_policy = pretend.stub( + permits=pretend.call_recorder(lambda *a, **kw: permits_result) + ) + policy = security_policy.TwoFactorAuthorizationPolicy(policy=backing_policy) + context = ProjectFactory.create( + owners_require_2fa=False, + pypi_mandates_2fa=True, + ) + result = policy.permits(context, pretend.stub(), pretend.stub()) + + assert result == permits_result + assert db_request.session.flash.calls == [ + pretend.call( + "This project is included in PyPI's two-factor mandate " + "for critical projects. In the future, you will be unable to " + "perform this action without enabling 2FA for your account", + queue="warning", + ), + ] + + @pytest.mark.parametrize("owners_require_2fa", [True, False]) + @pytest.mark.parametrize("pypi_mandates_2fa", [True, False]) + @pytest.mark.parametrize("two_factor_requirement_enabled", [True, False]) + @pytest.mark.parametrize("two_factor_mandate_available", [True, False]) + @pytest.mark.parametrize("two_factor_mandate_enabled", [True, False]) + def test_permits_if_user_has_2fa( + self, + monkeypatch, + owners_require_2fa, + pypi_mandates_2fa, + two_factor_requirement_enabled, + two_factor_mandate_available, + two_factor_mandate_enabled, + db_request, + ): + db_request.registry.settings = { + "warehouse.two_factor_requirement.enabled": two_factor_requirement_enabled, + "warehouse.two_factor_mandate.available": two_factor_mandate_available, + "warehouse.two_factor_mandate.enabled": two_factor_mandate_enabled, + } + user = pretend.stub(has_two_factor=True) + db_request.user = user + get_current_request = pretend.call_recorder(lambda: db_request) + monkeypatch.setattr(security_policy, "get_current_request", get_current_request) + + permits_result = Allowed("Because") + backing_policy = pretend.stub( + permits=pretend.call_recorder(lambda *a, **kw: permits_result) + ) + policy = security_policy.TwoFactorAuthorizationPolicy(policy=backing_policy) + context = ProjectFactory.create( + owners_require_2fa=owners_require_2fa, pypi_mandates_2fa=pypi_mandates_2fa + ) + result = policy.permits(context, pretend.stub(), pretend.stub()) + + assert result == permits_result + + @pytest.mark.parametrize( + "owners_require_2fa, pypi_mandates_2fa, reason", + [ + (True, False, "owners_require_2fa"), + (False, True, "pypi_mandates_2fa"), + (True, True, "pypi_mandates_2fa"), + ], + ) + def test_denies_if_2fa_is_required_but_user_doesnt_have_2fa( + self, + monkeypatch, + owners_require_2fa, + pypi_mandates_2fa, + reason, + db_request, + ): + db_request.registry.settings = { + "warehouse.two_factor_requirement.enabled": owners_require_2fa, + "warehouse.two_factor_mandate.enabled": pypi_mandates_2fa, + } + user = pretend.stub(has_two_factor=False) + db_request.user = user + get_current_request = pretend.call_recorder(lambda: db_request) + monkeypatch.setattr(security_policy, "get_current_request", get_current_request) + + permits_result = Allowed("Because") + backing_policy = pretend.stub( + permits=pretend.call_recorder(lambda *a, **kw: permits_result) + ) + policy = security_policy.TwoFactorAuthorizationPolicy(policy=backing_policy) + context = ProjectFactory.create( + owners_require_2fa=owners_require_2fa, pypi_mandates_2fa=pypi_mandates_2fa + ) + result = policy.permits(context, pretend.stub(), pretend.stub()) + + summary = { + "owners_require_2fa": ( + "This project requires two factor authentication to be enabled " + "for all contributors.", + ), + "pypi_mandates_2fa": ( + "PyPI requires two factor authentication to be enabled " + "for all contributors to this project.", + ), + }[reason] + + assert result == WarehouseDenied(summary, reason="two_factor_required") + + def test_principals_allowed_by_permission(self): + principals = pretend.stub() + backing_policy = pretend.stub( + principals_allowed_by_permission=pretend.call_recorder( + lambda *a: principals + ) + ) + policy = security_policy.TwoFactorAuthorizationPolicy(policy=backing_policy) + + assert ( + policy.principals_allowed_by_permission(pretend.stub(), pretend.stub()) + is principals + ) diff --git a/tests/unit/forklift/test_legacy.py b/tests/unit/forklift/test_legacy.py --- a/tests/unit/forklift/test_legacy.py +++ b/tests/unit/forklift/test_legacy.py @@ -33,7 +33,6 @@ from wtforms.form import Form from wtforms.validators import ValidationError -from warehouse.accounts import AuthenticationMethod from warehouse.admin.flags import AdminFlag, AdminFlagValue from warehouse.classifiers.models import Classifier from warehouse.forklift import legacy @@ -50,6 +49,7 @@ Role, ) from warehouse.packaging.tasks import update_bigquery_release_files +from warehouse.utils.security_policy import AuthenticationMethod from ...common.db.accounts import EmailFactory, UserFactory from ...common.db.classifiers import ClassifierFactory diff --git a/tests/unit/macaroons/test_auth_policy.py b/tests/unit/macaroons/test_security_policy.py similarity index 61% rename from tests/unit/macaroons/test_auth_policy.py rename to tests/unit/macaroons/test_security_policy.py --- a/tests/unit/macaroons/test_auth_policy.py +++ b/tests/unit/macaroons/test_security_policy.py @@ -10,16 +10,15 @@ # See the License for the specific language governing permissions and # limitations under the License. -import uuid import pretend import pytest -from pyramid.interfaces import IAuthenticationPolicy, IAuthorizationPolicy +from pyramid.interfaces import IAuthorizationPolicy, ISecurityPolicy from pyramid.security import Denied from zope.interface.verify import verifyClass -from warehouse.macaroons import auth_policy +from warehouse.macaroons import security_policy from warehouse.macaroons.interfaces import IMacaroonService from warehouse.macaroons.services import InvalidMacaroonError @@ -39,7 +38,7 @@ def test_extract_http_macaroon(auth, result): headers=pretend.stub(get=pretend.call_recorder(lambda k: auth)) ) - assert auth_policy._extract_http_macaroon(request) == result + assert security_policy._extract_http_macaroon(request) == result @pytest.mark.parametrize( @@ -53,123 +52,99 @@ def test_extract_http_macaroon(auth, result): ], ) def test_extract_basic_macaroon(auth, result): - assert auth_policy._extract_basic_macaroon(auth) == result + assert security_policy._extract_basic_macaroon(auth) == result -class TestMacaroonAuthenticationPolicy: +class TestMacaroonSecurityPolicy: def test_verify(self): assert verifyClass( - IAuthenticationPolicy, auth_policy.MacaroonAuthenticationPolicy + ISecurityPolicy, + security_policy.MacaroonSecurityPolicy, ) - def test_unauthenticated_userid_invalid_macaroon(self, monkeypatch): - _extract_http_macaroon = pretend.call_recorder(lambda r: None) - monkeypatch.setattr( - auth_policy, "_extract_http_macaroon", _extract_http_macaroon + def test_noops(self): + policy = security_policy.MacaroonSecurityPolicy() + assert policy.authenticated_userid(pretend.stub()) == NotImplemented + assert ( + policy.permits(pretend.stub(), pretend.stub(), pretend.stub()) + == NotImplemented ) - policy = auth_policy.MacaroonAuthenticationPolicy() + def test_forget_and_remember(self): + policy = security_policy.MacaroonSecurityPolicy() + + assert policy.forget(pretend.stub()) == [] + assert policy.remember(pretend.stub(), pretend.stub()) == [] + + def test_identify_no_macaroon(self, monkeypatch): + policy = security_policy.MacaroonSecurityPolicy() vary_cb = pretend.stub() add_vary_cb = pretend.call_recorder(lambda *v: vary_cb) - monkeypatch.setattr(auth_policy, "add_vary_callback", add_vary_cb) + monkeypatch.setattr(security_policy, "add_vary_callback", add_vary_cb) + + extract_http_macaroon = pretend.call_recorder(lambda r: None) + monkeypatch.setattr( + security_policy, "_extract_http_macaroon", extract_http_macaroon + ) request = pretend.stub( add_response_callback=pretend.call_recorder(lambda cb: None) ) - assert policy.unauthenticated_userid(request) is None - assert _extract_http_macaroon.calls == [pretend.call(request)] + assert policy.identity(request) is None + assert extract_http_macaroon.calls == [pretend.call(request)] + assert add_vary_cb.calls == [pretend.call("Authorization")] assert request.add_response_callback.calls == [pretend.call(vary_cb)] - def test_unauthenticated_userid_valid_macaroon(self, monkeypatch): - _extract_http_macaroon = pretend.call_recorder(lambda r: b"not a real macaroon") - monkeypatch.setattr( - auth_policy, "_extract_http_macaroon", _extract_http_macaroon - ) - - policy = auth_policy.MacaroonAuthenticationPolicy() + def test_identify(self, monkeypatch): + policy = security_policy.MacaroonSecurityPolicy() vary_cb = pretend.stub() add_vary_cb = pretend.call_recorder(lambda *v: vary_cb) - monkeypatch.setattr(auth_policy, "add_vary_callback", add_vary_cb) + monkeypatch.setattr(security_policy, "add_vary_callback", add_vary_cb) - userid = uuid.uuid4() + raw_macaroon = pretend.stub() + extract_http_macaroon = pretend.call_recorder(lambda r: raw_macaroon) + monkeypatch.setattr( + security_policy, "_extract_http_macaroon", extract_http_macaroon + ) + + user = pretend.stub() macaroon_service = pretend.stub( - find_userid=pretend.call_recorder(lambda macaroon: userid) + find_from_raw=pretend.call_recorder(lambda m: pretend.stub(user=user)) ) request = pretend.stub( - find_service=pretend.call_recorder( - lambda interface, **kw: macaroon_service - ), add_response_callback=pretend.call_recorder(lambda cb: None), + find_service=pretend.call_recorder(lambda i, **kw: macaroon_service), ) - assert policy.unauthenticated_userid(request) == str(userid) - assert _extract_http_macaroon.calls == [pretend.call(request)] + assert policy.identity(request) is user + assert extract_http_macaroon.calls == [pretend.call(request)] assert request.find_service.calls == [ pretend.call(IMacaroonService, context=None) ] - assert macaroon_service.find_userid.calls == [ - pretend.call(b"not a real macaroon") - ] - assert add_vary_cb.calls == [pretend.call("Authorization")] - assert request.add_response_callback.calls == [pretend.call(vary_cb)] - - def test_unauthenticated_userid_valid_macaroon_invalid_userid(self, monkeypatch): - _extract_http_macaroon = pretend.call_recorder(lambda r: b"not a real macaroon") - monkeypatch.setattr( - auth_policy, "_extract_http_macaroon", _extract_http_macaroon - ) - - policy = auth_policy.MacaroonAuthenticationPolicy() - - vary_cb = pretend.stub() - add_vary_cb = pretend.call_recorder(lambda *v: vary_cb) - monkeypatch.setattr(auth_policy, "add_vary_callback", add_vary_cb) - - macaroon_service = pretend.stub( - find_userid=pretend.call_recorder(lambda macaroon: None) - ) - request = pretend.stub( - find_service=pretend.call_recorder( - lambda interface, **kw: macaroon_service - ), - add_response_callback=pretend.call_recorder(lambda cb: None), - ) + assert macaroon_service.find_from_raw.calls == [pretend.call(raw_macaroon)] - assert policy.unauthenticated_userid(request) is None - assert _extract_http_macaroon.calls == [pretend.call(request)] assert add_vary_cb.calls == [pretend.call("Authorization")] - assert macaroon_service.find_userid.calls == [ - pretend.call(b"not a real macaroon") - ] assert request.add_response_callback.calls == [pretend.call(vary_cb)] - def test_remember(self): - policy = auth_policy.MacaroonAuthenticationPolicy() - assert policy.remember(pretend.stub(), pretend.stub()) == [] - - def test_forget(self): - policy = auth_policy.MacaroonAuthenticationPolicy() - assert policy.forget(pretend.stub()) == [] - class TestMacaroonAuthorizationPolicy: def test_verify(self): assert verifyClass( - IAuthorizationPolicy, auth_policy.MacaroonAuthorizationPolicy + IAuthorizationPolicy, security_policy.MacaroonAuthorizationPolicy ) def test_permits_no_active_request(self, monkeypatch): get_current_request = pretend.call_recorder(lambda: None) - monkeypatch.setattr(auth_policy, "get_current_request", get_current_request) + monkeypatch.setattr(security_policy, "get_current_request", get_current_request) backing_policy = pretend.stub( permits=pretend.call_recorder(lambda *a, **kw: pretend.stub()) ) - policy = auth_policy.MacaroonAuthorizationPolicy(policy=backing_policy) + policy = security_policy.MacaroonAuthorizationPolicy(policy=backing_policy) result = policy.permits(pretend.stub(), pretend.stub(), pretend.stub()) assert result == Denied("") @@ -178,18 +153,18 @@ def test_permits_no_active_request(self, monkeypatch): def test_permits_no_macaroon(self, monkeypatch): request = pretend.stub() get_current_request = pretend.call_recorder(lambda: request) - monkeypatch.setattr(auth_policy, "get_current_request", get_current_request) + monkeypatch.setattr(security_policy, "get_current_request", get_current_request) _extract_http_macaroon = pretend.call_recorder(lambda r: None) monkeypatch.setattr( - auth_policy, "_extract_http_macaroon", _extract_http_macaroon + security_policy, "_extract_http_macaroon", _extract_http_macaroon ) permits = pretend.stub() backing_policy = pretend.stub( permits=pretend.call_recorder(lambda *a, **kw: permits) ) - policy = auth_policy.MacaroonAuthorizationPolicy(policy=backing_policy) + policy = security_policy.MacaroonAuthorizationPolicy(policy=backing_policy) result = policy.permits(pretend.stub(), pretend.stub(), pretend.stub()) assert result == permits @@ -202,18 +177,18 @@ def test_permits_invalid_macaroon(self, monkeypatch): find_service=pretend.call_recorder(lambda interface, **kw: macaroon_service) ) get_current_request = pretend.call_recorder(lambda: request) - monkeypatch.setattr(auth_policy, "get_current_request", get_current_request) + monkeypatch.setattr(security_policy, "get_current_request", get_current_request) _extract_http_macaroon = pretend.call_recorder(lambda r: b"not a real macaroon") monkeypatch.setattr( - auth_policy, "_extract_http_macaroon", _extract_http_macaroon + security_policy, "_extract_http_macaroon", _extract_http_macaroon ) permits = pretend.stub() backing_policy = pretend.stub( permits=pretend.call_recorder(lambda *a, **kw: permits) ) - policy = auth_policy.MacaroonAuthorizationPolicy(policy=backing_policy) + policy = security_policy.MacaroonAuthorizationPolicy(policy=backing_policy) result = policy.permits(pretend.stub(), pretend.stub(), pretend.stub()) assert result == Denied("") @@ -227,18 +202,18 @@ def test_permits_valid_macaroon(self, monkeypatch): find_service=pretend.call_recorder(lambda interface, **kw: macaroon_service) ) get_current_request = pretend.call_recorder(lambda: request) - monkeypatch.setattr(auth_policy, "get_current_request", get_current_request) + monkeypatch.setattr(security_policy, "get_current_request", get_current_request) _extract_http_macaroon = pretend.call_recorder(lambda r: b"not a real macaroon") monkeypatch.setattr( - auth_policy, "_extract_http_macaroon", _extract_http_macaroon + security_policy, "_extract_http_macaroon", _extract_http_macaroon ) permits = pretend.stub() backing_policy = pretend.stub( permits=pretend.call_recorder(lambda *a, **kw: permits) ) - policy = auth_policy.MacaroonAuthorizationPolicy(policy=backing_policy) + policy = security_policy.MacaroonAuthorizationPolicy(policy=backing_policy) result = policy.permits(pretend.stub(), pretend.stub(), "upload") assert result == permits @@ -257,18 +232,18 @@ def test_denies_valid_macaroon_for_incorrect_permission( find_service=pretend.call_recorder(lambda interface, **kw: macaroon_service) ) get_current_request = pretend.call_recorder(lambda: request) - monkeypatch.setattr(auth_policy, "get_current_request", get_current_request) + monkeypatch.setattr(security_policy, "get_current_request", get_current_request) _extract_http_macaroon = pretend.call_recorder(lambda r: b"not a real macaroon") monkeypatch.setattr( - auth_policy, "_extract_http_macaroon", _extract_http_macaroon + security_policy, "_extract_http_macaroon", _extract_http_macaroon ) permits = pretend.stub() backing_policy = pretend.stub( permits=pretend.call_recorder(lambda *a, **kw: permits) ) - policy = auth_policy.MacaroonAuthorizationPolicy(policy=backing_policy) + policy = security_policy.MacaroonAuthorizationPolicy(policy=backing_policy) result = policy.permits(pretend.stub(), pretend.stub(), invalid_permission) assert result == Denied("") @@ -283,7 +258,7 @@ def test_principals_allowed_by_permission(self): lambda *a: principals ) ) - policy = auth_policy.MacaroonAuthorizationPolicy(policy=backing_policy) + policy = security_policy.MacaroonAuthorizationPolicy(policy=backing_policy) assert ( policy.principals_allowed_by_permission(pretend.stub(), pretend.stub()) diff --git a/tests/unit/packaging/test_models.py b/tests/unit/packaging/test_models.py --- a/tests/unit/packaging/test_models.py +++ b/tests/unit/packaging/test_models.py @@ -118,14 +118,14 @@ def test_acl(self, db_session): (Allow, "group:moderators", "moderator"), ] + sorted( [ - (Allow, str(owner1.user.id), ["manage:project", "upload"]), - (Allow, str(owner2.user.id), ["manage:project", "upload"]), + (Allow, f"user:{owner1.user.id}", ["manage:project", "upload"]), + (Allow, f"user:{owner2.user.id}", ["manage:project", "upload"]), ], key=lambda x: x[1], ) + sorted( [ - (Allow, str(maintainer1.user.id), ["upload"]), - (Allow, str(maintainer2.user.id), ["upload"]), + (Allow, f"user:{maintainer1.user.id}", ["upload"]), + (Allow, f"user:{maintainer2.user.id}", ["upload"]), ], key=lambda x: x[1], ) @@ -337,14 +337,14 @@ def test_acl(self, db_session): (Allow, "group:moderators", "moderator"), ] + sorted( [ - (Allow, str(owner1.user.id), ["manage:project", "upload"]), - (Allow, str(owner2.user.id), ["manage:project", "upload"]), + (Allow, f"user:{owner1.user.id}", ["manage:project", "upload"]), + (Allow, f"user:{owner2.user.id}", ["manage:project", "upload"]), ], key=lambda x: x[1], ) + sorted( [ - (Allow, str(maintainer1.user.id), ["upload"]), - (Allow, str(maintainer2.user.id), ["upload"]), + (Allow, f"user:{maintainer1.user.id}", ["upload"]), + (Allow, f"user:{maintainer2.user.id}", ["upload"]), ], key=lambda x: x[1], ) diff --git a/tests/unit/utils/test_security_policy.py b/tests/unit/utils/test_security_policy.py new file mode 100644 --- /dev/null +++ b/tests/unit/utils/test_security_policy.py @@ -0,0 +1,233 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pretend +import pytest + +from pyramid.authorization import Authenticated +from pyramid.security import Denied + +from warehouse.utils import security_policy + +from ...common.db.accounts import UserFactory + + [email protected]( + ( + "is_superuser", + "is_moderator", + "is_psf_staff", + "expected", + ), + [ + (False, False, False, []), + ( + True, + False, + False, + [ + "group:admins", + "group:moderators", + "group:psf_staff", + "group:with_admin_dashboard_access", + ], + ), + ( + False, + True, + False, + ["group:moderators", "group:with_admin_dashboard_access"], + ), + ( + True, + True, + False, + [ + "group:admins", + "group:moderators", + "group:psf_staff", + "group:with_admin_dashboard_access", + ], + ), + ( + False, + False, + True, + ["group:psf_staff", "group:with_admin_dashboard_access"], + ), + ( + False, + True, + True, + [ + "group:moderators", + "group:psf_staff", + "group:with_admin_dashboard_access", + ], + ), + ], +) +def test_principals_for_authenticated_user( + is_superuser, + is_moderator, + is_psf_staff, + expected, +): + user = pretend.stub( + id=1, + is_superuser=is_superuser, + is_moderator=is_moderator, + is_psf_staff=is_psf_staff, + ) + assert security_policy._principals_for_authenticated_user(user) == expected + + +class TestMultiSecurityPolicy: + def test_initializes(self): + subpolicies = pretend.stub() + authz = pretend.stub() + policy = security_policy.MultiSecurityPolicy(subpolicies, authz) + + assert policy._policies is subpolicies + assert policy._authz is authz + + def test_identity_none(self): + subpolicies = [pretend.stub(identity=pretend.call_recorder(lambda r: None))] + authz = pretend.stub() + policy = security_policy.MultiSecurityPolicy(subpolicies, authz) + + request = pretend.stub() + assert policy.identity(request) is None + for p in subpolicies: + assert p.identity.calls == [pretend.call(request)] + + def test_identity_first_come_first_serve(self): + identity1 = pretend.stub() + identity2 = pretend.stub() + subpolicies = [ + pretend.stub(identity=pretend.call_recorder(lambda r: None)), + pretend.stub(identity=pretend.call_recorder(lambda r: identity1)), + pretend.stub(identity=pretend.call_recorder(lambda r: identity2)), + ] + authz = pretend.stub() + policy = security_policy.MultiSecurityPolicy(subpolicies, authz) + + request = pretend.stub() + assert policy.identity(request) is identity1 + assert subpolicies[0].identity.calls == [pretend.call(request)] + assert subpolicies[1].identity.calls == [pretend.call(request)] + assert subpolicies[2].identity.calls == [] + + def test_authenticated_userid_no_identity(self): + subpolicies = pretend.stub() + authz = pretend.stub() + policy = security_policy.MultiSecurityPolicy(subpolicies, authz) + + request = pretend.stub(identity=None) + assert policy.authenticated_userid(request) is None + + def test_authenticated_userid_nonuser_identity(self, db_request): + subpolicies = pretend.stub() + authz = pretend.stub() + policy = security_policy.MultiSecurityPolicy(subpolicies, authz) + + request = pretend.stub(identity=pretend.stub(id="fakeid")) + assert policy.authenticated_userid(request) is None + + def test_authenticated_userid(self, db_request): + subpolicies = pretend.stub() + authz = pretend.stub() + policy = security_policy.MultiSecurityPolicy(subpolicies, authz) + + user = UserFactory.create() + request = pretend.stub(identity=user) + assert policy.authenticated_userid(request) == str(user.id) + + def test_forget(self): + header = pretend.stub() + subpolicies = [ + pretend.stub(forget=pretend.call_recorder(lambda r, **kw: [header])) + ] + authz = pretend.stub() + policy = security_policy.MultiSecurityPolicy(subpolicies, authz) + + request = pretend.stub() + assert policy.forget(request, foo=None) == [header] + assert subpolicies[0].forget.calls == [pretend.call(request, foo=None)] + + def test_remember(self): + header = pretend.stub() + subpolicies = [ + pretend.stub(remember=pretend.call_recorder(lambda r, uid, **kw: [header])) + ] + authz = pretend.stub() + policy = security_policy.MultiSecurityPolicy(subpolicies, authz) + + request = pretend.stub() + userid = pretend.stub() + assert policy.remember(request, userid, foo=None) == [header] + assert subpolicies[0].remember.calls == [ + pretend.call(request, userid, foo=None) + ] + + def test_permits_user(self, db_request, monkeypatch): + subpolicies = pretend.stub() + status = pretend.stub() + authz = pretend.stub(permits=pretend.call_recorder(lambda *a: status)) + policy = security_policy.MultiSecurityPolicy(subpolicies, authz) + + principals_for_authenticated_user = pretend.call_recorder( + lambda *a: ["some:principal"] + ) + monkeypatch.setattr( + security_policy, + "_principals_for_authenticated_user", + principals_for_authenticated_user, + ) + + user = UserFactory.create() + request = pretend.stub(identity=user) + context = pretend.stub() + permission = pretend.stub() + assert policy.permits(request, context, permission) is status + assert authz.permits.calls == [ + pretend.call( + context, + [Authenticated, f"user:{user.id}", "some:principal"], + permission, + ) + ] + + def test_permits_nonuser_denied(self): + subpolicies = pretend.stub() + authz = pretend.stub(permits=pretend.call_recorder(lambda *a: pretend.stub())) + policy = security_policy.MultiSecurityPolicy(subpolicies, authz) + + # Anything that doesn't pass an isinstance check for User + fakeuser = pretend.stub() + request = pretend.stub(identity=fakeuser) + context = pretend.stub() + permission = pretend.stub() + assert policy.permits(request, context, permission) == Denied("unimplemented") + assert authz.permits.calls == [] + + def test_permits_no_identity(self): + subpolicies = pretend.stub() + status = pretend.stub() + authz = pretend.stub(permits=pretend.call_recorder(lambda *a: status)) + policy = security_policy.MultiSecurityPolicy(subpolicies, authz) + + request = pretend.stub(identity=None) + context = pretend.stub() + permission = pretend.stub() + assert policy.permits(request, context, permission) is status + assert authz.permits.calls == [pretend.call(context, [], permission)]
Don't flash warning on session expiration for token-based sessions As raised in https://github.com/pypa/warehouse/pull/11218#discussion_r856926749, currently token-based sessions will get a flash message when the session expires, which is unnecessary.
2022-04-20T16:34:17Z
[]
[]
pypi/warehouse
11,272
pypi__warehouse-11272
[ "10970" ]
66dde449efc9cde46f81825477d00d4dfa77cd28
diff --git a/warehouse/config.py b/warehouse/config.py --- a/warehouse/config.py +++ b/warehouse/config.py @@ -158,6 +158,7 @@ def configure(settings=None): maybe_set(settings, "warehouse.num_proxies", "WAREHOUSE_NUM_PROXIES", int) maybe_set(settings, "warehouse.domain", "WAREHOUSE_DOMAIN") maybe_set(settings, "forklift.domain", "FORKLIFT_DOMAIN") + maybe_set(settings, "auth.domain", "AUTH_DOMAIN") maybe_set(settings, "warehouse.legacy_domain", "WAREHOUSE_LEGACY_DOMAIN") maybe_set(settings, "site.name", "SITE_NAME", default="Warehouse") maybe_set(settings, "aws.key_id", "AWS_ACCESS_KEY_ID") @@ -237,6 +238,12 @@ def configure(settings=None): maybe_set_compound(settings, "metrics", "backend", "METRICS_BACKEND") maybe_set_compound(settings, "breached_passwords", "backend", "BREACHED_PASSWORDS") maybe_set_compound(settings, "malware_check", "backend", "MALWARE_CHECK_BACKEND") + maybe_set( + settings, + "oidc.backend", + "OIDC_BACKEND", + default="warehouse.oidc.services.OIDCProviderService", + ) # Pythondotorg integration settings maybe_set(settings, "pythondotorg.host", "PYTHONDOTORG_HOST", default="python.org") @@ -430,6 +437,7 @@ def configure(settings=None): filters.setdefault("format_package_type", "warehouse.filters:format_package_type") filters.setdefault("parse_version", "warehouse.filters:parse_version") filters.setdefault("localize_datetime", "warehouse.filters:localize_datetime") + filters.setdefault("ctime", "warehouse.filters:ctime") filters.setdefault("is_recent", "warehouse.filters:is_recent") filters.setdefault("canonicalize_name", "packaging.utils:canonicalize_name") filters.setdefault("format_author_email", "warehouse.filters:format_author_email") diff --git a/warehouse/events/tags.py b/warehouse/events/tags.py --- a/warehouse/events/tags.py +++ b/warehouse/events/tags.py @@ -109,6 +109,7 @@ class Project(EventTagEnum): """ # Name = "source_type:subject_type:action" + ShortLivedAPITokenAdded = "account:short_lived_api_token:added" APITokenAdded = "project:api_token:added" APITokenRemoved = "project:api_token:removed" OIDCProviderAdded = "project:oidc:provider-added" diff --git a/warehouse/filters.py b/warehouse/filters.py --- a/warehouse/filters.py +++ b/warehouse/filters.py @@ -159,6 +159,10 @@ def localize_datetime(timestamp): return pytz.utc.localize(timestamp) +def ctime(timestamp): + return datetime.datetime.fromtimestamp(timestamp) + + def is_recent(timestamp): if timestamp: return timestamp + datetime.timedelta(days=30) > datetime.datetime.now() diff --git a/warehouse/forklift/legacy.py b/warehouse/forklift/legacy.py --- a/warehouse/forklift/legacy.py +++ b/warehouse/forklift/legacy.py @@ -782,9 +782,9 @@ def file_upload(request): metrics = request.find_service(IMetricsService, context=None) metrics.increment("warehouse.upload.attempt") - # Before we do anything, if there isn't an authenticated user with this - # request, then we'll go ahead and bomb out. - if request.authenticated_userid is None: + # Before we do anything, if there isn't an authenticated identity with + # this request, then we'll go ahead and bomb out. + if request.identity is None: raise _exc_with_message( HTTPForbidden, "Invalid or non-existent authentication information. " @@ -793,24 +793,27 @@ def file_upload(request): ), ) - # Ensure that user has a verified, primary email address. This should both - # reduce the ease of spam account creation and activity, as well as act as - # a forcing function for https://github.com/pypi/warehouse/issues/3632. - # TODO: Once https://github.com/pypi/warehouse/issues/3632 has been solved, - # we might consider a different condition, possibly looking at - # User.is_active instead. - if not (request.user.primary_email and request.user.primary_email.verified): - raise _exc_with_message( - HTTPBadRequest, - ( - "User {!r} does not have a verified primary email address. " - "Please add a verified primary email before attempting to " - "upload to PyPI. See {project_help} for more information." - ).format( - request.user.username, - project_help=request.help_url(_anchor="verified-email"), - ), - ) from None + # These checks only make sense when our authenticated identity is a user, + # not a project identity (like OIDC-minted tokens.) + if request.user: + # Ensure that user has a verified, primary email address. This should both + # reduce the ease of spam account creation and activity, as well as act as + # a forcing function for https://github.com/pypa/warehouse/issues/3632. + # TODO: Once https://github.com/pypa/warehouse/issues/3632 has been solved, + # we might consider a different condition, possibly looking at + # User.is_active instead. + if not (request.user.primary_email and request.user.primary_email.verified): + raise _exc_with_message( + HTTPBadRequest, + ( + "User {!r} does not have a verified primary email address. " + "Please add a verified primary email before attempting to " + "upload to PyPI. See {project_help} for more information." + ).format( + request.user.username, + project_help=request.help_url(_anchor="verified-email"), + ), + ) from None # Do some cleanup of the various form fields for key in list(request.POST): @@ -892,6 +895,19 @@ def file_upload(request): ) if project is None: + # Another sanity check: we should be preventing non-user identities + # from creating projects in the first place with scoped tokens, + # but double-check anyways. + if not request.user: + raise _exc_with_message( + HTTPBadRequest, + ( + "Non-user identities cannot create new projects. " + "You must first create a project as a user, and then " + "configure the project to use OpenID Connect." + ), + ) + # We attempt to create the project. try: validate_project_name(form.name.data, request) @@ -923,27 +939,42 @@ def file_upload(request): }, ) - # Check that the user has permission to do things to this project, if this + # Check that the identity has permission to do things to this project, if this # is a new project this will act as a sanity check for the role we just # added above. allowed = request.has_permission("upload", project) if not allowed: reason = getattr(allowed, "reason", None) - msg = ( - ( - "The user '{0}' isn't allowed to upload to project '{1}'. " - "See {2} for more information." - ).format( - request.user.username, - project.name, - request.help_url(_anchor="project-name"), + if request.user: + msg = ( + ( + "The user '{0}' isn't allowed to upload to project '{1}'. " + "See {2} for more information." + ).format( + request.user.username, + project.name, + request.help_url(_anchor="project-name"), + ) + if reason is None + else allowed.msg + ) + else: + msg = ( + ( + "The given token isn't allowed to upload to project '{0}'. " + "See {1} for more information." + ).format( + project.name, + request.help_url(_anchor="project-name"), + ) + if reason is None + else allowed.msg ) - if reason is None - else allowed.msg - ) raise _exc_with_message(HTTPForbidden, msg) # Check if the user has 2FA and used basic auth + # NOTE: We don't need to guard request.user here because basic auth + # can only be used with user identities. if ( request.authentication_method == AuthenticationMethod.BASIC_AUTH and request.user.has_two_factor @@ -1072,10 +1103,11 @@ def file_upload(request): "requires_python", } }, - uploader=request.user, + uploader=request.user if request.user else None, uploaded_via=request.user_agent, ) request.db.add(release) + # TODO: This should be handled by some sort of database trigger or # a SQLAlchemy hook or the like instead of doing it inline in # this view. @@ -1084,7 +1116,7 @@ def file_upload(request): name=release.project.name, version=release.version, action="new release", - submitted_by=request.user, + submitted_by=request.user if request.user else None, submitted_from=request.remote_addr, ) ) @@ -1093,7 +1125,10 @@ def file_upload(request): tag=EventTag.Project.ReleaseAdd, ip_address=request.remote_addr, additional={ - "submitted_by": request.user.username, + "ephemeral": request.user is None, + "submitted_by": request.user.username + if request.user + else "OpenID created token", "canonical_version": release.canonical_version, }, ) @@ -1342,7 +1377,7 @@ def file_upload(request): action="add {python_version} file {filename}".format( python_version=file_.python_version, filename=file_.filename ), - submitted_by=request.user, + submitted_by=request.user if request.user else None, submitted_from=request.remote_addr, ) ) diff --git a/warehouse/macaroons/caveats/__init__.py b/warehouse/macaroons/caveats/__init__.py --- a/warehouse/macaroons/caveats/__init__.py +++ b/warehouse/macaroons/caveats/__init__.py @@ -33,6 +33,7 @@ deserialize, serialize, ) +from warehouse.oidc import models as oidc_models from warehouse.packaging.models import Project __all__ = ["deserialize", "serialize", "verify"] @@ -100,6 +101,38 @@ def verify(self, request: Request, context: Any, permission: str) -> Result: return Success() +@as_caveat(tag=4) +@dataclass(frozen=True) +class OIDCProvider(Caveat): + oidc_provider_id: StrictStr + + def verify(self, request: Request, context: Any, permission: str) -> Result: + # If the identity associated with this macaroon is not an OpenID provider, + # then it doesn't make sense to restrict it with an `OIDCProvider` caveat. + if not isinstance(request.identity, oidc_models.OIDCProvider): + return Failure( + "OIDC scoped token used outside of an OIDC identified request" + ) + + if str(request.identity.id) != self.oidc_provider_id: + return Failure( + "current OIDC provider does not match provider restriction in token" + ) + + # OpenID-scoped tokens are only valid against projects. + if not isinstance(context, Project): + return Failure("OIDC scoped token used outside of a project context") + + # Specifically, they are only valid against projects that are registered + # to the current identifying OpenID provider. + if context not in request.identity.projects: + return Failure( + f"OIDC scoped token is not valid for project '{context.name}'" + ) + + return Success() + + def verify( macaroon: Macaroon, key: bytes, request: Request, context: Any, permission: str ) -> Allowed | WarehouseDenied: diff --git a/warehouse/macaroons/interfaces.py b/warehouse/macaroons/interfaces.py --- a/warehouse/macaroons/interfaces.py +++ b/warehouse/macaroons/interfaces.py @@ -14,15 +14,6 @@ class IMacaroonService(Interface): - def _extract_raw_macaroon(raw_macaroon): - """ - Returns the base64-encoded macaroon component of a PyPI macaroon, - dropping the prefix. - - Returns None if the macaroon is None, has no prefix, or has the - wrong prefix. - """ - def find_from_raw(raw_macaroon): """ Returns a macaroon model from the DB from a raw macaroon, or raises @@ -46,13 +37,18 @@ def verify(raw_macaroon, request, context, permission): Returns True if the given raw (serialized) macaroon is valid for the request, context, and requested permission. - Raises InvalidMacaroon if the macaroon is not valid. + Raises InvalidMacaroonError if the macaroon is not valid. """ - def create_macaroon(location, user_id, description, scopes): + def create_macaroon( + location, description, scopes, *, user_id=None, oidc_provider_id=None + ): """ - Returns a new raw (serialized) macaroon. The description provided - is not embedded into the macaroon, only stored in the DB model. + Returns a tuple of a new raw (serialized) macaroon and its DB model. + The description provided is not embedded into the macaroon, only stored + in the DB model. + + An associated identity (either a user or macaroon, by ID) must be specified. """ def delete_macaroon(macaroon_id): diff --git a/warehouse/macaroons/models.py b/warehouse/macaroons/models.py --- a/warehouse/macaroons/models.py +++ b/warehouse/macaroons/models.py @@ -13,6 +13,7 @@ import os from sqlalchemy import ( + CheckConstraint, Column, DateTime, ForeignKey, @@ -37,13 +38,25 @@ class Macaroon(db.Model): UniqueConstraint( "description", "user_id", name="_user_macaroons_description_uc" ), + CheckConstraint( + "(user_id::text IS NULL) <> (oidc_provider_id::text IS NULL)", + name="_user_xor_oidc_provider_macaroon", + ), ) - # All of our Macaroons belong to a specific user, because a caveat-less - # Macaroon should act the same as their password does, instead of as a - # global permission to upload files. + # Macaroons come in two forms: they either belong to a user, or they + # authenticate for one or more projects. + # * In the user case, a Macaroon has an associated user, and *might* have + # additional project scope restrictions as part of its caveats. + # * In the project case, a Macaroon does *not* have an explicit associated + # project. Instead, depending on how its used (its request context), + # it identifies one of the projects scoped in its caveats. user_id = Column( - UUID(as_uuid=True), ForeignKey("users.id"), nullable=False, index=True + UUID(as_uuid=True), ForeignKey("users.id"), nullable=True, index=True + ) + + oidc_provider_id = Column( + UUID(as_uuid=True), ForeignKey("oidc_providers.id"), nullable=True, index=True ) # Store some information about the Macaroon to give users some mechanism diff --git a/warehouse/macaroons/security_policy.py b/warehouse/macaroons/security_policy.py --- a/warehouse/macaroons/security_policy.py +++ b/warehouse/macaroons/security_policy.py @@ -16,7 +16,6 @@ from pyramid.threadlocal import get_current_request from zope.interface import implementer -from warehouse.accounts.interfaces import IUserService from warehouse.cache.http import add_vary_callback from warehouse.errors import WarehouseDenied from warehouse.macaroons import InvalidMacaroonError @@ -88,13 +87,16 @@ def identity(self, request): # Check to see if our Macaroon exists in the database, and if so # fetch the user that is associated with it. macaroon_service = request.find_service(IMacaroonService, context=None) - userid = macaroon_service.find_userid(macaroon) - if userid is None: + try: + dm = macaroon_service.find_from_raw(macaroon) + except InvalidMacaroonError: return None - user_service = request.find_service(IUserService, context=None) - return user_service.get_user(userid) + # Every Macaroon is either associated with a user or an OIDC provider. + if dm.user is not None: + return dm.user + return dm.oidc_provider def remember(self, request, userid, **kw): # This is a NO-OP because our Macaroon header policy doesn't allow diff --git a/warehouse/macaroons/services.py b/warehouse/macaroons/services.py --- a/warehouse/macaroons/services.py +++ b/warehouse/macaroons/services.py @@ -11,7 +11,6 @@ # limitations under the License. import datetime -import uuid import pymacaroons @@ -20,7 +19,6 @@ from sqlalchemy.orm.exc import NoResultFound from zope.interface import implementer -from warehouse.accounts.models import User from warehouse.macaroons import caveats from warehouse.macaroons.errors import InvalidMacaroonError from warehouse.macaroons.interfaces import IMacaroonService @@ -54,17 +52,9 @@ def find_macaroon(self, macaroon_id): Returns a macaroon model from the DB by its identifier. Returns None if no macaroon has the given ID. """ - try: - dm = ( - self.db.query(Macaroon) - .options(joinedload("user")) - .filter(Macaroon.id == uuid.UUID(macaroon_id)) - .one() - ) - except NoResultFound: - return None - - return dm + return self.db.get( + Macaroon, macaroon_id, (joinedload("user"), joinedload("oidc_provider")) + ) def _deserialize_raw_macaroon(self, raw_macaroon): raw_macaroon = self._extract_raw_macaroon(raw_macaroon) @@ -95,11 +85,16 @@ def find_userid(self, raw_macaroon): if dm is None: return None + # This can be None if the macaroon has no associated user + # (e.g., an OIDC-minted macaroon). + if dm.user is None: + return None + return dm.user.id def find_from_raw(self, raw_macaroon): """ - Returns a DB macaroon matching the imput, or raises InvalidMacaroonError + Returns a DB macaroon matching the input, or raises InvalidMacaroonError """ m = self._deserialize_raw_macaroon(raw_macaroon) dm = self.find_macaroon(m.identifier.decode()) @@ -127,14 +122,16 @@ def verify(self, raw_macaroon, request, context, permission): raise InvalidMacaroonError(verified.msg) - def create_macaroon(self, location, user_id, description, scopes): + def create_macaroon( + self, location, description, scopes, *, user_id=None, oidc_provider_id=None + ): """ Returns a tuple of a new raw (serialized) macaroon and its DB model. The description provided is not embedded into the macaroon, only stored in the DB model. - """ - user = self.db.query(User).filter(User.id == user_id).one() + An associated identity (either a user or macaroon, by ID) must be specified. + """ if not all(isinstance(c, caveats.Caveat) for c in scopes): raise TypeError("scopes must be a list of Caveat instances") @@ -151,7 +148,8 @@ def create_macaroon(self, location, user_id, description, scopes): break dm = Macaroon( - user=user, + user_id=user_id, + oidc_provider_id=oidc_provider_id, description=description, permissions_caveat={"permissions": permissions}, ) diff --git a/warehouse/manage/views.py b/warehouse/manage/views.py --- a/warehouse/manage/views.py +++ b/warehouse/manage/views.py @@ -1045,9 +1045,9 @@ def create_macaroon(self): serialized_macaroon, macaroon = self.macaroon_service.create_macaroon( location=self.request.domain, - user_id=self.request.user.id, description=form.description.data, scopes=macaroon_caveats, + user_id=self.request.user.id, ) self.user_service.record_event( self.request.user.id, @@ -3221,10 +3221,12 @@ def delete_oidc_provider(self): provider=provider, ) - # NOTE: We remove the provider from the project, but we don't actually - # delete the provider model itself (since it might be associated - # with other projects). + # We remove this provider from the project's list of providers + # and, if there are no projects left associated with the provider, + # we delete it entirely. self.project.oidc_providers.remove(provider) + if len(provider.projects) == 0: + self.request.db.delete(provider) self.project.record_event( tag=EventTag.Project.OIDCProviderRemoved, diff --git a/warehouse/migrations/versions/43bf0b6badcb_make_users_optional_with_macaroons.py b/warehouse/migrations/versions/43bf0b6badcb_make_users_optional_with_macaroons.py new file mode 100644 --- /dev/null +++ b/warehouse/migrations/versions/43bf0b6badcb_make_users_optional_with_macaroons.py @@ -0,0 +1,67 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +Make users optional with Macaroons + +Revision ID: 43bf0b6badcb +Revises: ef0a77c48089 +Create Date: 2022-04-19 14:57:54.765006 +""" + +import sqlalchemy as sa + +from alembic import op +from citext import CIText +from sqlalchemy.dialects import postgresql + +revision = "43bf0b6badcb" +down_revision = "ef0a77c48089" + + +def upgrade(): + # Macaroon users are now optional. + op.alter_column( + "macaroons", "user_id", existing_type=postgresql.UUID(), nullable=True + ) + + # Macaroons might have an associated OIDCProvider (if not user-associated). + op.add_column( + "macaroons", + sa.Column("oidc_provider_id", postgresql.UUID(as_uuid=True), nullable=True), + ) + op.create_index( + op.f("ix_macaroons_oidc_provider_id"), + "macaroons", + ["oidc_provider_id"], + unique=False, + ) + op.create_foreign_key( + None, "macaroons", "oidc_providers", ["oidc_provider_id"], ["id"] + ) + + # JournalEvent users are now optional. + op.alter_column("journals", "submitted_by", existing_type=CIText(), nullable=True) + + # Macaroon -> (User XOR OIDCProvider) + op.create_check_constraint( + "_user_xor_oidc_provider_macaroon", + table_name="macaroons", + condition="(user_id::text IS NULL) <> (oidc_provider_id::text IS NULL)", + ) + + +def downgrade(): + op.alter_column( + "macaroons", "user_id", existing_type=postgresql.UUID(), nullable=False + ) + + op.alter_column("journals", "submitted_by", existing_type=CIText(), nullable=False) diff --git a/warehouse/oidc/__init__.py b/warehouse/oidc/__init__.py --- a/warehouse/oidc/__init__.py +++ b/warehouse/oidc/__init__.py @@ -12,13 +12,26 @@ from warehouse.oidc.interfaces import IOIDCProviderService from warehouse.oidc.services import OIDCProviderServiceFactory +from warehouse.oidc.utils import GITHUB_OIDC_ISSUER_URL def includeme(config): + oidc_provider_service_class = config.maybe_dotted( + config.registry.settings["oidc.backend"] + ) + config.register_service_factory( OIDCProviderServiceFactory( - provider="github", issuer_url="https://token.actions.githubusercontent.com" + provider="github", + issuer_url=GITHUB_OIDC_ISSUER_URL, + service_class=oidc_provider_service_class, ), IOIDCProviderService, name="github", ) + + # During deployments, we separate auth routes into their own subdomain + # to simplify caching exclusion. + auth = config.get_settings().get("auth.domain") + + config.add_route("oidc.mint_token", "/_/oidc/github/mint-token", domain=auth) diff --git a/warehouse/oidc/interfaces.py b/warehouse/oidc/interfaces.py --- a/warehouse/oidc/interfaces.py +++ b/warehouse/oidc/interfaces.py @@ -10,41 +10,36 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + +from typing import Any, NewType from zope.interface import Interface from warehouse.rate_limiting.interfaces import RateLimiterException +SignedClaims = NewType("SignedClaims", dict[str, Any]) + class IOIDCProviderService(Interface): - def get_key(key_id): + def verify_jwt_signature(unverified_token: str): """ - Return the JWK identified by the given KID, - fetching it if not already cached locally. + Verify the given JWT's signature, returning its signed claims if + valid. If the signature is invalid, `None` is returned. - Returns None if the JWK does not exist or the access pattern is - invalid (i.e., exceeds our internal limit on JWK requests to - each provider). + This method does **not** verify the claim set itself -- the API + consumer is responsible for evaluating the claim set. """ pass - def verify_signature_only(token): - """ - Verify the given JWT's signature and basic claims, returning - the decoded JWT, or `None` if invalid. - - This function **does not** verify the token's suitability - for a particular action; subsequent checks on the decoded token's - third party claims must be done to ensure that. + def find_provider(signed_claims: SignedClaims): """ + Given a mapping of signed claims produced by `verify_jwt_signature`, + attempt to find and return an `OIDCProvider` that matches them. - def verify_for_project(token, project): - """ - Verify the given JWT's signature and basic claims in the same - manner as `verify_signature_only`, but *also* verify that the JWT's - claims are consistent with at least one of the project's registered - OIDC providers. + If no `OIDCProvider` matches the claims, `None` is returned. """ + pass class TooManyOIDCRegistrations(RateLimiterException): diff --git a/warehouse/oidc/models.py b/warehouse/oidc/models.py --- a/warehouse/oidc/models.py +++ b/warehouse/oidc/models.py @@ -19,6 +19,8 @@ from sqlalchemy.dialects.postgresql import UUID from warehouse import db +from warehouse.macaroons.models import Macaroon +from warehouse.oidc.interfaces import SignedClaims from warehouse.packaging.models import Project @@ -77,6 +79,9 @@ class OIDCProvider(db.Model): secondary=OIDCProviderProjectAssociation.__table__, # type: ignore backref="oidc_providers", ) + macaroons = orm.relationship( + Macaroon, backref="oidc_provider", cascade="all, delete-orphan", lazy=True + ) __mapper_args__ = { "polymorphic_identity": "oidc_providers", @@ -115,7 +120,7 @@ def all_known_claims(cls): | cls.__unchecked_claims__ ) - def verify_claims(self, signed_claims): + def verify_claims(self, signed_claims: SignedClaims): """ Given a JWT that has been successfully decoded (checked for a valid signature and basic claims), verify it against the more specific @@ -159,6 +164,11 @@ def provider_name(self): # pragma: no cover # Only concrete subclasses of OIDCProvider are constructed. return NotImplemented + @property + def provider_url(self): # pragma: no cover + # Only concrete subclasses of OIDCProvider are constructed. + return NotImplemented + class GitHubProvider(OIDCProvider): __tablename__ = "github_oidc_providers" @@ -203,17 +213,27 @@ class GitHubProvider(OIDCProvider): "workflow", } + @property + def _workflow_slug(self): + return f".github/workflows/{self.workflow_filename}" + @property def provider_name(self): return "GitHub" + @property + def provider_url(self): + # NOTE: Until we embed the SHA, this URL is not guaranteed to contain + # the exact contents of the workflow that their OIDC provider corresponds to. + return f"https://github.com/{self.repository}/blob/HEAD/{self._workflow_slug}" + @property def repository(self): return f"{self.repository_owner}/{self.repository_name}" @property def job_workflow_ref(self): - return f"{self.repository}/.github/workflows/{self.workflow_filename}" + return f"{self.repository}/{self._workflow_slug}" def __str__(self): return f"{self.workflow_filename} @ {self.repository}" diff --git a/warehouse/oidc/services.py b/warehouse/oidc/services.py --- a/warehouse/oidc/services.py +++ b/warehouse/oidc/services.py @@ -11,6 +11,7 @@ # limitations under the License. import json +import warnings import jwt import redis @@ -20,12 +21,60 @@ from zope.interface import implementer from warehouse.metrics.interfaces import IMetricsService -from warehouse.oidc.interfaces import IOIDCProviderService +from warehouse.oidc.interfaces import IOIDCProviderService, SignedClaims +from warehouse.oidc.models import OIDCProvider +from warehouse.oidc.utils import find_provider_by_issuer + + +class InsecureOIDCProviderWarning(UserWarning): + pass + + +@implementer(IOIDCProviderService) +class NullOIDCProviderService: + def __init__(self, session, provider, issuer_url, cache_url, metrics): + warnings.warn( + "NullOIDCProviderService is intended only for use in development, " + "you should not use it in production due to the lack of actual " + "JWT verification.", + InsecureOIDCProviderWarning, + ) + + self.db = session + self.issuer_url = issuer_url + + def verify_jwt_signature(self, unverified_token: str) -> SignedClaims | None: + try: + return SignedClaims( + jwt.decode( + unverified_token, + options=dict( + verify_signature=False, + # We require all of these to be present, but for the + # null provider we only actually verify the audience. + require=["iss", "iat", "nbf", "exp", "aud"], + verify_iss=False, + verify_iat=False, + verify_nbf=False, + verify_exp=False, + verify_aud=True, + ), + audience="pypi", + ) + ) + except jwt.PyJWTError: + return None + + def find_provider(self, signed_claims: SignedClaims) -> OIDCProvider | None: + # NOTE: We do NOT verify the claims against the provider, since this + # service is for development purposes only. + return find_provider_by_issuer(self.db, self.issuer_url, signed_claims) @implementer(IOIDCProviderService) class OIDCProviderService: - def __init__(self, provider, issuer_url, cache_url, metrics): + def __init__(self, session, provider, issuer_url, cache_url, metrics): + self.db = session self.provider = provider self.issuer_url = issuer_url self.cache_url = cache_url @@ -133,7 +182,7 @@ def _refresh_keyset(self): return keys - def get_key(self, key_id): + def _get_key(self, key_id): """ Return a JWK for the given key ID, or None if the key can't be found in this provider's keyset. @@ -158,77 +207,75 @@ def _get_key_for_token(self, token): prior to any verification. """ unverified_header = jwt.get_unverified_header(token) - return self.get_key(unverified_header["kid"]) + return self._get_key(unverified_header["kid"]) - def verify_signature_only(self, token): - key = self._get_key_for_token(token) + def verify_jwt_signature(self, unverified_token: str) -> SignedClaims | None: + key = self._get_key_for_token(unverified_token) try: # NOTE: Many of the keyword arguments here are defaults, but we # set them explicitly to assert the intended verification behavior. signed_payload = jwt.decode( - token, + unverified_token, key=key, algorithms=["RS256"], - verify_signature=True, - # "require" only checks for the presence of these claims, not - # their validity. Each has a corresponding "verify_" kwarg - # that enforces their actual validity. - require=["iss", "iat", "nbf", "exp", "aud"], - verify_iss=True, - verify_iat=True, - verify_nbf=True, - verify_exp=True, - verify_aud=True, + options=dict( + verify_signature=True, + # "require" only checks for the presence of these claims, not + # their validity. Each has a corresponding "verify_" kwarg + # that enforces their actual validity. + require=["iss", "iat", "nbf", "exp", "aud"], + verify_iss=True, + verify_iat=True, + verify_nbf=True, + verify_exp=True, + verify_aud=True, + ), issuer=self.issuer_url, audience="pypi", leeway=30, ) - return signed_payload - except jwt.PyJWTError: - return None + return SignedClaims(signed_payload) except Exception as e: - # We expect pyjwt to only raise subclasses of PyJWTError, but - # we can't enforce this. Other exceptions indicate an abstraction - # leak, so we log them for upstream reporting. - sentry_sdk.capture_message(f"JWT verify raised generic error: {e}") + self.metrics.increment( + "warehouse.oidc.verify_jwt_signature.invalid_signature", + tags=[f"provider:{self.provider}"], + ) + if not isinstance(e, jwt.PyJWTError): + # We expect pyjwt to only raise subclasses of PyJWTError, but + # we can't enforce this. Other exceptions indicate an abstraction + # leak, so we log them for upstream reporting. + sentry_sdk.capture_message(f"JWT verify raised generic error: {e}") return None - def verify_for_project(self, token, project): - signed_payload = self.verify_signature_only(token) - - metrics_tags = [f"project:{project.name}", f"provider:{self.provider}"] + def find_provider(self, signed_claims: SignedClaims) -> OIDCProvider | None: + metrics_tags = [f"provider:{self.provider}"] self.metrics.increment( - "warehouse.oidc.verify_for_project.attempt", + "warehouse.oidc.find_provider.attempt", tags=metrics_tags, ) - if signed_payload is None: + provider = find_provider_by_issuer(self.db, self.issuer_url, signed_claims) + if provider is None: self.metrics.increment( - "warehouse.oidc.verify_for_project.invalid_signature", + "warehouse.oidc.find_provider.provider_not_found", tags=metrics_tags, ) - return False - - # In order for a signed JWT to be valid for a particular PyPI project, - # it must match at least one of the OIDC providers registered to - # the project. - verified = any( - provider.verify_claims(signed_payload) - for provider in project.oidc_providers - ) - if not verified: + return None + + if not provider.verify_claims(signed_claims): self.metrics.increment( - "warehouse.oidc.verify_for_project.invalid_claims", + "warehouse.oidc.find_provider.invalid_claims", tags=metrics_tags, ) + return None else: self.metrics.increment( - "warehouse.oidc.verify_for_project.ok", + "warehouse.oidc.find_provider.ok", tags=metrics_tags, ) - return verified + return provider class OIDCProviderServiceFactory: @@ -241,7 +288,9 @@ def __call__(self, _context, request): cache_url = request.registry.settings["oidc.jwk_cache_url"] metrics = request.find_service(IMetricsService, context=None) - return self.service_class(self.provider, self.issuer_url, cache_url, metrics) + return self.service_class( + request.db, self.provider, self.issuer_url, cache_url, metrics + ) def __eq__(self, other): if not isinstance(other, OIDCProviderServiceFactory): diff --git a/warehouse/oidc/utils.py b/warehouse/oidc/utils.py new file mode 100644 --- /dev/null +++ b/warehouse/oidc/utils.py @@ -0,0 +1,64 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from sqlalchemy.sql.expression import func, literal + +from warehouse.oidc.interfaces import SignedClaims +from warehouse.oidc.models import GitHubProvider, OIDCProvider + +GITHUB_OIDC_ISSUER_URL = "https://token.actions.githubusercontent.com" + +OIDC_ISSUER_URLS = {GITHUB_OIDC_ISSUER_URL} + + +def find_provider_by_issuer( + session, issuer_url: str, signed_claims: SignedClaims +) -> OIDCProvider | None: + """ + Given an OIDC issuer URL and a dictionary of claims that have been verified + for a token from that OIDC issuer, retrieve a concrete `OIDCProvider` registered + to one or more projects. + """ + + if issuer_url not in OIDC_ISSUER_URLS: + # This indicates a logic error, since we shouldn't have verified + # claims for an issuer that we don't recognize and support. + return None + + # This is the ugly part: OIDCProvider is polymorphic, and retrieving + # the correct provider requires us to query based on provider-specific + # claims. + if issuer_url == GITHUB_OIDC_ISSUER_URL: + repository = signed_claims["repository"] + repository_owner, repository_name = repository.split("/", 1) + workflow_prefix = f"{repository}/.github/workflows/" + workflow_ref = signed_claims["job_workflow_ref"].removeprefix(workflow_prefix) + + return ( + session.query(GitHubProvider) + .filter_by( + repository_name=repository_name, + repository_owner=repository_owner, + repository_owner_id=signed_claims["repository_owner_id"], + ) + .filter( + literal(workflow_ref).like( + func.concat(GitHubProvider.workflow_filename, "%") + ) + ) + .one_or_none() + ) + else: + # Unreachable; same logic error as above. + return None # pragma: no cover diff --git a/warehouse/oidc/views.py b/warehouse/oidc/views.py new file mode 100644 --- /dev/null +++ b/warehouse/oidc/views.py @@ -0,0 +1,129 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import time + +from pyramid.view import view_config + +from warehouse.admin.flags import AdminFlagValue +from warehouse.events.tags import EventTag +from warehouse.macaroons import caveats +from warehouse.macaroons.interfaces import IMacaroonService +from warehouse.oidc.interfaces import IOIDCProviderService + + +@view_config( + route_name="oidc.mint_token", + require_methods=["POST"], + renderer="json", + require_csrf=False, + has_translations=False, +) +def mint_token_from_oidc(request): + def _invalid(errors): + request.response.status = 422 + return {"message": "Token request failed", "errors": errors} + + oidc_enabled = request.registry.settings[ + "warehouse.oidc.enabled" + ] and not request.flags.enabled(AdminFlagValue.DISALLOW_OIDC) + if not oidc_enabled: + return _invalid( + errors=[ + { + "code": "not-enabled", + "description": "OIDC functionality not enabled", + } + ] + ) + + try: + body = request.json_body + except ValueError: + return _invalid( + errors=[{"code": "invalid-json", "description": "missing JSON body"}] + ) + + # `json_body` can return any valid top-level JSON type, so we have + # to make sure we're actually receiving a dictionary. + if not isinstance(body, dict): + return _invalid( + errors=[ + { + "code": "invalid-payload", + "description": "payload is not a JSON dictionary", + } + ] + ) + + unverified_jwt = body.get("token") + if unverified_jwt is None: + return _invalid( + errors=[{"code": "invalid-token", "description": "token is missing"}] + ) + + if not isinstance(unverified_jwt, str): + return _invalid( + errors=[{"code": "invalid-token", "description": "token is not a string"}] + ) + + # For the time being, GitHub is our only OIDC provider. + # In the future, this should locate the correct service based on an + # identifier in the request body. + oidc_service = request.find_service(IOIDCProviderService, name="github") + claims = oidc_service.verify_jwt_signature(unverified_jwt) + if not claims: + return _invalid( + errors=[ + {"code": "invalid-token", "description": "malformed or invalid token"} + ] + ) + + provider = oidc_service.find_provider(claims) + if not provider: + return _invalid( + errors=[ + { + "code": "invalid-provider", + "description": "valid token, but no corresponding provider", + } + ] + ) + + # At this point, we've verified that the given JWT is valid for the given + # project. All we need to do is mint a new token. + # NOTE: For OIDC-minted API tokens, the Macaroon's description string + # is purely an implementation detail and is not displayed to the user. + macaroon_service = request.find_service(IMacaroonService, context=None) + not_before = int(time.time()) + expires_at = not_before + 900 + serialized, dm = macaroon_service.create_macaroon( + request.domain, + f"OpenID token: {provider.provider_url} ({not_before})", + [ + caveats.OIDCProvider(oidc_provider_id=str(provider.id)), + caveats.ProjectID(project_ids=[str(p.id) for p in provider.projects]), + caveats.Expiration(expires_at=expires_at, not_before=not_before), + ], + oidc_provider_id=provider.id, + ) + for project in provider.projects: + project.record_event( + tag=EventTag.Project.ShortLivedAPITokenAdded, + ip_address=request.remote_addr, + additional={ + "expires": expires_at, + "provider_name": provider.provider_name, + "provider_url": provider.provider_url, + }, + ) + return {"success": True, "token": serialized} diff --git a/warehouse/packaging/models.py b/warehouse/packaging/models.py --- a/warehouse/packaging/models.py +++ b/warehouse/packaging/models.py @@ -258,6 +258,11 @@ def __acl__(self): (Allow, "group:moderators", "moderator"), ] + # The project has zero or more OIDC "providers" registered to it, + # each of which serves as an identity with the ability to upload releases. + for provider in self.oidc_providers: + acls.append((Allow, f"oidc:{provider.id}", ["upload"])) + # Get all of the users for this project. query = session.query(Role).filter(Role.project == self) query = query.options(orm.lazyload("project")) @@ -712,7 +717,10 @@ def __table_args__(cls): # noqa DateTime(timezone=False), nullable=False, server_default=sql.func.now() ) _submitted_by = Column( - "submitted_by", CIText, ForeignKey("users.username", onupdate="CASCADE") + "submitted_by", + CIText, + ForeignKey("users.username", onupdate="CASCADE"), + nullable=True, ) submitted_by = orm.relationship(User, lazy="raise_on_sql") submitted_from = Column(Text) diff --git a/warehouse/utils/security_policy.py b/warehouse/utils/security_policy.py --- a/warehouse/utils/security_policy.py +++ b/warehouse/utils/security_policy.py @@ -18,6 +18,7 @@ from zope.interface import implementer from warehouse.accounts.models import User +from warehouse.oidc.models import OIDCProvider class AuthenticationMethod(enum.Enum): @@ -103,8 +104,10 @@ def permits(self, request, context, permission): if isinstance(identity, User): principals.append(f"user:{identity.id}") principals.extend(_principals_for_authenticated_user(identity)) + elif isinstance(identity, OIDCProvider): + principals.append(f"oidc:{identity.id}") else: - return Denied("unimplemented") + return Denied("unknown identity") # NOTE: Observe that the parameters passed into the underlying AuthZ # policy here are not the same (or in the same order) as the ones
diff --git a/tests/common/db/oidc.py b/tests/common/db/oidc.py new file mode 100644 --- /dev/null +++ b/tests/common/db/oidc.py @@ -0,0 +1,28 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import factory + +from warehouse.oidc.models import GitHubProvider + +from .base import WarehouseFactory + + +class GitHubProviderFactory(WarehouseFactory): + class Meta: + model = GitHubProvider + + id = factory.Faker("uuid4", cast_to=None) + repository_name = "foo" + repository_owner = "bar" + repository_owner_id = 123 + workflow_filename = "example.yml" diff --git a/tests/functional/test_templates.py b/tests/functional/test_templates.py --- a/tests/functional/test_templates.py +++ b/tests/functional/test_templates.py @@ -32,6 +32,7 @@ "format_package_type": "warehouse.filters:format_package_type", "parse_version": "warehouse.filters:parse_version", "localize_datetime": "warehouse.filters:localize_datetime", + "ctime": "warehouse.filters:ctime", "canonicalize_name": "packaging.utils:canonicalize_name", } diff --git a/tests/unit/forklift/test_legacy.py b/tests/unit/forklift/test_legacy.py --- a/tests/unit/forklift/test_legacy.py +++ b/tests/unit/forklift/test_legacy.py @@ -53,6 +53,7 @@ from ...common.db.accounts import EmailFactory, UserFactory from ...common.db.classifiers import ClassifierFactory +from ...common.db.oidc import GitHubProviderFactory from ...common.db.packaging import ( FileFactory, ProjectFactory, @@ -640,7 +641,6 @@ def test_wheel_has_wheel_file(self, tmpdir): class TestIsDuplicateFile: def test_is_duplicate_true(self, pyramid_config, db_request): - pyramid_config.testing_securitypolicy(userid=1) user = UserFactory.create() EmailFactory.create(user=user) @@ -673,7 +673,6 @@ def test_is_duplicate_true(self, pyramid_config, db_request): assert legacy._is_duplicate_file(db_request.db, filename, hashes) def test_is_duplicate_none(self, pyramid_config, db_request): - pyramid_config.testing_securitypolicy(userid=1) user = UserFactory.create() EmailFactory.create(user=user) @@ -712,7 +711,6 @@ def test_is_duplicate_none(self, pyramid_config, db_request): ) def test_is_duplicate_false_same_blake2(self, pyramid_config, db_request): - pyramid_config.testing_securitypolicy(userid=1) user = UserFactory.create() EmailFactory.create(user=user) @@ -749,7 +747,6 @@ def test_is_duplicate_false_same_blake2(self, pyramid_config, db_request): ) def test_is_duplicate_false(self, pyramid_config, db_request): - pyramid_config.testing_securitypolicy(userid=1) user = UserFactory.create() EmailFactory.create(user=user) @@ -787,7 +784,6 @@ def test_is_duplicate_false(self, pyramid_config, db_request): class TestFileUpload: def test_fails_disallow_new_upload(self, pyramid_config, pyramid_request): - pyramid_config.testing_securitypolicy(userid=1) pyramid_request.flags = pretend.stub( enabled=lambda value: value == AdminFlagValue.DISALLOW_NEW_UPLOAD ) @@ -807,11 +803,12 @@ def test_fails_disallow_new_upload(self, pyramid_config, pyramid_request): @pytest.mark.parametrize("version", ["2", "3", "-1", "0", "dog", "cat"]) def test_fails_invalid_version(self, pyramid_config, pyramid_request, version): - pyramid_config.testing_securitypolicy(userid=1) pyramid_request.POST["protocol_version"] = version pyramid_request.flags = pretend.stub(enabled=lambda *a: False) - pyramid_request.user = pretend.stub(primary_email=pretend.stub(verified=True)) + user = pretend.stub(primary_email=pretend.stub(verified=True)) + pyramid_config.testing_securitypolicy(identity=user) + pyramid_request.user = user with pytest.raises(HTTPBadRequest) as excinfo: legacy.file_upload(pyramid_request) @@ -1001,9 +998,9 @@ def test_fails_invalid_version(self, pyramid_config, pyramid_request, version): def test_fails_invalid_post_data( self, pyramid_config, db_request, post_data, message ): - pyramid_config.testing_securitypolicy(userid=1) user = UserFactory.create() EmailFactory.create(user=user) + pyramid_config.testing_securitypolicy(identity=user) db_request.user = user db_request.POST = MultiDict(post_data) @@ -1017,9 +1014,9 @@ def test_fails_invalid_post_data( @pytest.mark.parametrize("name", ["requirements.txt", "rrequirements.txt"]) def test_fails_with_invalid_names(self, pyramid_config, db_request, name): - pyramid_config.testing_securitypolicy(userid=1) user = UserFactory.create() EmailFactory.create(user=user) + pyramid_config.testing_securitypolicy(identity=user) db_request.user = user db_request.POST = MultiDict( @@ -1067,10 +1064,10 @@ def test_fails_with_invalid_names(self, pyramid_config, db_request, name): def test_fails_with_ultranormalized_names( self, pyramid_config, db_request, conflicting_name ): - pyramid_config.testing_securitypolicy(userid=1) user = UserFactory.create() EmailFactory.create(user=user) ProjectFactory.create(name="toasting") + pyramid_config.testing_securitypolicy(identity=user) db_request.user = user db_request.db.flush() @@ -1125,9 +1122,9 @@ def test_fails_with_ultranormalized_names( def test_fails_invalid_render( self, pyramid_config, db_request, description_content_type, description, message ): - pyramid_config.testing_securitypolicy(userid=1) user = UserFactory.create() EmailFactory.create(user=user) + pyramid_config.testing_securitypolicy(identity=user) db_request.user = user db_request.user_agent = "warehouse-tests/6.6.6" @@ -1192,8 +1189,8 @@ def test_fails_invalid_render( def test_fails_with_stdlib_names(self, pyramid_config, db_request, name): user = UserFactory.create() EmailFactory.create(user=user) + pyramid_config.testing_securitypolicy(identity=user) db_request.user = user - pyramid_config.testing_securitypolicy(userid=1) db_request.POST = MultiDict( { "metadata_version": "1.2", @@ -1237,9 +1234,9 @@ def test_fails_with_admin_flag_set(self, pyramid_config, db_request): .first() ) admin_flag.enabled = True - pyramid_config.testing_securitypolicy(userid=1) user = UserFactory.create() EmailFactory.create(user=user) + pyramid_config.testing_securitypolicy(identity=user) db_request.user = user name = "fails-with-admin-flag" db_request.POST = MultiDict( @@ -1273,9 +1270,9 @@ def test_fails_with_admin_flag_set(self, pyramid_config, db_request): ) def test_upload_fails_without_file(self, pyramid_config, db_request): - pyramid_config.testing_securitypolicy(userid=1) user = UserFactory.create() EmailFactory.create(user=user) + pyramid_config.testing_securitypolicy(identity=user) db_request.user = user db_request.POST = MultiDict( { @@ -1297,10 +1294,10 @@ def test_upload_fails_without_file(self, pyramid_config, db_request): @pytest.mark.parametrize("value", [("UNKNOWN"), ("UNKNOWN\n\n")]) def test_upload_cleans_unknown_values(self, pyramid_config, db_request, value): - pyramid_config.testing_securitypolicy(userid=1) user = UserFactory.create() - EmailFactory.create(user=user) + pyramid_config.testing_securitypolicy(identity=user) db_request.user = user + EmailFactory.create(user=user) db_request.POST = MultiDict( { "metadata_version": "1.2", @@ -1317,9 +1314,9 @@ def test_upload_cleans_unknown_values(self, pyramid_config, db_request, value): assert "name" not in db_request.POST def test_upload_escapes_nul_characters(self, pyramid_config, db_request): - pyramid_config.testing_securitypolicy(userid=1) user = UserFactory.create() EmailFactory.create(user=user) + pyramid_config.testing_securitypolicy(identity=user) db_request.user = user db_request.POST = MultiDict( { @@ -1366,7 +1363,6 @@ def test_successful_upload( ): monkeypatch.setattr(tempfile, "tempdir", str(tmpdir)) - pyramid_config.testing_securitypolicy(userid=1) user = UserFactory.create() EmailFactory.create(user=user) project = ProjectFactory.create() @@ -1377,6 +1373,7 @@ def test_successful_upload( filename = "{}-{}.tar.gz".format(project.name, release.version) + pyramid_config.testing_securitypolicy(identity=user) db_request.user = user db_request.user_agent = "warehouse-tests/6.6.6" @@ -1521,14 +1518,14 @@ def storage_service_store(path, file_path, *, meta): ] @pytest.mark.parametrize("content_type", [None, "image/foobar"]) - def test_upload_fails_invlaid_content_type( + def test_upload_fails_invalid_content_type( self, tmpdir, monkeypatch, pyramid_config, db_request, content_type ): monkeypatch.setattr(tempfile, "tempdir", str(tmpdir)) - pyramid_config.testing_securitypolicy(userid=1) user = UserFactory.create() EmailFactory.create(user=user) + pyramid_config.testing_securitypolicy(identity=user) db_request.user = user project = ProjectFactory.create() release = ReleaseFactory.create(project=project, version="1.0") @@ -1564,10 +1561,10 @@ def test_upload_fails_invlaid_content_type( assert resp.status == "400 Invalid distribution file." def test_upload_fails_with_legacy_type(self, pyramid_config, db_request): - pyramid_config.testing_securitypolicy(userid=1) user = UserFactory.create() EmailFactory.create(user=user) + pyramid_config.testing_securitypolicy(identity=user) db_request.user = user project = ProjectFactory.create() release = ReleaseFactory.create(project=project, version="1.0") @@ -1603,10 +1600,10 @@ def test_upload_fails_with_legacy_type(self, pyramid_config, db_request): ) def test_upload_fails_with_legacy_ext(self, pyramid_config, db_request): - pyramid_config.testing_securitypolicy(userid=1) user = UserFactory.create() EmailFactory.create(user=user) + pyramid_config.testing_securitypolicy(identity=user) db_request.user = user project = ProjectFactory.create() release = ReleaseFactory.create(project=project, version="1.0") @@ -1642,9 +1639,9 @@ def test_upload_fails_with_legacy_ext(self, pyramid_config, db_request): ) def test_upload_fails_for_second_sdist(self, pyramid_config, db_request): - pyramid_config.testing_securitypolicy(userid=1) user = UserFactory.create() + pyramid_config.testing_securitypolicy(identity=user) db_request.user = user EmailFactory.create(user=user) project = ProjectFactory.create() @@ -1683,9 +1680,9 @@ def test_upload_fails_for_second_sdist(self, pyramid_config, db_request): @pytest.mark.parametrize("sig", [b"lol nope"]) def test_upload_fails_with_invalid_signature(self, pyramid_config, db_request, sig): - pyramid_config.testing_securitypolicy(userid=1) user = UserFactory.create() + pyramid_config.testing_securitypolicy(identity=user) db_request.user = user EmailFactory.create(user=user) project = ProjectFactory.create() @@ -1721,9 +1718,9 @@ def test_upload_fails_with_invalid_signature(self, pyramid_config, db_request, s assert resp.status == "400 PGP signature isn't ASCII armored." def test_upload_fails_with_invalid_classifier(self, pyramid_config, db_request): - pyramid_config.testing_securitypolicy(userid=1) user = UserFactory.create() + pyramid_config.testing_securitypolicy(identity=user) db_request.user = user EmailFactory.create(user=user) project = ProjectFactory.create() @@ -1778,9 +1775,9 @@ def test_upload_fails_with_invalid_classifier(self, pyramid_config, db_request): def test_upload_fails_with_deprecated_classifier( self, pyramid_config, db_request, monkeypatch, deprecated_classifiers, expected ): - pyramid_config.testing_securitypolicy(userid=1) user = UserFactory.create() + pyramid_config.testing_securitypolicy(identity=user) db_request.user = user EmailFactory.create(user=user) project = ProjectFactory.create() @@ -1853,9 +1850,9 @@ def test_upload_fails_with_deprecated_classifier( def test_upload_fails_with_invalid_digest( self, pyramid_config, db_request, digests ): - pyramid_config.testing_securitypolicy(userid=1) user = UserFactory.create() + pyramid_config.testing_securitypolicy(identity=user) db_request.user = user EmailFactory.create(user=user) project = ProjectFactory.create() @@ -1891,9 +1888,9 @@ def test_upload_fails_with_invalid_digest( ) def test_upload_fails_with_invalid_file(self, pyramid_config, db_request): - pyramid_config.testing_securitypolicy(userid=1) user = UserFactory.create() + pyramid_config.testing_securitypolicy(identity=user) db_request.user = user EmailFactory.create(user=user) project = ProjectFactory.create() @@ -1924,7 +1921,6 @@ def test_upload_fails_with_invalid_file(self, pyramid_config, db_request): assert resp.status == "400 Invalid distribution file." def test_upload_fails_end_of_file_error(self, pyramid_config, db_request, metrics): - pyramid_config.testing_securitypolicy(userid=1) user = UserFactory.create() EmailFactory.create(user=user) @@ -1934,6 +1930,7 @@ def test_upload_fails_end_of_file_error(self, pyramid_config, db_request, metric # Malformed tar.gz, triggers EOF error file_contents = b"\x8b\x08\x00\x00\x00\x00\x00\x00\xff" + pyramid_config.testing_securitypolicy(identity=user) db_request.user = user db_request.user_agent = "warehouse-tests/6.6.6" db_request.POST = MultiDict( @@ -1968,9 +1965,9 @@ def test_upload_fails_end_of_file_error(self, pyramid_config, db_request, metric assert resp.status == "400 Invalid distribution file." def test_upload_fails_with_too_large_file(self, pyramid_config, db_request): - pyramid_config.testing_securitypolicy(userid=1) user = UserFactory.create() + pyramid_config.testing_securitypolicy(identity=user) db_request.user = user EmailFactory.create(user=user) project = ProjectFactory.create(name="foobar", upload_limit=(100 * 1024 * 1024)) @@ -2010,9 +2007,9 @@ def test_upload_fails_with_too_large_file(self, pyramid_config, db_request): def test_upload_fails_with_too_large_project_size_default_limit( self, pyramid_config, db_request ): - pyramid_config.testing_securitypolicy(userid=1) user = UserFactory.create() + pyramid_config.testing_securitypolicy(identity=user) db_request.user = user EmailFactory.create(user=user) project = ProjectFactory.create( @@ -2057,9 +2054,9 @@ def test_upload_fails_with_too_large_project_size_default_limit( def test_upload_fails_with_too_large_project_size_custom_limit( self, pyramid_config, db_request ): - pyramid_config.testing_securitypolicy(userid=1) user = UserFactory.create() + pyramid_config.testing_securitypolicy(identity=user) db_request.user = user EmailFactory.create(user=user) one_megabyte = 1 * 1024 * 1024 @@ -2107,9 +2104,9 @@ def test_upload_fails_with_too_large_project_size_custom_limit( def test_upload_succeeds_custom_project_size_limit( self, pyramid_config, db_request, metrics ): - pyramid_config.testing_securitypolicy(userid=1) user = UserFactory.create() + pyramid_config.testing_securitypolicy(identity=user) db_request.user = user EmailFactory.create(user=user) one_megabyte = 1 * 1024 * 1024 @@ -2209,9 +2206,9 @@ def test_upload_succeeds_custom_project_size_limit( ] def test_upload_fails_with_too_large_signature(self, pyramid_config, db_request): - pyramid_config.testing_securitypolicy(userid=1) user = UserFactory.create() + pyramid_config.testing_securitypolicy(identity=user) db_request.user = user EmailFactory.create(user=user) project = ProjectFactory.create() @@ -2250,9 +2247,9 @@ def test_upload_fails_with_too_large_signature(self, pyramid_config, db_request) def test_upload_fails_with_previously_used_filename( self, pyramid_config, db_request ): - pyramid_config.testing_securitypolicy(userid=1) user = UserFactory.create() + pyramid_config.testing_securitypolicy(identity=user) db_request.user = user EmailFactory.create(user=user) project = ProjectFactory.create() @@ -2294,9 +2291,9 @@ def test_upload_fails_with_previously_used_filename( def test_upload_noop_with_existing_filename_same_content( self, pyramid_config, db_request ): - pyramid_config.testing_securitypolicy(userid=1) user = UserFactory.create() + pyramid_config.testing_securitypolicy(identity=user) db_request.user = user EmailFactory.create(user=user) project = ProjectFactory.create() @@ -2341,9 +2338,9 @@ def test_upload_noop_with_existing_filename_same_content( def test_upload_fails_with_existing_filename_diff_content( self, pyramid_config, db_request ): - pyramid_config.testing_securitypolicy(userid=1) user = UserFactory.create() + pyramid_config.testing_securitypolicy(identity=user) db_request.user = user EmailFactory.create(user=user) project = ProjectFactory.create() @@ -2395,9 +2392,9 @@ def test_upload_fails_with_existing_filename_diff_content( def test_upload_fails_with_diff_filename_same_blake2( self, pyramid_config, db_request ): - pyramid_config.testing_securitypolicy(userid=1) user = UserFactory.create() + pyramid_config.testing_securitypolicy(identity=user) db_request.user = user EmailFactory.create(user=user) project = ProjectFactory.create() @@ -2450,9 +2447,9 @@ def test_upload_fails_with_diff_filename_same_blake2( ) def test_upload_fails_with_wrong_filename(self, pyramid_config, db_request): - pyramid_config.testing_securitypolicy(userid=1) user = UserFactory.create() + pyramid_config.testing_securitypolicy(identity=user) db_request.user = user EmailFactory.create(user=user) project = ProjectFactory.create() @@ -2489,9 +2486,9 @@ def test_upload_fails_with_wrong_filename(self, pyramid_config, db_request): ) def test_upload_fails_with_invalid_extension(self, pyramid_config, db_request): - pyramid_config.testing_securitypolicy(userid=1) user = UserFactory.create() + pyramid_config.testing_securitypolicy(identity=user) db_request.user = user EmailFactory.create(user=user) project = ProjectFactory.create() @@ -2531,9 +2528,9 @@ def test_upload_fails_with_invalid_extension(self, pyramid_config, db_request): def test_upload_fails_with_unsafe_filename( self, pyramid_config, db_request, character ): - pyramid_config.testing_securitypolicy(userid=1) user = UserFactory.create() + pyramid_config.testing_securitypolicy(identity=user) db_request.user = user EmailFactory.create(user=user) project = ProjectFactory.create() @@ -2565,9 +2562,7 @@ def test_upload_fails_with_unsafe_filename( assert resp.status_code == 400 assert resp.status == "400 Cannot upload a file with '/' or '\\' in the name." - def test_upload_fails_without_permission(self, pyramid_config, db_request): - pyramid_config.testing_securitypolicy(userid=1, permissive=False) - + def test_upload_fails_without_user_permission(self, pyramid_config, db_request): user1 = UserFactory.create() EmailFactory.create(user=user1) user2 = UserFactory.create() @@ -2578,6 +2573,7 @@ def test_upload_fails_without_permission(self, pyramid_config, db_request): filename = "{}-{}.tar.wat".format(project.name, release.version) + pyramid_config.testing_securitypolicy(identity=user2, permissive=False) db_request.user = user2 db_request.POST = MultiDict( { @@ -2609,16 +2605,57 @@ def test_upload_fails_without_permission(self, pyramid_config, db_request): "See /the/help/url/ for more information." ).format(user2.username, project.name) + def test_upload_fails_without_oidc_provider_permission( + self, pyramid_config, db_request + ): + project = ProjectFactory.create() + release = ReleaseFactory.create(project=project, version="1.0") + + provider = GitHubProviderFactory.create(projects=[project]) + + filename = "{}-{}.tar.wat".format(project.name, release.version) + + pyramid_config.testing_securitypolicy(identity=provider, permissive=False) + db_request.user = None + db_request.POST = MultiDict( + { + "metadata_version": "1.2", + "name": project.name, + "version": release.version, + "filetype": "sdist", + "md5_digest": "nope!", + "content": pretend.stub( + filename=filename, + file=io.BytesIO(b"a" * (legacy.MAX_FILESIZE + 1)), + type="application/tar", + ), + } + ) + + db_request.help_url = pretend.call_recorder(lambda **kw: "/the/help/url/") + + with pytest.raises(HTTPForbidden) as excinfo: + legacy.file_upload(db_request) + + resp = excinfo.value + + assert db_request.help_url.calls == [pretend.call(_anchor="project-name")] + assert resp.status_code == 403 + assert resp.status == ( + "403 The given token isn't allowed to upload to project '{0}'. " + "See /the/help/url/ for more information." + ).format(project.name) + def test_upload_succeeds_with_2fa_enabled( self, pyramid_config, db_request, metrics, monkeypatch ): - pyramid_config.testing_securitypolicy(userid=1) user = UserFactory.create(totp_secret=b"secret") EmailFactory.create(user=user) project = ProjectFactory.create() RoleFactory.create(user=user, project=project) + pyramid_config.testing_securitypolicy(identity=user) db_request.user = user db_request.user_agent = "warehouse-tests/6.6.6" db_request.POST = MultiDict( @@ -2696,8 +2733,6 @@ def test_upload_succeeds_with_wheel( ): monkeypatch.setattr(tempfile, "tempdir", str(tmpdir)) - pyramid_config.testing_securitypolicy(userid=1) - user = UserFactory.create() EmailFactory.create(user=user) project = ProjectFactory.create() @@ -2706,6 +2741,7 @@ def test_upload_succeeds_with_wheel( filename = "{}-{}-cp34-none-{}.whl".format(project.name, release.version, plat) + pyramid_config.testing_securitypolicy(identity=user) db_request.user = user db_request.user_agent = "warehouse-tests/6.6.6" db_request.POST = MultiDict( @@ -2805,8 +2841,6 @@ def test_upload_succeeds_with_wheel_after_sdist( ): monkeypatch.setattr(tempfile, "tempdir", str(tmpdir)) - pyramid_config.testing_securitypolicy(userid=1) - user = UserFactory.create() EmailFactory.create(user=user) project = ProjectFactory.create() @@ -2820,6 +2854,7 @@ def test_upload_succeeds_with_wheel_after_sdist( filename = "{}-{}-cp34-none-any.whl".format(project.name, release.version) + pyramid_config.testing_securitypolicy(identity=user) db_request.user = user db_request.user_agent = "warehouse-tests/6.6.6" db_request.POST = MultiDict( @@ -2921,9 +2956,9 @@ def storage_service_store(path, file_path, *, meta): def test_upload_fails_with_unsupported_wheel_plat( self, monkeypatch, pyramid_config, db_request, plat ): - pyramid_config.testing_securitypolicy(userid=1) user = UserFactory.create() + pyramid_config.testing_securitypolicy(identity=user) db_request.user = user EmailFactory.create(user=user) project = ProjectFactory.create() @@ -2963,7 +2998,6 @@ def test_upload_fails_with_unsupported_wheel_plat( def test_upload_updates_existing_project_name( self, pyramid_config, db_request, metrics ): - pyramid_config.testing_securitypolicy(userid=1) user = UserFactory.create() EmailFactory.create(user=user) @@ -2973,6 +3007,7 @@ def test_upload_updates_existing_project_name( new_project_name = "package-name" filename = "{}-{}.tar.gz".format(new_project_name, "1.1") + pyramid_config.testing_securitypolicy(identity=user) db_request.user = user db_request.user_agent = "warehouse-tests/6.6.6" db_request.POST = MultiDict( @@ -3027,7 +3062,6 @@ def test_upload_updates_existing_project_name( def test_upload_succeeds_creates_release( self, pyramid_config, db_request, metrics, version, expected_version ): - pyramid_config.testing_securitypolicy(userid=1) user = UserFactory.create() EmailFactory.create(user=user) @@ -3039,6 +3073,7 @@ def test_upload_succeeds_creates_release( filename = "{}-{}.tar.gz".format(project.name, "1.0") + pyramid_config.testing_securitypolicy(identity=user) db_request.user = user db_request.user_agent = "warehouse-tests/6.6.6" db_request.POST = MultiDict( @@ -3151,7 +3186,6 @@ def test_equivalent_version_one_release(self, pyramid_config, db_request, metric upload with an equivalent version like '1.0.0' will not make a second release """ - pyramid_config.testing_securitypolicy(userid=1) user = UserFactory.create() EmailFactory.create(user=user) @@ -3159,6 +3193,7 @@ def test_equivalent_version_one_release(self, pyramid_config, db_request, metric release = ReleaseFactory.create(project=project, version="1.0") RoleFactory.create(user=user, project=project) + pyramid_config.testing_securitypolicy(identity=user) db_request.user = user db_request.user_agent = "warehouse-tests/6.6.6" db_request.POST = MultiDict( @@ -3198,7 +3233,6 @@ def test_equivalent_canonical_versions(self, pyramid_config, db_request, metrics Test that if more than one release with equivalent canonical versions exists, we use the one that is an exact match """ - pyramid_config.testing_securitypolicy(userid=1) user = UserFactory.create() EmailFactory.create(user=user) @@ -3207,6 +3241,7 @@ def test_equivalent_canonical_versions(self, pyramid_config, db_request, metrics release_b = ReleaseFactory.create(project=project, version="1.0.0") RoleFactory.create(user=user, project=project) + pyramid_config.testing_securitypolicy(identity=user) db_request.user = user db_request.user_agent = "warehouse-tests/6.6.6" db_request.POST = MultiDict( @@ -3236,14 +3271,57 @@ def test_equivalent_canonical_versions(self, pyramid_config, db_request, metrics assert len(release_a.files.all()) == 0 assert len(release_b.files.all()) == 1 + def test_upload_fails_nonuser_identity_cannot_create_project( + self, pyramid_config, db_request, metrics + ): + provider = GitHubProviderFactory.create() + + filename = "{}-{}.tar.gz".format("example", "1.0") + + pyramid_config.testing_securitypolicy(identity=provider) + db_request.user = None + db_request.POST = MultiDict( + { + "metadata_version": "1.2", + "name": "example", + "version": "1.0", + "filetype": "sdist", + "md5_digest": _TAR_GZ_PKG_MD5, + "content": pretend.stub( + filename=filename, + file=io.BytesIO(_TAR_GZ_PKG_TESTDATA), + type="application/tar", + ), + } + ) + + storage_service = pretend.stub(store=lambda path, filepath, meta: None) + db_request.find_service = lambda svc, name=None, context=None: { + IFileStorage: storage_service, + IMetricsService: metrics, + }.get(svc) + db_request.user_agent = "warehouse-tests/6.6.6" + + with pytest.raises(HTTPBadRequest) as excinfo: + legacy.file_upload(db_request) + + resp = excinfo.value + + assert resp.status_code == 400 + assert resp.status == ( + "400 Non-user identities cannot create new projects. " + "You must first create a project as a user, and then " + "configure the project to use OpenID Connect." + ) + def test_upload_succeeds_creates_project(self, pyramid_config, db_request, metrics): - pyramid_config.testing_securitypolicy(userid=1) user = UserFactory.create() EmailFactory.create(user=user) filename = "{}-{}.tar.gz".format("example", "1.0") + pyramid_config.testing_securitypolicy(identity=user) db_request.user = user db_request.POST = MultiDict( { @@ -3343,7 +3421,6 @@ def test_upload_succeeds_creates_project(self, pyramid_config, db_request, metri def test_upload_requires_verified_email( self, pyramid_config, db_request, emails_verified, expected_success, metrics ): - pyramid_config.testing_securitypolicy(userid=1) user = UserFactory.create() for i, verified in enumerate(emails_verified): @@ -3351,6 +3428,7 @@ def test_upload_requires_verified_email( filename = "{}-{}.tar.gz".format("example", "1.0") + pyramid_config.testing_securitypolicy(identity=user) db_request.user = user db_request.POST = MultiDict( { @@ -3399,13 +3477,13 @@ def test_upload_requires_verified_email( def test_upload_purges_legacy( self, pyramid_config, db_request, monkeypatch, metrics ): - pyramid_config.testing_securitypolicy(userid=1) user = UserFactory.create() EmailFactory.create(user=user) filename = "{}-{}.tar.gz".format("example", "1.0") + pyramid_config.testing_securitypolicy(identity=user) db_request.user = user db_request.POST = MultiDict( { diff --git a/tests/unit/macaroons/test_caveats.py b/tests/unit/macaroons/test_caveats.py --- a/tests/unit/macaroons/test_caveats.py +++ b/tests/unit/macaroons/test_caveats.py @@ -25,6 +25,7 @@ CaveatError, Expiration, Failure, + OIDCProvider, ProjectID, ProjectName, RequestUser, @@ -36,6 +37,7 @@ from warehouse.macaroons.caveats._core import _CaveatRegistry from ...common.db.accounts import UserFactory +from ...common.db.oidc import GitHubProviderFactory from ...common.db.packaging import ProjectFactory @@ -273,6 +275,65 @@ def test_verify_ok(self, db_request): assert result == Success() +class TestOIDCProviderCaveat: + def test_verify_no_identity(self): + caveat = OIDCProvider(oidc_provider_id="invalid") + result = caveat.verify( + pretend.stub(identity=None), pretend.stub(), pretend.stub() + ) + + assert result == Failure( + "OIDC scoped token used outside of an OIDC identified request" + ) + + def test_verify_invalid_provider_id(self, db_request): + provider = GitHubProviderFactory.create() + + caveat = OIDCProvider(oidc_provider_id="invalid") + result = caveat.verify( + pretend.stub(identity=provider), pretend.stub(), pretend.stub() + ) + + assert result == Failure( + "current OIDC provider does not match provider restriction in token" + ) + + def test_verify_invalid_context(self, db_request): + provider = GitHubProviderFactory.create() + + caveat = OIDCProvider(oidc_provider_id=str(provider.id)) + result = caveat.verify( + pretend.stub(identity=provider), pretend.stub(), pretend.stub() + ) + + assert result == Failure("OIDC scoped token used outside of a project context") + + def test_verify_invalid_project(self, db_request): + foobar = ProjectFactory.create(name="foobar") + foobaz = ProjectFactory.create(name="foobaz") + + # This OIDC provider is only registered to "foobar", so it should + # not verify a caveat presented for "foobaz". + provider = GitHubProviderFactory.create(projects=[foobar]) + caveat = OIDCProvider(oidc_provider_id=str(provider.id)) + + result = caveat.verify(pretend.stub(identity=provider), foobaz, pretend.stub()) + + assert result == Failure("OIDC scoped token is not valid for project 'foobaz'") + + def test_verify_ok(self, db_request): + foobar = ProjectFactory.create(name="foobar") + + # This OIDC provider is only registered to "foobar", so it should + # not verify a caveat presented for "foobaz". + provider = GitHubProviderFactory.create(projects=[foobar]) + caveat = OIDCProvider(oidc_provider_id=str(provider.id)) + + result = caveat.verify(pretend.stub(identity=provider), foobar, pretend.stub()) + + assert result == Success() + + class TestCaveatRegistry: def test_cannot_reuse_tag(self): registry = _CaveatRegistry() diff --git a/tests/unit/macaroons/test_security_policy.py b/tests/unit/macaroons/test_security_policy.py --- a/tests/unit/macaroons/test_security_policy.py +++ b/tests/unit/macaroons/test_security_policy.py @@ -18,7 +18,6 @@ from pyramid.security import Denied from zope.interface.verify import verifyClass -from warehouse.accounts.interfaces import IUserService from warehouse.macaroons import security_policy from warehouse.macaroons.interfaces import IMacaroonService from warehouse.macaroons.services import InvalidMacaroonError @@ -77,7 +76,7 @@ def test_forget_and_remember(self): assert policy.forget(pretend.stub()) == [] assert policy.remember(pretend.stub(), pretend.stub()) == [] - def test_identify_no_macaroon(self, monkeypatch): + def test_identity_no_http_macaroon(self, monkeypatch): policy = security_policy.MacaroonSecurityPolicy() vary_cb = pretend.stub() @@ -99,7 +98,7 @@ def test_identify_no_macaroon(self, monkeypatch): assert add_vary_cb.calls == [pretend.call("Authorization")] assert request.add_response_callback.calls == [pretend.call(vary_cb)] - def test_identify_no_userid(self, monkeypatch): + def test_identity_no_db_macaroon(self, monkeypatch): policy = security_policy.MacaroonSecurityPolicy() vary_cb = pretend.stub() @@ -113,11 +112,12 @@ def test_identify_no_userid(self, monkeypatch): ) macaroon_service = pretend.stub( - find_userid=pretend.call_recorder(lambda m: None) + find_from_raw=pretend.call_recorder(pretend.raiser(InvalidMacaroonError)), ) + request = pretend.stub( add_response_callback=pretend.call_recorder(lambda cb: None), - find_service=pretend.call_recorder(lambda i, **kw: macaroon_service), + find_service=pretend.call_recorder(lambda iface, **kw: macaroon_service), ) assert policy.identity(request) is None @@ -125,12 +125,12 @@ def test_identify_no_userid(self, monkeypatch): assert request.find_service.calls == [ pretend.call(IMacaroonService, context=None), ] - assert macaroon_service.find_userid.calls == [pretend.call(raw_macaroon)] + assert macaroon_service.find_from_raw.calls == [pretend.call(raw_macaroon)] assert add_vary_cb.calls == [pretend.call("Authorization")] assert request.add_response_callback.calls == [pretend.call(vary_cb)] - def test_identify(self, monkeypatch): + def test_identity_user(self, monkeypatch): policy = security_policy.MacaroonSecurityPolicy() vary_cb = pretend.stub() @@ -143,32 +143,57 @@ def test_identify(self, monkeypatch): security_policy, "_extract_http_macaroon", extract_http_macaroon ) - userid = pretend.stub() user = pretend.stub() + macaroon = pretend.stub(user=user) macaroon_service = pretend.stub( - find_userid=pretend.call_recorder(lambda m: userid) + find_from_raw=pretend.call_recorder(lambda rm: macaroon), ) - user_service = pretend.stub(get_user=pretend.call_recorder(lambda uid: user)) - - def find_service(interface, **kw): - if interface == IMacaroonService: - return macaroon_service - else: - return user_service request = pretend.stub( add_response_callback=pretend.call_recorder(lambda cb: None), - find_service=pretend.call_recorder(find_service), + find_service=pretend.call_recorder(lambda iface, **kw: macaroon_service), ) assert policy.identity(request) is user assert extract_http_macaroon.calls == [pretend.call(request)] assert request.find_service.calls == [ pretend.call(IMacaroonService, context=None), - pretend.call(IUserService, context=None), ] - assert macaroon_service.find_userid.calls == [pretend.call(raw_macaroon)] - assert user_service.get_user.calls == [pretend.call(userid)] + assert macaroon_service.find_from_raw.calls == [pretend.call(raw_macaroon)] + + assert add_vary_cb.calls == [pretend.call("Authorization")] + assert request.add_response_callback.calls == [pretend.call(vary_cb)] + + def test_identity_oidc_provider(self, monkeypatch): + policy = security_policy.MacaroonSecurityPolicy() + + vary_cb = pretend.stub() + add_vary_cb = pretend.call_recorder(lambda *v: vary_cb) + monkeypatch.setattr(security_policy, "add_vary_callback", add_vary_cb) + + raw_macaroon = pretend.stub() + extract_http_macaroon = pretend.call_recorder(lambda r: raw_macaroon) + monkeypatch.setattr( + security_policy, "_extract_http_macaroon", extract_http_macaroon + ) + + oidc_provider = pretend.stub() + macaroon = pretend.stub(user=None, oidc_provider=oidc_provider) + macaroon_service = pretend.stub( + find_from_raw=pretend.call_recorder(lambda rm: macaroon), + ) + + request = pretend.stub( + add_response_callback=pretend.call_recorder(lambda cb: None), + find_service=pretend.call_recorder(lambda iface, **kw: macaroon_service), + ) + + assert policy.identity(request) is oidc_provider + assert extract_http_macaroon.calls == [pretend.call(request)] + assert request.find_service.calls == [ + pretend.call(IMacaroonService, context=None), + ] + assert macaroon_service.find_from_raw.calls == [pretend.call(raw_macaroon)] assert add_vary_cb.calls == [pretend.call("Authorization")] assert request.add_response_callback.calls == [pretend.call(vary_cb)] diff --git a/tests/unit/macaroons/test_services.py b/tests/unit/macaroons/test_services.py --- a/tests/unit/macaroons/test_services.py +++ b/tests/unit/macaroons/test_services.py @@ -27,6 +27,7 @@ from warehouse.macaroons.models import Macaroon from ...common.db.accounts import UserFactory +from ...common.db.oidc import GitHubProviderFactory def test_database_macaroon_factory(): @@ -63,29 +64,31 @@ def test_find_macaroon(self, user_service, macaroon_service): user = UserFactory.create() _, macaroon = macaroon_service.create_macaroon( "fake location", - user.id, "fake description", [caveats.RequestUser(user_id=str(user.id))], + user_id=user.id, ) dm = macaroon_service.find_macaroon(str(macaroon.id)) assert isinstance(dm, Macaroon) assert macaroon.id == dm.id + assert macaroon.user == user def test_find_from_raw(self, user_service, macaroon_service): user = UserFactory.create() serialized, macaroon = macaroon_service.create_macaroon( "fake location", - user.id, "fake description", [caveats.RequestUser(user_id=str(user.id))], + user_id=user.id, ) dm = macaroon_service.find_from_raw(serialized) assert isinstance(dm, Macaroon) assert macaroon.id == dm.id + assert macaroon.user == user @pytest.mark.parametrize( "raw_macaroon", @@ -124,19 +127,29 @@ def test_find_userid_valid_macaroon_trailinglinebreak(self, macaroon_service): user = UserFactory.create() raw_macaroon, _ = macaroon_service.create_macaroon( "fake location", - user.id, "fake description", [caveats.ProjectName(normalized_names=["foo"])], + user_id=user.id, ) assert macaroon_service.find_userid(f"{raw_macaroon}\n") is None + def test_find_userid_oidc_macaroon(self, macaroon_service): + provider = GitHubProviderFactory.create() + raw_macaroon, _, = macaroon_service.create_macaroon( + "fake location", + "fake description", + [caveats.OIDCProvider(oidc_provider_id=str(provider.id))], + oidc_provider_id=provider.id, + ) + assert macaroon_service.find_userid(raw_macaroon) is None + def test_find_userid(self, macaroon_service): user = UserFactory.create() raw_macaroon, _ = macaroon_service.create_macaroon( "fake location", - user.id, "fake description", [caveats.RequestUser(user_id=str(user.id))], + user_id=user.id, ) user_id = macaroon_service.find_userid(raw_macaroon) @@ -177,9 +190,9 @@ def test_verify_invalid_macaroon(self, monkeypatch, user_service, macaroon_servi user = UserFactory.create() raw_macaroon, _ = macaroon_service.create_macaroon( "fake location", - user.id, "fake description", [caveats.RequestUser(user_id=str(user.id))], + user_id=user.id, ) verify = pretend.call_recorder(lambda m, k, r, c, p: WarehouseDenied("foo")) @@ -241,9 +254,9 @@ def test_verify_valid_macaroon(self, monkeypatch, macaroon_service): user = UserFactory.create() raw_macaroon, _ = macaroon_service.create_macaroon( "fake location", - user.id, "fake description", [caveats.RequestUser(user_id=str(user.id))], + user_id=user.id, ) verify = pretend.call_recorder(lambda m, k, r, c, p: True) @@ -262,9 +275,9 @@ def test_delete_macaroon(self, user_service, macaroon_service): user = UserFactory.create() _, macaroon = macaroon_service.create_macaroon( "fake location", - user.id, "fake description", [caveats.RequestUser(user_id=str(user.id))], + user_id=user.id, ) macaroon_id = str(macaroon.id) @@ -283,12 +296,12 @@ def test_get_macaroon_by_description(self, macaroon_service): user = UserFactory.create() _, macaroon = macaroon_service.create_macaroon( "fake location", - user.id, "fake description", [ caveats.ProjectName(normalized_names=["foo", "bar"]), caveats.Expiration(expires_at=10, not_before=5), ], + user_id=user.id, ) dm = macaroon_service.find_macaroon(str(macaroon.id)) @@ -301,10 +314,12 @@ def test_get_macaroon_by_description(self, macaroon_service): def test_errors_with_wrong_caveats(self, macaroon_service): user = UserFactory.create() - with pytest.raises(TypeError): + with pytest.raises( + TypeError, match="scopes must be a list of Caveat instances" + ): macaroon_service.create_macaroon( "fake location", - user.id, "fake description", [{"version": 1, "permissions": "user"}], + user_id=user.id, ) diff --git a/tests/unit/manage/test_views.py b/tests/unit/manage/test_views.py --- a/tests/unit/manage/test_views.py +++ b/tests/unit/manage/test_views.py @@ -10031,11 +10031,13 @@ def test_add_github_oidc_provider_invalid_form(self, monkeypatch): assert view._check_ratelimits.calls == [pretend.call()] assert github_provider_form_obj.validate.calls == [pretend.call()] - def test_delete_oidc_provider(self, monkeypatch): + def test_delete_oidc_provider_registered_to_multiple_projects(self, monkeypatch): provider = pretend.stub( provider_name="fakeprovider", id="fakeid", + projects=[pretend.stub(), pretend.stub()], ) + # NOTE: Can't set __str__ using pretend.stub() monkeypatch.setattr(provider.__class__, "__str__", lambda s: "fakespecifier") @@ -10055,6 +10057,7 @@ def test_delete_oidc_provider(self, monkeypatch): POST=pretend.stub(), db=pretend.stub( query=lambda *a: pretend.stub(get=lambda id: provider), + delete=pretend.call_recorder(lambda o: None), ), remote_addr="0.0.0.0", path="request-path", @@ -10110,6 +10113,105 @@ def test_delete_oidc_provider(self, monkeypatch): assert request.session.flash.calls == [ pretend.call("Removed fakespecifier from fakeproject", queue="success") ] + # The provider is not actually removed entirely from the DB, since it's + # registered to other projects that haven't removed it. + assert request.db.delete.calls == [] + + assert delete_provider_form_cls.calls == [pretend.call(request.POST)] + assert delete_provider_form_obj.validate.calls == [pretend.call()] + + assert views.send_oidc_provider_removed_email.calls == [ + pretend.call( + request, fakeuser, project_name="fakeproject", provider=provider + ) + for fakeuser in fakeusers + ] + + def test_delete_oidc_provider_entirely(self, monkeypatch): + provider = pretend.stub( + provider_name="fakeprovider", + id="fakeid", + # NOTE: This is technically out of sync with the state below; + # it should be projects=[project], but we make it empty + # to trigger the DB deletion case. + projects=[], + ) + # NOTE: Can't set __str__ using pretend.stub() + monkeypatch.setattr(provider.__class__, "__str__", lambda s: "fakespecifier") + + fakeusers = [pretend.stub(), pretend.stub(), pretend.stub()] + project = pretend.stub( + oidc_providers=[provider], + name="fakeproject", + record_event=pretend.call_recorder(lambda *a, **kw: None), + users=fakeusers, + ) + metrics = pretend.stub(increment=pretend.call_recorder(lambda *a, **kw: None)) + request = pretend.stub( + registry=pretend.stub(settings={"warehouse.oidc.enabled": True}), + find_service=lambda *a, **kw: metrics, + flags=pretend.stub(enabled=pretend.call_recorder(lambda f: False)), + session=pretend.stub(flash=pretend.call_recorder(lambda *a, **kw: None)), + POST=pretend.stub(), + db=pretend.stub( + query=lambda *a: pretend.stub(get=lambda id: provider), + delete=pretend.call_recorder(lambda o: None), + ), + remote_addr="0.0.0.0", + path="request-path", + ) + + delete_provider_form_obj = pretend.stub( + validate=pretend.call_recorder(lambda: True), + provider_id=pretend.stub(data="fakeid"), + ) + delete_provider_form_cls = pretend.call_recorder( + lambda *a, **kw: delete_provider_form_obj + ) + monkeypatch.setattr(views, "DeleteProviderForm", delete_provider_form_cls) + monkeypatch.setattr( + views, + "send_oidc_provider_removed_email", + pretend.call_recorder(lambda *a, **kw: None), + ) + + view = views.ManageOIDCProviderViews(project, request) + default_response = {"_": pretend.stub()} + monkeypatch.setattr( + views.ManageOIDCProviderViews, "default_response", default_response + ) + + assert isinstance(view.delete_oidc_provider(), HTTPSeeOther) + assert provider not in project.oidc_providers + + assert view.metrics.increment.calls == [ + pretend.call( + "warehouse.oidc.delete_provider.attempt", + ), + pretend.call( + "warehouse.oidc.delete_provider.ok", tags=["provider:fakeprovider"] + ), + ] + + assert project.record_event.calls == [ + pretend.call( + tag="project:oidc:provider-removed", + ip_address=request.remote_addr, + additional={ + "provider": "fakeprovider", + "id": "fakeid", + "specifier": "fakespecifier", + }, + ) + ] + + assert request.flags.enabled.calls == [ + pretend.call(AdminFlagValue.DISALLOW_OIDC) + ] + assert request.session.flash.calls == [ + pretend.call("Removed fakespecifier from fakeproject", queue="success") + ] + assert request.db.delete.calls == [pretend.call(provider)] assert delete_provider_form_cls.calls == [pretend.call(request.POST)] assert delete_provider_form_obj.validate.calls == [pretend.call()] diff --git a/tests/unit/oidc/test_models.py b/tests/unit/oidc/test_models.py --- a/tests/unit/oidc/test_models.py +++ b/tests/unit/oidc/test_models.py @@ -74,6 +74,10 @@ def test_github_provider_computed_properties(self): assert getattr(provider, claim_name) is not None assert str(provider) == "fakeworkflow.yml @ fakeowner/fakerepo" + assert ( + provider.provider_url == "https://github.com/fakeowner/fakerepo/blob/HEAD/" + f".github/workflows/{provider.workflow_filename}" + ) def test_github_provider_unaccounted_claims(self, monkeypatch): provider = models.GitHubProvider( diff --git a/tests/unit/oidc/test_services.py b/tests/unit/oidc/test_services.py --- a/tests/unit/oidc/test_services.py +++ b/tests/unit/oidc/test_services.py @@ -30,6 +30,7 @@ def test_oidc_provider_service_factory(): metrics = pretend.stub() request = pretend.stub( + db=pretend.stub(), registry=pretend.stub( settings={"oidc.jwk_cache_url": "rediss://another.example.com"} ), @@ -38,6 +39,7 @@ def test_oidc_provider_service_factory(): service = factory(pretend.stub(), request) assert isinstance(service, factory.service_class) + assert service.db == request.db assert service.provider == factory.provider assert service.issuer_url == factory.issuer_url assert service.cache_url == "rediss://another.example.com" @@ -50,8 +52,14 @@ def test_oidc_provider_service_factory(): class TestOIDCProviderService: - def test_verify_signature_only(self, monkeypatch): + def test_interface_matches(self): + assert verifyClass( + interfaces.IOIDCProviderService, services.OIDCProviderService + ) + + def test_verify_jwt_signature(self, monkeypatch): service = services.OIDCProviderService( + session=pretend.stub(), provider=pretend.stub(), issuer_url=pretend.stub(), cache_url=pretend.stub(), @@ -66,19 +74,21 @@ def test_verify_signature_only(self, monkeypatch): ) monkeypatch.setattr(services, "jwt", jwt) - assert service.verify_signature_only(token) == decoded + assert service.verify_jwt_signature(token) == decoded assert jwt.decode.calls == [ pretend.call( token, key="fake-key", algorithms=["RS256"], - verify_signature=True, - require=["iss", "iat", "nbf", "exp", "aud"], - verify_iss=True, - verify_iat=True, - verify_nbf=True, - verify_exp=True, - verify_aud=True, + options=dict( + verify_signature=True, + require=["iss", "iat", "nbf", "exp", "aud"], + verify_iss=True, + verify_iat=True, + verify_nbf=True, + verify_exp=True, + verify_aud=True, + ), issuer=service.issuer_url, audience="pypi", leeway=30, @@ -86,12 +96,15 @@ def test_verify_signature_only(self, monkeypatch): ] @pytest.mark.parametrize("exc", [PyJWTError, ValueError]) - def test_verify_signature_only_fails(self, monkeypatch, exc): + def test_verify_jwt_signature_fails(self, monkeypatch, exc): service = services.OIDCProviderService( - provider=pretend.stub(), + session=pretend.stub(), + provider="fakeprovider", issuer_url=pretend.stub(), cache_url=pretend.stub(), - metrics=pretend.stub(), + metrics=pretend.stub( + increment=pretend.call_recorder(lambda *a, **kw: None) + ), ) token = pretend.stub() @@ -101,10 +114,17 @@ def test_verify_signature_only_fails(self, monkeypatch, exc): ) monkeypatch.setattr(services, "jwt", jwt) - assert service.verify_signature_only(token) is None + assert service.verify_jwt_signature(token) is None + assert service.metrics.increment.calls == [ + pretend.call( + "warehouse.oidc.verify_jwt_signature.invalid_signature", + tags=["provider:fakeprovider"], + ) + ] - def test_verify_for_project(self, monkeypatch): + def test_find_provider(self, monkeypatch): service = services.OIDCProviderService( + session=pretend.stub(), provider="fakeprovider", issuer_url=pretend.stub(), cache_url=pretend.stub(), @@ -114,30 +134,28 @@ def test_verify_for_project(self, monkeypatch): ) token = pretend.stub() - claims = pretend.stub() - monkeypatch.setattr( - service, "verify_signature_only", pretend.call_recorder(lambda t: claims) - ) provider = pretend.stub(verify_claims=pretend.call_recorder(lambda c: True)) - project = pretend.stub(name="fakeproject", oidc_providers=[provider]) + find_provider_by_issuer = pretend.call_recorder(lambda *a: provider) + monkeypatch.setattr( + services, "find_provider_by_issuer", find_provider_by_issuer + ) - assert service.verify_for_project(token, project) + assert service.find_provider(token) == provider assert service.metrics.increment.calls == [ pretend.call( - "warehouse.oidc.verify_for_project.attempt", - tags=["project:fakeproject", "provider:fakeprovider"], + "warehouse.oidc.find_provider.attempt", + tags=["provider:fakeprovider"], ), pretend.call( - "warehouse.oidc.verify_for_project.ok", - tags=["project:fakeproject", "provider:fakeprovider"], + "warehouse.oidc.find_provider.ok", + tags=["provider:fakeprovider"], ), ] - assert service.verify_signature_only.calls == [pretend.call(token)] - assert provider.verify_claims.calls == [pretend.call(claims)] - def test_verify_for_project_invalid_signature(self, monkeypatch): + def test_find_provider_issuer_lookup_fails(self, monkeypatch): service = services.OIDCProviderService( + session=pretend.stub(), provider="fakeprovider", issuer_url=pretend.stub(), cache_url=pretend.stub(), @@ -146,25 +164,27 @@ def test_verify_for_project_invalid_signature(self, monkeypatch): ), ) - token = pretend.stub() - monkeypatch.setattr(service, "verify_signature_only", lambda t: None) - - project = pretend.stub(name="fakeproject") + find_provider_by_issuer = pretend.call_recorder(lambda *a: None) + monkeypatch.setattr( + services, "find_provider_by_issuer", find_provider_by_issuer + ) - assert not service.verify_for_project(token, project) + claims = pretend.stub() + assert service.find_provider(claims) is None assert service.metrics.increment.calls == [ pretend.call( - "warehouse.oidc.verify_for_project.attempt", - tags=["project:fakeproject", "provider:fakeprovider"], + "warehouse.oidc.find_provider.attempt", + tags=["provider:fakeprovider"], ), pretend.call( - "warehouse.oidc.verify_for_project.invalid_signature", - tags=["project:fakeproject", "provider:fakeprovider"], + "warehouse.oidc.find_provider.provider_not_found", + tags=["provider:fakeprovider"], ), ] - def test_verify_for_project_invalid_claims(self, monkeypatch): + def test_find_provider_verify_claims_fails(self, monkeypatch): service = services.OIDCProviderService( + session=pretend.stub(), provider="fakeprovider", issuer_url=pretend.stub(), cache_url=pretend.stub(), @@ -173,31 +193,29 @@ def test_verify_for_project_invalid_claims(self, monkeypatch): ), ) - token = pretend.stub() - claims = pretend.stub() + provider = pretend.stub(verify_claims=pretend.call_recorder(lambda c: False)) + find_provider_by_issuer = pretend.call_recorder(lambda *a: provider) monkeypatch.setattr( - service, "verify_signature_only", pretend.call_recorder(lambda t: claims) + services, "find_provider_by_issuer", find_provider_by_issuer ) - provider = pretend.stub(verify_claims=pretend.call_recorder(lambda c: False)) - project = pretend.stub(name="fakeproject", oidc_providers=[provider]) - - assert not service.verify_for_project(token, project) + claims = pretend.stub() + assert service.find_provider(claims) is None assert service.metrics.increment.calls == [ pretend.call( - "warehouse.oidc.verify_for_project.attempt", - tags=["project:fakeproject", "provider:fakeprovider"], + "warehouse.oidc.find_provider.attempt", + tags=["provider:fakeprovider"], ), pretend.call( - "warehouse.oidc.verify_for_project.invalid_claims", - tags=["project:fakeproject", "provider:fakeprovider"], + "warehouse.oidc.find_provider.invalid_claims", + tags=["provider:fakeprovider"], ), ] - assert service.verify_signature_only.calls == [pretend.call(token)] assert provider.verify_claims.calls == [pretend.call(claims)] def test_get_keyset_not_cached(self, monkeypatch, mockredis): service = services.OIDCProviderService( + session=pretend.stub(), provider="example", issuer_url=pretend.stub(), cache_url="rediss://fake.example.com", @@ -213,6 +231,7 @@ def test_get_keyset_not_cached(self, monkeypatch, mockredis): def test_get_keyset_cached(self, monkeypatch, mockredis): service = services.OIDCProviderService( + session=pretend.stub(), provider="example", issuer_url=pretend.stub(), cache_url="rediss://fake.example.com", @@ -231,6 +250,7 @@ def test_get_keyset_cached(self, monkeypatch, mockredis): def test_refresh_keyset_timeout(self, monkeypatch, mockredis): metrics = pretend.stub(increment=pretend.call_recorder(lambda *a, **kw: None)) service = services.OIDCProviderService( + session=pretend.stub(), provider="example", issuer_url="https://example.com", cache_url="rediss://fake.example.com", @@ -253,6 +273,7 @@ def test_refresh_keyset_timeout(self, monkeypatch, mockredis): def test_refresh_keyset_oidc_config_fails(self, monkeypatch, mockredis): metrics = pretend.stub(increment=pretend.call_recorder(lambda *a, **kw: None)) service = services.OIDCProviderService( + session=pretend.stub(), provider="example", issuer_url="https://example.com", cache_url="rediss://fake.example.com", @@ -287,6 +308,7 @@ def test_refresh_keyset_oidc_config_fails(self, monkeypatch, mockredis): def test_refresh_keyset_oidc_config_no_jwks_uri(self, monkeypatch, mockredis): metrics = pretend.stub(increment=pretend.call_recorder(lambda *a, **kw: None)) service = services.OIDCProviderService( + session=pretend.stub(), provider="example", issuer_url="https://example.com", cache_url="rediss://fake.example.com", @@ -323,6 +345,7 @@ def test_refresh_keyset_oidc_config_no_jwks_uri(self, monkeypatch, mockredis): def test_refresh_keyset_oidc_config_no_jwks_json(self, monkeypatch, mockredis): metrics = pretend.stub(increment=pretend.call_recorder(lambda *a, **kw: None)) service = services.OIDCProviderService( + session=pretend.stub(), provider="example", issuer_url="https://example.com", cache_url="rediss://fake.example.com", @@ -370,6 +393,7 @@ def get(url): def test_refresh_keyset_oidc_config_no_jwks_keys(self, monkeypatch, mockredis): metrics = pretend.stub(increment=pretend.call_recorder(lambda *a, **kw: None)) service = services.OIDCProviderService( + session=pretend.stub(), provider="example", issuer_url="https://example.com", cache_url="rediss://fake.example.com", @@ -414,6 +438,7 @@ def get(url): def test_refresh_keyset_successful(self, monkeypatch, mockredis): metrics = pretend.stub(increment=pretend.call_recorder(lambda *a, **kw: None)) service = services.OIDCProviderService( + session=pretend.stub(), provider="example", issuer_url="https://example.com", cache_url="rediss://fake.example.com", @@ -463,6 +488,7 @@ def get(url): def test_get_key_cached(self, monkeypatch): metrics = pretend.stub(increment=pretend.call_recorder(lambda *a, **kw: None)) service = services.OIDCProviderService( + session=pretend.stub(), provider="example", issuer_url="https://example.com", cache_url="rediss://fake.example.com", @@ -483,7 +509,7 @@ def test_get_key_cached(self, monkeypatch): } monkeypatch.setattr(service, "_get_keyset", lambda: (keyset, True)) - key = service.get_key("fake-key-id") + key = service._get_key("fake-key-id") assert isinstance(key, PyJWK) assert key.key_id == "fake-key-id" @@ -492,6 +518,7 @@ def test_get_key_cached(self, monkeypatch): def test_get_key_uncached(self, monkeypatch): metrics = pretend.stub(increment=pretend.call_recorder(lambda *a, **kw: None)) service = services.OIDCProviderService( + session=pretend.stub(), provider="example", issuer_url="https://example.com", cache_url="rediss://fake.example.com", @@ -513,7 +540,7 @@ def test_get_key_uncached(self, monkeypatch): monkeypatch.setattr(service, "_get_keyset", lambda: ({}, False)) monkeypatch.setattr(service, "_refresh_keyset", lambda: keyset) - key = service.get_key("fake-key-id") + key = service._get_key("fake-key-id") assert isinstance(key, PyJWK) assert key.key_id == "fake-key-id" @@ -522,6 +549,7 @@ def test_get_key_uncached(self, monkeypatch): def test_get_key_refresh_fails(self, monkeypatch): metrics = pretend.stub(increment=pretend.call_recorder(lambda *a, **kw: None)) service = services.OIDCProviderService( + session=pretend.stub(), provider="example", issuer_url="https://example.com", cache_url="rediss://fake.example.com", @@ -531,7 +559,7 @@ def test_get_key_refresh_fails(self, monkeypatch): monkeypatch.setattr(service, "_get_keyset", lambda: ({}, False)) monkeypatch.setattr(service, "_refresh_keyset", lambda: {}) - key = service.get_key("fake-key-id") + key = service._get_key("fake-key-id") assert key is None assert metrics.increment.calls == [ @@ -546,12 +574,13 @@ def test_get_key_for_token(self, monkeypatch): key = pretend.stub() service = services.OIDCProviderService( + session=pretend.stub(), provider="example", issuer_url="https://example.com", cache_url="rediss://fake.example.com", metrics=pretend.stub(), ) - monkeypatch.setattr(service, "get_key", pretend.call_recorder(lambda kid: key)) + monkeypatch.setattr(service, "_get_key", pretend.call_recorder(lambda kid: key)) monkeypatch.setattr( services.jwt, @@ -560,5 +589,126 @@ def test_get_key_for_token(self, monkeypatch): ) assert service._get_key_for_token(token) == key - assert service.get_key.calls == [pretend.call("fake-key-id")] + assert service._get_key.calls == [pretend.call("fake-key-id")] assert services.jwt.get_unverified_header.calls == [pretend.call(token)] + + +class TestNullOIDCProviderService: + def test_interface_matches(self): + assert verifyClass( + interfaces.IOIDCProviderService, services.NullOIDCProviderService + ) + + def test_warns_on_init(self, monkeypatch): + warnings = pretend.stub(warn=pretend.call_recorder(lambda m, c: None)) + monkeypatch.setattr(services, "warnings", warnings) + + service = services.NullOIDCProviderService( + session=pretend.stub(), + provider="example", + issuer_url="https://example.com", + cache_url="rediss://fake.example.com", + metrics=pretend.stub(), + ) + + assert service is not None + assert warnings.warn.calls == [ + pretend.call( + "NullOIDCProviderService is intended only for use in development, " + "you should not use it in production due to the lack of actual " + "JWT verification.", + services.InsecureOIDCProviderWarning, + ) + ] + + def test_verify_jwt_signature_malformed_jwt(self): + service = services.NullOIDCProviderService( + session=pretend.stub(), + provider="example", + issuer_url="https://example.com", + cache_url="rediss://fake.example.com", + metrics=pretend.stub(), + ) + + assert service.verify_jwt_signature("malformed-jwt") is None + + def test_verify_jwt_signature_missing_aud(self): + # { + # "iss": "foo", + # "iat": 1516239022, + # "nbf": 1516239022, + # "exp": 9999999999 + # } + jwt = ( + "eyJhbGciOiJSUzI1NiJ9.eyJpc3MiOiJmb28iLCJpYXQiOjE1MTYyMzkwMjIsIm5iZ" + "iI6MTUxNjIzOTAyMiwiZXhwIjo5OTk5OTk5OTk5fQ.CAR9tx9_A6kxIDYWzXotuLfQ" + "0wmvHDDO98rLO4F46y7QDWOalIok9yX3OzkWz-30TIBl1dleGVYbtZQzFNEJY13OLB" + "gzFvxEpsAWvKJGyOLz-YDeGd2ApEZaggLvJiPZCngxFTH5fAyEcUUxQs5sCO9lGbkc" + "E6lg_Di3VQhPohSuj_V7-DkcXefL3lV7m_JNOBoDWx_nDOFx4w2f8Z2NmswMrsu1vU" + "NUZH7POiQBeyEsbY1at3u6gGerjyeYl8SIbeeRUWL0rtWxTgktoiKKgyPI-8F8Fpug" + "jwtKZU_WFhIF4nA0les81hxnm8HFnoun2kx5cSF4Db3N8h6m8wRTUw" + ) + + service = services.NullOIDCProviderService( + session=pretend.stub(), + provider="example", + issuer_url="https://example.com", + cache_url="rediss://fake.example.com", + metrics=pretend.stub(), + ) + + assert service.verify_jwt_signature(jwt) is None + + def test_verify_jwt_signature_wrong_aud(self): + # { + # "iss": "foo", + # "iat": 1516239022, + # "nbf": 1516239022, + # "exp": 9999999999, + # "aud": "notpypi" + # } + jwt = ( + "eyJhbGciOiJSUzI1NiJ9.eyJpc3MiOiJmb28iLCJpYXQiOjE1MTYyMzkwMjIsIm5iZ" + "iI6MTUxNjIzOTAyMiwiZXhwIjo5OTk5OTk5OTk5LCJhdWQiOiJub3RweXBpIn0.rFf" + "rBXfGyRjU-tIo9dpJRkbnB2BLKK6uwjrE6g4pqwN-5BDn_UNR1Cw4t6Pw8kYOCRmVD" + "aacu01L-GwHaXJmXyKsqIGie-bcp40zn1FX7dP000PQkAdhuQ-lILGhzscWNJK0J_g" + "IewoFV9jNUVHJmK9UXx0hHl4eaH_3Ob22kzzIqNKuao2625qfLAdNfV44efArEubXT" + "vBR-Y8HFzj7-7Zz7rHApImFYmC4E1aMDn_XEYJsXaJcwhhXJx8WB8SAhD7JZ-zotrd" + "hlqkRMD9rXpv4DAMU15SEnw19tztVRf9OA4PO5Hd4uTKxPA1euBJgXa2g9QgIc1aFA" + "FYKICTVgQ" + ) + + service = services.NullOIDCProviderService( + session=pretend.stub(), + provider="example", + issuer_url="https://example.com", + cache_url="rediss://fake.example.com", + metrics=pretend.stub(), + ) + + assert service.verify_jwt_signature(jwt) is None + + def test_find_provider(self, monkeypatch): + claims = { + "iss": "foo", + "iat": 1516239022, + "nbf": 1516239022, + "exp": 9999999999, + "aud": "pypi", + } + + service = services.NullOIDCProviderService( + session=pretend.stub(), + provider="example", + issuer_url="https://example.com", + cache_url="rediss://fake.example.com", + metrics=pretend.stub(), + ) + + provider = pretend.stub(verify_claims=pretend.call_recorder(lambda c: True)) + find_provider_by_issuer = pretend.call_recorder(lambda *a: provider) + monkeypatch.setattr( + services, "find_provider_by_issuer", find_provider_by_issuer + ) + + assert service.find_provider(claims) == provider diff --git a/tests/unit/oidc/test_utils.py b/tests/unit/oidc/test_utils.py new file mode 100644 --- /dev/null +++ b/tests/unit/oidc/test_utils.py @@ -0,0 +1,72 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pretend + +from sqlalchemy.sql.expression import func, literal + +from warehouse.oidc import utils +from warehouse.oidc.models import GitHubProvider + + +def test_find_provider_by_issuer_bad_issuer_url(): + assert ( + utils.find_provider_by_issuer( + pretend.stub(), "https://fake-issuer.url", pretend.stub() + ) + is None + ) + + +def test_find_provider_by_issuer_github(): + provider = pretend.stub() + one_or_none = pretend.call_recorder(lambda: provider) + filter_ = pretend.call_recorder(lambda *a: pretend.stub(one_or_none=one_or_none)) + filter_by = pretend.call_recorder(lambda **kw: pretend.stub(filter=filter_)) + session = pretend.stub( + query=pretend.call_recorder(lambda cls: pretend.stub(filter_by=filter_by)) + ) + signed_claims = { + "repository": "foo/bar", + "job_workflow_ref": "foo/bar/.github/workflows/ci.yml@refs/heads/main", + "repository_owner_id": "1234", + } + + assert ( + utils.find_provider_by_issuer( + session, "https://token.actions.githubusercontent.com", signed_claims + ) + == provider + ) + + assert session.query.calls == [pretend.call(GitHubProvider)] + assert filter_by.calls == [ + pretend.call( + repository_name="bar", repository_owner="foo", repository_owner_id="1234" + ) + ] + + # SQLAlchemy BinaryExpression objects don't support comparison with __eq__, + # so we need to dig into the callset and compare the argument manually. + assert len(filter_.calls) == 1 + assert len(filter_.calls[0].args) == 1 + assert ( + filter_.calls[0] + .args[0] + .compare( + literal("ci.yml@refs/heads/main").like( + func.concat(GitHubProvider.workflow_filename, "%") + ) + ) + ) + + assert one_or_none.calls == [pretend.call()] diff --git a/tests/unit/oidc/test_views.py b/tests/unit/oidc/test_views.py new file mode 100644 --- /dev/null +++ b/tests/unit/oidc/test_views.py @@ -0,0 +1,287 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pretend +import pytest + +from warehouse.events.tags import EventTag +from warehouse.macaroons import caveats +from warehouse.macaroons.interfaces import IMacaroonService +from warehouse.oidc import views +from warehouse.oidc.interfaces import IOIDCProviderService + + [email protected]( + ("registry", "admin"), [(False, False), (False, True), (True, True)] +) +def test_mint_token_from_oidc_not_enabled(registry, admin): + request = pretend.stub( + response=pretend.stub(status=None), + registry=pretend.stub(settings={"warehouse.oidc.enabled": registry}), + flags=pretend.stub(enabled=lambda *a: admin), + ) + + response = views.mint_token_from_oidc(request) + assert request.response.status == 422 + assert response == { + "message": "Token request failed", + "errors": [ + {"code": "not-enabled", "description": "OIDC functionality not enabled"} + ], + } + + +def test_mint_token_from_oidc_invalid_json(): + class Request: + def __init__(self): + self.response = pretend.stub(status=None) + self.registry = pretend.stub(settings={"warehouse.oidc.enabled": True}) + self.flags = pretend.stub(enabled=lambda *a: False) + + @property + def json_body(self): + raise ValueError + + req = Request() + resp = views.mint_token_from_oidc(req) + assert req.response.status == 422 + assert resp == { + "message": "Token request failed", + "errors": [{"code": "invalid-json", "description": "missing JSON body"}], + } + + [email protected]( + "body", + [ + "", + [], + "this is a valid JSON string", + 12345, + 3.14, + None, + ], +) +def test_mint_token_from_oidc_invalid_payload(body): + class Request: + def __init__(self): + self.response = pretend.stub(status=None) + self.registry = pretend.stub(settings={"warehouse.oidc.enabled": True}) + self.flags = pretend.stub(enabled=lambda *a: False) + + @property + def json_body(self): + return body + + req = Request() + resp = views.mint_token_from_oidc(req) + assert req.response.status == 422 + assert resp == { + "message": "Token request failed", + "errors": [ + { + "code": "invalid-payload", + "description": "payload is not a JSON dictionary", + } + ], + } + + [email protected]( + "body", + [ + {}, + {"token": None}, + {"wrongkey": ""}, + ], +) +def test_mint_token_from_oidc_missing_token(body): + request = pretend.stub( + response=pretend.stub(status=None), + json_body=body, + registry=pretend.stub(settings={"warehouse.oidc.enabled": True}), + flags=pretend.stub(enabled=lambda *a: False), + ) + resp = views.mint_token_from_oidc(request) + assert request.response.status == 422 + assert resp == { + "message": "Token request failed", + "errors": [{"code": "invalid-token", "description": "token is missing"}], + } + + [email protected]( + "body", + [ + {"token": 3.14}, + {"token": 0}, + {"token": [""]}, + {"token": []}, + {"token": {}}, + ], +) +def test_mint_token_from_oidc_nonstring_token(body): + request = pretend.stub( + response=pretend.stub(status=None), + json_body=body, + registry=pretend.stub(settings={"warehouse.oidc.enabled": True}), + flags=pretend.stub(enabled=lambda *a: False), + ) + resp = views.mint_token_from_oidc(request) + assert request.response.status == 422 + assert resp == { + "message": "Token request failed", + "errors": [{"code": "invalid-token", "description": "token is not a string"}], + } + + +def test_mint_token_from_oidc_provider_verify_jwt_signature_fails(): + oidc_service = pretend.stub( + verify_jwt_signature=pretend.call_recorder(lambda token: None), + ) + request = pretend.stub( + response=pretend.stub(status=None), + json_body={"token": "faketoken"}, + find_service=pretend.call_recorder(lambda cls, **kw: oidc_service), + registry=pretend.stub(settings={"warehouse.oidc.enabled": True}), + flags=pretend.stub(enabled=lambda *a: False), + ) + + response = views.mint_token_from_oidc(request) + assert request.response.status == 422 + assert response == { + "message": "Token request failed", + "errors": [ + { + "code": "invalid-token", + "description": "malformed or invalid token", + } + ], + } + + assert request.find_service.calls == [ + pretend.call(IOIDCProviderService, name="github") + ] + assert oidc_service.verify_jwt_signature.calls == [pretend.call("faketoken")] + + +def test_mint_token_from_oidc_provider_lookup_fails(): + claims = pretend.stub() + oidc_service = pretend.stub( + verify_jwt_signature=pretend.call_recorder(lambda token: claims), + find_provider=pretend.call_recorder(lambda claims: None), + ) + request = pretend.stub( + response=pretend.stub(status=None), + json_body={"token": "faketoken"}, + find_service=pretend.call_recorder(lambda cls, **kw: oidc_service), + registry=pretend.stub(settings={"warehouse.oidc.enabled": True}), + flags=pretend.stub(enabled=lambda *a: False), + ) + + response = views.mint_token_from_oidc(request) + assert request.response.status == 422 + assert response == { + "message": "Token request failed", + "errors": [ + { + "code": "invalid-provider", + "description": "valid token, but no corresponding provider", + } + ], + } + + assert request.find_service.calls == [ + pretend.call(IOIDCProviderService, name="github") + ] + assert oidc_service.verify_jwt_signature.calls == [pretend.call("faketoken")] + assert oidc_service.find_provider.calls == [pretend.call(claims)] + + +def test_mint_token_from_oidc_ok(monkeypatch): + time = pretend.stub(time=pretend.call_recorder(lambda: 0)) + monkeypatch.setattr(views, "time", time) + + project = pretend.stub( + id="fakeprojectid", + record_event=pretend.call_recorder(lambda **kw: None), + ) + provider = pretend.stub( + id="fakeproviderid", + projects=[project], + provider_name="fakeprovidername", + provider_url="https://fake/url", + ) + # NOTE: Can't set __str__ using pretend.stub() + monkeypatch.setattr(provider.__class__, "__str__", lambda s: "fakespecifier") + + claims = pretend.stub() + oidc_service = pretend.stub( + verify_jwt_signature=pretend.call_recorder(lambda token: claims), + find_provider=pretend.call_recorder(lambda claims: provider), + ) + + db_macaroon = pretend.stub(description="fakemacaroon") + macaroon_service = pretend.stub( + create_macaroon=pretend.call_recorder( + lambda *a, **kw: ("raw-macaroon", db_macaroon) + ) + ) + + def find_service(iface, **kw): + if iface == IOIDCProviderService: + return oidc_service + elif iface == IMacaroonService: + return macaroon_service + assert False, iface + + request = pretend.stub( + response=pretend.stub(status=None), + json_body={"token": "faketoken"}, + find_service=find_service, + domain="fakedomain", + remote_addr="0.0.0.0", + registry=pretend.stub(settings={"warehouse.oidc.enabled": True}), + flags=pretend.stub(enabled=lambda *a: False), + ) + + response = views.mint_token_from_oidc(request) + assert response == { + "success": True, + "token": "raw-macaroon", + } + + assert oidc_service.verify_jwt_signature.calls == [pretend.call("faketoken")] + assert oidc_service.find_provider.calls == [pretend.call(claims)] + assert macaroon_service.create_macaroon.calls == [ + pretend.call( + "fakedomain", + "OpenID token: https://fake/url (0)", + [ + caveats.OIDCProvider(oidc_provider_id="fakeproviderid"), + caveats.ProjectID(project_ids=["fakeprojectid"]), + caveats.Expiration(expires_at=900, not_before=0), + ], + oidc_provider_id="fakeproviderid", + ) + ] + assert project.record_event.calls == [ + pretend.call( + tag=EventTag.Project.ShortLivedAPITokenAdded, + ip_address="0.0.0.0", + additional={ + "expires": 900, + "provider_name": "fakeprovidername", + "provider_url": "https://fake/url", + }, + ) + ] diff --git a/tests/unit/packaging/test_models.py b/tests/unit/packaging/test_models.py --- a/tests/unit/packaging/test_models.py +++ b/tests/unit/packaging/test_models.py @@ -21,6 +21,7 @@ from warehouse.organizations.models import TeamProjectRoleType from warehouse.packaging.models import File, ProjectFactory, ReleaseURL +from ...common.db.oidc import GitHubProviderFactory from ...common.db.organizations import ( OrganizationFactory as DBOrganizationFactory, OrganizationProjectFactory as DBOrganizationProjectFactory, @@ -141,6 +142,8 @@ def test_acl(self, db_session): team=team, project=project, role_name=TeamProjectRoleType.Owner ) + provider = GitHubProviderFactory.create(projects=[project]) + acls = [] for location in lineage(project): try: @@ -157,6 +160,8 @@ def test_acl(self, db_session): (Allow, "group:admins", "admin"), (Allow, "group:moderators", "moderator"), ] + sorted( + [(Allow, f"oidc:{provider.id}", ["upload"])], key=lambda x: x[1] + ) + sorted( [ (Allow, f"user:{owner1.user.id}", ["manage:project", "upload"]), (Allow, f"user:{owner2.user.id}", ["manage:project", "upload"]), diff --git a/tests/unit/test_config.py b/tests/unit/test_config.py --- a/tests/unit/test_config.py +++ b/tests/unit/test_config.py @@ -263,6 +263,7 @@ def __init__(self): "warehouse.two_factor_mandate.available": False, "warehouse.two_factor_mandate.enabled": False, "warehouse.oidc.enabled": False, + "oidc.backend": "warehouse.oidc.services.OIDCProviderService", "warehouse.two_factor_mandate.cohort_size": 0, } if environment == config.Environment.development: diff --git a/tests/unit/test_filters.py b/tests/unit/test_filters.py --- a/tests/unit/test_filters.py +++ b/tests/unit/test_filters.py @@ -210,6 +210,19 @@ def test_localize_datetime(inp, expected): assert filters.localize_datetime(inp).strftime(datetime_format) == expected [email protected]( + ("inp", "expected"), + [ + ( + 1667404296, + datetime.datetime(2022, 11, 2, 15, 51, 36), + ) + ], +) +def test_ctime(inp, expected): + assert filters.ctime(inp) == expected + + @pytest.mark.parametrize( "delta, expected", [ diff --git a/tests/unit/utils/test_security_policy.py b/tests/unit/utils/test_security_policy.py --- a/tests/unit/utils/test_security_policy.py +++ b/tests/unit/utils/test_security_policy.py @@ -19,6 +19,7 @@ from warehouse.utils import security_policy from ...common.db.accounts import UserFactory +from ...common.db.oidc import GitHubProviderFactory @pytest.mark.parametrize( @@ -211,6 +212,25 @@ def test_permits_user(self, db_request, monkeypatch): ) ] + def test_permits_oidc_provider(self, db_request): + subpolicies = pretend.stub() + status = pretend.stub() + authz = pretend.stub(permits=pretend.call_recorder(lambda *a: status)) + policy = security_policy.MultiSecurityPolicy(subpolicies, authz) + + provider = GitHubProviderFactory.create() + request = pretend.stub(identity=provider) + context = pretend.stub() + permission = pretend.stub() + assert policy.permits(request, context, permission) is status + assert authz.permits.calls == [ + pretend.call( + context, + [Authenticated, f"oidc:{provider.id}"], + permission, + ) + ] + def test_permits_nonuser_denied(self): subpolicies = pretend.stub() authz = pretend.stub(permits=pretend.call_recorder(lambda *a: pretend.stub()))
Routes and endpoints for JWT consumption Once we allow projects to configure OIDC providers (termed "publishers" in user-facing views), we need to allow OIDC-minted JWTs to be exchanged for API tokens. As discussed with @di, we should ensure that these can be configured to run on a separate subdomain during deployment, to simplify caching logic.
Needs #10792. As a reminder to myself: all of the routing/endpoint here should have accompanying metrics, per @ewdurbin's feedback: https://github.com/pypa/warehouse/pull/10753#pullrequestreview-925230468
2022-04-27T22:00:01Z
[]
[]
pypi/warehouse
11,273
pypi__warehouse-11273
[ "11220" ]
47d25b39116c1ed2b203342d9e8b6bbf2ac21f25
diff --git a/warehouse/packaging/models.py b/warehouse/packaging/models.py --- a/warehouse/packaging/models.py +++ b/warehouse/packaging/models.py @@ -462,6 +462,16 @@ def urls(self): name, _, url = urlspec.partition(",") name = name.strip() url = url.strip() + + # avoid duplicating homepage/download links in case the same + # url is specified in the pkginfo twice (in the Home-page + # or Download-URL field and again in the Project-URL fields) + comp_name = name.casefold().replace("-", "").replace("_", "") + if comp_name == "homepage" and url == _urls.get("Homepage"): + continue + if comp_name == "downloadurl" and url == _urls.get("Download"): + continue + if name and url: _urls[name] = url
diff --git a/tests/unit/packaging/test_models.py b/tests/unit/packaging/test_models.py --- a/tests/unit/packaging/test_models.py +++ b/tests/unit/packaging/test_models.py @@ -252,6 +252,35 @@ def test_has_meta_false(self, db_session): ] ), ), + # similar spellings of homepage/download label doesn't duplicate urls + ( + "https://example.com/home/", + "https://example.com/download/", + [ + "homepage, https://example.com/home/", + "download-URL ,https://example.com/download/", + ], + OrderedDict( + [ + ("Homepage", "https://example.com/home/"), + ("Download", "https://example.com/download/"), + ] + ), + ), + # the duplicate removal only happens if the urls are equal too! + ( + "https://example.com/home1/", + None, + [ + "homepage, https://example.com/home2/", + ], + OrderedDict( + [ + ("Homepage", "https://example.com/home1/"), + ("homepage", "https://example.com/home2/"), + ] + ), + ), # ignore invalid links ( None,
The metadata "homepage" link rendered twice on PyPI With the most current tools, and following the recommendations of [_PEP 621 – Storing project metadata in `pyproject.toml`_](https://peps.python.org/pep-0621/), package publishers end up with duplicate homepage links rendered on the UI: ![Screen Shot 2022-04-20 at 10 52 45 PM](https://user-images.githubusercontent.com/6615374/164368739-95906085-1bf9-432d-8d5b-8d566ad3165f.png) https://test.pypi.org/project/issue11220/ This is probably actually the fault of setuptools and/or the spec, but perhaps warehouse can workaround by ignoring duplicates? What do you think? Otherwise we might start seeing it everywhere as people adopt PEP 621 recommendations.. :-\ _Note:_ The package was produced from running `python -m build` on this source, a stripped down version of the [example](https://peps.python.org/pep-0621/#example) in the PEP: ``` [build-system] requires = ["setuptools >= 40.6.0", "wheel"] build-backend = "setuptools.build_meta" [project] name = "issue11220" version = "0.1" [project.urls] homepage = "https://github.com/pypa/warehouse/issues/11220" ```
The issue is that we're getting both of these metadata fields from the distributions: ``` Home-page: https://github.com/pypa/warehouse/issues/11220 Project-URL: homepage, https://github.com/pypa/warehouse/issues/11220 ``` PEP 621 doesn't seem to explain what tools should do with the old `Home-page` field when `homepage` is present, but looks like setuptools has decided to duplicate the value into it. I'm not sure that's really necessary, as I think the long-term plan should be to deprecate this field and stop using it entirely. It's also not clear to me how PyPI should expect to de-duplicate this. A `pyproject.toml` like this: ``` [project.urls] homepage = "https://example.com/homepage" Homepage = "https://example.com/Homepage" Home-page = "https://example.com/Home-page" Home_page = "https://example.com/Home_page" Home__page = "https://example.com/Home__page" ``` Produces this metadata: ``` Metadata-Version: 2.1 Name: issue11220 Version: 0.1 Summary: foo Home-page: https://example.com/Home__page License: UNKNOWN Project-URL: homepage, https://example.com/homepage Project-URL: Homepage, https://example.com/Homepage Project-URL: Home-page, https://example.com/Home-page Project-URL: Home_page, https://example.com/Home_page Project-URL: Home__page, https://example.com/Home__page Platform: UNKNOWN UNKNOWN ``` So it looks like it's just choosing the last one, which seems confusing. FWIW, `flit` doesn't have this problem, and produces the following metadata: ``` Metadata-Version: 2.1 Name: issue11220 Version: 0.1 Summary: foo Project-URL: Home-page, https://example.com/Home-page Project-URL: Home__page, https://example.com/Home__page Project-URL: Home_page, https://example.com/Home_page Project-URL: Homepage, https://example.com/Homepage Project-URL: homepage, https://example.com/homepage ``` Which IMO is the correct behavior here and probably what setuptools should be doing, instead of PyPI introducing workarounds for its behavior. CC @abravalheri for your thoughts. There is a discussion regarding this in: - https://discuss.python.org/t/help-testing-experimental-features-in-setuptools/13821/13 - https://discuss.python.org/t/help-testing-experimental-features-in-setuptools/13821/16 The path I choose is the maximum backward compatibility, so I purposefully decided to backfill the `Home-page` field using the values in `Project-URL`. This is motivated by the fact that the `Home-page` field is not currently considered deprecated. I agree with Dustin that in the case of multiple candidates choosing the last option might be confusing. Would it be better if instead we stick with the first? Regarding the deduplication, if you write: ```toml [project.urls] Homepage = "https://github.com/pypa/warehouse/issues/11220" ``` PyPI will display only one URL. --- @di in the case of ``` Project-URL: Homepage, https://example.com/Homepage Project-URL: homepage, https://example.com/homepage ``` If the approach chosen by PyPI is to not do any de-duplucation, does it also make sense to not automatically change the case of the word? > I agree with Dustin that in the case of multiple candidates choosing the last option might be confusing. Would it be better if instead we stick with the first? I think the best thing would be for setuptools to just stop duplicating the field. I'm not sure what would be depending on maintaining this backwards compatibility, but it's not PyPI and nothing else comes to mind. > If the approach chosen by PyPI is to not do any de-duplucation, does it also make sense to not automatically change the case of the word? I'm not sure I understand your question! I don't think PyPI should do any modification to what the user has put in this field. I don't think tools should be either. I agree that PyPI should not be doing modifications to what has been provided to us. I think that as the core metadata for ``Project-URL`` is specified we also probably shouldn't change the casing either under the same rationale, since as the metadata si currently defined ``Homepage`` and ``homepage`` are distinct entries. I think that there's a good argument to be made that ``Project-URL`` keys should be case insensitive, and if we made that change then I think changing case could be fine, but unless that happens I think not changing makes sense. I also think there is a good argument that the existing url fields that aren't ``Project-URL`` should just be deprecated As far as what is valid for setuptools to do here, tools are generally free to generate the metadata using their inputs as they see fit. PEP 621 defines a standard for how to generate *some* of those fields, from a specific shared input (in this case, 1:1 mapping with no transformation) but anything not covered by PEP 621.. isn't covered by PEP 621. Like if we remove PEP 621 from the equation, and someone wrote: ```python from setuptools import setup setup( ..., project_urls={ "Homepage": "...", }, ) ``` Would it be valid for setuptools to backfill the ``Homepage`` metadata using that? Which I think is unequivocally yes. Does it make *sense* for setuptools to do this? Eh, personally I don't think that field matters and having it duplicated probably confuses some people somewhere so I wouldn't bother if I were setuptools. I don't feel strongly about it personally though. I think it probably does make sense to deprecate the two old URL fields and in that case PyPI could just stop showing that field and setuptools can easily justify no longer emitting it. > I think the best thing would be for setuptools to just stop duplicating the field. I understand and sympathise with the point of view you guys are exposing here. Probably in the future we will remove the duplication, but there are a few other changes that might need to happen first, before we reach that point. For example, `distutils` expects the field to be defined. It will complain loudly with a warning, which will make `setuptools` users very confused, and may break CI workflows configured with a more strict `PYTHONWARNINGS`[^1]. If `Home-page` gets deprecated as core metadata, then I would have some more leverage to push this kind of change. [^1]: To workaround that I have to propose a PR to drop the warning in `pypa/distutils`. But the maintainers there may (rightfully) indicate that the field is not deprecated and therefore there is no obvious problem with the behaviour implemented in `distutils`. There is some precedence with other fields that makes me believe that would be the case. > I'm not sure I understand your question! I don't think PyPI should do any modification to what the user has put in this field. I don't think tools should be either. What is the current approach taken by PyPI for deduplication? What happens if the user has 2 `Project-URL` with (exactly) the same label? Based on [previous experiments](https://github.com/moshez/pyproject-only-example/pull/1), it seems that PyPI will deduplicate `Home-page: ...` and `Project-URL: Homepage, ...` but not `Project-URL: homepage, ...`. Regardless, this is not a real issue with this project, and `warehouse` should be free to decide how to display `Project-URL` however they see fit. In terms of the setuptools implementation, the answer would be: it's complicated. Right now, since core metadata is not technically incorrect when duplicating `Home-page` and `Project-url: Homepage, ...`, there is a high chance this issue would be treated as a `wontfix` or at least linger in the issue tracker for a while before we are in a position that we can tackle it properly. > What is the current approach taken by PyPI for deduplication? > What happens if the user has 2 `Project-URL` with (exactly) the same label? From what I can tell, in the database it will store the duplicated URLs since it's storing it as a row per entry, with a singular string column for ``"Key, Value"``. When we load that data, our data model makes it available in two forms: 1. A list of strings in the ``"Key, Value"`` form. 2. A Mapping of Key -> Value, and we "prefill" the mapping with a "Homepage" key taken from ``Home-page`` and ``Download`` from ``Download-URL``. * This means that if someone has a "Homepage" project-url or a "Download" project URL (explicitly that, case sensitive) AND the older ``Home-page`` or ``Download-URL`` data, that the project-url will overwrite and take precedence. It also means that PyPI is already treating those fields as somewhat deprecated already. * It also means if someone has duplicates for the same Project URL Key twice, the latter will overwrite and "win". The core metadata spec is silent in this case, but I think we should probably change this case to be an error on upload, and just require unique keys for every ``Project-URL`` entry. For actually sending that data to the end user, for the HTML pages and the JSON API we use the mapping in (2). In the XMLRPC API we return the list from (1). > 1. To workaround that I have to propose a PR to drop the warning in `pypa/distutils`. But the maintainers there may (rightfully) indicate that the field is not deprecated and therefore there is no obvious problem with the behaviour implemented in `distutils`. There is some precedence with other fields that makes me believe that would be the case. It being deprecated or not isn't really important. It's not a required field, they're of course free to be stricter on what they require than the metadata spec requires, but the spec itself its' perfectly valid to emit metadata without that field regardless of this issue. The *only* required fields are Name, Version, and Metadata-Version. Thank you very much for the explanation @dstufft, now it does make sense the behaviour I was observing with `Project-url: Homepage, ...`. > they're of course free to be stricter on what they require than the metadata spec requires, but the spec itself its' perfectly valid to emit metadata without that field regardless of this issue. This is more or less what is happening right now. Metadata will be emitted, but `distutils` will be stricter and use warnings to encourage users to fill some metadata it considers important. I was mostly pointing it out because I'm pretty sure those warnings date back to when at least Download-URL was a mandatory value, I don't recall if Home-page was mandatory or not. It being made optional was a (relatively speaking) recent change (8? years 10? years ago). Here is what I think setuptools should do: - Stop auto-populating a **Project-URL** into the **Home-page**. Here is what I think distutils should do: - Nothing. Getting changes into distutils is useless, it's already on death row. Here is what I think warehouse should do: - If there is any **Project-URL** with the key "homepage" or "home-page" (case insensitive), then use that link and render it as the Homepage link. Ignore whatever is in **Home-page**. This is just pragmatic, and makes sure PyPI does the right thing for people using the latest build tools without changing anything for people that are using old build tools. _Maybe_ setuptools could patch their vendored distutils to not warn on missing **Home-page**, if it's easy enough, but that's not really particularly important (distutils sdist builds already spam ignorable warnings about a bunch of other stuff) [Here](https://github.com/wimglenn/setuptools-ext/blob/0.2/setuptools_ext.py#L59-L63) is what I do in my build backend, if **Home-page** is duplicated in a **Project-URL**, then the **Home-page** gets kicked out. Note: In the Web UI PyPI does not have the concept of a "Homepage" link or a "Download" link. It has a Project URL mapping that gets rendered. PyPI *will* inject the Home-page and Download-URL metadata into that Project URL mapping at render time (but if the Project URLs contain "Homepage" or "Download" the injected values get over written. The JSON/XMLRPC API differentiate between project URLs and Home-page/Download-URL and should *not* munge data between them. Those APIs are to get access to the underlying data, and should faithfully reproduce the data as given. Yes, I understand. So my suggestion is just to extend the "injected values get overwritten" part to also overwrite with a project url of homepage or home-page or hoMepAGE (any one of them, first, last, doesn't matter as long as it deterministic). It should not also render the "homepage" project url separately if that got used to overwrite. Happy to prepare a PR if we have some consensus on that 3rd bullet point. > Would it be valid for setuptools to backfill the Homepage metadata using that? Which I think is unequivocally yes. Sorry to be contrary, but I think no, actually. Because the core metadata spec field is **Home-page** not **Homepage**. I think it should not backfill at all, but I could be convinced for it to backfill a Project-URL also called the Home-page into the metadata field Home-page. @abravalheri where does this backfill happen in setuptools? I was spelunking through the code but could not find it.. Hi @wimglenn. When I was collecting feedback about PEP 621 implementation, cryptic distutils warnings were constantly mentioned by the users. Right now , we avoid messages about the url corresponding to `Home-page` by backfilling with the value in `Project-URL`, and therefore effectively promoting backward compatibility with `distutils`. Please note that I am not against the changes proposed. I am just saying that the process is a bit more indirect and goes through either changing distutils first or creating the patches you propose. (`pypa/distutils` has been receiving a lot of attention recently and `setuptools` regularly merges it into the bundled copy). It would be also helpful if there is consensus in the community and the `Home-page` field in the core metadata is officially deprecated. My personal plan for the moment in terms of contributions to setuptools is to focus in other changes that I judge more urgent/important (unfortunately we all have to prioritize with the volunteering time we have available). So I invite anyone that is interested in pushing this issue forward in the setuptools/distutils side to propose the changes via code contributions. > where does this backfill happen in setuptools? I was spelunking through the code but could not find it.. This is it: https://github.com/pypa/setuptools/blob/372652d6dadf8de6c29c83d0f2107fdd7b6e6571/setuptools/config/_apply_pyprojecttoml.py#L173 @abravalheri Thanks - was not aware of pypa/distutils, and just assumed that setuptools was vendoring the stdlib code. Good to know! I'll prepare PR when I find the time, totally agree that it's not an urgent/important issue.
2022-04-28T02:21:24Z
[]
[]
pypi/warehouse
11,311
pypi__warehouse-11311
[ "11307" ]
0b0ed299df1d502cae2f2bcb0c03ec45a09c62b9
diff --git a/warehouse/manage/views.py b/warehouse/manage/views.py --- a/warehouse/manage/views.py +++ b/warehouse/manage/views.py @@ -1022,9 +1022,7 @@ def create_organization(self): tag="organization:create", additional={"created_by_user_id": str(self.request.user.id)}, ) - self.organization_service.add_catalog_entry( - organization.name, organization.id - ) + self.organization_service.add_catalog_entry(organization.id) self.organization_service.record_event( organization.id, tag="organization:catalog_entry:add", diff --git a/warehouse/organizations/interfaces.py b/warehouse/organizations/interfaces.py --- a/warehouse/organizations/interfaces.py +++ b/warehouse/organizations/interfaces.py @@ -38,7 +38,7 @@ def add_organization(name, display_name, orgtype, link_url, description): attributes. """ - def add_catalog_entry(name, organization_id): + def add_catalog_entry(organization_id): """ Adds the organization name to the organization name catalog """ diff --git a/warehouse/organizations/services.py b/warehouse/organizations/services.py --- a/warehouse/organizations/services.py +++ b/warehouse/organizations/services.py @@ -12,6 +12,7 @@ import datetime +from sqlalchemy import func from sqlalchemy.orm.exc import NoResultFound from zope.interface import implementer @@ -51,10 +52,11 @@ def find_organizationid(self, name): Find the unique organization identifier for the given normalized name or None if there is no organization with the given name. """ + normalized_name = func.normalize_pep426_name(name) try: organization = ( self.db.query(Organization.id) - .filter(Organization.normalized_name == name) + .filter(Organization.normalized_name == normalized_name) .one() ) except NoResultFound: @@ -79,13 +81,14 @@ def add_organization(self, name, display_name, orgtype, link_url, description): return organization - def add_catalog_entry(self, name, organization_id): + def add_catalog_entry(self, organization_id): """ Adds the organization name to the organization name catalog """ organization = self.get_organization(organization_id) catalog_entry = OrganizationNameCatalog( - normalized_name=name, organization_id=organization.id + normalized_name=organization.normalized_name, + organization_id=organization.id, ) self.db.add(catalog_entry)
diff --git a/tests/common/db/organizations.py b/tests/common/db/organizations.py --- a/tests/common/db/organizations.py +++ b/tests/common/db/organizations.py @@ -35,8 +35,7 @@ class Meta: model = Organization id = factory.Faker("uuid4", cast_to=None) - name = factory.Faker("word") - normalized_name = factory.Faker("word") + name = factory.Faker("pystr", max_chars=12) display_name = factory.Faker("word") orgtype = "Community" link_url = factory.Faker("uri") diff --git a/tests/unit/admin/views/test_organizations.py b/tests/unit/admin/views/test_organizations.py --- a/tests/unit/admin/views/test_organizations.py +++ b/tests/unit/admin/views/test_organizations.py @@ -30,9 +30,7 @@ def test_no_query(self, enable_organizations, db_request): ) result = views.organization_list(db_request) - assert result["organizations"].items == organizations[:25] - assert result["query"] == "" - assert result["terms"] == [] + assert result == {"organizations": organizations[:25], "query": "", "terms": []} def test_with_page(self, enable_organizations, db_request): organizations = sorted( @@ -42,9 +40,7 @@ def test_with_page(self, enable_organizations, db_request): db_request.GET["page"] = "2" result = views.organization_list(db_request) - assert result["organizations"].items == organizations[25:50] - assert result["query"] == "" - assert result["terms"] == [] + assert result == {"organizations": organizations[25:], "query": "", "terms": []} def test_with_invalid_page(self, enable_organizations): request = pretend.stub( @@ -63,9 +59,11 @@ def test_basic_query(self, enable_organizations, db_request): db_request.GET["q"] = organizations[0].name result = views.organization_list(db_request) - assert result["organizations"].items == [organizations[0]] - assert result["query"] == organizations[0].name - assert result["terms"] == [organizations[0].name] + assert result == { + "organizations": [organizations[0]], + "query": organizations[0].name, + "terms": [organizations[0].name], + } def test_name_query(self, enable_organizations, db_request): organizations = sorted( @@ -75,9 +73,11 @@ def test_name_query(self, enable_organizations, db_request): db_request.GET["q"] = f"name:{organizations[0].name}" result = views.organization_list(db_request) - assert result["organizations"].items == [organizations[0]] - assert result["query"] == f"name:{organizations[0].name}" - assert result["terms"] == [f"name:{organizations[0].name}"] + assert result == { + "organizations": [organizations[0]], + "query": f"name:{organizations[0].name}", + "terms": [f"name:{organizations[0].name}"], + } def test_organization_query(self, enable_organizations, db_request): organizations = sorted( @@ -87,9 +87,11 @@ def test_organization_query(self, enable_organizations, db_request): db_request.GET["q"] = f"organization:{organizations[0].display_name}" result = views.organization_list(db_request) - assert result["organizations"].items == [organizations[0]] - assert result["query"] == f"organization:{organizations[0].display_name}" - assert result["terms"] == [f"organization:{organizations[0].display_name}"] + assert result == { + "organizations": [organizations[0]], + "query": f"organization:{organizations[0].display_name}", + "terms": [f"organization:{organizations[0].display_name}"], + } def test_url_query(self, enable_organizations, db_request): organizations = sorted( @@ -99,9 +101,11 @@ def test_url_query(self, enable_organizations, db_request): db_request.GET["q"] = f"url:{organizations[0].link_url}" result = views.organization_list(db_request) - assert result["organizations"].items == [organizations[0]] - assert result["query"] == f"url:{organizations[0].link_url}" - assert result["terms"] == [f"url:{organizations[0].link_url}"] + assert result == { + "organizations": [organizations[0]], + "query": f"url:{organizations[0].link_url}", + "terms": [f"url:{organizations[0].link_url}"], + } def test_description_query(self, enable_organizations, db_request): organizations = sorted( @@ -111,9 +115,11 @@ def test_description_query(self, enable_organizations, db_request): db_request.GET["q"] = f"description:'{organizations[0].description}'" result = views.organization_list(db_request) - assert result["organizations"].items == [organizations[0]] - assert result["query"] == f"description:'{organizations[0].description}'" - assert result["terms"] == [f"description:{organizations[0].description}"] + assert result == { + "organizations": [organizations[0]], + "query": f"description:'{organizations[0].description}'", + "terms": [f"description:{organizations[0].description}"], + } def test_is_approved_query(self, enable_organizations, db_request): organizations = sorted( @@ -128,9 +134,11 @@ def test_is_approved_query(self, enable_organizations, db_request): db_request.GET["q"] = "is:approved" result = views.organization_list(db_request) - assert result["organizations"].items == organizations[:2] - assert result["query"] == "is:approved" - assert result["terms"] == ["is:approved"] + assert result == { + "organizations": organizations[:2], + "query": "is:approved", + "terms": ["is:approved"], + } def test_is_declined_query(self, enable_organizations, db_request): organizations = sorted( @@ -145,9 +153,11 @@ def test_is_declined_query(self, enable_organizations, db_request): db_request.GET["q"] = "is:declined" result = views.organization_list(db_request) - assert result["organizations"].items == organizations[2:3] - assert result["query"] == "is:declined" - assert result["terms"] == ["is:declined"] + assert result == { + "organizations": organizations[2:3], + "query": "is:declined", + "terms": ["is:declined"], + } def test_is_submitted_query(self, enable_organizations, db_request): organizations = sorted( @@ -162,9 +172,11 @@ def test_is_submitted_query(self, enable_organizations, db_request): db_request.GET["q"] = "is:submitted" result = views.organization_list(db_request) - assert result["organizations"].items == organizations[3:] - assert result["query"] == "is:submitted" - assert result["terms"] == ["is:submitted"] + assert result == { + "organizations": organizations[3:], + "query": "is:submitted", + "terms": ["is:submitted"], + } def test_is_active_query(self, enable_organizations, db_request): organizations = sorted( @@ -179,9 +191,11 @@ def test_is_active_query(self, enable_organizations, db_request): db_request.GET["q"] = "is:active" result = views.organization_list(db_request) - assert result["organizations"].items == organizations[:2] - assert result["query"] == "is:active" - assert result["terms"] == ["is:active"] + assert result == { + "organizations": organizations[:2], + "query": "is:active", + "terms": ["is:active"], + } def test_is_inactive_query(self, enable_organizations, db_request): organizations = sorted( @@ -196,9 +210,11 @@ def test_is_inactive_query(self, enable_organizations, db_request): db_request.GET["q"] = "is:inactive" result = views.organization_list(db_request) - assert result["organizations"].items == organizations[2:] - assert result["query"] == "is:inactive" - assert result["terms"] == ["is:inactive"] + assert result == { + "organizations": organizations[2:], + "query": "is:inactive", + "terms": ["is:inactive"], + } def test_is_invalid_query(self, enable_organizations, db_request): organizations = sorted( @@ -208,9 +224,11 @@ def test_is_invalid_query(self, enable_organizations, db_request): db_request.GET["q"] = "is:not-actually-a-valid-query" result = views.organization_list(db_request) - assert result["organizations"].items == organizations[:25] - assert result["query"] == "is:not-actually-a-valid-query" - assert result["terms"] == ["is:not-actually-a-valid-query"] + assert result == { + "organizations": organizations[:25], + "query": "is:not-actually-a-valid-query", + "terms": ["is:not-actually-a-valid-query"], + } def test_disable_organizations(self, db_request): with pytest.raises(HTTPNotFound): diff --git a/tests/unit/manage/test_views.py b/tests/unit/manage/test_views.py --- a/tests/unit/manage/test_views.py +++ b/tests/unit/manage/test_views.py @@ -2445,10 +2445,7 @@ def test_create_organization(self, monkeypatch): ) ] assert organization_service.add_catalog_entry.calls == [ - pretend.call( - organization.name, - organization.id, - ) + pretend.call(organization.id) ] assert organization_service.add_organization_role.calls == [ pretend.call( diff --git a/tests/unit/organizations/test_services.py b/tests/unit/organizations/test_services.py --- a/tests/unit/organizations/test_services.py +++ b/tests/unit/organizations/test_services.py @@ -86,9 +86,7 @@ def test_add_organization(self, organization_service): def test_add_catalog_entry(self, organization_service): organization = OrganizationFactory.create() - catalog_entry = organization_service.add_catalog_entry( - organization.normalized_name, organization.id - ) + catalog_entry = organization_service.add_catalog_entry(organization.id) assert catalog_entry.normalized_name == organization.normalized_name assert catalog_entry.organization_id == organization.id
Flakey organization-related test I noticed there is an organization-related test that is currently occasionally failing on `main`: ``` =================================== FAILURES =================================== ______________________ TestOrganizationList.test_no_query ______________________ self = <tests.unit.admin.views.test_organizations.TestOrganizationList object at 0x7fd3ab8ef4f0> enable_organizations = None db_request = <pyramid.testing.DummyRequest object at 0x7fd3a257edc0> def test_no_query(self, enable_organizations, db_request): organizations = sorted( [OrganizationFactory.create() for _ in range(30)], key=lambda o: o.normalized_name, ) result = views.organization_list(db_request) > assert result["organizations"].items == organizations[:25] E AssertionError: assert [Organization...ulture'), ...] == [Organization...ulture'), ...] E At index 22 diff: Organization(name='product') != Organization(name='product') E Full diff: E [ E Organization(name='and'), E Organization(name='anything'), E Organization(name='artist'), E Organization(name='base'), E Organization(name='business'), E Organization(name='culture'), E Organization(name='effect'), E Organization(name='effect'), E Organization(name='end'), E Organization(name='fast'), E Organization(name='financial'), E Organization(name='five'), E Organization(name='garden'), E Organization(name='grow'), E Organization(name='himself'), E Organization(name='information'), E Organization(name='itself'), E Organization(name='many'), E Organization(name='mean'), E Organization(name='moment'), E Organization(name='politics'), E Organization(name='prepare'), E Organization(name='product'), E Organization(name='product'), E Organization(name='real'), E ] tests/unit/admin/views/test_organizations.py:33: AssertionError ``` @divbzero @sterbo Can you look into this?
You can see the failure here: https://github.com/pypa/warehouse/runs/6260354769?check_suite_focus=true @di Yes, let me look into this.
2022-05-02T19:30:08Z
[]
[]
pypi/warehouse
11,342
pypi__warehouse-11342
[ "11083" ]
64fc7f1e4922017b476643ba7eda0859d69ae137
diff --git a/warehouse/accounts/views.py b/warehouse/accounts/views.py --- a/warehouse/accounts/views.py +++ b/warehouse/accounts/views.py @@ -56,12 +56,18 @@ from warehouse.cache.origin import origin_cache from warehouse.email import ( send_added_as_collaborator_email, + send_added_as_organization_member_email, send_collaborator_added_email, + send_declined_as_invited_organization_member_email, send_email_verification_email, + send_organization_member_added_email, + send_organization_member_invite_declined_email, send_password_change_email, send_password_reset_email, send_recovery_code_reminder_email, ) +from warehouse.organizations.interfaces import IOrganizationService +from warehouse.organizations.models import OrganizationRole, OrganizationRoleType from warehouse.packaging.models import ( JournalEntry, Project, @@ -816,6 +822,174 @@ def _get_two_factor_data(request, _redirect_to="/"): return two_factor_data +@view_config( + route_name="accounts.verify-organization-role", + renderer="accounts/organization-invite-confirmation.html", + require_methods=False, + uses_session=True, + permission="manage:user", + has_translations=True, +) +def verify_organization_role(request): + token_service = request.find_service(ITokenService, name="email") + organization_service = request.find_service(IOrganizationService, context=None) + user_service = request.find_service(IUserService, context=None) + + def _error(message): + request.session.flash(message, queue="error") + return HTTPSeeOther(request.route_path("manage.organizations")) + + try: + token = request.params.get("token") + data = token_service.loads(token) + except TokenExpired: + return _error(request._("Expired token: request a new organization invitation")) + except TokenInvalid: + return _error(request._("Invalid token: request a new organization invitation")) + except TokenMissing: + return _error(request._("Invalid token: no token supplied")) + + # Check whether this token is being used correctly + if data.get("action") != "email-organization-role-verify": + return _error(request._("Invalid token: not an organization invitation token")) + + user = user_service.get_user(data.get("user_id")) + if user != request.user: + return _error(request._("Organization invitation is not valid.")) + + organization = organization_service.get_organization(data.get("organization_id")) + desired_role = data.get("desired_role") + + organization_invite = organization_service.get_organization_invite_by_user( + organization.id, user.id + ) + if not organization_invite: + return _error(request._("Organization invitation no longer exists.")) + + # Use the renderer to bring up a confirmation page + # before adding as contributor + if request.method == "GET": + return { + "organization_name": organization.name, + "desired_role": desired_role, + } + elif request.method == "POST" and "decline" in request.POST: + organization_service.delete_organization_invite(organization_invite.id) + submitter_user = user_service.get_user(data.get("submitter_id")) + organization.record_event( + tag="organization:organization_role:declined", + ip_address=request.remote_addr, + additional={ + "submitted_by_user_id": str(submitter_user.id), + "role_name": desired_role, + "target_user_id": str(user.id), + }, + ) + user.record_event( + tag="account:organization_role:declined", + ip_address=request.remote_addr, + additional={ + "submitted_by_user_id": str(submitter_user.id), + "organization_name": organization.name, + "role_name": desired_role, + }, + ) + owner_roles = ( + request.db.query(OrganizationRole) + .filter(OrganizationRole.organization == organization) + .filter(OrganizationRole.role_name == OrganizationRoleType.Owner) + .all() + ) + owner_users = {owner.user for owner in owner_roles} + send_organization_member_invite_declined_email( + request, + owner_users, + user=user, + organization_name=organization.name, + ) + send_declined_as_invited_organization_member_email( + request, + user, + organization_name=organization.name, + ) + request.session.flash( + request._( + "Invitation for '${organization_name}' is declined.", + mapping={"organization_name": organization.name}, + ), + queue="success", + ) + return HTTPSeeOther(request.route_path("manage.organizations")) + + organization_service.add_organization_role( + organization_id=organization.id, + user_id=user.id, + role_name=desired_role, + ) + organization_service.delete_organization_invite(organization_invite.id) + submitter_user = user_service.get_user(data.get("submitter_id")) + organization.record_event( + tag="organization:organization_role:accepted", + ip_address=request.remote_addr, + additional={ + "submitted_by_user_id": str(submitter_user.id), + "role_name": desired_role, + "target_user_id": str(user.id), + }, + ) + user.record_event( + tag="account:organization_role:accepted", + ip_address=request.remote_addr, + additional={ + "submitted_by_user_id": str(submitter_user.id), + "organization_name": organization.name, + "role_name": desired_role, + }, + ) + + owner_roles = ( + request.db.query(OrganizationRole) + .filter(OrganizationRole.organization == organization) + .filter(OrganizationRole.role_name == OrganizationRoleType.Owner) + .all() + ) + owner_users = {owner.user for owner in owner_roles} + + # Don't send email to new user if they are now an owner + owner_users.discard(user) + + send_organization_member_added_email( + request, + owner_users, + user=user, + submitter=submitter_user, + organization_name=organization.name, + role=desired_role, + ) + + send_added_as_organization_member_email( + request, + user, + submitter=submitter_user, + organization_name=organization.name, + role=desired_role, + ) + + request.session.flash( + request._( + "You are now ${role} of the '${organization_name}' organization.", + mapping={"organization_name": organization.name, "role": desired_role}, + ), + queue="success", + ) + + return HTTPSeeOther( + request.route_path( + "manage.organization.roles", organization_name=organization.name + ) + ) + + @view_config( route_name="accounts.verify-project-role", renderer="accounts/invite-confirmation.html", @@ -836,9 +1010,9 @@ def _error(message): token = request.params.get("token") data = token_service.loads(token) except TokenExpired: - return _error(request._("Expired token: request a new project role invite")) + return _error(request._("Expired token: request a new project role invitation")) except TokenInvalid: - return _error(request._("Invalid token: request a new project role invite")) + return _error(request._("Invalid token: request a new project role invitation")) except TokenMissing: return _error(request._("Invalid token: no token supplied")) diff --git a/warehouse/config.py b/warehouse/config.py --- a/warehouse/config.py +++ b/warehouse/config.py @@ -421,6 +421,16 @@ def configure(settings=None): # And some enums to reuse in the templates jglobals.setdefault("AdminFlagValue", "warehouse.admin.flags:AdminFlagValue") + jglobals.setdefault( + "OrganizationInvitationStatus", + "warehouse.organizations.models:OrganizationInvitationStatus", + ) + jglobals.setdefault( + "OrganizationRoleType", "warehouse.organizations.models:OrganizationRoleType" + ) + jglobals.setdefault( + "OrganizationType", "warehouse.organizations.models:OrganizationType" + ) jglobals.setdefault( "RoleInvitationStatus", "warehouse.packaging.models:RoleInvitationStatus" ) diff --git a/warehouse/email/__init__.py b/warehouse/email/__init__.py --- a/warehouse/email/__init__.py +++ b/warehouse/email/__init__.py @@ -331,13 +331,199 @@ def send_new_organization_declined_email( } -@_email("collaborator-added") -def send_collaborator_added_email( - request, email_recipients, *, user, submitter, project_name, role +@_email("organization-member-invited") +def send_organization_member_invited_email( + request, + email_recipients, + *, + user, + desired_role, + initiator_username, + organization_name, + email_token, + token_age, +): + return { + "username": user.username, + "desired_role": desired_role, + "initiator_username": initiator_username, + "n_hours": token_age // 60 // 60, + "organization_name": organization_name, + "token": email_token, + } + + +@_email("verify-organization-role", allow_unverified=True) +def send_organization_role_verification_email( + request, + user, + *, + desired_role, + initiator_username, + organization_name, + email_token, + token_age, +): + return { + "username": user.username, + "desired_role": desired_role, + "initiator_username": initiator_username, + "n_hours": token_age // 60 // 60, + "organization_name": organization_name, + "token": email_token, + } + + +@_email("organization-member-invite-canceled") +def send_organization_member_invite_canceled_email( + request, + email_recipients, + *, + user, + organization_name, +): + return { + "username": user.username, + "organization_name": organization_name, + } + + +@_email("canceled-as-invited-organization-member") +def send_canceled_as_invited_organization_member_email( + request, + user, + *, + organization_name, +): + return { + "username": user.username, + "organization_name": organization_name, + } + + +@_email("organization-member-invite-declined") +def send_organization_member_invite_declined_email( + request, + email_recipients, + *, + user, + organization_name, +): + return { + "username": user.username, + "organization_name": organization_name, + } + + +@_email("declined-as-invited-organization-member") +def send_declined_as_invited_organization_member_email( + request, + user, + *, + organization_name, +): + return { + "username": user.username, + "organization_name": organization_name, + } + + +@_email("organization-member-added") +def send_organization_member_added_email( + request, + email_recipients, + *, + user, + submitter, + organization_name, + role, +): + return { + "username": user.username, + "submitter": submitter.username, + "organization_name": organization_name, + "role": role, + } + + +@_email("added-as-organization-member") +def send_added_as_organization_member_email( + request, + user, + *, + submitter, + organization_name, + role, ): return { "username": user.username, - "project": project_name, + "submitter": submitter.username, + "organization_name": organization_name, + "role": role, + } + + +@_email("organization-member-removed") +def send_organization_member_removed_email( + request, + email_recipients, + *, + user, + submitter, + organization_name, +): + return { + "username": user.username, + "submitter": submitter.username, + "organization_name": organization_name, + } + + +@_email("removed-as-organization-member") +def send_removed_as_organization_member_email( + request, + user, + *, + submitter, + organization_name, +): + return { + "username": user.username, + "submitter": submitter.username, + "organization_name": organization_name, + } + + +@_email("organization-member-role-changed") +def send_organization_member_role_changed_email( + request, + email_recipients, + *, + user, + submitter, + organization_name, + role, +): + return { + "username": user.username, + "submitter": submitter.username, + "organization_name": organization_name, + "role": role, + } + + +@_email("role-changed-as-organization-member") +def send_role_changed_as_organization_member_email( + request, + user, + *, + submitter, + organization_name, + role, +): + return { + "username": user.username, + "organization_name": organization_name, "submitter": submitter.username, "role": role, } @@ -363,6 +549,18 @@ def send_project_role_verification_email( } +@_email("collaborator-added") +def send_collaborator_added_email( + request, email_recipients, *, user, submitter, project_name, role +): + return { + "username": user.username, + "project": project_name, + "submitter": submitter.username, + "role": role, + } + + @_email("added-as-collaborator") def send_added_as_collaborator_email(request, user, *, submitter, project_name, role): return { diff --git a/warehouse/manage/forms.py b/warehouse/manage/forms.py --- a/warehouse/manage/forms.py +++ b/warehouse/manage/forms.py @@ -26,6 +26,7 @@ WebAuthnCredentialMixin, ) from warehouse.i18n import localize as _ +from warehouse.organizations.models import OrganizationType # /manage/account/ forms @@ -310,7 +311,22 @@ class Toggle2FARequirementForm(forms.Form): # /manage/organizations/ forms -class NewOrganizationNameMixin: +class OrganizationRoleNameMixin: + + role_name = wtforms.SelectField( + "Select role", + choices=[ + ("", "Select role"), + ("Member", "Member"), + ("Manager", "Manager"), + ("Owner", "Owner"), + ("Billing Manager", "Billing Manager"), + ], + validators=[wtforms.validators.DataRequired(message="Select role")], + ) + + +class OrganizationNameMixin: name = wtforms.StringField( validators=[ @@ -350,7 +366,33 @@ def validate_name(self, field): ) -class CreateOrganizationForm(forms.Form, NewOrganizationNameMixin): +class CreateOrganizationRoleForm(OrganizationRoleNameMixin, UsernameMixin, forms.Form): + def __init__(self, *args, orgtype, organization_service, user_service, **kwargs): + super().__init__(*args, **kwargs) + if orgtype != OrganizationType.Company: + # Remove "Billing Manager" choice if organization is not a "Company" + self.role_name.choices = [ + choice + for choice in self.role_name.choices + if "Billing Manager" not in choice + ] + self.organization_service = organization_service + self.user_service = user_service + + +class ChangeOrganizationRoleForm(OrganizationRoleNameMixin, forms.Form): + def __init__(self, *args, orgtype, **kwargs): + super().__init__(*args, **kwargs) + if orgtype != OrganizationType.Company: + # Remove "Billing Manager" choice if organization is not a "Company" + self.role_name.choices = [ + choice + for choice in self.role_name.choices + if "Billing Manager" not in choice + ] + + +class CreateOrganizationForm(forms.Form, OrganizationNameMixin): __params__ = ["name", "display_name", "link_url", "description", "orgtype"] diff --git a/warehouse/manage/views.py b/warehouse/manage/views.py --- a/warehouse/manage/views.py +++ b/warehouse/manage/views.py @@ -44,21 +44,29 @@ from warehouse.email import ( send_account_deletion_email, send_admin_new_organization_requested_email, + send_canceled_as_invited_organization_member_email, send_collaborator_removed_email, send_collaborator_role_changed_email, send_email_verification_email, send_new_organization_requested_email, send_oidc_provider_added_email, send_oidc_provider_removed_email, + send_organization_member_invite_canceled_email, + send_organization_member_invited_email, + send_organization_member_removed_email, + send_organization_member_role_changed_email, + send_organization_role_verification_email, send_password_change_email, send_primary_email_change_email, send_project_role_verification_email, send_recovery_codes_generated_email, send_removed_as_collaborator_email, + send_removed_as_organization_member_email, send_removed_project_email, send_removed_project_release_email, send_removed_project_release_file_email, send_role_changed_as_collaborator_email, + send_role_changed_as_organization_member_email, send_two_factor_added_email, send_two_factor_removed_email, send_unyanked_project_release_email, @@ -68,11 +76,13 @@ from warehouse.macaroons.interfaces import IMacaroonService from warehouse.manage.forms import ( AddEmailForm, + ChangeOrganizationRoleForm, ChangePasswordForm, ChangeRoleForm, ConfirmPasswordForm, CreateMacaroonForm, CreateOrganizationForm, + CreateOrganizationRoleForm, CreateRoleForm, DeleteMacaroonForm, DeleteTOTPForm, @@ -89,6 +99,7 @@ from warehouse.organizations.interfaces import IOrganizationService from warehouse.organizations.models import ( Organization, + OrganizationInvitationStatus, OrganizationRole, OrganizationRoleType, ) @@ -1028,6 +1039,20 @@ def user_organizations(request): } +def organization_owners(request, organization): + """Return all users who are owners of the organization.""" + owner_roles = ( + request.db.query(User.id) + .join(OrganizationRole.user) + .filter( + OrganizationRole.role_name == OrganizationRoleType.Owner, + OrganizationRole.organization == organization, + ) + .subquery() + ) + return request.db.query(User).join(owner_roles, User.id == owner_roles.c.id).all() + + @view_defaults( route_name="manage.organizations", renderer="manage/organizations.html", @@ -1048,11 +1073,22 @@ def __init__(self, request): @property def default_response(self): all_user_organizations = user_organizations(self.request) + + organization_invites = ( + self.organization_service.get_organization_invites_by_user( + self.request.user.id + ) + ) + organization_invites = [ + (organization_invite.organization, organization_invite.token) + for organization_invite in organization_invites + ] + return { + "organization_invites": organization_invites, "organizations": self.organization_service.get_organizations_by_user( self.request.user.id ), - **all_user_organizations, "organizations_managed": list( organization.name for organization in all_user_organizations["organizations_managed"] @@ -1102,7 +1138,9 @@ def create_organization(self): additional={"submitted_by_user_id": str(self.request.user.id)}, ) self.organization_service.add_organization_role( - "Owner", self.request.user.id, organization.id + organization.id, + self.request.user.id, + OrganizationRoleType.Owner, ) self.organization_service.record_event( organization.id, @@ -1156,15 +1194,381 @@ def create_organization(self): renderer="manage/organization/roles.html", uses_session=True, require_methods=False, - # permission="manage:organization", + permission="view:organization", has_translations=True, require_reauth=True, ) -def manage_organization_roles(organization, request): +def manage_organization_roles( + organization, request, _form_class=CreateOrganizationRoleForm +): if request.flags.enabled(AdminFlagValue.DISABLE_ORGANIZATIONS): raise HTTPNotFound - return {"organization": organization} + organization_service = request.find_service(IOrganizationService, context=None) + user_service = request.find_service(IUserService, context=None) + form = _form_class( + request.POST, + orgtype=organization.orgtype, + organization_service=organization_service, + user_service=user_service, + ) + + if request.method == "POST" and form.validate(): + username = form.username.data + role_name = form.role_name.data + userid = user_service.find_userid(username) + user = user_service.get_user(userid) + token_service = request.find_service(ITokenService, name="email") + + existing_role = organization_service.get_organization_role_by_user( + organization.id, user.id + ) + organization_invite = organization_service.get_organization_invite_by_user( + organization.id, user.id + ) + # Cover edge case where invite is invalid but task + # has not updated invite status + try: + invite_token = token_service.loads(organization_invite.token) + except (TokenExpired, AttributeError): + invite_token = None + + if existing_role: + request.session.flash( + request._( + "User '${username}' already has ${role_name} role for organization", + mapping={ + "username": username, + "role_name": existing_role.role_name.value, + }, + ), + queue="error", + ) + elif user.primary_email is None or not user.primary_email.verified: + request.session.flash( + request._( + "User '${username}' does not have a verified primary email " + "address and cannot be added as a ${role_name} for organization", + mapping={"username": username, "role_name": role_name}, + ), + queue="error", + ) + elif ( + organization_invite + and organization_invite.invite_status + == OrganizationInvitationStatus.Pending + and invite_token + ): + request.session.flash( + request._( + "User '${username}' already has an active invite. " + "Please try again later.", + mapping={"username": username}, + ), + queue="error", + ) + else: + invite_token = token_service.dumps( + { + "action": "email-organization-role-verify", + "desired_role": role_name, + "user_id": user.id, + "organization_id": organization.id, + "submitter_id": request.user.id, + } + ) + if organization_invite: + organization_invite.invite_status = OrganizationInvitationStatus.Pending + organization_invite.token = invite_token + else: + organization_service.add_organization_invite( + organization_id=organization.id, + user_id=user.id, + invite_token=invite_token, + ) + organization.record_event( + tag="organization:organization_role:invite", + ip_address=request.remote_addr, + additional={ + "submitted_by_user_id": str(request.user.id), + "role_name": role_name, + "target_user_id": str(userid), + }, + ) + request.db.flush() # in order to get id + owner_users = set(organization_owners(request, organization)) + send_organization_member_invited_email( + request, + owner_users, + user=user, + desired_role=role_name, + initiator_username=request.user.username, + organization_name=organization.name, + email_token=invite_token, + token_age=token_service.max_age, + ) + send_organization_role_verification_email( + request, + user, + desired_role=role_name, + initiator_username=request.user.username, + organization_name=organization.name, + email_token=invite_token, + token_age=token_service.max_age, + ) + request.session.flash( + request._( + "Invitation sent to '${username}'", + mapping={"username": username}, + ), + queue="success", + ) + + form = _form_class( + orgtype=organization.orgtype, + organization_service=organization_service, + user_service=user_service, + ) + + roles = set(organization_service.get_organization_roles(organization.id)) + invitations = set(organization_service.get_organization_invites(organization.id)) + + return { + "organization": organization, + "roles": roles, + "invitations": invitations, + "form": form, + } + + +@view_config( + route_name="manage.organization.revoke_invite", + context=Organization, + uses_session=True, + require_methods=["POST"], + permission="manage:organization", + has_translations=True, +) +def revoke_organization_invitation(organization, request): + organization_service = request.find_service(IOrganizationService, context=None) + user_service = request.find_service(IUserService, context=None) + token_service = request.find_service(ITokenService, name="email") + user = user_service.get_user(request.POST["user_id"]) + + organization_invite = organization_service.get_organization_invite_by_user( + organization.id, user.id + ) + if organization_invite is None: + request.session.flash( + request._("Could not find organization invitation."), queue="error" + ) + return HTTPSeeOther( + request.route_path( + "manage.organization.roles", organization_name=organization.name + ) + ) + + organization_service.delete_organization_invite(organization_invite.id) + + try: + token_data = token_service.loads(organization_invite.token) + except TokenExpired: + request.session.flash(request._("Invitation already expired."), queue="success") + return HTTPSeeOther( + request.route_path( + "manage.organization.roles", organization_name=organization.name + ) + ) + role_name = token_data.get("desired_role") + + organization.record_event( + tag="organization:organization_role:revoke_invite", + ip_address=request.remote_addr, + additional={ + "submitted_by_user_id": str(request.user.id), + "role_name": role_name, + "target_user_id": str(user.id), + }, + ) + + owner_users = set(organization_owners(request, organization)) + send_organization_member_invite_canceled_email( + request, + owner_users, + user=user, + organization_name=organization.name, + ) + send_canceled_as_invited_organization_member_email( + request, + user, + organization_name=organization.name, + ) + + request.session.flash( + request._( + "Invitation revoked from '${username}'.", + mapping={"username": user.username}, + ), + queue="success", + ) + + return HTTPSeeOther( + request.route_path( + "manage.organization.roles", organization_name=organization.name + ) + ) + + +@view_config( + route_name="manage.organization.change_role", + context=Organization, + uses_session=True, + require_methods=["POST"], + permission="manage:organization", + has_translations=True, + require_reauth=True, +) +def change_organization_role( + organization, request, _form_class=ChangeOrganizationRoleForm +): + form = _form_class(request.POST, orgtype=organization.orgtype) + + if form.validate(): + organization_service = request.find_service(IOrganizationService, context=None) + role_id = request.POST["role_id"] + role = organization_service.get_organization_role(role_id) + if not role or role.organization_id != organization.id: + request.session.flash("Could not find member", queue="error") + elif role.role_name == OrganizationRoleType.Owner and role.user == request.user: + request.session.flash("Cannot remove yourself as Owner", queue="error") + else: + role.role_name = form.role_name.data + + owner_users = set(organization_owners(request, organization)) + # Don't send owner notification email to new user + # if they are now an owner + owner_users.discard(role.user) + + send_organization_member_role_changed_email( + request, + owner_users, + user=role.user, + submitter=request.user, + organization_name=organization.name, + role=role.role_name, + ) + + send_role_changed_as_organization_member_email( + request, + role.user, + submitter=request.user, + organization_name=organization.name, + role=role.role_name, + ) + + organization.record_event( + tag="organization:organization_role:change", + ip_address=request.remote_addr, + additional={ + "submitted_by_user_id": str(request.user.id), + "role_name": form.role_name.data, + "target_user_id": str(role.user.id), + }, + ) + role.user.record_event( + tag="account:organization_role:change", + ip_address=request.remote_addr, + additional={ + "submitted_by_user_id": str(request.user.id), + "organization_name": organization.name, + "role_name": form.role_name.data, + }, + ) + + request.session.flash("Changed role", queue="success") + + return HTTPSeeOther( + request.route_path( + "manage.organization.roles", organization_name=organization.name + ) + ) + + +@view_config( + route_name="manage.organization.delete_role", + context=Organization, + uses_session=True, + require_methods=["POST"], + permission="view:organization", + has_translations=True, + require_reauth=True, +) +def delete_organization_role(organization, request): + organization_service = request.find_service(IOrganizationService, context=None) + role_id = request.POST["role_id"] + role = organization_service.get_organization_role(role_id) + if not role or role.organization_id != organization.id: + request.session.flash("Could not find member", queue="error") + elif ( + not request.has_permission("manage:organization") and role.user != request.user + ): + request.session.flash( + "Cannot remove other people from the organization", queue="error" + ) + elif role.role_name == OrganizationRoleType.Owner and role.user == request.user: + request.session.flash("Cannot remove yourself as Owner", queue="error") + else: + organization_service.delete_organization_role(role.id) + organization.record_event( + tag="organization:organization_role:delete", + ip_address=request.remote_addr, + additional={ + "submitted_by_user_id": str(request.user.id), + "role_name": role.role_name.value, + "target_user_id": str(role.user.id), + }, + ) + role.user.record_event( + tag="account:organization_role:delete", + ip_address=request.remote_addr, + additional={ + "submitted_by_user_id": str(request.user.id), + "organization_name": organization.name, + "role_name": role.role_name.value, + }, + ) + + owner_users = set(organization_owners(request, organization)) + # Don't send owner notification email to new user + # if they are now an owner + owner_users.discard(role.user) + + send_organization_member_removed_email( + request, + owner_users, + user=role.user, + submitter=request.user, + organization_name=organization.name, + ) + + send_removed_as_organization_member_email( + request, + role.user, + submitter=request.user, + organization_name=organization.name, + ) + + request.session.flash("Removed from organization", queue="success") + + if role and role.user == request.user: + # User removed self from organization. + return HTTPSeeOther(request.route_path("manage.organizations")) + else: + return HTTPSeeOther( + request.route_path( + "manage.organization.roles", organization_name=organization.name + ) + ) @view_config( diff --git a/warehouse/organizations/__init__.py b/warehouse/organizations/__init__.py --- a/warehouse/organizations/__init__.py +++ b/warehouse/organizations/__init__.py @@ -10,10 +10,17 @@ # See the License for the specific language governing permissions and # limitations under the License. +from celery.schedules import crontab + from warehouse.organizations.interfaces import IOrganizationService from warehouse.organizations.services import database_organization_factory +from warehouse.organizations.tasks import update_organization_invitation_status def includeme(config): # Register our organization service config.register_service_factory(database_organization_factory, IOrganizationService) + + config.add_periodic_task( + crontab(minute="*/5"), update_organization_invitation_status + ) diff --git a/warehouse/organizations/interfaces.py b/warehouse/organizations/interfaces.py --- a/warehouse/organizations/interfaces.py +++ b/warehouse/organizations/interfaces.py @@ -59,9 +59,61 @@ def add_catalog_entry(organization_id): Adds the organization name to the organization name catalog """ - def add_organization_role(role_name, user_id, organization_id): + def get_organization_role(organization_role_id): """ - Adds the organization role to the specified user and org + Return the org role object that represents the given org role id, + or None if there is no organization role for that ID. + """ + + def get_organization_role_by_user(organization_id, user_id): + """ + Gets an organization role for a specified org and user + """ + + def get_organization_roles(organization_id): + """ + Gets a list of organization roles for a specified org + """ + + def add_organization_role(organization_id, user_id, role_name): + """ + Adds an organization role for the specified org and user + """ + + def delete_organization_role(organization_role_id): + """ + Delete an organization role for a specified organization role id + """ + + def get_organization_invite(organization_invite_id): + """ + Return the org invite object that represents the given org invite id, + or None if there is no organization invite for that ID. + """ + + def get_organization_invite_by_user(organization_id, user_id): + """ + Gets an organization invite for a specified org and user + """ + + def get_organization_invites(organization_id): + """ + Gets a list of organization invites for a specified org + """ + + def get_organization_invites_by_user(user_id): + """ + Gets a list of organization invites for a specified user + """ + + def add_organization_invite(organization_id, user_id, invite_token): + """ + Adds an organization invitation for the specified user and org + """ + + def delete_organization_invite(organization_invite_id): + """ + Delete an organization invite for the specified org invite id """ def approve_organization(organization_id): diff --git a/warehouse/organizations/models.py b/warehouse/organizations/models.py --- a/warehouse/organizations/models.py +++ b/warehouse/organizations/models.py @@ -12,6 +12,7 @@ import enum +from pyramid.authorization import Allow from sqlalchemy import ( Boolean, CheckConstraint, @@ -35,12 +36,12 @@ from warehouse.utils.attrs import make_repr -class OrganizationRoleType(enum.Enum): +class OrganizationRoleType(str, enum.Enum): + Owner = "Owner" BillingManager = "Billing Manager" Manager = "Manager" Member = "Member" - Owner = "Owner" class OrganizationRole(db.Model): @@ -163,7 +164,6 @@ class Organization(HasEvents, db.Model): onupdate=func.now(), ) - # TODO: Determine if cascade applies to any of these relationships users = orm.relationship( User, secondary=OrganizationRole.__table__, backref="organizations" # type: ignore # noqa ) @@ -171,8 +171,54 @@ class Organization(HasEvents, db.Model): "Project", secondary=OrganizationProject.__table__, backref="organizations" # type: ignore # noqa ) - # TODO: - # def __acl__(self): + def __acl__(self): + session = orm.object_session(self) + + acls = [ + (Allow, "group:admins", "admin"), + (Allow, "group:moderators", "moderator"), + ] + + # Get all of the users for this organization. + query = session.query(OrganizationRole).filter( + OrganizationRole.organization == self + ) + query = query.options(orm.lazyload("organization")) + query = query.join(User).order_by(User.id.asc()) + for role in sorted( + query.all(), + key=lambda x: [e.value for e in OrganizationRoleType].index(x.role_name), + ): + # Allow all people in organization read access. + # Allow write access depending on role. + if role.role_name == OrganizationRoleType.Owner: + acls.append( + ( + Allow, + f"user:{role.user.id}", + ["view:organization", "manage:organization"], + ) + ) + elif role.role_name == OrganizationRoleType.BillingManager: + acls.append( + ( + Allow, + f"user:{role.user.id}", + ["view:organization", "manage:billing"], + ) + ) + elif role.role_name == OrganizationRoleType.Manager: + acls.append( + ( + Allow, + f"user:{role.user.id}", + ["view:organization", "manage:team"], + ) + ) + else: + # No member-specific write access needed for now. + acls.append((Allow, f"user:{role.user.id}", ["view:organization"])) + return acls class OrganizationNameCatalog(db.Model): diff --git a/warehouse/organizations/services.py b/warehouse/organizations/services.py --- a/warehouse/organizations/services.py +++ b/warehouse/organizations/services.py @@ -16,9 +16,12 @@ from sqlalchemy.orm.exc import NoResultFound from zope.interface import implementer +from warehouse.accounts.models import User from warehouse.organizations.interfaces import IOrganizationService from warehouse.organizations.models import ( Organization, + OrganizationInvitation, + OrganizationInvitationStatus, OrganizationNameCatalog, OrganizationRole, ) @@ -126,13 +129,50 @@ def add_catalog_entry(self, organization_id): return catalog_entry - def add_organization_role(self, role_name, user_id, organization_id): + def get_organization_role(self, organization_role_id): """ - Adds the organization role to the specified user and org + Return the org role object that represents the given org role id, + or None if there is no organization role for that ID. + """ + return self.db.query(OrganizationRole).get(organization_role_id) + + def get_organization_role_by_user(self, organization_id, user_id): + """ + Gets an organization role for a specified org and user + """ + try: + organization_role = ( + self.db.query(OrganizationRole) + .filter( + OrganizationRole.organization_id == organization_id, + OrganizationRole.user_id == user_id, + ) + .one() + ) + except NoResultFound: + return + + return organization_role + + def get_organization_roles(self, organization_id): + """ + Gets a list of organization roles for a specified org + """ + return ( + self.db.query(OrganizationRole) + .join(User) + .filter(OrganizationRole.organization_id == organization_id) + .all() + ) + + def add_organization_role(self, organization_id, user_id, role_name): + """ + Adds an organization role for the specified org and user """ - organization = self.get_organization(organization_id) role = OrganizationRole( - role_name=role_name, user_id=user_id, organization_id=organization.id + organization_id=organization_id, + user_id=user_id, + role_name=role_name, ) self.db.add(role) @@ -140,6 +180,91 @@ def add_organization_role(self, role_name, user_id, organization_id): return role + def delete_organization_role(self, organization_role_id): + """ + Delete an organization role for a specified organization role id + """ + role = self.get_organization_role(organization_role_id) + + self.db.delete(role) + self.db.flush() + + def get_organization_invite(self, organization_invite_id): + """ + Return the org invite object that represents the given org invite id, + or None if there is no organization invite for that ID. + """ + return self.db.query(OrganizationInvitation).get(organization_invite_id) + + def get_organization_invite_by_user(self, organization_id, user_id): + """ + Gets an organization invite for a specified org and user + """ + try: + organization_invite = ( + self.db.query(OrganizationInvitation) + .filter( + OrganizationInvitation.organization_id == organization_id, + OrganizationInvitation.user_id == user_id, + ) + .one() + ) + except NoResultFound: + return + + return organization_invite + + def get_organization_invites(self, organization_id): + """ + Gets a list of organization invites for a specified org + """ + return ( + self.db.query(OrganizationInvitation) + .join(User) + .filter(OrganizationInvitation.organization_id == organization_id) + .all() + ) + + def get_organization_invites_by_user(self, user_id): + """ + Gets a list of organization invites for a specified user + """ + return ( + self.db.query(OrganizationInvitation) + .filter( + OrganizationInvitation.invite_status + == OrganizationInvitationStatus.Pending, + OrganizationInvitation.user_id == user_id, + ) + .all() + ) + + def add_organization_invite(self, organization_id, user_id, invite_token): + """ + Adds an organization invitation for the specified user and org + """ + # organization = self.get_organization(organization_id) + organization_invite = OrganizationInvitation( + organization_id=organization_id, + user_id=user_id, + token=invite_token, + invite_status=OrganizationInvitationStatus.Pending, + ) + + self.db.add(organization_invite) + self.db.flush() + + return organization_invite + + def delete_organization_invite(self, organization_invite_id): + """ + Delete an organization invite for the specified org invite id + """ + organization_invite = self.get_organization_invite(organization_invite_id) + + self.db.delete(organization_invite) + self.db.flush() + def approve_organization(self, organization_id): """ Performs operations necessary to approve an Organization diff --git a/warehouse/organizations/tasks.py b/warehouse/organizations/tasks.py new file mode 100644 --- /dev/null +++ b/warehouse/organizations/tasks.py @@ -0,0 +1,36 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from warehouse import tasks +from warehouse.accounts.interfaces import ITokenService, TokenExpired +from warehouse.organizations.models import ( + OrganizationInvitation, + OrganizationInvitationStatus, +) + + [email protected](ignore_result=True, acks_late=True) +def update_organization_invitation_status(request): + invites = ( + request.db.query(OrganizationInvitation) + .filter( + OrganizationInvitation.invite_status == OrganizationInvitationStatus.Pending + ) + .all() + ) + token_service = request.find_service(ITokenService, name="email") + + for invite in invites: + try: + token_service.loads(invite.token) + except TokenExpired: + invite.invite_status = OrganizationInvitationStatus.Expired diff --git a/warehouse/routes.py b/warehouse/routes.py --- a/warehouse/routes.py +++ b/warehouse/routes.py @@ -165,6 +165,11 @@ def includeme(config): config.add_route( "accounts.verify-email", "/account/verify-email/", domain=warehouse ) + config.add_route( + "accounts.verify-organization-role", + "/account/verify-organization-role/", + domain=warehouse, + ) config.add_route( "accounts.verify-project-role", "/account/verify-project-role/", @@ -229,6 +234,27 @@ def includeme(config): traverse="/{organization_name}", domain=warehouse, ) + config.add_route( + "manage.organization.revoke_invite", + "/manage/organization/{organization_name}/people/revoke_invite/", + factory="warehouse.organizations.models:OrganizationFactory", + traverse="/{organization_name}", + domain=warehouse, + ) + config.add_route( + "manage.organization.change_role", + "/manage/organization/{organization_name}/people/change/", + factory="warehouse.organizations.models:OrganizationFactory", + traverse="/{organization_name}", + domain=warehouse, + ) + config.add_route( + "manage.organization.delete_role", + "/manage/organization/{organization_name}/people/delete/", + factory="warehouse.organizations.models:OrganizationFactory", + traverse="/{organization_name}", + domain=warehouse, + ) config.add_route("manage.projects", "/manage/projects/", domain=warehouse) config.add_route( "manage.project.settings",
diff --git a/tests/common/db/organizations.py b/tests/common/db/organizations.py --- a/tests/common/db/organizations.py +++ b/tests/common/db/organizations.py @@ -21,6 +21,7 @@ OrganizationNameCatalog, OrganizationProject, OrganizationRole, + OrganizationRoleType, ) from .accounts import UserFactory @@ -61,7 +62,7 @@ class OrganizationNameCatalogFactory(WarehouseFactory): class Meta: model = OrganizationNameCatalog - name = factory.Faker("orgname") + name = factory.Faker("pystr", max_chars=12) organization_id = factory.Faker("uuid4", cast_to=None) @@ -69,7 +70,7 @@ class OrganizationRoleFactory(WarehouseFactory): class Meta: model = OrganizationRole - role_name = "Owner" + role_name = OrganizationRoleType.Owner user = factory.SubFactory(UserFactory) organization = factory.SubFactory(OrganizationFactory) diff --git a/tests/conftest.py b/tests/conftest.py --- a/tests/conftest.py +++ b/tests/conftest.py @@ -38,17 +38,19 @@ import warehouse -from warehouse import admin, config, static +from warehouse import admin, config, email, static from warehouse.accounts import services as account_services -from warehouse.accounts.interfaces import ITokenService +from warehouse.accounts.interfaces import ITokenService, IUserService from warehouse.admin.flags import AdminFlag, AdminFlagValue from warehouse.email import services as email_services from warehouse.email.interfaces import IEmailSender from warehouse.macaroons import services as macaroon_services from warehouse.metrics import IMetricsService from warehouse.organizations import services as organization_services +from warehouse.organizations.interfaces import IOrganizationService from .common.db import Session +from .common.db.accounts import EmailFactory, UserFactory def pytest_collection_modifyitems(items): @@ -120,14 +122,18 @@ def find_service(self, iface=None, context=None, name=""): @pytest.fixture -def pyramid_services(metrics, email_service, token_service): +def pyramid_services( + email_service, metrics, organization_service, token_service, user_service +): services = _Services() # Register our global services. - services.register_service(metrics, IMetricsService, None, name="") services.register_service(email_service, IEmailSender, None, name="") + services.register_service(metrics, IMetricsService, None, name="") + services.register_service(organization_service, IOrganizationService, None, name="") services.register_service(token_service, ITokenService, None, name="password") services.register_service(token_service, ITokenService, None, name="email") + services.register_service(user_service, IUserService, None, name="") return services @@ -159,6 +165,14 @@ def pyramid_config(pyramid_request): yield config [email protected] +def pyramid_user(pyramid_request): + user = UserFactory.create() + EmailFactory.create(user=user, verified=True) + pyramid_request.user = user + return user + + @pytest.fixture def cli(): runner = click.testing.CliRunner() @@ -351,7 +365,7 @@ def db_request(pyramid_request, db_session): return pyramid_request [email protected]() [email protected] def enable_organizations(db_request): flag = db_request.db.query(AdminFlag).get( AdminFlagValue.DISABLE_ORGANIZATIONS.value @@ -361,6 +375,40 @@ def enable_organizations(db_request): flag.enabled = True [email protected] +def send_email(pyramid_request, monkeypatch): + send_email_stub = pretend.stub( + delay=pretend.call_recorder(lambda *args, **kwargs: None) + ) + pyramid_request.task = pretend.call_recorder( + lambda *args, **kwargs: send_email_stub + ) + pyramid_request.registry.settings = {"mail.sender": "[email protected]"} + monkeypatch.setattr(email, "send_email", send_email_stub) + return send_email_stub + + [email protected] +def make_email_renderers(pyramid_config): + def _make_email_renderers( + name, + subject="Email Subject", + body="Email Body", + html="Email HTML Body", + ): + subject_renderer = pyramid_config.testing_add_renderer( + f"email/{name}/subject.txt" + ) + subject_renderer.string_response = subject + body_renderer = pyramid_config.testing_add_renderer(f"email/{name}/body.txt") + body_renderer.string_response = body + html_renderer = pyramid_config.testing_add_renderer(f"email/{name}/body.html") + html_renderer.string_response = html + return subject_renderer, body_renderer, html_renderer + + return _make_email_renderers + + class _TestApp(_webtest.TestApp): def xmlrpc(self, path, method, *args): body = xmlrpc.client.dumps(args, methodname=method) diff --git a/tests/unit/accounts/test_views.py b/tests/unit/accounts/test_views.py --- a/tests/unit/accounts/test_views.py +++ b/tests/unit/accounts/test_views.py @@ -41,10 +41,20 @@ from warehouse.accounts.models import User from warehouse.accounts.views import two_factor_and_totp_validate from warehouse.admin.flags import AdminFlag, AdminFlagValue +from warehouse.organizations.models import ( + OrganizationInvitation, + OrganizationRole, + OrganizationRoleType, +) from warehouse.packaging.models import Role, RoleInvitation from warehouse.rate_limiting.interfaces import IRateLimiter from ...common.db.accounts import EmailFactory, UserFactory +from ...common.db.organizations import ( + OrganizationFactory, + OrganizationInvitationFactory, + OrganizationRoleFactory, +) from ...common.db.packaging import ProjectFactory, RoleFactory, RoleInvitationFactory @@ -2142,6 +2152,340 @@ def test_verify_email_already_verified(self, db_request): ] +class TestVerifyOrganizationRole: + @pytest.mark.parametrize( + "desired_role", ["Member", "Manager", "Owner", "Billing Manager"] + ) + def test_verify_organization_role( + self, db_request, token_service, monkeypatch, desired_role + ): + organization = OrganizationFactory.create() + user = UserFactory.create() + OrganizationInvitationFactory.create( + organization=organization, + user=user, + ) + owner_user = UserFactory.create() + OrganizationRoleFactory( + organization=organization, + user=owner_user, + role_name=OrganizationRoleType.Owner, + ) + + db_request.user = user + db_request.method = "POST" + db_request.GET.update({"token": "RANDOM_KEY"}) + db_request.route_path = pretend.call_recorder(lambda *a, **kw: "/") + db_request.remote_addr = "192.168.1.1" + db_request.session.flash = pretend.call_recorder(lambda *a, **kw: None) + token_service.loads = pretend.call_recorder( + lambda token: { + "action": "email-organization-role-verify", + "desired_role": desired_role, + "user_id": user.id, + "organization_id": organization.id, + "submitter_id": owner_user.id, + } + ) + + organization_member_added_email = pretend.call_recorder( + lambda *args, **kwargs: None + ) + monkeypatch.setattr( + views, + "send_organization_member_added_email", + organization_member_added_email, + ) + added_as_organization_member_email = pretend.call_recorder( + lambda *args, **kwargs: None + ) + monkeypatch.setattr( + views, + "send_added_as_organization_member_email", + added_as_organization_member_email, + ) + + result = views.verify_organization_role(db_request) + + db_request.db.flush() + + assert not ( + db_request.db.query(OrganizationInvitation) + .filter(OrganizationInvitation.user == user) + .filter(OrganizationInvitation.organization == organization) + .one_or_none() + ) + assert ( + db_request.db.query(OrganizationRole) + .filter( + OrganizationRole.organization == organization, + OrganizationRole.user == user, + ) + .one() + ) + assert organization_member_added_email.calls == [ + pretend.call( + db_request, + {owner_user}, + user=user, + submitter=owner_user, + organization_name=organization.name, + role=desired_role, + ) + ] + assert added_as_organization_member_email.calls == [ + pretend.call( + db_request, + user, + submitter=owner_user, + organization_name=organization.name, + role=desired_role, + ) + ] + assert db_request.session.flash.calls == [ + pretend.call( + ( + f"You are now {desired_role} of the " + f"'{organization.name}' organization." + ), + queue="success", + ) + ] + assert isinstance(result, HTTPSeeOther) + assert result.headers["Location"] == "/" + assert db_request.route_path.calls == [ + pretend.call( + "manage.organization.roles", organization_name=organization.name + ) + ] + + @pytest.mark.parametrize( + ("exception", "message"), + [ + (TokenInvalid, "Invalid token: request a new organization invitation"), + (TokenExpired, "Expired token: request a new organization invitation"), + (TokenMissing, "Invalid token: no token supplied"), + ], + ) + def test_verify_organization_role_loads_failure( + self, db_request, token_service, exception, message + ): + def loads(token): + raise exception + + db_request.params = {"token": "RANDOM_KEY"} + db_request.route_path = pretend.call_recorder(lambda name: "/") + db_request.session.flash = pretend.call_recorder(lambda *a, **kw: None) + token_service.loads = loads + + views.verify_organization_role(db_request) + + assert db_request.route_path.calls == [pretend.call("manage.organizations")] + assert db_request.session.flash.calls == [pretend.call(message, queue="error")] + + def test_verify_email_invalid_action(self, db_request, token_service): + data = {"action": "invalid-action"} + db_request.params = {"token": "RANDOM_KEY"} + db_request.route_path = pretend.call_recorder(lambda name: "/") + db_request.session.flash = pretend.call_recorder(lambda *a, **kw: None) + token_service.loads = lambda a: data + + views.verify_organization_role(db_request) + + assert db_request.route_path.calls == [pretend.call("manage.organizations")] + assert db_request.session.flash.calls == [ + pretend.call( + "Invalid token: not an organization invitation token", queue="error" + ) + ] + + def test_verify_organization_role_revoked(self, db_request, token_service): + desired_role = "Manager" + organization = OrganizationFactory.create() + user = UserFactory.create() + owner_user = UserFactory.create() + OrganizationRoleFactory( + organization=organization, + user=owner_user, + role_name=OrganizationRoleType.Owner, + ) + + db_request.user = user + db_request.method = "POST" + db_request.GET.update({"token": "RANDOM_KEY"}) + db_request.route_path = pretend.call_recorder(lambda name: "/") + db_request.remote_addr = "192.168.1.1" + db_request.session.flash = pretend.call_recorder(lambda *a, **kw: None) + token_service.loads = pretend.call_recorder( + lambda token: { + "action": "email-organization-role-verify", + "desired_role": desired_role, + "user_id": user.id, + "organization_id": organization.id, + "submitter_id": owner_user.id, + } + ) + + views.verify_organization_role(db_request) + + assert db_request.session.flash.calls == [ + pretend.call( + "Organization invitation no longer exists.", + queue="error", + ) + ] + assert db_request.route_path.calls == [pretend.call("manage.organizations")] + + def test_verify_organization_role_declined( + self, db_request, token_service, monkeypatch + ): + desired_role = "Manager" + organization = OrganizationFactory.create() + user = UserFactory.create() + OrganizationInvitationFactory.create( + organization=organization, + user=user, + ) + owner_user = UserFactory.create() + OrganizationRoleFactory( + organization=organization, + user=owner_user, + role_name=OrganizationRoleType.Owner, + ) + + db_request.user = user + db_request.method = "POST" + db_request.POST.update({"token": "RANDOM_KEY", "decline": "Decline"}) + db_request.route_path = pretend.call_recorder(lambda name: "/") + db_request.remote_addr = "192.168.1.1" + db_request.session.flash = pretend.call_recorder(lambda *a, **kw: None) + token_service.loads = pretend.call_recorder( + lambda token: { + "action": "email-organization-role-verify", + "desired_role": desired_role, + "user_id": user.id, + "organization_id": organization.id, + "submitter_id": owner_user.id, + } + ) + + organization_member_invite_declined_email = pretend.call_recorder( + lambda *args, **kwargs: None + ) + monkeypatch.setattr( + views, + "send_organization_member_invite_declined_email", + organization_member_invite_declined_email, + ) + declined_as_invited_organization_member_email = pretend.call_recorder( + lambda *args, **kwargs: None + ) + monkeypatch.setattr( + views, + "send_declined_as_invited_organization_member_email", + declined_as_invited_organization_member_email, + ) + + result = views.verify_organization_role(db_request) + + assert not ( + db_request.db.query(OrganizationInvitation) + .filter(OrganizationInvitation.user == user) + .filter(OrganizationInvitation.organization == organization) + .one_or_none() + ) + assert organization_member_invite_declined_email.calls == [ + pretend.call( + db_request, + {owner_user}, + user=user, + organization_name=organization.name, + ) + ] + assert declined_as_invited_organization_member_email.calls == [ + pretend.call( + db_request, + user, + organization_name=organization.name, + ) + ] + assert isinstance(result, HTTPSeeOther) + assert db_request.route_path.calls == [pretend.call("manage.organizations")] + + def test_verify_fails_with_different_user(self, db_request, token_service): + desired_role = "Manager" + organization = OrganizationFactory.create() + user = UserFactory.create() + user_2 = UserFactory.create() + owner_user = UserFactory.create() + OrganizationRoleFactory( + organization=organization, + user=owner_user, + role_name=OrganizationRoleType.Owner, + ) + + db_request.user = user_2 + db_request.method = "POST" + db_request.GET.update({"token": "RANDOM_KEY"}) + db_request.route_path = pretend.call_recorder(lambda name: "/") + db_request.remote_addr = "192.168.1.1" + db_request.session.flash = pretend.call_recorder(lambda *a, **kw: None) + token_service.loads = pretend.call_recorder( + lambda token: { + "action": "email-organization-role-verify", + "desired_role": desired_role, + "user_id": user.id, + "organization_id": organization.id, + "submitter_id": owner_user.id, + } + ) + + views.verify_organization_role(db_request) + + assert db_request.session.flash.calls == [ + pretend.call("Organization invitation is not valid.", queue="error") + ] + assert db_request.route_path.calls == [pretend.call("manage.organizations")] + + def test_verify_role_get_confirmation(self, db_request, token_service): + desired_role = "Manager" + organization = OrganizationFactory.create() + user = UserFactory.create() + OrganizationInvitationFactory.create( + organization=organization, + user=user, + ) + owner_user = UserFactory.create() + OrganizationRoleFactory( + organization=organization, + user=owner_user, + role_name=OrganizationRoleType.Owner, + ) + + db_request.user = user + db_request.method = "GET" + db_request.GET.update({"token": "RANDOM_KEY"}) + db_request.route_path = pretend.call_recorder(lambda name: "/") + db_request.remote_addr = "192.168.1.1" + db_request.session.flash = pretend.call_recorder(lambda *a, **kw: None) + token_service.loads = pretend.call_recorder( + lambda token: { + "action": "email-organization-role-verify", + "desired_role": desired_role, + "user_id": user.id, + "organization_id": organization.id, + "submitter_id": owner_user.id, + } + ) + + roles = views.verify_organization_role(db_request) + + assert roles == { + "organization_name": organization.name, + "desired_role": desired_role, + } + + class TestVerifyProjectRole: @pytest.mark.parametrize("desired_role", ["Maintainer", "Owner"]) def test_verify_project_role( @@ -2252,8 +2596,8 @@ def test_verify_project_role( @pytest.mark.parametrize( ("exception", "message"), [ - (TokenInvalid, "Invalid token: request a new project role invite"), - (TokenExpired, "Expired token: request a new project role invite"), + (TokenInvalid, "Invalid token: request a new project role invitation"), + (TokenExpired, "Expired token: request a new project role invitation"), (TokenMissing, "Invalid token: no token supplied"), ], ) diff --git a/tests/unit/email/test_init.py b/tests/unit/email/test_init.py --- a/tests/unit/email/test_init.py +++ b/tests/unit/email/test_init.py @@ -1808,7 +1808,6 @@ def test_send_new_organization_declined_email( organization_name=organization_name, message=message, ) - assert pyramid_request.task.calls == [pretend.call(send_email)] assert send_email.delay.calls == [ pretend.call( f"{initiator_user.username} <{initiator_user.email}>", @@ -1834,6 +1833,672 @@ def test_send_new_organization_declined_email( ] +class TestOrganizationMemberEmails: + @pytest.fixture + def organization_invite(self, pyramid_user): + self.initiator_user = pyramid_user + self.user = UserFactory.create() + EmailFactory.create(user=self.user, verified=True) + self.desired_role = "Manager" + self.organization_name = "example" + self.email_token = "token" + self.token_age = 72 * 60 * 60 + + def test_send_organization_member_invited_email( + self, + db_request, + organization_invite, + make_email_renderers, + send_email, + ): + subject_renderer, body_renderer, html_renderer = make_email_renderers( + "organization-member-invited" + ) + + result = email.send_organization_member_invited_email( + db_request, + self.initiator_user, + user=self.user, + desired_role=self.desired_role, + initiator_username=self.initiator_user.username, + organization_name=self.organization_name, + email_token=self.email_token, + token_age=self.token_age, + ) + + assert result == { + "username": self.user.username, + "desired_role": self.desired_role, + "initiator_username": self.initiator_user.username, + "n_hours": self.token_age // 60 // 60, + "organization_name": self.organization_name, + "token": self.email_token, + } + subject_renderer.assert_(**result) + body_renderer.assert_(**result) + html_renderer.assert_(**result) + assert db_request.task.calls == [pretend.call(send_email)] + assert send_email.delay.calls == [ + pretend.call( + f"{self.initiator_user.name} <{self.initiator_user.email}>", + { + "subject": subject_renderer.string_response, + "body_text": body_renderer.string_response, + "body_html": ( + f"<html>\n" + f"<head></head>\n" + f"<body><p>{html_renderer.string_response}</p></body>\n" + f"</html>\n" + ), + }, + { + "tag": "account:email:sent", + "user_id": self.initiator_user.id, + "additional": { + "from_": db_request.registry.settings["mail.sender"], + "to": self.initiator_user.email, + "subject": subject_renderer.string_response, + "redact_ip": False, + }, + }, + ) + ] + + def test_send_organization_role_verification_email( + self, + db_request, + organization_invite, + make_email_renderers, + send_email, + ): + subject_renderer, body_renderer, html_renderer = make_email_renderers( + "verify-organization-role" + ) + + result = email.send_organization_role_verification_email( + db_request, + self.user, + desired_role=self.desired_role, + initiator_username=self.initiator_user.username, + organization_name=self.organization_name, + email_token=self.email_token, + token_age=self.token_age, + ) + + assert result == { + "username": self.user.username, + "desired_role": self.desired_role, + "initiator_username": self.initiator_user.username, + "n_hours": self.token_age // 60 // 60, + "organization_name": self.organization_name, + "token": self.email_token, + } + subject_renderer.assert_(**result) + body_renderer.assert_(**result) + html_renderer.assert_(**result) + assert db_request.task.calls == [pretend.call(send_email)] + assert send_email.delay.calls == [ + pretend.call( + f"{self.user.name} <{self.user.email}>", + { + "subject": subject_renderer.string_response, + "body_text": body_renderer.string_response, + "body_html": ( + f"<html>\n" + f"<head></head>\n" + f"<body><p>{html_renderer.string_response}</p></body>\n" + f"</html>\n" + ), + }, + { + "tag": "account:email:sent", + "user_id": self.user.id, + "additional": { + "from_": db_request.registry.settings["mail.sender"], + "to": self.user.email, + "subject": subject_renderer.string_response, + "redact_ip": True, + }, + }, + ) + ] + + def test_send_organization_member_invite_canceled_email( + self, + db_request, + organization_invite, + make_email_renderers, + send_email, + ): + subject_renderer, body_renderer, html_renderer = make_email_renderers( + "organization-member-invite-canceled" + ) + + result = email.send_organization_member_invite_canceled_email( + db_request, + self.initiator_user, + user=self.user, + organization_name=self.organization_name, + ) + + assert result == { + "username": self.user.username, + "organization_name": self.organization_name, + } + subject_renderer.assert_(**result) + body_renderer.assert_(**result) + html_renderer.assert_(**result) + assert db_request.task.calls == [pretend.call(send_email)] + assert send_email.delay.calls == [ + pretend.call( + f"{self.initiator_user.name} <{self.initiator_user.email}>", + { + "subject": subject_renderer.string_response, + "body_text": body_renderer.string_response, + "body_html": ( + f"<html>\n" + f"<head></head>\n" + f"<body><p>{html_renderer.string_response}</p></body>\n" + f"</html>\n" + ), + }, + { + "tag": "account:email:sent", + "user_id": self.initiator_user.id, + "additional": { + "from_": db_request.registry.settings["mail.sender"], + "to": self.initiator_user.email, + "subject": subject_renderer.string_response, + "redact_ip": False, + }, + }, + ) + ] + + def test_send_canceled_as_invited_organization_member_email( + self, + db_request, + organization_invite, + make_email_renderers, + send_email, + ): + subject_renderer, body_renderer, html_renderer = make_email_renderers( + "canceled-as-invited-organization-member" + ) + + result = email.send_canceled_as_invited_organization_member_email( + db_request, + self.user, + organization_name=self.organization_name, + ) + + assert result == { + "username": self.user.username, + "organization_name": self.organization_name, + } + subject_renderer.assert_(**result) + body_renderer.assert_(**result) + html_renderer.assert_(**result) + assert db_request.task.calls == [pretend.call(send_email)] + assert send_email.delay.calls == [ + pretend.call( + f"{self.user.name} <{self.user.email}>", + { + "subject": subject_renderer.string_response, + "body_text": body_renderer.string_response, + "body_html": ( + f"<html>\n" + f"<head></head>\n" + f"<body><p>{html_renderer.string_response}</p></body>\n" + f"</html>\n" + ), + }, + { + "tag": "account:email:sent", + "user_id": self.user.id, + "additional": { + "from_": db_request.registry.settings["mail.sender"], + "to": self.user.email, + "subject": subject_renderer.string_response, + "redact_ip": True, + }, + }, + ) + ] + + def test_send_organization_member_invite_declined_email( + self, + db_request, + organization_invite, + make_email_renderers, + send_email, + ): + subject_renderer, body_renderer, html_renderer = make_email_renderers( + "organization-member-invite-declined" + ) + + result = email.send_organization_member_invite_declined_email( + db_request, + self.initiator_user, + user=self.user, + organization_name=self.organization_name, + ) + + assert result == { + "username": self.user.username, + "organization_name": self.organization_name, + } + subject_renderer.assert_(**result) + body_renderer.assert_(**result) + html_renderer.assert_(**result) + assert db_request.task.calls == [pretend.call(send_email)] + assert send_email.delay.calls == [ + pretend.call( + f"{self.initiator_user.name} <{self.initiator_user.email}>", + { + "subject": subject_renderer.string_response, + "body_text": body_renderer.string_response, + "body_html": ( + f"<html>\n" + f"<head></head>\n" + f"<body><p>{html_renderer.string_response}</p></body>\n" + f"</html>\n" + ), + }, + { + "tag": "account:email:sent", + "user_id": self.initiator_user.id, + "additional": { + "from_": db_request.registry.settings["mail.sender"], + "to": self.initiator_user.email, + "subject": subject_renderer.string_response, + "redact_ip": False, + }, + }, + ) + ] + + def test_send_declined_as_invited_organization_member_email( + self, + db_request, + organization_invite, + make_email_renderers, + send_email, + ): + subject_renderer, body_renderer, html_renderer = make_email_renderers( + "declined-as-invited-organization-member" + ) + + result = email.send_declined_as_invited_organization_member_email( + db_request, + self.user, + organization_name=self.organization_name, + ) + + assert result == { + "username": self.user.username, + "organization_name": self.organization_name, + } + subject_renderer.assert_(**result) + body_renderer.assert_(**result) + html_renderer.assert_(**result) + assert db_request.task.calls == [pretend.call(send_email)] + assert send_email.delay.calls == [ + pretend.call( + f"{self.user.name} <{self.user.email}>", + { + "subject": subject_renderer.string_response, + "body_text": body_renderer.string_response, + "body_html": ( + f"<html>\n" + f"<head></head>\n" + f"<body><p>{html_renderer.string_response}</p></body>\n" + f"</html>\n" + ), + }, + { + "tag": "account:email:sent", + "user_id": self.user.id, + "additional": { + "from_": db_request.registry.settings["mail.sender"], + "to": self.user.email, + "subject": subject_renderer.string_response, + "redact_ip": True, + }, + }, + ) + ] + + def test_send_organization_member_added_email( + self, + db_request, + organization_invite, + make_email_renderers, + send_email, + ): + subject_renderer, body_renderer, html_renderer = make_email_renderers( + "organization-member-added" + ) + + result = email.send_organization_member_added_email( + db_request, + self.initiator_user, + user=self.user, + submitter=self.initiator_user, + organization_name=self.organization_name, + role=self.desired_role, + ) + + assert result == { + "username": self.user.username, + "submitter": self.initiator_user.username, + "organization_name": self.organization_name, + "role": self.desired_role, + } + subject_renderer.assert_(**result) + body_renderer.assert_(**result) + html_renderer.assert_(**result) + assert db_request.task.calls == [pretend.call(send_email)] + assert send_email.delay.calls == [ + pretend.call( + f"{self.initiator_user.name} <{self.initiator_user.email}>", + { + "subject": subject_renderer.string_response, + "body_text": body_renderer.string_response, + "body_html": ( + f"<html>\n" + f"<head></head>\n" + f"<body><p>{html_renderer.string_response}</p></body>\n" + f"</html>\n" + ), + }, + { + "tag": "account:email:sent", + "user_id": self.initiator_user.id, + "additional": { + "from_": db_request.registry.settings["mail.sender"], + "to": self.initiator_user.email, + "subject": subject_renderer.string_response, + "redact_ip": False, + }, + }, + ) + ] + + def test_send_added_as_organization_email( + self, + db_request, + organization_invite, + make_email_renderers, + send_email, + ): + subject_renderer, body_renderer, html_renderer = make_email_renderers( + "added-as-organization-member" + ) + + result = email.send_added_as_organization_member_email( + db_request, + self.user, + submitter=self.initiator_user, + organization_name=self.organization_name, + role=self.desired_role, + ) + + assert result == { + "username": self.user.username, + "submitter": self.initiator_user.username, + "organization_name": self.organization_name, + "role": self.desired_role, + } + subject_renderer.assert_(**result) + body_renderer.assert_(**result) + html_renderer.assert_(**result) + assert db_request.task.calls == [pretend.call(send_email)] + assert send_email.delay.calls == [ + pretend.call( + f"{self.user.name} <{self.user.email}>", + { + "subject": subject_renderer.string_response, + "body_text": body_renderer.string_response, + "body_html": ( + f"<html>\n" + f"<head></head>\n" + f"<body><p>{html_renderer.string_response}</p></body>\n" + f"</html>\n" + ), + }, + { + "tag": "account:email:sent", + "user_id": self.user.id, + "additional": { + "from_": db_request.registry.settings["mail.sender"], + "to": self.user.email, + "subject": subject_renderer.string_response, + "redact_ip": True, + }, + }, + ) + ] + + def test_send_organization_member_removed_email( + self, + db_request, + organization_invite, + make_email_renderers, + send_email, + ): + subject_renderer, body_renderer, html_renderer = make_email_renderers( + "organization-member-removed" + ) + + result = email.send_organization_member_removed_email( + db_request, + self.initiator_user, + user=self.user, + submitter=self.initiator_user, + organization_name=self.organization_name, + ) + + assert result == { + "username": self.user.username, + "submitter": self.initiator_user.username, + "organization_name": self.organization_name, + } + subject_renderer.assert_(**result) + body_renderer.assert_(**result) + html_renderer.assert_(**result) + assert db_request.task.calls == [pretend.call(send_email)] + assert send_email.delay.calls == [ + pretend.call( + f"{self.initiator_user.name} <{self.initiator_user.email}>", + { + "subject": subject_renderer.string_response, + "body_text": body_renderer.string_response, + "body_html": ( + f"<html>\n" + f"<head></head>\n" + f"<body><p>{html_renderer.string_response}</p></body>\n" + f"</html>\n" + ), + }, + { + "tag": "account:email:sent", + "user_id": self.initiator_user.id, + "additional": { + "from_": db_request.registry.settings["mail.sender"], + "to": self.initiator_user.email, + "subject": subject_renderer.string_response, + "redact_ip": False, + }, + }, + ) + ] + + def test_send_removed_as_organization_email( + self, + db_request, + organization_invite, + make_email_renderers, + send_email, + ): + subject_renderer, body_renderer, html_renderer = make_email_renderers( + "removed-as-organization-member" + ) + + result = email.send_removed_as_organization_member_email( + db_request, + self.user, + submitter=self.initiator_user, + organization_name=self.organization_name, + ) + + assert result == { + "username": self.user.username, + "submitter": self.initiator_user.username, + "organization_name": self.organization_name, + } + subject_renderer.assert_(**result) + body_renderer.assert_(**result) + html_renderer.assert_(**result) + assert db_request.task.calls == [pretend.call(send_email)] + assert send_email.delay.calls == [ + pretend.call( + f"{self.user.name} <{self.user.email}>", + { + "subject": subject_renderer.string_response, + "body_text": body_renderer.string_response, + "body_html": ( + f"<html>\n" + f"<head></head>\n" + f"<body><p>{html_renderer.string_response}</p></body>\n" + f"</html>\n" + ), + }, + { + "tag": "account:email:sent", + "user_id": self.user.id, + "additional": { + "from_": db_request.registry.settings["mail.sender"], + "to": self.user.email, + "subject": subject_renderer.string_response, + "redact_ip": True, + }, + }, + ) + ] + + def test_send_organization_member_role_changed_email( + self, + db_request, + organization_invite, + make_email_renderers, + send_email, + ): + subject_renderer, body_renderer, html_renderer = make_email_renderers( + "organization-member-role-changed" + ) + + result = email.send_organization_member_role_changed_email( + db_request, + self.initiator_user, + user=self.user, + submitter=self.initiator_user, + organization_name=self.organization_name, + role=self.desired_role, + ) + + assert result == { + "username": self.user.username, + "submitter": self.initiator_user.username, + "organization_name": self.organization_name, + "role": self.desired_role, + } + subject_renderer.assert_(**result) + body_renderer.assert_(**result) + html_renderer.assert_(**result) + assert db_request.task.calls == [pretend.call(send_email)] + assert send_email.delay.calls == [ + pretend.call( + f"{self.initiator_user.name} <{self.initiator_user.email}>", + { + "subject": subject_renderer.string_response, + "body_text": body_renderer.string_response, + "body_html": ( + f"<html>\n" + f"<head></head>\n" + f"<body><p>{html_renderer.string_response}</p></body>\n" + f"</html>\n" + ), + }, + { + "tag": "account:email:sent", + "user_id": self.initiator_user.id, + "additional": { + "from_": db_request.registry.settings["mail.sender"], + "to": self.initiator_user.email, + "subject": subject_renderer.string_response, + "redact_ip": False, + }, + }, + ) + ] + + def test_send_role_changed_as_organization_email( + self, + db_request, + organization_invite, + make_email_renderers, + send_email, + ): + subject_renderer, body_renderer, html_renderer = make_email_renderers( + "role-changed-as-organization-member" + ) + + result = email.send_role_changed_as_organization_member_email( + db_request, + self.user, + submitter=self.initiator_user, + organization_name=self.organization_name, + role=self.desired_role, + ) + + assert result == { + "username": self.user.username, + "submitter": self.initiator_user.username, + "organization_name": self.organization_name, + "role": self.desired_role, + } + subject_renderer.assert_(**result) + body_renderer.assert_(**result) + html_renderer.assert_(**result) + assert db_request.task.calls == [pretend.call(send_email)] + assert send_email.delay.calls == [ + pretend.call( + f"{self.user.name} <{self.user.email}>", + { + "subject": subject_renderer.string_response, + "body_text": body_renderer.string_response, + "body_html": ( + f"<html>\n" + f"<head></head>\n" + f"<body><p>{html_renderer.string_response}</p></body>\n" + f"</html>\n" + ), + }, + { + "tag": "account:email:sent", + "user_id": self.user.id, + "additional": { + "from_": db_request.registry.settings["mail.sender"], + "to": self.user.email, + "subject": subject_renderer.string_response, + "redact_ip": True, + }, + }, + ) + ] + + class TestCollaboratorAddedEmail: def test_collaborator_added_email( self, pyramid_request, pyramid_config, monkeypatch diff --git a/tests/unit/manage/test_views.py b/tests/unit/manage/test_views.py --- a/tests/unit/manage/test_views.py +++ b/tests/unit/manage/test_views.py @@ -45,6 +45,13 @@ from warehouse.metrics.interfaces import IMetricsService from warehouse.oidc.interfaces import TooManyOIDCRegistrations from warehouse.organizations.interfaces import IOrganizationService +from warehouse.organizations.models import ( + OrganizationInvitation, + OrganizationInvitationStatus, + OrganizationRole, + OrganizationRoleType, + OrganizationType, +) from warehouse.packaging.models import ( File, JournalEntry, @@ -58,7 +65,11 @@ from warehouse.utils.project import remove_documentation from ...common.db.accounts import EmailFactory -from ...common.db.organizations import OrganizationFactory +from ...common.db.organizations import ( + OrganizationFactory, + OrganizationInvitationFactory, + OrganizationRoleFactory, +) from ...common.db.packaging import ( FileFactory, JournalEntryFactory, @@ -2326,7 +2337,8 @@ def test_default_response(self, monkeypatch): monkeypatch.setattr(views, "user_organizations", user_organizations) organization_service = pretend.stub( - get_organizations_by_user=lambda *a, **kw: [organization] + get_organizations_by_user=lambda *a, **kw: [organization], + get_organization_invites_by_user=lambda *a, **kw: [], ) user_service = pretend.stub() request = pretend.stub( @@ -2340,11 +2352,12 @@ def test_default_response(self, monkeypatch): view = views.ManageOrganizationsViews(request) assert view.default_response == { - "create_organization_form": create_organization_obj, + "organization_invites": [], "organizations": [organization], "organizations_managed": [], "organizations_owned": [organization.name], "organizations_billing": [], + "create_organization_form": create_organization_obj, } def test_manage_organizations(self, monkeypatch): @@ -2469,9 +2482,9 @@ def test_create_organization(self, monkeypatch): ] assert organization_service.add_organization_role.calls == [ pretend.call( - "Owner", - request.user.id, organization.id, + request.user.id, + OrganizationRoleType.Owner, ) ] assert organization_service.record_event.calls == [ @@ -2614,24 +2627,941 @@ def test_create_organizations_disable_organizations(self, monkeypatch): class TestManageOrganizationRoles: - def test_get_manage_organization_roles(self, db_request): + def test_get_manage_organization_roles(self, db_request, enable_organizations): organization = OrganizationFactory.create(name="foobar") - request = pretend.stub( - flags=pretend.stub(enabled=pretend.call_recorder(lambda *a: False)), - ) + form_obj = pretend.stub() - result = views.manage_organization_roles(organization, request) + def form_class(*a, **kw): + return form_obj - assert result == {"organization": organization} + result = views.manage_organization_roles( + organization, db_request, _form_class=form_class + ) + assert result == { + "organization": organization, + "roles": set(), + "invitations": set(), + "form": form_obj, + } def test_get_manage_organization_roles_disable_organizations(self, db_request): organization = OrganizationFactory.create(name="foobar") - request = pretend.stub( - flags=pretend.stub(enabled=pretend.call_recorder(lambda *a: True)), - ) with pytest.raises(HTTPNotFound): - views.manage_organization_roles(organization, request) + views.manage_organization_roles(organization, db_request) + + @pytest.mark.parametrize("orgtype", list(OrganizationType)) + def test_post_new_organization_role( + self, + db_request, + orgtype, + organization_service, + user_service, + token_service, + enable_organizations, + monkeypatch, + ): + organization = OrganizationFactory.create(name="foobar", orgtype=orgtype) + new_user = UserFactory.create(username="new_user") + EmailFactory.create(user=new_user, verified=True, primary=True) + owner_1 = UserFactory.create(username="owner_1") + owner_2 = UserFactory.create(username="owner_2") + owner_1_role = OrganizationRoleFactory.create( + organization=organization, + user=owner_1, + role_name=OrganizationRoleType.Owner, + ) + owner_2_role = OrganizationRoleFactory.create( + organization=organization, + user=owner_2, + role_name=OrganizationRoleType.Owner, + ) + + db_request.method = "POST" + db_request.POST = MultiDict( + {"username": new_user.username, "role_name": "Owner"} + ) + db_request.user = owner_1 + db_request.session = pretend.stub( + flash=pretend.call_recorder(lambda *a, **kw: None) + ) + + send_organization_member_invited_email = pretend.call_recorder( + lambda r, u, **k: None + ) + monkeypatch.setattr( + views, + "send_organization_member_invited_email", + send_organization_member_invited_email, + ) + send_organization_role_verification_email = pretend.call_recorder( + lambda r, u, **k: None + ) + monkeypatch.setattr( + views, + "send_organization_role_verification_email", + send_organization_role_verification_email, + ) + + result = views.manage_organization_roles(organization, db_request) + form_obj = result["form"] + + assert db_request.session.flash.calls == [ + pretend.call(f"Invitation sent to '{new_user.username}'", queue="success") + ] + + # Only one role invitation is created + organization_invitation = ( + db_request.db.query(OrganizationInvitation) + .filter(OrganizationInvitation.user == new_user) + .filter(OrganizationInvitation.organization == organization) + .one() + ) + + assert result == { + "organization": organization, + "roles": {owner_1_role, owner_2_role}, + "invitations": {organization_invitation}, + "form": form_obj, + } + assert send_organization_member_invited_email.calls == [ + pretend.call( + db_request, + {owner_1, owner_2}, + user=new_user, + desired_role=db_request.POST["role_name"], + initiator_username=db_request.user.username, + organization_name=organization.name, + email_token=token_service.dumps( + { + "action": "email-organization-role-verify", + "desired_role": db_request.POST["role_name"], + "user_id": new_user.id, + "organization_id": organization.id, + "submitter_id": db_request.user.id, + } + ), + token_age=token_service.max_age, + ) + ] + assert send_organization_role_verification_email.calls == [ + pretend.call( + db_request, + new_user, + desired_role=db_request.POST["role_name"], + initiator_username=db_request.user.username, + organization_name=organization.name, + email_token=token_service.dumps( + { + "action": "email-organization-role-verify", + "desired_role": db_request.POST["role_name"], + "user_id": new_user.id, + "organization_id": organization.id, + "submitter_id": db_request.user.id, + } + ), + token_age=token_service.max_age, + ) + ] + + def test_post_duplicate_organization_role( + self, db_request, organization_service, user_service, enable_organizations + ): + organization = OrganizationFactory.create(name="foobar") + user = UserFactory.create(username="testuser") + role = OrganizationRoleFactory.create( + organization=organization, + user=user, + role_name=OrganizationRoleType.Owner, + ) + + db_request.method = "POST" + db_request.POST = pretend.stub() + db_request.session = pretend.stub( + flash=pretend.call_recorder(lambda *a, **kw: None) + ) + form_obj = pretend.stub( + validate=pretend.call_recorder(lambda: True), + username=pretend.stub(data=user.username), + role_name=pretend.stub(data=role.role_name), + ) + form_class = pretend.call_recorder(lambda *a, **kw: form_obj) + + result = views.manage_organization_roles( + organization, db_request, _form_class=form_class + ) + + assert form_obj.validate.calls == [pretend.call()] + assert form_class.calls == [ + pretend.call( + db_request.POST, + orgtype=organization.orgtype, + organization_service=organization_service, + user_service=user_service, + ), + pretend.call( + orgtype=organization.orgtype, + organization_service=organization_service, + user_service=user_service, + ), + ] + assert db_request.session.flash.calls == [ + pretend.call( + "User 'testuser' already has Owner role for organization", queue="error" + ) + ] + + # No additional roles are created + assert role == db_request.db.query(OrganizationRole).one() + + assert result == { + "organization": organization, + "roles": {role}, + "invitations": set(), + "form": form_obj, + } + + @pytest.mark.parametrize("with_email", [True, False]) + def test_post_unverified_email( + self, + db_request, + organization_service, + user_service, + enable_organizations, + with_email, + ): + organization = OrganizationFactory.create(name="foobar") + user = UserFactory.create(username="testuser") + if with_email: + EmailFactory.create(user=user, verified=False, primary=True) + + db_request.method = "POST" + db_request.POST = pretend.stub() + db_request.session = pretend.stub( + flash=pretend.call_recorder(lambda *a, **kw: None) + ) + form_obj = pretend.stub( + validate=pretend.call_recorder(lambda: True), + username=pretend.stub(data=user.username), + role_name=pretend.stub(data="Owner"), + ) + form_class = pretend.call_recorder(lambda *a, **kw: form_obj) + + result = views.manage_organization_roles( + organization, db_request, _form_class=form_class + ) + + assert form_obj.validate.calls == [pretend.call()] + assert form_class.calls == [ + pretend.call( + db_request.POST, + orgtype=organization.orgtype, + organization_service=organization_service, + user_service=user_service, + ), + pretend.call( + orgtype=organization.orgtype, + organization_service=organization_service, + user_service=user_service, + ), + ] + assert db_request.session.flash.calls == [ + pretend.call( + "User 'testuser' does not have a verified primary email address " + "and cannot be added as a Owner for organization", + queue="error", + ) + ] + + # No additional roles are created + assert db_request.db.query(OrganizationRole).all() == [] + + assert result == { + "organization": organization, + "roles": set(), + "invitations": set(), + "form": form_obj, + } + + def test_cannot_reinvite_organization_role( + self, db_request, organization_service, user_service, enable_organizations + ): + organization = OrganizationFactory.create(name="foobar") + new_user = UserFactory.create(username="new_user") + EmailFactory.create(user=new_user, verified=True, primary=True) + owner_1 = UserFactory.create(username="owner_1") + owner_2 = UserFactory.create(username="owner_2") + owner_1_role = OrganizationRoleFactory.create( + organization=organization, + user=owner_1, + role_name=OrganizationRoleType.Owner, + ) + owner_2_role = OrganizationRoleFactory.create( + organization=organization, + user=owner_2, + role_name=OrganizationRoleType.Owner, + ) + token_service = db_request.find_service(ITokenService, name="email") + new_organization_invitation = OrganizationInvitationFactory.create( + organization=organization, + user=new_user, + invite_status=OrganizationInvitationStatus.Pending, + token=token_service.dumps({"action": "email-organization-role-verify"}), + ) + + db_request.method = "POST" + db_request.POST = pretend.stub() + db_request.remote_addr = "10.10.10.10" + db_request.user = owner_1 + db_request.session = pretend.stub( + flash=pretend.call_recorder(lambda *a, **kw: None) + ) + form_obj = pretend.stub( + validate=pretend.call_recorder(lambda: True), + username=pretend.stub(data=new_user.username), + role_name=pretend.stub(data="Owner"), + ) + form_class = pretend.call_recorder(lambda *a, **kw: form_obj) + + result = views.manage_organization_roles( + organization, db_request, _form_class=form_class + ) + + assert form_obj.validate.calls == [pretend.call()] + assert form_class.calls == [ + pretend.call( + db_request.POST, + orgtype=organization.orgtype, + organization_service=organization_service, + user_service=user_service, + ), + pretend.call( + orgtype=organization.orgtype, + organization_service=organization_service, + user_service=user_service, + ), + ] + assert db_request.session.flash.calls == [ + pretend.call( + "User 'new_user' already has an active invite. Please try again later.", + queue="error", + ) + ] + + assert result == { + "organization": organization, + "roles": {owner_1_role, owner_2_role}, + "invitations": {new_organization_invitation}, + "form": form_obj, + } + + def test_reinvite_organization_role_after_expiration( + self, + db_request, + organization_service, + user_service, + enable_organizations, + monkeypatch, + ): + organization = OrganizationFactory.create(name="foobar") + new_user = UserFactory.create(username="new_user") + EmailFactory.create(user=new_user, verified=True, primary=True) + owner_1 = UserFactory.create(username="owner_1") + owner_2 = UserFactory.create(username="owner_2") + owner_1_role = OrganizationRoleFactory.create( + organization=organization, + user=owner_1, + role_name=OrganizationRoleType.Owner, + ) + owner_2_role = OrganizationRoleFactory.create( + user=owner_2, + organization=organization, + role_name=OrganizationRoleType.Owner, + ) + token_service = db_request.find_service(ITokenService, name="email") + new_organization_invitation = OrganizationInvitationFactory.create( + user=new_user, + organization=organization, + invite_status=OrganizationInvitationStatus.Expired, + token=token_service.dumps({}), + ) + + db_request.method = "POST" + db_request.POST = pretend.stub() + db_request.remote_addr = "10.10.10.10" + db_request.user = owner_1 + db_request.session = pretend.stub( + flash=pretend.call_recorder(lambda *a, **kw: None) + ) + form_obj = pretend.stub( + validate=pretend.call_recorder(lambda: True), + username=pretend.stub(data=new_user.username), + role_name=pretend.stub(data="Owner"), + ) + form_class = pretend.call_recorder(lambda *a, **kw: form_obj) + + send_organization_member_invited_email = pretend.call_recorder( + lambda r, u, **k: None + ) + monkeypatch.setattr( + views, + "send_organization_member_invited_email", + send_organization_member_invited_email, + ) + send_organization_role_verification_email = pretend.call_recorder( + lambda r, u, **k: None + ) + monkeypatch.setattr( + views, + "send_organization_role_verification_email", + send_organization_role_verification_email, + ) + + result = views.manage_organization_roles( + organization, db_request, _form_class=form_class + ) + + assert form_obj.validate.calls == [pretend.call()] + assert form_class.calls == [ + pretend.call( + db_request.POST, + orgtype=organization.orgtype, + organization_service=organization_service, + user_service=user_service, + ), + pretend.call( + orgtype=organization.orgtype, + organization_service=organization_service, + user_service=user_service, + ), + ] + assert db_request.session.flash.calls == [ + pretend.call(f"Invitation sent to '{new_user.username}'", queue="success") + ] + + # Only one role invitation is created + organization_invitation = ( + db_request.db.query(OrganizationInvitation) + .filter(OrganizationInvitation.user == new_user) + .filter(OrganizationInvitation.organization == organization) + .one() + ) + + assert result["invitations"] == {new_organization_invitation} + assert result == { + "organization": organization, + "roles": {owner_1_role, owner_2_role}, + "invitations": {organization_invitation}, + "form": form_obj, + } + assert send_organization_member_invited_email.calls == [ + pretend.call( + db_request, + {owner_1, owner_2}, + user=new_user, + desired_role=form_obj.role_name.data, + initiator_username=db_request.user.username, + organization_name=organization.name, + email_token=token_service.dumps( + { + "action": "email-organization-role-verify", + "desired_role": form_obj.role_name.data, + "user_id": new_user.id, + "organization_id": organization.id, + "submitter_id": db_request.user.id, + } + ), + token_age=token_service.max_age, + ) + ] + assert send_organization_role_verification_email.calls == [ + pretend.call( + db_request, + new_user, + desired_role=form_obj.role_name.data, + initiator_username=db_request.user.username, + organization_name=organization.name, + email_token=token_service.dumps( + { + "action": "email-organization-role-verify", + "desired_role": form_obj.role_name.data, + "user_id": new_user.id, + "organization_id": organization.id, + "submitter_id": db_request.user.id, + } + ), + token_age=token_service.max_age, + ) + ] + + +class TestRevokeOrganizationInvitation: + def test_revoke_invitation( + self, db_request, token_service, enable_organizations, monkeypatch + ): + organization = OrganizationFactory.create(name="foobar") + user = UserFactory.create(username="testuser") + OrganizationInvitationFactory.create( + organization=organization, + user=user, + ) + owner_user = UserFactory.create() + OrganizationRoleFactory( + user=owner_user, + organization=organization, + role_name=OrganizationRoleType.Owner, + ) + + db_request.method = "POST" + db_request.POST = MultiDict({"user_id": user.id, "token": "TOKEN"}) + db_request.remote_addr = "10.10.10.10" + db_request.user = owner_user + db_request.route_path = pretend.call_recorder( + lambda *a, **kw: "/manage/organizations" + ) + db_request.session = pretend.stub( + flash=pretend.call_recorder(lambda *a, **kw: None) + ) + token_service.loads = pretend.call_recorder( + lambda data: { + "action": "email-organization-role-verify", + "desired_role": "Manager", + "user_id": user.id, + "organization_id": organization.id, + "submitter_id": owner_user.id, + } + ) + + organization_member_invite_canceled_email = pretend.call_recorder( + lambda *args, **kwargs: None + ) + monkeypatch.setattr( + views, + "send_organization_member_invite_canceled_email", + organization_member_invite_canceled_email, + ) + canceled_as_invited_organization_member_email = pretend.call_recorder( + lambda *args, **kwargs: None + ) + monkeypatch.setattr( + views, + "send_canceled_as_invited_organization_member_email", + canceled_as_invited_organization_member_email, + ) + + result = views.revoke_organization_invitation(organization, db_request) + db_request.db.flush() + + assert not ( + db_request.db.query(OrganizationInvitation) + .filter(OrganizationInvitation.user == user) + .filter(OrganizationInvitation.organization == organization) + .one_or_none() + ) + assert organization_member_invite_canceled_email.calls == [ + pretend.call( + db_request, + {owner_user}, + user=user, + organization_name=organization.name, + ) + ] + assert canceled_as_invited_organization_member_email.calls == [ + pretend.call( + db_request, + user, + organization_name=organization.name, + ) + ] + assert db_request.session.flash.calls == [ + pretend.call(f"Invitation revoked from '{user.username}'.", queue="success") + ] + assert isinstance(result, HTTPSeeOther) + assert result.headers["Location"] == "/manage/organizations" + + def test_invitation_does_not_exist( + self, db_request, token_service, enable_organizations + ): + organization = OrganizationFactory.create(name="foobar") + user = UserFactory.create(username="testuser") + owner_user = UserFactory.create() + OrganizationRoleFactory( + user=owner_user, + organization=organization, + role_name=OrganizationRoleType.Owner, + ) + + db_request.method = "POST" + db_request.POST = MultiDict({"user_id": user.id, "token": "TOKEN"}) + db_request.remote_addr = "10.10.10.10" + db_request.user = owner_user + db_request.route_path = pretend.call_recorder( + lambda *a, **kw: "/manage/organizations" + ) + db_request.session = pretend.stub( + flash=pretend.call_recorder(lambda *a, **kw: None) + ) + token_service.loads = pretend.call_recorder(lambda data: None) + + result = views.revoke_organization_invitation(organization, db_request) + db_request.db.flush() + + assert db_request.session.flash.calls == [ + pretend.call("Could not find organization invitation.", queue="error") + ] + assert isinstance(result, HTTPSeeOther) + assert result.headers["Location"] == "/manage/organizations" + + def test_token_expired(self, db_request, token_service, enable_organizations): + organization = OrganizationFactory.create(name="foobar") + user = UserFactory.create(username="testuser") + OrganizationInvitationFactory.create( + organization=organization, + user=user, + ) + owner_user = UserFactory.create() + OrganizationRoleFactory( + user=owner_user, + organization=organization, + role_name=OrganizationRoleType.Owner, + ) + + db_request.method = "POST" + db_request.POST = MultiDict({"user_id": user.id, "token": "TOKEN"}) + db_request.remote_addr = "10.10.10.10" + db_request.user = owner_user + db_request.route_path = pretend.call_recorder( + lambda *a, **kw: "/manage/organizations/roles" + ) + db_request.session = pretend.stub( + flash=pretend.call_recorder(lambda *a, **kw: None) + ) + token_service.loads = pretend.call_recorder(pretend.raiser(TokenExpired)) + + result = views.revoke_organization_invitation(organization, db_request) + db_request.db.flush() + + assert not ( + db_request.db.query(OrganizationInvitation) + .filter(OrganizationInvitation.user == user) + .filter(OrganizationInvitation.organization == organization) + .one_or_none() + ) + assert db_request.session.flash.calls == [ + pretend.call("Invitation already expired.", queue="success") + ] + assert isinstance(result, HTTPSeeOther) + assert result.headers["Location"] == "/manage/organizations/roles" + + +class TestChangeOrganizationRole: + @pytest.mark.parametrize("orgtype", list(OrganizationType)) + def test_change_role(self, db_request, orgtype, enable_organizations, monkeypatch): + organization = OrganizationFactory.create(name="foobar", orgtype=orgtype) + user = UserFactory.create(username="testuser") + role = OrganizationRoleFactory.create( + organization=organization, + user=user, + role_name=OrganizationRoleType.Owner, + ) + new_role_name = "Manager" + + user_2 = UserFactory.create() + + db_request.method = "POST" + db_request.POST = MultiDict({"role_id": role.id, "role_name": new_role_name}) + db_request.user = user_2 + db_request.session = pretend.stub( + flash=pretend.call_recorder(lambda *a, **kw: None) + ) + db_request.route_path = pretend.call_recorder(lambda *a, **kw: "/the-redirect") + + send_organization_member_role_changed_email = pretend.call_recorder( + lambda *a, **kw: None + ) + monkeypatch.setattr( + views, + "send_organization_member_role_changed_email", + send_organization_member_role_changed_email, + ) + send_role_changed_as_organization_member_email = pretend.call_recorder( + lambda *a, **kw: None + ) + monkeypatch.setattr( + views, + "send_role_changed_as_organization_member_email", + send_role_changed_as_organization_member_email, + ) + + result = views.change_organization_role(organization, db_request) + + assert role.role_name == new_role_name + assert db_request.route_path.calls == [ + pretend.call( + "manage.organization.roles", organization_name=organization.name + ) + ] + assert send_organization_member_role_changed_email.calls == [ + pretend.call( + db_request, + set(), + user=user, + submitter=user_2, + organization_name="foobar", + role=new_role_name, + ) + ] + assert send_role_changed_as_organization_member_email.calls == [ + pretend.call( + db_request, + user, + submitter=user_2, + organization_name="foobar", + role=new_role_name, + ) + ] + assert db_request.session.flash.calls == [ + pretend.call("Changed role", queue="success") + ] + assert isinstance(result, HTTPSeeOther) + assert result.headers["Location"] == "/the-redirect" + + def test_change_organization_role_invalid_role_name( + self, db_request, enable_organizations + ): + organization = OrganizationFactory.create(name="foobar") + + db_request.method = "POST" + db_request.POST = MultiDict( + {"role_id": str(uuid.uuid4()), "role_name": "Invalid Role Name"} + ) + db_request.route_path = pretend.call_recorder(lambda *a, **kw: "/the-redirect") + + result = views.change_organization_role(organization, db_request) + + assert db_request.route_path.calls == [ + pretend.call( + "manage.organization.roles", organization_name=organization.name + ) + ] + assert isinstance(result, HTTPSeeOther) + assert result.headers["Location"] == "/the-redirect" + + def test_change_missing_organization_role(self, db_request, enable_organizations): + organization = OrganizationFactory.create(name="foobar") + missing_role_id = str(uuid.uuid4()) + + db_request.method = "POST" + db_request.POST = MultiDict({"role_id": missing_role_id, "role_name": "Owner"}) + db_request.user = pretend.stub() + db_request.session = pretend.stub( + flash=pretend.call_recorder(lambda *a, **kw: None) + ) + db_request.route_path = pretend.call_recorder(lambda *a, **kw: "/the-redirect") + + result = views.change_organization_role(organization, db_request) + + assert db_request.session.flash.calls == [ + pretend.call("Could not find member", queue="error") + ] + assert isinstance(result, HTTPSeeOther) + assert result.headers["Location"] == "/the-redirect" + + def test_change_own_owner_organization_role(self, db_request, enable_organizations): + organization = OrganizationFactory.create(name="foobar") + user = UserFactory.create(username="testuser") + role = OrganizationRoleFactory.create( + user=user, organization=organization, role_name="Owner" + ) + + db_request.method = "POST" + db_request.user = user + db_request.POST = MultiDict({"role_id": role.id, "role_name": "Manager"}) + db_request.session = pretend.stub( + flash=pretend.call_recorder(lambda *a, **kw: None) + ) + db_request.route_path = pretend.call_recorder(lambda *a, **kw: "/the-redirect") + + result = views.change_organization_role(organization, db_request) + + assert db_request.session.flash.calls == [ + pretend.call("Cannot remove yourself as Owner", queue="error") + ] + assert isinstance(result, HTTPSeeOther) + assert result.headers["Location"] == "/the-redirect" + + +class TestDeleteOrganizationRoles: + def test_delete_role(self, db_request, enable_organizations, monkeypatch): + organization = OrganizationFactory.create(name="foobar") + user = UserFactory.create(username="testuser") + role = OrganizationRoleFactory.create( + organization=organization, + user=user, + role_name=OrganizationRoleType.Owner, + ) + user_2 = UserFactory.create() + + db_request.method = "POST" + db_request.POST = MultiDict({"role_id": role.id}) + db_request.user = user_2 + db_request.session = pretend.stub( + flash=pretend.call_recorder(lambda *a, **kw: None) + ) + db_request.route_path = pretend.call_recorder(lambda *a, **kw: "/the-redirect") + + send_organization_member_removed_email = pretend.call_recorder( + lambda *a, **kw: None + ) + monkeypatch.setattr( + views, + "send_organization_member_removed_email", + send_organization_member_removed_email, + ) + send_removed_as_organization_member_email = pretend.call_recorder( + lambda *a, **kw: None + ) + monkeypatch.setattr( + views, + "send_removed_as_organization_member_email", + send_removed_as_organization_member_email, + ) + + result = views.delete_organization_role(organization, db_request) + + assert db_request.route_path.calls == [ + pretend.call( + "manage.organization.roles", organization_name=organization.name + ) + ] + assert db_request.db.query(OrganizationRole).all() == [] + assert send_organization_member_removed_email.calls == [ + pretend.call( + db_request, + set(), + user=user, + submitter=user_2, + organization_name="foobar", + ) + ] + assert send_removed_as_organization_member_email.calls == [ + pretend.call( + db_request, + user, + submitter=user_2, + organization_name="foobar", + ) + ] + assert db_request.session.flash.calls == [ + pretend.call("Removed from organization", queue="success") + ] + assert isinstance(result, HTTPSeeOther) + assert result.headers["Location"] == "/the-redirect" + + def test_delete_missing_role(self, db_request, enable_organizations): + organization = OrganizationFactory.create(name="foobar") + missing_role_id = str(uuid.uuid4()) + + db_request.method = "POST" + db_request.user = pretend.stub() + db_request.POST = MultiDict({"role_id": missing_role_id}) + db_request.session = pretend.stub( + flash=pretend.call_recorder(lambda *a, **kw: None) + ) + db_request.route_path = pretend.call_recorder(lambda *a, **kw: "/the-redirect") + + result = views.delete_organization_role(organization, db_request) + + assert db_request.session.flash.calls == [ + pretend.call("Could not find member", queue="error") + ] + assert isinstance(result, HTTPSeeOther) + assert result.headers["Location"] == "/the-redirect" + + def test_delete_other_role_as_nonowner(self, db_request, enable_organizations): + organization = OrganizationFactory.create(name="foobar") + user = UserFactory.create(username="testuser") + role = OrganizationRoleFactory.create( + organization=organization, + user=user, + role_name=OrganizationRoleType.Owner, + ) + user_2 = UserFactory.create() + + db_request.method = "POST" + db_request.user = user_2 + db_request.POST = MultiDict({"role_id": role.id}) + db_request.has_permission = pretend.call_recorder(lambda *a, **kw: False) + db_request.session = pretend.stub( + flash=pretend.call_recorder(lambda *a, **kw: None) + ) + db_request.route_path = pretend.call_recorder(lambda *a, **kw: "/the-redirect") + + result = views.delete_organization_role(organization, db_request) + + assert db_request.has_permission.calls == [pretend.call("manage:organization")] + assert db_request.session.flash.calls == [ + pretend.call( + "Cannot remove other people from the organization", queue="error" + ) + ] + assert isinstance(result, HTTPSeeOther) + assert result.headers["Location"] == "/the-redirect" + + def test_delete_own_owner_role(self, db_request, enable_organizations): + organization = OrganizationFactory.create(name="foobar") + user = UserFactory.create(username="testuser") + role = OrganizationRoleFactory.create( + organization=organization, + user=user, + role_name=OrganizationRoleType.Owner, + ) + + db_request.method = "POST" + db_request.user = user + db_request.POST = MultiDict({"role_id": role.id}) + db_request.session = pretend.stub( + flash=pretend.call_recorder(lambda *a, **kw: None) + ) + db_request.route_path = pretend.call_recorder(lambda *a, **kw: "/the-redirect") + + result = views.delete_organization_role(organization, db_request) + + assert db_request.session.flash.calls == [ + pretend.call("Cannot remove yourself as Owner", queue="error") + ] + assert isinstance(result, HTTPSeeOther) + assert result.headers["Location"] == "/the-redirect" + + def test_delete_non_owner_role(self, db_request, enable_organizations): + organization = OrganizationFactory.create(name="foobar") + user = UserFactory.create(username="testuser") + role = OrganizationRoleFactory.create( + organization=organization, + user=user, + role_name=OrganizationRoleType.Owner, + ) + + some_other_user = UserFactory.create(username="someotheruser") + some_other_organization = OrganizationFactory.create( + name="someotherorganization" + ) + + db_request.method = "POST" + db_request.user = some_other_user + db_request.POST = MultiDict({"role_id": role.id}) + db_request.session = pretend.stub( + flash=pretend.call_recorder(lambda *a, **kw: None) + ) + db_request.route_path = pretend.call_recorder(lambda *a, **kw: "/the-redirect") + + result = views.delete_organization_role(some_other_organization, db_request) + + assert db_request.session.flash.calls == [ + pretend.call("Could not find member", queue="error") + ] + assert isinstance(result, HTTPSeeOther) + assert result.headers["Location"] == "/the-redirect" class TestManageProjects: @@ -4526,7 +5456,7 @@ def test_token_expired(self, db_request, token_service): assert result.headers["Location"] == "/manage/projects/roles" -class TestChangeProjectRoles: +class TestChangeProjectRole: def test_change_role(self, db_request, monkeypatch): project = ProjectFactory.create(name="foobar") user = UserFactory.create(username="testuser") diff --git a/tests/unit/organizations/test_models.py b/tests/unit/organizations/test_models.py --- a/tests/unit/organizations/test_models.py +++ b/tests/unit/organizations/test_models.py @@ -12,9 +12,15 @@ import pytest -from warehouse.organizations.models import OrganizationFactory +from pyramid.authorization import Allow +from pyramid.location import lineage -from ...common.db.organizations import OrganizationFactory as DBOrganizationFactory +from warehouse.organizations.models import OrganizationFactory, OrganizationRoleType + +from ...common.db.organizations import ( + OrganizationFactory as DBOrganizationFactory, + OrganizationRoleFactory as DBOrganizationRoleFactory, +) class TestOrganizationFactory: @@ -31,3 +37,93 @@ def test_traversal_cant_find(self, db_request): with pytest.raises(KeyError): root[organization.name + "invalid"] + + +class TestOrganization: + def test_acl(self, db_session): + organization = DBOrganizationFactory.create() + owner1 = DBOrganizationRoleFactory.create(organization=organization) + owner2 = DBOrganizationRoleFactory.create(organization=organization) + billing_mgr1 = DBOrganizationRoleFactory.create( + organization=organization, role_name=OrganizationRoleType.BillingManager + ) + billing_mgr2 = DBOrganizationRoleFactory.create( + organization=organization, role_name=OrganizationRoleType.BillingManager + ) + account_mgr1 = DBOrganizationRoleFactory.create( + organization=organization, role_name=OrganizationRoleType.Manager + ) + account_mgr2 = DBOrganizationRoleFactory.create( + organization=organization, role_name=OrganizationRoleType.Manager + ) + member1 = DBOrganizationRoleFactory.create( + organization=organization, role_name=OrganizationRoleType.Member + ) + member2 = DBOrganizationRoleFactory.create( + organization=organization, role_name=OrganizationRoleType.Member + ) + + acls = [] + for location in lineage(organization): + try: + acl = location.__acl__ + except AttributeError: + continue + + if acl and callable(acl): + acl = acl() + + acls.extend(acl) + + assert acls == [ + (Allow, "group:admins", "admin"), + (Allow, "group:moderators", "moderator"), + ] + sorted( + [ + ( + Allow, + f"user:{owner1.user.id}", + ["view:organization", "manage:organization"], + ), + ( + Allow, + f"user:{owner2.user.id}", + ["view:organization", "manage:organization"], + ), + ], + key=lambda x: x[1], + ) + sorted( + [ + ( + Allow, + f"user:{billing_mgr1.user.id}", + ["view:organization", "manage:billing"], + ), + ( + Allow, + f"user:{billing_mgr2.user.id}", + ["view:organization", "manage:billing"], + ), + ], + key=lambda x: x[1], + ) + sorted( + [ + ( + Allow, + f"user:{account_mgr1.user.id}", + ["view:organization", "manage:team"], + ), + ( + Allow, + f"user:{account_mgr2.user.id}", + ["view:organization", "manage:team"], + ), + ], + key=lambda x: x[1], + ) + sorted( + [ + (Allow, f"user:{member1.user.id}", ["view:organization"]), + (Allow, f"user:{member2.user.id}", ["view:organization"]), + ], + key=lambda x: x[1], + ) diff --git a/tests/unit/organizations/test_services.py b/tests/unit/organizations/test_services.py --- a/tests/unit/organizations/test_services.py +++ b/tests/unit/organizations/test_services.py @@ -18,7 +18,12 @@ from warehouse.organizations.interfaces import IOrganizationService from warehouse.organizations.models import OrganizationRoleType -from ...common.db.organizations import OrganizationFactory, UserFactory +from ...common.db.organizations import ( + OrganizationFactory, + OrganizationInvitationFactory, + OrganizationRoleFactory, + UserFactory, +) def test_database_organizations_factory(): @@ -91,15 +96,17 @@ def test_get_organizations_by_user(self, organization_service, user_service): user_organization = OrganizationFactory.create() user = UserFactory.create() organization_service.add_organization_role( - OrganizationRoleType.Owner.value, user.id, user_organization.id + user_organization.id, + user.id, + OrganizationRoleType.Owner.value, ) another_user_organization = OrganizationFactory.create() another_user = UserFactory.create() organization_service.add_organization_role( - OrganizationRoleType.Owner.value, - another_user.id, another_user_organization.id, + another_user.id, + OrganizationRoleType.Owner.value, ) user_orgs = organization_service.get_organizations_by_user(user.id) @@ -138,17 +145,156 @@ def test_add_catalog_entry(self, organization_service): assert catalog_entry.normalized_name == organization.normalized_name assert catalog_entry.organization_id == organization.id + def test_get_organization_role(self, organization_service, user_service): + organization_role = OrganizationRoleFactory.create() + + assert ( + organization_service.get_organization_role(organization_role.id) + == organization_role + ) + + def test_get_organization_role_by_user(self, organization_service, user_service): + organization_role = OrganizationRoleFactory.create() + + assert ( + organization_service.get_organization_role_by_user( + organization_role.organization_id, + organization_role.user_id, + ) + == organization_role + ) + + def test_get_organization_role_by_user_nonexistent_role(self, organization_service): + user = UserFactory.create() + organization = OrganizationFactory.create() + + assert ( + organization_service.get_organization_role_by_user(organization.id, user.id) + is None + ) + + def test_get_organization_roles(self, organization_service, user_service): + organization = OrganizationFactory.create() + user = UserFactory.create() + another_user = UserFactory.create() + + added_owner = organization_service.add_organization_role( + organization.id, + user.id, + OrganizationRoleType.Owner.value, + ) + added_member = organization_service.add_organization_role( + organization.id, + another_user.id, + OrganizationRoleType.Member.value, + ) + + org_roles = organization_service.get_organization_roles(organization.id) + + assert added_owner in org_roles + assert added_member in org_roles + def test_add_organization_role(self, organization_service, user_service): user = UserFactory.create() organization = OrganizationFactory.create() added_role = organization_service.add_organization_role( - OrganizationRoleType.Owner.value, user.id, organization.id + organization.id, + user.id, + OrganizationRoleType.Owner.value, ) assert added_role.role_name == OrganizationRoleType.Owner.value assert added_role.user_id == user.id assert added_role.organization_id == organization.id + def test_delete_organization_role(self, organization_service, user_service): + organization_role = OrganizationRoleFactory.create() + + organization_service.delete_organization_role(organization_role.id) + + assert ( + organization_service.get_organization_role_by_user( + organization_role.organization_id, + organization_role.user_id, + ) + is None + ) + + def test_get_organization_invite(self, organization_service): + organization_invite = OrganizationInvitationFactory.create() + + assert ( + organization_service.get_organization_invite(organization_invite.id) + is not None + ) + + def test_get_organization_invite_by_user(self, organization_service): + organization_invite = OrganizationInvitationFactory.create() + + assert ( + organization_service.get_organization_invite_by_user( + organization_invite.organization_id, organization_invite.user_id + ) + is not None + ) + + def test_get_organization_invite_by_user_nonexistent_invite( + self, organization_service + ): + user = UserFactory.create() + organization = OrganizationFactory.create() + + assert ( + organization_service.get_organization_invite_by_user( + organization.id, user.id + ) + is None + ) + + def test_get_organization_invites(self, organization_service, user_service): + user = UserFactory.create() + organization = OrganizationFactory.create() + another_organization = OrganizationFactory.create() + + invite = organization_service.add_organization_invite( + organization.id, + user.id, + "some_token", + ) + another_invite = organization_service.add_organization_invite( + another_organization.id, + user.id, + "some_token", + ) + + invites = organization_service.get_organization_invites_by_user(user.id) + + assert invite in invites + assert another_invite in invites + + def test_add_organization_invite(self, organization_service, user_service): + user = UserFactory.create() + organization = OrganizationFactory.create() + + added_invite = organization_service.add_organization_invite( + organization.id, + user.id, + "some_token", + ) + + assert added_invite.user_id == user.id + assert added_invite.organization_id == organization.id + assert added_invite.token == "some_token" + + def test_delete_organization_invite(self, organization_service): + organization_invite = OrganizationInvitationFactory.create() + + organization_service.delete_organization_invite(organization_invite.id) + + assert ( + organization_service.get_organization_invite(organization_invite.id) is None + ) + def test_approve_organization(self, organization_service): organization = OrganizationFactory.create() organization_service.approve_organization(organization.id) @@ -163,6 +309,3 @@ def test_decline_organization(self, organization_service): assert organization.is_approved is False assert organization.date_approved is not None - - # def test_record_event(self, organization_id, *, tag, additional=None): - # raise NotImplementedError diff --git a/tests/unit/organizations/test_tasks.py b/tests/unit/organizations/test_tasks.py new file mode 100644 --- /dev/null +++ b/tests/unit/organizations/test_tasks.py @@ -0,0 +1,55 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pretend + +from warehouse.accounts.interfaces import ITokenService, TokenExpired +from warehouse.organizations.models import OrganizationInvitationStatus +from warehouse.organizations.tasks import update_organization_invitation_status + +from ...common.db.organizations import ( + OrganizationFactory, + OrganizationInvitationFactory, + UserFactory, +) + + +class TestUpdateInvitationStatus: + def test_update_invitation_status(self, db_request): + organization = OrganizationFactory.create() + user = UserFactory.create() + invite = OrganizationInvitationFactory(user=user, organization=organization) + + token_service = pretend.stub(loads=pretend.raiser(TokenExpired)) + db_request.find_service = pretend.call_recorder(lambda *a, **kw: token_service) + + update_organization_invitation_status(db_request) + + assert db_request.find_service.calls == [ + pretend.call(ITokenService, name="email") + ] + assert invite.invite_status == OrganizationInvitationStatus.Expired + + def test_no_updates(self, db_request): + organization = OrganizationFactory.create() + user = UserFactory.create() + invite = OrganizationInvitationFactory(user=user, organization=organization) + + token_service = pretend.stub(loads=lambda token: {}) + db_request.find_service = pretend.call_recorder(lambda *a, **kw: token_service) + + update_organization_invitation_status(db_request) + + assert db_request.find_service.calls == [ + pretend.call(ITokenService, name="email") + ] + assert invite.invite_status == OrganizationInvitationStatus.Pending diff --git a/tests/unit/test_routes.py b/tests/unit/test_routes.py --- a/tests/unit/test_routes.py +++ b/tests/unit/test_routes.py @@ -178,6 +178,11 @@ def add_policy(name, filename): pretend.call( "accounts.verify-email", "/account/verify-email/", domain=warehouse ), + pretend.call( + "accounts.verify-organization-role", + "/account/verify-organization-role/", + domain=warehouse, + ), pretend.call( "accounts.verify-project-role", "/account/verify-project-role/", @@ -247,6 +252,27 @@ def add_policy(name, filename): traverse="/{organization_name}", domain=warehouse, ), + pretend.call( + "manage.organization.revoke_invite", + "/manage/organization/{organization_name}/people/revoke_invite/", + factory="warehouse.organizations.models:OrganizationFactory", + traverse="/{organization_name}", + domain=warehouse, + ), + pretend.call( + "manage.organization.change_role", + "/manage/organization/{organization_name}/people/change/", + factory="warehouse.organizations.models:OrganizationFactory", + traverse="/{organization_name}", + domain=warehouse, + ), + pretend.call( + "manage.organization.delete_role", + "/manage/organization/{organization_name}/people/delete/", + factory="warehouse.organizations.models:OrganizationFactory", + traverse="/{organization_name}", + domain=warehouse, + ), pretend.call("manage.projects", "/manage/projects/", domain=warehouse), pretend.call( "manage.project.settings",
Accept/reject Organization invite Feature request for organization account project in PyPI. Description | When a User has been invited to join an Organization, the User can accept or reject the invite -- | -- User value | When an Owner adds a User to the organization, they are not added automatically to the Organization Acceptance criteria | Email notification to account Owners and User
2022-05-05T21:53:33Z
[]
[]
pypi/warehouse
11,379
pypi__warehouse-11379
[ "11072" ]
51ec8528b60e662584ed33e85038bda85ea2c5d1
diff --git a/warehouse/accounts/views.py b/warehouse/accounts/views.py --- a/warehouse/accounts/views.py +++ b/warehouse/accounts/views.py @@ -985,7 +985,7 @@ def _error(message): return HTTPSeeOther( request.route_path( - "manage.organization.roles", organization_name=organization.name + "manage.organization.roles", organization_name=organization.normalized_name ) ) diff --git a/warehouse/email/__init__.py b/warehouse/email/__init__.py --- a/warehouse/email/__init__.py +++ b/warehouse/email/__init__.py @@ -222,6 +222,23 @@ def send_admin_new_organization_declined_email( } +@_email("admin-organization-renamed") +def send_admin_organization_renamed_email( + request, user, *, organization_name, previous_organization_name +): + return { + "organization_name": organization_name, + "previous_organization_name": previous_organization_name, + } + + +@_email("admin-organization-deleted") +def send_admin_organization_deleted_email(request, user, *, organization_name): + return { + "organization_name": organization_name, + } + + # Email templates for users. @@ -529,6 +546,23 @@ def send_role_changed_as_organization_member_email( } +@_email("organization-renamed") +def send_organization_renamed_email( + request, user, *, organization_name, previous_organization_name +): + return { + "organization_name": organization_name, + "previous_organization_name": previous_organization_name, + } + + +@_email("organization-deleted") +def send_organization_deleted_email(request, user, *, organization_name): + return { + "organization_name": organization_name, + } + + @_email("verify-project-role", allow_unverified=True) def send_project_role_verification_email( request, diff --git a/warehouse/manage/forms.py b/warehouse/manage/forms.py --- a/warehouse/manage/forms.py +++ b/warehouse/manage/forms.py @@ -26,7 +26,7 @@ WebAuthnCredentialMixin, ) from warehouse.i18n import localize as _ -from warehouse.organizations.models import OrganizationType +from warehouse.organizations.models import OrganizationRoleType, OrganizationType # /manage/account/ forms @@ -322,6 +322,7 @@ class OrganizationRoleNameMixin: ("Owner", "Owner"), ("Billing Manager", "Billing Manager"), ], + coerce=lambda string: OrganizationRoleType(string) if string else None, validators=[wtforms.validators.DataRequired(message="Select role")], ) @@ -359,9 +360,8 @@ def validate_name(self, field): if self.organization_service.find_organizationid(field.data) is not None: raise wtforms.validators.ValidationError( _( - "This organization account name is already being " - "used by another account. Choose a different " - "organization account name." + "This organization account name has already been used. " + "Choose a different organization account name." ) ) @@ -392,14 +392,19 @@ def __init__(self, *args, orgtype, **kwargs): ] -class CreateOrganizationForm(forms.Form, OrganizationNameMixin): +class SaveOrganizationNameForm(OrganizationNameMixin, forms.Form): - __params__ = ["name", "display_name", "link_url", "description", "orgtype"] + __params__ = ["name"] def __init__(self, *args, organization_service, **kwargs): super().__init__(*args, **kwargs) self.organization_service = organization_service + +class SaveOrganizationForm(forms.Form): + + __params__ = ["display_name", "link_url", "description", "orgtype"] + display_name = wtforms.StringField( validators=[ wtforms.validators.DataRequired(message="Specify your organization name"), @@ -439,8 +444,15 @@ def __init__(self, *args, organization_service, **kwargs): ] ) orgtype = wtforms.SelectField( + # TODO: Map additional choices to "Company" and "Community". choices=[("Company", "Company"), ("Community", "Community")], + coerce=OrganizationType, validators=[ wtforms.validators.DataRequired(message="Select organization type"), ], ) + + +class CreateOrganizationForm(SaveOrganizationNameForm, SaveOrganizationForm): + + __params__ = SaveOrganizationNameForm.__params__ + SaveOrganizationForm.__params__ diff --git a/warehouse/manage/views.py b/warehouse/manage/views.py --- a/warehouse/manage/views.py +++ b/warehouse/manage/views.py @@ -44,6 +44,8 @@ from warehouse.email import ( send_account_deletion_email, send_admin_new_organization_requested_email, + send_admin_organization_deleted_email, + send_admin_organization_renamed_email, send_canceled_as_invited_organization_member_email, send_collaborator_removed_email, send_collaborator_role_changed_email, @@ -51,10 +53,12 @@ send_new_organization_requested_email, send_oidc_provider_added_email, send_oidc_provider_removed_email, + send_organization_deleted_email, send_organization_member_invite_canceled_email, send_organization_member_invited_email, send_organization_member_removed_email, send_organization_member_role_changed_email, + send_organization_renamed_email, send_organization_role_verification_email, send_password_change_email, send_primary_email_change_email, @@ -90,6 +94,8 @@ ProvisionTOTPForm, ProvisionWebAuthnForm, SaveAccountForm, + SaveOrganizationForm, + SaveOrganizationNameForm, Toggle2FARequirementForm, ) from warehouse.metrics.interfaces import IMetricsService @@ -114,6 +120,7 @@ ) from warehouse.rate_limiting import IRateLimiter from warehouse.utils.http import is_safe_url +from warehouse.utils.organization import confirm_organization from warehouse.utils.paginate import paginate_url_factory from warehouse.utils.project import confirm_project, destroy_docs, remove_project @@ -1188,6 +1195,178 @@ def create_organization(self): return self.default_response +@view_defaults( + route_name="manage.organization.settings", + context=Organization, + renderer="manage/organization/settings.html", + uses_session=True, + require_csrf=True, + require_methods=False, + permission="view:organization", + has_translations=True, + require_reauth=True, +) +class ManageOrganizationSettingsViews: + def __init__(self, organization, request): + self.organization = organization + self.request = request + self.user_service = request.find_service(IUserService, context=None) + self.organization_service = request.find_service( + IOrganizationService, context=None + ) + + @property + def active_projects(self): + return self.organization.projects + + @property + def default_response(self): + return { + "organization": self.organization, + "save_organization_form": SaveOrganizationForm( + name=self.organization.name, + display_name=self.organization.display_name, + link_url=self.organization.link_url, + description=self.organization.description, + orgtype=self.organization.orgtype, + organization_service=self.organization_service, + ), + "active_projects": self.active_projects, + } + + @view_config(request_method="GET") + def manage_organization(self): + if self.request.flags.enabled(AdminFlagValue.DISABLE_ORGANIZATIONS): + raise HTTPNotFound + + return self.default_response + + @view_config(request_method="POST", request_param=SaveOrganizationForm.__params__) + def save_organization(self): + if self.request.flags.enabled(AdminFlagValue.DISABLE_ORGANIZATIONS): + raise HTTPNotFound + + form = SaveOrganizationForm( + self.request.POST, + organization_service=self.organization_service, + ) + + if form.validate(): + data = form.data + self.organization_service.update_organization(self.organization.id, **data) + self.request.session.flash("Organization details updated", queue="success") + + return {**self.default_response, "save_organization_form": form} + + @view_config( + request_method="POST", + request_param=["confirm_current_organization_name"] + + SaveOrganizationNameForm.__params__, + ) + def save_organization_name(self): + if self.request.flags.enabled(AdminFlagValue.DISABLE_ORGANIZATIONS): + raise HTTPNotFound + + confirm_organization( + self.organization, + self.request, + fail_route="manage.organization.settings", + field_name="confirm_current_organization_name", + error_message="Could not rename organization", + ) + + form = SaveOrganizationNameForm( + self.request.POST, + organization_service=self.organization_service, + ) + + if form.validate(): + previous_organization_name = self.organization.name + self.organization_service.rename_organization( + self.organization.id, + form.name.data, + ) + self.organization.record_event( + tag="organization:rename", + ip_address=self.request.remote_addr, + additional={ + "previous_organization_name": previous_organization_name, + "renamed_by_user_id": str(self.request.user.id), + }, + ) + owner_users = set(organization_owners(self.request, self.organization)) + send_admin_organization_renamed_email( + self.request, + self.user_service.get_admins(), + organization_name=self.organization.name, + previous_organization_name=previous_organization_name, + ) + send_organization_renamed_email( + self.request, + owner_users, + organization_name=self.organization.name, + previous_organization_name=previous_organization_name, + ) + self.request.session.flash( + "Organization account name updated", queue="success" + ) + return HTTPSeeOther( + self.request.route_path( + "manage.organization.settings", + organization_name=self.organization.normalized_name, + ) + ) + else: + for error_list in form.errors.values(): + for error in error_list: + self.request.session.flash(error, queue="error") + + return self.default_response + + @view_config(request_method="POST", request_param=["confirm_organization_name"]) + def delete_organization(self): + if self.request.flags.enabled(AdminFlagValue.DISABLE_ORGANIZATIONS): + raise HTTPNotFound + + confirm_organization( + self.organization, self.request, fail_route="manage.organization.settings" + ) + + if self.active_projects: + self.request.session.flash( + "Cannot delete organization with active project ownerships", + queue="error", + ) + return self.default_response + + # Record event before deleting organization. + self.organization.record_event( + tag="organization:delete", + ip_address=self.request.remote_addr, + additional={ + "deleted_by_user_id": str(self.request.user.id), + }, + ) + + # Get owners before deleting organization. + owner_users = set(organization_owners(self.request, self.organization)) + + self.organization_service.delete_organization(self.organization.id) + + send_admin_organization_deleted_email( + self.request, + self.user_service.get_admins(), + organization_name=self.organization.name, + ) + send_organization_deleted_email( + self.request, + owner_users, + organization_name=self.organization.name, + ) + + return HTTPSeeOther(self.request.route_path("manage.organizations")) + + @view_config( route_name="manage.organization.roles", context=Organization, @@ -1249,7 +1428,7 @@ def manage_organization_roles( request._( "User '${username}' does not have a verified primary email " "address and cannot be added as a ${role_name} for organization", - mapping={"username": username, "role_name": role_name}, + mapping={"username": username, "role_name": role_name.value}, ), queue="error", ) @@ -1271,7 +1450,7 @@ def manage_organization_roles( invite_token = token_service.dumps( { "action": "email-organization-role-verify", - "desired_role": role_name, + "desired_role": role_name.value, "user_id": user.id, "organization_id": organization.id, "submitter_id": request.user.id, @@ -1291,7 +1470,7 @@ def manage_organization_roles( ip_address=request.remote_addr, additional={ "submitted_by_user_id": str(request.user.id), - "role_name": role_name, + "role_name": role_name.value, "target_user_id": str(userid), }, ) @@ -1301,7 +1480,7 @@ def manage_organization_roles( request, owner_users, user=user, - desired_role=role_name, + desired_role=role_name.value, initiator_username=request.user.username, organization_name=organization.name, email_token=invite_token, @@ -1310,7 +1489,7 @@ def manage_organization_roles( send_organization_role_verification_email( request, user, - desired_role=role_name, + desired_role=role_name.value, initiator_username=request.user.username, organization_name=organization.name, email_token=invite_token, @@ -1364,7 +1543,8 @@ def revoke_organization_invitation(organization, request): ) return HTTPSeeOther( request.route_path( - "manage.organization.roles", organization_name=organization.name + "manage.organization.roles", + organization_name=organization.normalized_name, ) ) @@ -1376,7 +1556,8 @@ def revoke_organization_invitation(organization, request): request.session.flash(request._("Invitation already expired."), queue="success") return HTTPSeeOther( request.route_path( - "manage.organization.roles", organization_name=organization.name + "manage.organization.roles", + organization_name=organization.normalized_name, ) ) role_name = token_data.get("desired_role") @@ -1414,7 +1595,7 @@ def revoke_organization_invitation(organization, request): return HTTPSeeOther( request.route_path( - "manage.organization.roles", organization_name=organization.name + "manage.organization.roles", organization_name=organization.normalized_name ) ) @@ -1489,7 +1670,7 @@ def change_organization_role( return HTTPSeeOther( request.route_path( - "manage.organization.roles", organization_name=organization.name + "manage.organization.roles", organization_name=organization.normalized_name ) ) @@ -1566,7 +1747,8 @@ def delete_organization_role(organization, request): else: return HTTPSeeOther( request.route_path( - "manage.organization.roles", organization_name=organization.name + "manage.organization.roles", + organization_name=organization.normalized_name, ) ) diff --git a/warehouse/migrations/versions/d1c00b634ac8_update_name_catalog_to_allow_null_org.py b/warehouse/migrations/versions/d1c00b634ac8_update_name_catalog_to_allow_null_org.py new file mode 100644 --- /dev/null +++ b/warehouse/migrations/versions/d1c00b634ac8_update_name_catalog_to_allow_null_org.py @@ -0,0 +1,89 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +update_name_catalog_to_allow_null_org + +Revision ID: d1c00b634ac8 +Revises: ad71523546f9 +Create Date: 2022-05-11 17:20:56.596019 +""" + +from alembic import op +from sqlalchemy.dialects import postgresql + +revision = "d1c00b634ac8" +down_revision = "ad71523546f9" + +# Note: It is VERY important to ensure that a migration does not lock for a +# long period of time and to ensure that each individual migration does +# not break compatibility with the *previous* version of the code base. +# This is because the migrations will be ran automatically as part of the +# deployment process, but while the previous version of the code is still +# up and running. Thus backwards incompatible changes must be broken up +# over multiple migrations inside of multiple pull requests in order to +# phase them in over multiple deploys. + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.alter_column( + "organization_name_catalog", + "organization_id", + existing_type=postgresql.UUID(), + nullable=True, + ) + op.create_index( + op.f("ix_organization_name_catalog_normalized_name"), + "organization_name_catalog", + ["normalized_name"], + unique=False, + ) + op.create_index( + op.f("ix_organization_name_catalog_organization_id"), + "organization_name_catalog", + ["organization_id"], + unique=False, + ) + op.drop_constraint( + "organization_name_catalog_organization_id_fkey", + "organization_name_catalog", + type_="foreignkey", + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_foreign_key( + "organization_name_catalog_organization_id_fkey", + "organization_name_catalog", + "organizations", + ["organization_id"], + ["id"], + onupdate="CASCADE", + ondelete="CASCADE", + ) + op.drop_index( + op.f("ix_organization_name_catalog_organization_id"), + table_name="organization_name_catalog", + ) + op.drop_index( + op.f("ix_organization_name_catalog_normalized_name"), + table_name="organization_name_catalog", + ) + op.alter_column( + "organization_name_catalog", + "organization_id", + existing_type=postgresql.UUID(), + nullable=False, + ) + # ### end Alembic commands ### diff --git a/warehouse/organizations/__init__.py b/warehouse/organizations/__init__.py --- a/warehouse/organizations/__init__.py +++ b/warehouse/organizations/__init__.py @@ -14,7 +14,10 @@ from warehouse.organizations.interfaces import IOrganizationService from warehouse.organizations.services import database_organization_factory -from warehouse.organizations.tasks import update_organization_invitation_status +from warehouse.organizations.tasks import ( + delete_declined_organizations, + update_organization_invitation_status, +) def includeme(config): @@ -24,3 +27,5 @@ def includeme(config): config.add_periodic_task( crontab(minute="*/5"), update_organization_invitation_status ) + + config.add_periodic_task(crontab(minute=0, hour=0), delete_declined_organizations) diff --git a/warehouse/organizations/interfaces.py b/warehouse/organizations/interfaces.py --- a/warehouse/organizations/interfaces.py +++ b/warehouse/organizations/interfaces.py @@ -126,6 +126,22 @@ def decline_organization(organization_id): Performs operations necessary to reject approval of an organization """ + def delete_organization(organization_id): + """ + Delete an organization for the specified organization id + """ + + def rename_organization(organization_id, name): + """ + Performs operations necessary to rename an Organization + """ + + def update_organization(organization_id, **changes): + """ + Accepts a organization object and attempts to update an organization with those + attributes + """ + def record_event(organization_id, *, tag, additional=None): """ Creates a new Organization.Event for the given organization with the given diff --git a/warehouse/organizations/models.py b/warehouse/organizations/models.py --- a/warehouse/organizations/models.py +++ b/warehouse/organizations/models.py @@ -13,6 +13,7 @@ import enum from pyramid.authorization import Allow +from pyramid.httpexceptions import HTTPPermanentRedirect from sqlalchemy import ( Boolean, CheckConstraint, @@ -27,6 +28,7 @@ orm, sql, ) +from sqlalchemy.dialects.postgresql import UUID from sqlalchemy.orm.exc import NoResultFound from sqlalchemy_utils.types.url import URLType @@ -104,7 +106,7 @@ class OrganizationProject(db.Model): project = orm.relationship("Project", lazy=False) -class OrganizationType(enum.Enum): +class OrganizationType(str, enum.Enum): Community = "Community" Company = "Company" @@ -115,6 +117,7 @@ def __init__(self, request): self.request = request def __getitem__(self, organization): + # Try returning organization with matching name. try: return ( self.request.db.query(Organization) @@ -124,6 +127,27 @@ def __getitem__(self, organization): ) .one() ) + except NoResultFound: + pass + # Try redirecting to a renamed organization. + try: + organization = ( + self.request.db.query(Organization) + .join( + OrganizationNameCatalog, + OrganizationNameCatalog.organization_id == Organization.id, + ) + .filter( + OrganizationNameCatalog.normalized_name + == func.normalize_pep426_name(organization) + ) + .one() + ) + raise HTTPPermanentRedirect( + self.request.matched_route.generate( + {"organization_name": organization.normalized_name} + ) + ) except NoResultFound: raise KeyError from None @@ -171,6 +195,14 @@ class Organization(HasEvents, db.Model): "Project", secondary=OrganizationProject.__table__, backref="organizations" # type: ignore # noqa ) + def record_event(self, *, tag, ip_address, additional={}): + """Record organization name in events in case organization is ever deleted.""" + super().record_event( + tag=tag, + ip_address=ip_address, + additional={"organization_name": self.name, **additional}, + ) + def __acl__(self): session = orm.object_session(self) @@ -236,11 +268,8 @@ class OrganizationNameCatalog(db.Model): __repr__ = make_repr("normalized_name", "organization_id") - normalized_name = Column(Text, nullable=False) - organization_id = Column( - ForeignKey("organizations.id", onupdate="CASCADE", ondelete="CASCADE"), - nullable=False, - ) + normalized_name = Column(Text, nullable=False, index=True) + organization_id = Column(UUID(as_uuid=True), nullable=True, index=True) class OrganizationInvitationStatus(enum.Enum): diff --git a/warehouse/organizations/services.py b/warehouse/organizations/services.py --- a/warehouse/organizations/services.py +++ b/warehouse/organizations/services.py @@ -23,9 +23,12 @@ OrganizationInvitation, OrganizationInvitationStatus, OrganizationNameCatalog, + OrganizationProject, OrganizationRole, ) +NAME_FIELD = "name" + @implementer(IOrganizationService) class DatabaseOrganizationService: @@ -57,15 +60,15 @@ def find_organizationid(self, name): """ normalized_name = func.normalize_pep426_name(name) try: - organization = ( - self.db.query(Organization.id) - .filter(Organization.normalized_name == normalized_name) + (organization_id,) = ( + self.db.query(OrganizationNameCatalog.organization_id) + .filter(OrganizationNameCatalog.normalized_name == normalized_name) .one() ) except NoResultFound: return - return organization.id + return organization_id def get_organizations(self): """ @@ -80,7 +83,7 @@ def get_organizations_needing_approval(self): """ return ( self.db.query(Organization) - .filter(Organization.is_approved == None) # noqa + .filter(Organization.is_approved == None) # noqa: E711 .order_by(Organization.name) .all() ) @@ -289,6 +292,56 @@ def decline_organization(self, organization_id): return organization + def delete_organization(self, organization_id): + """ + Delete an organization for the specified organization id + """ + organization = self.get_organization(organization_id) + + # Delete invitations + self.db.query(OrganizationInvitation).filter_by( + organization=organization + ).delete() + # Null out organization id for all name catalog entries + self.db.query(OrganizationNameCatalog).filter( + OrganizationNameCatalog.organization_id == organization_id + ).update({OrganizationNameCatalog.organization_id: None}) + # Delete projects + self.db.query(OrganizationProject).filter_by(organization=organization).delete() + # Delete roles + self.db.query(OrganizationRole).filter_by(organization=organization).delete() + # TODO: Delete any stored card data from payment processor + # Delete organization + self.db.delete(organization) + self.db.flush() + + def rename_organization(self, organization_id, name): + """ + Performs operations necessary to rename an Organization + """ + organization = self.get_organization(organization_id) + + organization.name = name + self.db.flush() + + self.add_catalog_entry(organization_id) + + return organization + + def update_organization(self, organization_id, **changes): + """ + Accepts a organization object and attempts to update an organization with those + attributes + """ + organization = self.get_organization(organization_id) + for attr, value in changes.items(): + if attr == NAME_FIELD: + # Call rename function to ensure name catalag entry is added + self.rename_organization(organization_id, value) + setattr(organization, attr, value) + + return organization + def record_event(self, organization_id, *, tag, additional=None): """ Creates a new Organization.Event for the given organization with the given diff --git a/warehouse/organizations/tasks.py b/warehouse/organizations/tasks.py --- a/warehouse/organizations/tasks.py +++ b/warehouse/organizations/tasks.py @@ -10,13 +10,19 @@ # See the License for the specific language governing permissions and # limitations under the License. +import datetime + from warehouse import tasks from warehouse.accounts.interfaces import ITokenService, TokenExpired +from warehouse.organizations.interfaces import IOrganizationService from warehouse.organizations.models import ( + Organization, OrganizationInvitation, OrganizationInvitationStatus, ) +CLEANUP_AFTER = datetime.timedelta(days=30) + @tasks.task(ignore_result=True, acks_late=True) def update_organization_invitation_status(request): @@ -34,3 +40,26 @@ def update_organization_invitation_status(request): token_service.loads(invite.token) except TokenExpired: invite.invite_status = OrganizationInvitationStatus.Expired + + [email protected](ignore_result=True, acks_late=True) +def delete_declined_organizations(request): + organizations = ( + request.db.query(Organization) + .filter( + Organization.is_active == False, # noqa: E712 + Organization.is_approved == False, # noqa: E712 + Organization.date_approved < (datetime.datetime.utcnow() - CLEANUP_AFTER), + ) + .all() + ) + + for organization in organizations: + organization_service = request.find_service(IOrganizationService, context=None) + # TODO: Cannot call this after deletion so how exactly do we handle this? + organization_service.record_event( + organization.id, + tag="organization:delete", + additional={"deleted_by": "CRON"}, + ) + organization_service.delete_organization(organization.id) diff --git a/warehouse/routes.py b/warehouse/routes.py --- a/warehouse/routes.py +++ b/warehouse/routes.py @@ -227,6 +227,13 @@ def includeme(config): ) config.add_route("manage.account.token", "/manage/account/token/", domain=warehouse) config.add_route("manage.organizations", "/manage/organizations/", domain=warehouse) + config.add_route( + "manage.organization.settings", + "/manage/organization/{organization_name}/settings/", + factory="warehouse.organizations.models:OrganizationFactory", + traverse="/{organization_name}", + domain=warehouse, + ) config.add_route( "manage.organization.roles", "/manage/organization/{organization_name}/people/", diff --git a/warehouse/utils/organization.py b/warehouse/utils/organization.py new file mode 100644 --- /dev/null +++ b/warehouse/utils/organization.py @@ -0,0 +1,41 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from packaging.utils import canonicalize_name +from pyramid.httpexceptions import HTTPSeeOther + + +def confirm_organization( + organization, + request, + fail_route, + field_name="confirm_organization_name", + error_message="Could not delete organization", +): + confirm = request.POST.get(field_name) + organization_name = organization.normalized_name + if not confirm: + request.session.flash("Confirm the request", queue="error") + raise HTTPSeeOther( + request.route_path(fail_route, organization_name=organization_name) + ) + if canonicalize_name(confirm) != organization.normalized_name: + request.session.flash( + ( + f"{error_message} - " + f"{confirm!r} is not the same as {organization.normalized_name!r}" + ), + queue="error", + ) + raise HTTPSeeOther( + request.route_path(fail_route, organization_name=organization_name) + )
diff --git a/tests/common/db/organizations.py b/tests/common/db/organizations.py --- a/tests/common/db/organizations.py +++ b/tests/common/db/organizations.py @@ -35,6 +35,15 @@ class OrganizationFactory(WarehouseFactory): class Meta: model = Organization + @classmethod + def _create(cls, model_class, *args, **kwargs): + organization = super()._create(model_class, *args, **kwargs) + OrganizationNameCatalogFactory.create( + normalized_name=organization.normalized_name, + organization_id=organization.id, + ) + return organization + id = factory.Faker("uuid4", cast_to=None) name = factory.Faker("pystr", max_chars=12) display_name = factory.Faker("word") @@ -62,7 +71,7 @@ class OrganizationNameCatalogFactory(WarehouseFactory): class Meta: model = OrganizationNameCatalog - name = factory.Faker("pystr", max_chars=12) + normalized_name = factory.Faker("pystr", max_chars=12) organization_id = factory.Faker("uuid4", cast_to=None) diff --git a/tests/unit/accounts/test_views.py b/tests/unit/accounts/test_views.py --- a/tests/unit/accounts/test_views.py +++ b/tests/unit/accounts/test_views.py @@ -2256,7 +2256,8 @@ def test_verify_organization_role( assert result.headers["Location"] == "/" assert db_request.route_path.calls == [ pretend.call( - "manage.organization.roles", organization_name=organization.name + "manage.organization.roles", + organization_name=organization.normalized_name, ) ] diff --git a/tests/unit/email/test_init.py b/tests/unit/email/test_init.py --- a/tests/unit/email/test_init.py +++ b/tests/unit/email/test_init.py @@ -2499,6 +2499,227 @@ def test_send_role_changed_as_organization_email( ] +class TestOrganizationRenameEmails: + @pytest.fixture + def organization_rename(self, pyramid_user): + self.user = UserFactory.create() + EmailFactory.create(user=self.user, verified=True) + self.organization_name = "example" + self.previous_organization_name = "examplegroup" + + def test_send_admin_organization_renamed_email( + self, + db_request, + organization_rename, + make_email_renderers, + send_email, + ): + subject_renderer, body_renderer, html_renderer = make_email_renderers( + "admin-organization-renamed" + ) + + result = email.send_admin_organization_renamed_email( + db_request, + self.user, + organization_name=self.organization_name, + previous_organization_name=self.previous_organization_name, + ) + + assert result == { + "organization_name": self.organization_name, + "previous_organization_name": self.previous_organization_name, + } + subject_renderer.assert_(**result) + body_renderer.assert_(**result) + html_renderer.assert_(**result) + assert db_request.task.calls == [pretend.call(send_email)] + assert send_email.delay.calls == [ + pretend.call( + f"{self.user.name} <{self.user.email}>", + { + "subject": subject_renderer.string_response, + "body_text": body_renderer.string_response, + "body_html": ( + f"<html>\n" + f"<head></head>\n" + f"<body><p>{html_renderer.string_response}</p></body>\n" + f"</html>\n" + ), + }, + { + "tag": "account:email:sent", + "user_id": self.user.id, + "additional": { + "from_": db_request.registry.settings["mail.sender"], + "to": self.user.email, + "subject": subject_renderer.string_response, + "redact_ip": True, + }, + }, + ) + ] + + def test_send_organization_renamed_email( + self, + db_request, + organization_rename, + make_email_renderers, + send_email, + ): + subject_renderer, body_renderer, html_renderer = make_email_renderers( + "organization-renamed" + ) + + result = email.send_organization_renamed_email( + db_request, + self.user, + organization_name=self.organization_name, + previous_organization_name=self.previous_organization_name, + ) + + assert result == { + "organization_name": self.organization_name, + "previous_organization_name": self.previous_organization_name, + } + subject_renderer.assert_(**result) + body_renderer.assert_(**result) + html_renderer.assert_(**result) + assert db_request.task.calls == [pretend.call(send_email)] + assert send_email.delay.calls == [ + pretend.call( + f"{self.user.name} <{self.user.email}>", + { + "subject": subject_renderer.string_response, + "body_text": body_renderer.string_response, + "body_html": ( + f"<html>\n" + f"<head></head>\n" + f"<body><p>{html_renderer.string_response}</p></body>\n" + f"</html>\n" + ), + }, + { + "tag": "account:email:sent", + "user_id": self.user.id, + "additional": { + "from_": db_request.registry.settings["mail.sender"], + "to": self.user.email, + "subject": subject_renderer.string_response, + "redact_ip": True, + }, + }, + ) + ] + + +class TestOrganizationDeleteEmails: + @pytest.fixture + def organization_delete(self, pyramid_user): + self.user = UserFactory.create() + EmailFactory.create(user=self.user, verified=True) + self.organization_name = "example" + + def test_send_admin_organization_deleted_email( + self, + db_request, + organization_delete, + make_email_renderers, + send_email, + ): + subject_renderer, body_renderer, html_renderer = make_email_renderers( + "admin-organization-deleted" + ) + + result = email.send_admin_organization_deleted_email( + db_request, + self.user, + organization_name=self.organization_name, + ) + + assert result == { + "organization_name": self.organization_name, + } + subject_renderer.assert_(**result) + body_renderer.assert_(**result) + html_renderer.assert_(**result) + assert db_request.task.calls == [pretend.call(send_email)] + assert send_email.delay.calls == [ + pretend.call( + f"{self.user.name} <{self.user.email}>", + { + "subject": subject_renderer.string_response, + "body_text": body_renderer.string_response, + "body_html": ( + f"<html>\n" + f"<head></head>\n" + f"<body><p>{html_renderer.string_response}</p></body>\n" + f"</html>\n" + ), + }, + { + "tag": "account:email:sent", + "user_id": self.user.id, + "additional": { + "from_": db_request.registry.settings["mail.sender"], + "to": self.user.email, + "subject": subject_renderer.string_response, + "redact_ip": True, + }, + }, + ) + ] + + def test_send_organization_deleted_email( + self, + db_request, + organization_delete, + make_email_renderers, + send_email, + ): + subject_renderer, body_renderer, html_renderer = make_email_renderers( + "organization-deleted" + ) + + result = email.send_organization_deleted_email( + db_request, + self.user, + organization_name=self.organization_name, + ) + + assert result == { + "organization_name": self.organization_name, + } + subject_renderer.assert_(**result) + body_renderer.assert_(**result) + html_renderer.assert_(**result) + assert db_request.task.calls == [pretend.call(send_email)] + assert send_email.delay.calls == [ + pretend.call( + f"{self.user.name} <{self.user.email}>", + { + "subject": subject_renderer.string_response, + "body_text": body_renderer.string_response, + "body_html": ( + f"<html>\n" + f"<head></head>\n" + f"<body><p>{html_renderer.string_response}</p></body>\n" + f"</html>\n" + ), + }, + { + "tag": "account:email:sent", + "user_id": self.user.id, + "additional": { + "from_": db_request.registry.settings["mail.sender"], + "to": self.user.email, + "subject": subject_renderer.string_response, + "redact_ip": True, + }, + }, + ) + ] + + class TestCollaboratorAddedEmail: def test_collaborator_added_email( self, pyramid_request, pyramid_config, monkeypatch diff --git a/tests/unit/manage/test_views.py b/tests/unit/manage/test_views.py --- a/tests/unit/manage/test_views.py +++ b/tests/unit/manage/test_views.py @@ -2626,6 +2626,355 @@ def test_create_organizations_disable_organizations(self, monkeypatch): ] +class TestManageOrganizationSettings: + def test_manage_organization( + self, db_request, organization_service, enable_organizations, monkeypatch + ): + organization = OrganizationFactory.create() + organization.projects = [ProjectFactory.create()] + + save_organization_obj = pretend.stub() + save_organization_cls = pretend.call_recorder( + lambda *a, **kw: save_organization_obj + ) + monkeypatch.setattr(views, "SaveOrganizationForm", save_organization_cls) + + view = views.ManageOrganizationSettingsViews(organization, db_request) + result = view.manage_organization() + + assert view.request == db_request + assert view.organization_service == organization_service + assert result == { + "organization": organization, + "save_organization_form": save_organization_obj, + "active_projects": view.active_projects, + } + assert save_organization_cls.calls == [ + pretend.call( + name=organization.name, + display_name=organization.display_name, + link_url=organization.link_url, + description=organization.description, + orgtype=organization.orgtype, + organization_service=organization_service, + ), + ] + + def test_manage_organization_disable_organizations(self, db_request): + organization = OrganizationFactory.create() + + view = views.ManageOrganizationSettingsViews(organization, db_request) + with pytest.raises(HTTPNotFound): + view.manage_organization() + + def test_save_organization( + self, db_request, organization_service, enable_organizations, monkeypatch + ): + organization = OrganizationFactory.create() + db_request.POST = { + "display_name": organization.display_name, + "link_url": organization.link_url, + "description": organization.description, + "orgtype": organization.orgtype, + } + + monkeypatch.setattr( + organization_service, + "update_organization", + pretend.call_recorder(lambda *a, **kw: None), + ) + + save_organization_obj = pretend.stub( + validate=lambda: True, data=db_request.POST + ) + save_organization_cls = pretend.call_recorder( + lambda *a, **kw: save_organization_obj + ) + monkeypatch.setattr(views, "SaveOrganizationForm", save_organization_cls) + + view = views.ManageOrganizationSettingsViews(organization, db_request) + result = view.save_organization() + + assert result == { + **view.default_response, + "save_organization_form": save_organization_obj, + } + assert organization_service.update_organization.calls == [ + pretend.call(organization.id, **db_request.POST) + ] + + def test_save_organization_validation_fails( + self, db_request, organization_service, enable_organizations, monkeypatch + ): + organization = OrganizationFactory.create() + db_request.POST = { + "display_name": organization.display_name, + "link_url": organization.link_url, + "description": organization.description, + "orgtype": organization.orgtype, + } + + monkeypatch.setattr( + organization_service, + "update_organization", + pretend.call_recorder(lambda *a, **kw: None), + ) + + save_organization_obj = pretend.stub( + validate=lambda: False, data=db_request.POST + ) + save_organization_cls = pretend.call_recorder( + lambda *a, **kw: save_organization_obj + ) + monkeypatch.setattr(views, "SaveOrganizationForm", save_organization_cls) + + view = views.ManageOrganizationSettingsViews(organization, db_request) + result = view.save_organization() + + assert result == { + **view.default_response, + "save_organization_form": save_organization_obj, + } + assert organization_service.update_organization.calls == [] + + def test_save_organization_disable_organizations(self, db_request): + organization = OrganizationFactory.create() + + view = views.ManageOrganizationSettingsViews(organization, db_request) + with pytest.raises(HTTPNotFound): + view.save_organization() + + def test_save_organization_name( + self, + db_request, + pyramid_user, + organization_service, + user_service, + enable_organizations, + monkeypatch, + ): + organization = OrganizationFactory.create(name="old-name") + db_request.POST = { + "confirm_current_organization_name": organization.name, + "name": "new-name", + } + db_request.route_path = pretend.call_recorder( + lambda *a, organization_name, **kw: ( + f"/manage/organization/{organization_name}/settings/" + ) + ) + + def rename_organization(organization_id, organization_name): + organization.name = organization_name + + monkeypatch.setattr( + organization_service, + "rename_organization", + pretend.call_recorder(rename_organization), + ) + + admins = [] + monkeypatch.setattr( + user_service, + "get_admins", + pretend.call_recorder(lambda *a, **kw: admins), + ) + + save_organization_obj = pretend.stub() + save_organization_cls = pretend.call_recorder( + lambda *a, **kw: save_organization_obj + ) + monkeypatch.setattr(views, "SaveOrganizationForm", save_organization_cls) + + save_organization_name_obj = pretend.stub( + validate=lambda: True, name=pretend.stub(data=db_request.POST["name"]) + ) + save_organization_name_cls = pretend.call_recorder( + lambda *a, **kw: save_organization_name_obj + ) + monkeypatch.setattr( + views, "SaveOrganizationNameForm", save_organization_name_cls + ) + + send_email = pretend.call_recorder(lambda *a, **kw: None) + monkeypatch.setattr(views, "send_admin_organization_renamed_email", send_email) + monkeypatch.setattr(views, "send_organization_renamed_email", send_email) + monkeypatch.setattr( + views, "organization_owners", lambda *a, **kw: [pyramid_user] + ) + + view = views.ManageOrganizationSettingsViews(organization, db_request) + result = view.save_organization_name() + + assert isinstance(result, HTTPSeeOther) + assert ( + result.headers["Location"] + == f"/manage/organization/{organization.normalized_name}/settings/" + ) + assert organization_service.rename_organization.calls == [ + pretend.call(organization.id, "new-name") + ] + assert send_email.calls == [ + pretend.call( + db_request, + admins, + organization_name="new-name", + previous_organization_name="old-name", + ), + pretend.call( + db_request, + {pyramid_user}, + organization_name="new-name", + previous_organization_name="old-name", + ), + ] + + def test_save_organization_name_validation_fails( + self, db_request, organization_service, enable_organizations, monkeypatch + ): + organization = OrganizationFactory.create(name="old-name") + db_request.POST = { + "confirm_current_organization_name": organization.name, + "name": "new-name", + } + + def rename_organization(organization_id, organization_name): + organization.name = organization_name + + monkeypatch.setattr( + organization_service, + "rename_organization", + pretend.call_recorder(rename_organization), + ) + + save_organization_obj = pretend.stub() + save_organization_cls = pretend.call_recorder( + lambda *a, **kw: save_organization_obj + ) + monkeypatch.setattr(views, "SaveOrganizationForm", save_organization_cls) + + save_organization_name_obj = pretend.stub( + validate=lambda: False, errors=pretend.stub(values=lambda: ["Invalid"]) + ) + save_organization_name_cls = pretend.call_recorder( + lambda *a, **kw: save_organization_name_obj + ) + monkeypatch.setattr( + views, "SaveOrganizationNameForm", save_organization_name_cls + ) + + view = views.ManageOrganizationSettingsViews(organization, db_request) + result = view.save_organization_name() + + assert result == view.default_response + assert organization_service.rename_organization.calls == [] + + def test_save_organization_name_disable_organizations(self, db_request): + organization = OrganizationFactory.create(name="old-name") + + view = views.ManageOrganizationSettingsViews(organization, db_request) + with pytest.raises(HTTPNotFound): + view.save_organization_name() + + def test_delete_organization( + self, + db_request, + pyramid_user, + organization_service, + user_service, + enable_organizations, + monkeypatch, + ): + organization = OrganizationFactory.create() + db_request.POST = {"confirm_organization_name": organization.name} + db_request.route_path = pretend.call_recorder( + lambda *a, **kw: "/manage/organizations/" + ) + + monkeypatch.setattr( + organization_service, + "delete_organization", + pretend.call_recorder(lambda *a, **kw: None), + ) + + admins = [] + monkeypatch.setattr( + user_service, + "get_admins", + pretend.call_recorder(lambda *a, **kw: admins), + ) + + send_email = pretend.call_recorder(lambda *a, **kw: None) + monkeypatch.setattr(views, "send_admin_organization_deleted_email", send_email) + monkeypatch.setattr(views, "send_organization_deleted_email", send_email) + monkeypatch.setattr( + views, "organization_owners", lambda *a, **kw: [pyramid_user] + ) + + view = views.ManageOrganizationSettingsViews(organization, db_request) + result = view.delete_organization() + + assert isinstance(result, HTTPSeeOther) + assert result.headers["Location"] == "/manage/organizations/" + assert organization_service.delete_organization.calls == [ + pretend.call(organization.id) + ] + assert send_email.calls == [ + pretend.call( + db_request, + admins, + organization_name=organization.name, + ), + pretend.call( + db_request, + {pyramid_user}, + organization_name=organization.name, + ), + ] + assert db_request.route_path.calls == [pretend.call("manage.organizations")] + + def test_delete_organization_with_active_projects( + self, + db_request, + pyramid_user, + organization_service, + enable_organizations, + monkeypatch, + ): + organization = OrganizationFactory.create() + organization.projects = [ProjectFactory.create()] + db_request.POST = {"confirm_organization_name": organization.name} + db_request.route_path = pretend.call_recorder( + lambda *a, **kw: "/manage/organizations/" + ) + + save_organization_obj = pretend.stub() + save_organization_cls = pretend.call_recorder( + lambda *a, **kw: save_organization_obj + ) + monkeypatch.setattr(views, "SaveOrganizationForm", save_organization_cls) + + monkeypatch.setattr( + organization_service, + "delete_organization", + pretend.call_recorder(lambda *a, **kw: None), + ) + + view = views.ManageOrganizationSettingsViews(organization, db_request) + result = view.delete_organization() + + assert result == view.default_response + assert organization_service.delete_organization.calls == [] + assert db_request.route_path.calls == [] + + def test_delete_organization_disable_organizations(self, db_request): + organization = OrganizationFactory.create() + + view = views.ManageOrganizationSettingsViews(organization, db_request) + with pytest.raises(HTTPNotFound): + view.delete_organization() + + class TestManageOrganizationRoles: def test_get_manage_organization_roles(self, db_request, enable_organizations): organization = OrganizationFactory.create(name="foobar") @@ -2843,7 +3192,7 @@ def test_post_unverified_email( form_obj = pretend.stub( validate=pretend.call_recorder(lambda: True), username=pretend.stub(data=user.username), - role_name=pretend.stub(data="Owner"), + role_name=pretend.stub(data=OrganizationRoleType.Owner), ) form_class = pretend.call_recorder(lambda *a, **kw: form_obj) @@ -2919,7 +3268,7 @@ def test_cannot_reinvite_organization_role( form_obj = pretend.stub( validate=pretend.call_recorder(lambda: True), username=pretend.stub(data=new_user.username), - role_name=pretend.stub(data="Owner"), + role_name=pretend.stub(data=OrganizationRoleType.Owner), ) form_class = pretend.call_recorder(lambda *a, **kw: form_obj) @@ -2996,7 +3345,7 @@ def test_reinvite_organization_role_after_expiration( form_obj = pretend.stub( validate=pretend.call_recorder(lambda: True), username=pretend.stub(data=new_user.username), - role_name=pretend.stub(data="Owner"), + role_name=pretend.stub(data=OrganizationRoleType.Owner), ) form_class = pretend.call_recorder(lambda *a, **kw: form_obj) @@ -3059,13 +3408,13 @@ def test_reinvite_organization_role_after_expiration( db_request, {owner_1, owner_2}, user=new_user, - desired_role=form_obj.role_name.data, + desired_role=form_obj.role_name.data.value, initiator_username=db_request.user.username, organization_name=organization.name, email_token=token_service.dumps( { "action": "email-organization-role-verify", - "desired_role": form_obj.role_name.data, + "desired_role": form_obj.role_name.data.value, "user_id": new_user.id, "organization_id": organization.id, "submitter_id": db_request.user.id, @@ -3078,13 +3427,13 @@ def test_reinvite_organization_role_after_expiration( pretend.call( db_request, new_user, - desired_role=form_obj.role_name.data, + desired_role=form_obj.role_name.data.value, initiator_username=db_request.user.username, organization_name=organization.name, email_token=token_service.dumps( { "action": "email-organization-role-verify", - "desired_role": form_obj.role_name.data, + "desired_role": form_obj.role_name.data.value, "user_id": new_user.id, "organization_id": organization.id, "submitter_id": db_request.user.id, diff --git a/tests/unit/organizations/__init__.py b/tests/unit/organizations/__init__.py --- a/tests/unit/organizations/__init__.py +++ b/tests/unit/organizations/__init__.py @@ -9,10 +9,3 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - -from warehouse.organizations.interfaces import IOrganizationService -from warehouse.organizations.services import database_organization_factory - - -def includeme(config): - config.register_service_factory(database_organization_factory, IOrganizationService) diff --git a/tests/unit/organizations/test_init.py b/tests/unit/organizations/test_init.py new file mode 100644 --- /dev/null +++ b/tests/unit/organizations/test_init.py @@ -0,0 +1,43 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pretend + +from celery.schedules import crontab + +from warehouse import organizations +from warehouse.organizations.interfaces import IOrganizationService +from warehouse.organizations.services import database_organization_factory +from warehouse.organizations.tasks import ( + delete_declined_organizations, + update_organization_invitation_status, +) + + +def test_includeme(): + config = pretend.stub( + register_service_factory=pretend.call_recorder( + lambda factory, iface, name=None: None + ), + add_periodic_task=pretend.call_recorder(lambda *a, **kw: None), + ) + + organizations.includeme(config) + + assert config.register_service_factory.calls == [ + pretend.call(database_organization_factory, IOrganizationService), + ] + + assert config.add_periodic_task.calls == [ + pretend.call(crontab(minute="*/5"), update_organization_invitation_status), + pretend.call(crontab(minute=0, hour=0), delete_declined_organizations), + ] diff --git a/tests/unit/organizations/test_models.py b/tests/unit/organizations/test_models.py --- a/tests/unit/organizations/test_models.py +++ b/tests/unit/organizations/test_models.py @@ -10,15 +10,18 @@ # See the License for the specific language governing permissions and # limitations under the License. +import pretend import pytest from pyramid.authorization import Allow +from pyramid.httpexceptions import HTTPPermanentRedirect from pyramid.location import lineage from warehouse.organizations.models import OrganizationFactory, OrganizationRoleType from ...common.db.organizations import ( OrganizationFactory as DBOrganizationFactory, + OrganizationNameCatalogFactory as DBOrganizationNameCatalogFactory, OrganizationRoleFactory as DBOrganizationRoleFactory, ) @@ -31,6 +34,18 @@ def test_traversal_finds(self, db_request, name, normalized): assert root[normalized] == organization + def test_traversal_redirects(self, db_request): + db_request.matched_route = pretend.stub(generate=lambda *a, **kw: "route-path") + organization = DBOrganizationFactory.create() + DBOrganizationNameCatalogFactory.create( + normalized_name="oldname", + organization_id=organization.id, + ) + root = OrganizationFactory(db_request) + + with pytest.raises(HTTPPermanentRedirect): + root["oldname"] + def test_traversal_cant_find(self, db_request): organization = DBOrganizationFactory.create() root = OrganizationFactory(db_request) diff --git a/tests/unit/organizations/test_services.py b/tests/unit/organizations/test_services.py --- a/tests/unit/organizations/test_services.py +++ b/tests/unit/organizations/test_services.py @@ -16,7 +16,14 @@ from warehouse.organizations import services from warehouse.organizations.interfaces import IOrganizationService -from warehouse.organizations.models import OrganizationRoleType +from warehouse.organizations.models import ( + OrganizationInvitation, + OrganizationNameCatalog, + OrganizationProject, + OrganizationRole, + OrganizationRoleType, + OrganizationType, +) from ...common.db.organizations import ( OrganizationFactory, @@ -138,13 +145,6 @@ def test_add_organization(self, organization_service): assert org_from_db.description == organization.description assert not org_from_db.is_active - def test_add_catalog_entry(self, organization_service): - organization = OrganizationFactory.create() - - catalog_entry = organization_service.add_catalog_entry(organization.id) - assert catalog_entry.normalized_name == organization.normalized_name - assert catalog_entry.organization_id == organization.id - def test_get_organization_role(self, organization_service, user_service): organization_role = OrganizationRoleFactory.create() @@ -309,3 +309,82 @@ def test_decline_organization(self, organization_service): assert organization.is_approved is False assert organization.date_approved is not None + + def test_delete_organization(self, organization_service, db_request): + organization = OrganizationFactory.create() + + organization_service.delete_organization(organization.id) + + assert not ( + ( + db_request.db.query(OrganizationInvitation) + .filter_by(organization=organization) + .count() + ) + ) + assert not ( + ( + db_request.db.query(OrganizationNameCatalog) + .filter(OrganizationNameCatalog.organization_id == organization.id) + .count() + ) + ) + assert not ( + ( + db_request.db.query(OrganizationProject) + .filter_by(organization=organization) + .count() + ) + ) + assert not ( + ( + db_request.db.query(OrganizationRole) + .filter_by(organization=organization) + .count() + ) + ) + assert organization_service.get_organization(organization.id) is None + + def test_rename_organization(self, organization_service, db_request): + organization = OrganizationFactory.create() + + organization_service.rename_organization(organization.id, "some_new_name") + assert organization.name == "some_new_name" + + db_organization = organization_service.get_organization(organization.id) + assert db_organization.name == "some_new_name" + + assert ( + db_request.db.query(OrganizationNameCatalog) + .filter( + OrganizationNameCatalog.normalized_name == organization.normalized_name + ) + .count() + ) + + def test_update_organization(self, organization_service, db_request): + organization = OrganizationFactory.create() + + organization_service.update_organization( + organization.id, + name="some_new_name", + display_name="Some New Name", + orgtype=OrganizationType.Company.value, + ) + assert organization.name == "some_new_name" + assert organization.display_name == "Some New Name" + assert organization.orgtype == OrganizationType.Company + + db_organization = organization_service.get_organization(organization.id) + assert db_organization.name == "some_new_name" + assert db_organization.display_name == "Some New Name" + assert db_organization.orgtype == OrganizationType.Company + + assert ( + db_request.db.query(OrganizationNameCatalog) + .filter( + OrganizationNameCatalog.normalized_name + == db_organization.normalized_name + ) + .count() + ) diff --git a/tests/unit/organizations/test_tasks.py b/tests/unit/organizations/test_tasks.py --- a/tests/unit/organizations/test_tasks.py +++ b/tests/unit/organizations/test_tasks.py @@ -10,11 +10,16 @@ # See the License for the specific language governing permissions and # limitations under the License. +import datetime + import pretend from warehouse.accounts.interfaces import ITokenService, TokenExpired -from warehouse.organizations.models import OrganizationInvitationStatus -from warehouse.organizations.tasks import update_organization_invitation_status +from warehouse.organizations.models import Organization, OrganizationInvitationStatus +from warehouse.organizations.tasks import ( + delete_declined_organizations, + update_organization_invitation_status, +) from ...common.db.organizations import ( OrganizationFactory, @@ -53,3 +58,54 @@ def test_no_updates(self, db_request): pretend.call(ITokenService, name="email") ] assert invite.invite_status == OrganizationInvitationStatus.Pending + + +class TestDeleteOrganizations: + def test_delete_declined_organizations(self, db_request): + # Create an organization that's ready for cleanup + organization = OrganizationFactory.create() + organization.is_active = False + organization.is_approved = False + organization.date_approved = datetime.datetime.now() - datetime.timedelta( + days=31 + ) + + # Create an organization that's not ready to be cleaned up yet + organization2 = OrganizationFactory.create() + organization2.is_active = False + organization2.is_approved = False + organization2.date_approved = datetime.datetime.now() + + assert ( + db_request.db.query(Organization.id) + .filter(Organization.id == organization.id) + .count() + == 1 + ) + + assert ( + db_request.db.query(Organization.id) + .filter(Organization.id == organization2.id) + .count() + == 1 + ) + + assert db_request.db.query(Organization).count() == 2 + + delete_declined_organizations(db_request) + + assert not ( + ( + db_request.db.query(Organization.id) + .filter(Organization.id == organization.id) + .count() + ) + ) + + assert ( + db_request.db.query(Organization.id) + .filter(Organization.id == organization2.id) + .count() + ) + + assert db_request.db.query(Organization).count() == 1 diff --git a/tests/unit/test_routes.py b/tests/unit/test_routes.py --- a/tests/unit/test_routes.py +++ b/tests/unit/test_routes.py @@ -245,6 +245,13 @@ def add_policy(name, filename): pretend.call( "manage.organizations", "/manage/organizations/", domain=warehouse ), + pretend.call( + "manage.organization.settings", + "/manage/organization/{organization_name}/settings/", + factory="warehouse.organizations.models:OrganizationFactory", + traverse="/{organization_name}", + domain=warehouse, + ), pretend.call( "manage.organization.roles", "/manage/organization/{organization_name}/people/", diff --git a/tests/unit/utils/test_organization.py b/tests/unit/utils/test_organization.py new file mode 100644 --- /dev/null +++ b/tests/unit/utils/test_organization.py @@ -0,0 +1,69 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest + +from pretend import call, call_recorder, stub +from pyramid.httpexceptions import HTTPSeeOther + +from warehouse.utils.organization import confirm_organization + + +def test_confirm(): + organization = stub(normalized_name="foobar") + request = stub( + POST={"confirm_organization_name": "foobar"}, + route_path=call_recorder(lambda *a, **kw: stub()), + session=stub(flash=call_recorder(lambda *a, **kw: stub())), + ) + + confirm_organization(organization, request, fail_route="fail_route") + + assert request.route_path.calls == [] + assert request.session.flash.calls == [] + + +def test_confirm_no_input(): + organization = stub(normalized_name="foobar") + request = stub( + POST={"confirm_organization_name": ""}, + route_path=call_recorder(lambda *a, **kw: "/the-redirect"), + session=stub(flash=call_recorder(lambda *a, **kw: stub())), + ) + + with pytest.raises(HTTPSeeOther) as err: + confirm_organization(organization, request, fail_route="fail_route") + assert err.value.location == "/the-redirect" + + assert request.route_path.calls == [call("fail_route", organization_name="foobar")] + assert request.session.flash.calls == [call("Confirm the request", queue="error")] + + +def test_confirm_incorrect_input(): + organization = stub(normalized_name="foobar") + request = stub( + POST={"confirm_organization_name": "bizbaz"}, + route_path=call_recorder(lambda *a, **kw: "/the-redirect"), + session=stub(flash=call_recorder(lambda *a, **kw: stub())), + ) + + with pytest.raises(HTTPSeeOther) as err: + confirm_organization(organization, request, fail_route="fail_route") + assert err.value.location == "/the-redirect" + + assert request.route_path.calls == [call("fail_route", organization_name="foobar")] + assert request.session.flash.calls == [ + call( + "Could not delete organization - 'bizbaz' is not the same as 'foobar'", + queue="error", + ) + ]
Rename an organization Feature request for organization account project in PyPI. Description | An account Owner can change the name of an Organization account -- | -- User value | If a corporate or community project's brand or name changes, they may want their account to reflect that change. Acceptance criteria | Slug and reference is kept indefinitely so that the old Organization slug can be used to redirect to the new slug . A new Organization cannot be created using the old Organization slug. Email notification to PyPI Admin and account owners
2022-05-13T23:04:25Z
[]
[]
pypi/warehouse
11,473
pypi__warehouse-11473
[ "11086" ]
23654035bddff636652368369897cc7e37760ac2
diff --git a/warehouse/email/__init__.py b/warehouse/email/__init__.py --- a/warehouse/email/__init__.py +++ b/warehouse/email/__init__.py @@ -348,6 +348,26 @@ def send_new_organization_declined_email( } +@_email("organization-project-added") +def send_organization_project_added_email( + request, user, *, organization_name, project_name +): + return { + "organization_name": organization_name, + "project_name": project_name, + } + + +@_email("organization-project-removed") +def send_organization_project_removed_email( + request, user, *, organization_name, project_name +): + return { + "organization_name": organization_name, + "project_name": project_name, + } + + @_email("organization-member-invited") def send_organization_member_invited_email( request, diff --git a/warehouse/forklift/legacy.py b/warehouse/forklift/legacy.py --- a/warehouse/forklift/legacy.py +++ b/warehouse/forklift/legacy.py @@ -20,7 +20,6 @@ import zipfile from cgi import FieldStorage, parse_header -from itertools import chain import packaging.requirements import packaging.specifiers @@ -28,19 +27,19 @@ import packaging.version import pkg_resources import requests -import stdlib_list import wtforms import wtforms.validators from pyramid.httpexceptions import ( HTTPBadRequest, + HTTPException, HTTPForbidden, HTTPGone, HTTPPermanentRedirect, ) from pyramid.response import Response from pyramid.view import view_config -from sqlalchemy import exists, func, orm +from sqlalchemy import func, orm from sqlalchemy.orm.exc import MultipleResultsFound, NoResultFound from trove_classifiers import classifiers, deprecated_classifiers @@ -57,13 +56,13 @@ File, Filename, JournalEntry, - ProhibitedProjectName, Project, Release, Role, ) from warehouse.packaging.tasks import update_bigquery_release_files from warehouse.utils import http, readme +from warehouse.utils.project import add_project, validate_project_name from warehouse.utils.security_policy import AuthenticationMethod ONE_MB = 1 * 1024 * 1024 @@ -76,21 +75,6 @@ PATH_HASHER = "blake2_256" -def namespace_stdlib_list(module_list): - for module_name in module_list: - parts = module_name.split(".") - for i, part in enumerate(parts): - yield ".".join(parts[: i + 1]) - - -STDLIB_PROHIBITED = { - packaging.utils.canonicalize_name(s.rstrip("-_.").lstrip("-_.")) - for s in chain.from_iterable( - namespace_stdlib_list(stdlib_list.stdlib_list(version)) - for version in stdlib_list.short_versions - ) -} - # Wheel platform checking # Note: defining new platform ABI compatibility tags that don't @@ -899,83 +883,19 @@ def file_upload(request): # Look up the project first before doing anything else, this is so we can # automatically register it if we need to and can check permissions before # going any further. - try: - project = ( - request.db.query(Project) - .filter( - Project.normalized_name == func.normalize_pep426_name(form.name.data) - ) - .one() - ) - except NoResultFound: - # Check for AdminFlag set by a PyPI Administrator disabling new project - # registration, reasons for this include Spammers, security - # vulnerabilities, or just wanting to be lazy and not worry ;) - if request.flags.enabled(AdminFlagValue.DISALLOW_NEW_PROJECT_REGISTRATION): - raise _exc_with_message( - HTTPForbidden, - ( - "New project registration temporarily disabled. " - "See {projecthelp} for more information." - ).format(projecthelp=request.help_url(_anchor="admin-intervention")), - ) from None - - # Before we create the project, we're going to check our prohibited - # names to see if this project name prohibited, or if the project name - # is a close approximation of an existing project name. If it is, - # then we're going to deny the request to create this project. - _prohibited_name = request.db.query( - exists().where( - ProhibitedProjectName.name == func.normalize_pep426_name(form.name.data) - ) - ).scalar() - if _prohibited_name: - raise _exc_with_message( - HTTPBadRequest, - ( - "The name {name!r} isn't allowed. " - "See {projecthelp} for more information." - ).format( - name=form.name.data, - projecthelp=request.help_url(_anchor="project-name"), - ), - ) from None - - _ultranormalize_collision = request.db.query( - exists().where( - func.ultranormalize_name(Project.name) - == func.ultranormalize_name(form.name.data) - ) - ).scalar() - if _ultranormalize_collision: - raise _exc_with_message( - HTTPBadRequest, - ( - "The name {name!r} is too similar to an existing project. " - "See {projecthelp} for more information." - ).format( - name=form.name.data, - projecthelp=request.help_url(_anchor="project-name"), - ), - ) from None - - # Also check for collisions with Python Standard Library modules. - if packaging.utils.canonicalize_name(form.name.data) in STDLIB_PROHIBITED: - raise _exc_with_message( - HTTPBadRequest, - ( - "The name {name!r} isn't allowed (conflict with Python " - "Standard Library module name). See " - "{projecthelp} for more information." - ).format( - name=form.name.data, - projecthelp=request.help_url(_anchor="project-name"), - ), - ) from None + project = ( + request.db.query(Project) + .filter(Project.normalized_name == func.normalize_pep426_name(form.name.data)) + .first() + ) - # Next we'll create the project - project = Project(name=form.name.data) - request.db.add(project) + if project is None: + # We attempt to create the project. + try: + validate_project_name(form.name.data, request) + except HTTPException as exc: + raise _exc_with_message(exc.__class__, exc.detail) from None + project = add_project(form.name.data, request) # Then we'll add a role setting the current user as the "Owner" of the # project. @@ -983,14 +903,6 @@ def file_upload(request): # TODO: This should be handled by some sort of database trigger or a # SQLAlchemy hook or the like instead of doing it inline in this # view. - request.db.add( - JournalEntry( - name=project.name, - action="create", - submitted_by=request.user, - submitted_from=request.remote_addr, - ) - ) request.db.add( JournalEntry( name=project.name, @@ -999,12 +911,6 @@ def file_upload(request): submitted_from=request.remote_addr, ) ) - - project.record_event( - tag="project:create", - ip_address=request.remote_addr, - additional={"created_by": request.user.username}, - ) project.record_event( tag="project:role:add", ip_address=request.remote_addr, diff --git a/warehouse/manage/forms.py b/warehouse/manage/forms.py --- a/warehouse/manage/forms.py +++ b/warehouse/manage/forms.py @@ -11,6 +11,7 @@ # limitations under the License. import json +import re import wtforms @@ -341,7 +342,7 @@ class OrganizationNameMixin: ), ), # the regexp below must match the CheckConstraint - # for the name field in organizations.model.Organization + # for the name field in organizations.models.Organization wtforms.validators.Regexp( r"^[a-zA-Z0-9][a-zA-Z0-9._-]*[a-zA-Z0-9]$", message=_( @@ -366,6 +367,80 @@ def validate_name(self, field): ) +class AddOrganizationProjectForm(forms.Form): + + __params__ = ["add_existing_project", "existing_project_name", "new_project_name"] + + add_existing_project = wtforms.RadioField( + "Add existing or new project?", + choices=[("true", "Existing project"), ("false", "New project")], + coerce=lambda string: True if string == "true" else False, + default="true", + validators=[wtforms.validators.InputRequired()], + ) + + existing_project_name = wtforms.SelectField( + "Select project", + choices=[("", "Select project")], + ) + + new_project_name = wtforms.StringField() + + _project_name_re = re.compile( + r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.IGNORECASE + ) + + def __init__(self, *args, project_choices, project_factory, **kwargs): + super().__init__(*args, **kwargs) + self.existing_project_name.choices += [ + (name, name) for name in sorted(project_choices) + ] + self.project_factory = project_factory + + def validate_existing_project_name(self, field): + if self.add_existing_project.data: + if not field.data: + raise wtforms.validators.StopValidation(_("Select project")) + + def validate_new_project_name(self, field): + if not self.add_existing_project.data: + if not field.data: + raise wtforms.validators.StopValidation(_("Specify project name")) + if not self._project_name_re.match(field.data): + raise wtforms.validators.ValidationError( + _( + "Start and end with a letter or numeral containing " + "only ASCII numeric and '.', '_' and '-'." + ) + ) + if field.data in self.project_factory: + raise wtforms.validators.ValidationError( + _( + "This project name has already been used. " + "Choose a different project name." + ) + ) + + +class TransferOrganizationProjectForm(forms.Form): + + __params__ = ["organization"] + + organization = wtforms.SelectField( + "Select organization", + choices=[("", "Select organization")], + validators=[ + wtforms.validators.DataRequired(message="Select organization"), + ], + ) + + def __init__(self, *args, organization_choices, **kwargs): + super().__init__(*args, **kwargs) + self.organization.choices += [ + (name, name) for name in sorted(organization_choices) + ] + + class CreateOrganizationRoleForm(OrganizationRoleNameMixin, UsernameMixin, forms.Form): def __init__(self, *args, orgtype, organization_service, user_service, **kwargs): super().__init__(*args, **kwargs) diff --git a/warehouse/manage/views.py b/warehouse/manage/views.py --- a/warehouse/manage/views.py +++ b/warehouse/manage/views.py @@ -18,6 +18,7 @@ from paginate_sqlalchemy import SqlalchemyOrmPage as SQLAlchemyORMPage from pyramid.httpexceptions import ( HTTPBadRequest, + HTTPException, HTTPNotFound, HTTPSeeOther, HTTPTooManyRequests, @@ -58,6 +59,8 @@ send_organization_member_invited_email, send_organization_member_removed_email, send_organization_member_role_changed_email, + send_organization_project_added_email, + send_organization_project_removed_email, send_organization_renamed_email, send_organization_role_verification_email, send_password_change_email, @@ -80,6 +83,7 @@ from warehouse.macaroons.interfaces import IMacaroonService from warehouse.manage.forms import ( AddEmailForm, + AddOrganizationProjectForm, ChangeOrganizationRoleForm, ChangePasswordForm, ChangeRoleForm, @@ -97,6 +101,7 @@ SaveOrganizationForm, SaveOrganizationNameForm, Toggle2FARequirementForm, + TransferOrganizationProjectForm, ) from warehouse.metrics.interfaces import IMetricsService from warehouse.oidc.forms import DeleteProviderForm, GitHubProviderForm @@ -113,6 +118,7 @@ File, JournalEntry, Project, + ProjectFactory, Release, Role, RoleInvitation, @@ -122,34 +128,87 @@ from warehouse.utils.http import is_safe_url from warehouse.utils.organization import confirm_organization from warehouse.utils.paginate import paginate_url_factory -from warehouse.utils.project import confirm_project, destroy_docs, remove_project +from warehouse.utils.project import ( + add_project, + confirm_project, + destroy_docs, + remove_project, + validate_project_name, +) def user_projects(request): """Return all the projects for which the user is a sole owner""" projects_owned = ( - request.db.query(Project.id) + request.db.query(Project.id.label("id")) .join(Role.project) .filter(Role.role_name == "Owner", Role.user == request.user) - .subquery() ) projects_collaborator = ( request.db.query(Project.id) .join(Role.project) .filter(Role.user == request.user) - .subquery() ) with_sole_owner = ( + # Select projects having just one owner. request.db.query(Role.project_id) .join(projects_owned) .filter(Role.role_name == "Owner") .group_by(Role.project_id) .having(func.count(Role.project_id) == 1) - .subquery() + # Except projects owned by an organization. + .join(Role.project) + .filter(~Project.organization.has()) ) + if not request.flags.enabled(AdminFlagValue.DISABLE_ORGANIZATIONS): + organizations_owned = ( + request.db.query(Organization.id) + .join(OrganizationRole.organization) + .filter( + OrganizationRole.role_name == OrganizationRoleType.Owner, + OrganizationRole.user == request.user, + ) + .subquery() + ) + + organizations_with_sole_owner = ( + request.db.query(OrganizationRole.organization_id) + .join(organizations_owned) + .filter(OrganizationRole.role_name == "Owner") + .group_by(OrganizationRole.organization_id) + .having(func.count(OrganizationRole.organization_id) == 1) + .subquery() + ) + + projects_owned = projects_owned.union( + request.db.query(Project.id.label("id")) + .join(Organization.projects) + .join(organizations_owned, Organization.id == organizations_owned.c.id) + ) + + with_sole_owner = with_sole_owner.union( + # Select projects where organization has only one owner. + request.db.query(Project.id) + .join(Organization.projects) + .join( + organizations_with_sole_owner, + Organization.id == organizations_with_sole_owner.c.organization_id, + ) + # Except projects with any other individual owners. + .filter( + ~Project.roles.any( + (Role.role_name == "Owner") & (Role.user_id != request.user.id) + ) + ) + ) + + projects_owned = projects_owned.subquery() + projects_collaborator = projects_collaborator.subquery() + with_sole_owner = with_sole_owner.subquery() + return { "projects_owned": ( request.db.query(Project) @@ -172,13 +231,7 @@ def user_projects(request): def project_owners(request, project): """Return all users who are owners of the project.""" - owner_roles = ( - request.db.query(User.id) - .join(Role.user) - .filter(Role.role_name == "Owner", Role.project == project) - .subquery() - ) - return request.db.query(User).join(owner_roles, User.id == owner_roles.c.id).all() + return project.owners @view_defaults( @@ -1202,7 +1255,7 @@ def create_organization(self): uses_session=True, require_csrf=True, require_methods=False, - permission="view:organization", + permission="manage:organization", has_translations=True, require_reauth=True, ) @@ -1234,7 +1287,7 @@ def default_response(self): "active_projects": self.active_projects, } - @view_config(request_method="GET") + @view_config(request_method="GET", permission="view:organization") def manage_organization(self): if self.request.flags.enabled(AdminFlagValue.DISABLE_ORGANIZATIONS): raise HTTPNotFound @@ -1367,6 +1420,172 @@ def delete_organization(self): return HTTPSeeOther(self.request.route_path("manage.organizations")) +@view_defaults( + route_name="manage.organization.projects", + context=Organization, + renderer="manage/organization/projects.html", + uses_session=True, + require_csrf=True, + require_methods=False, + permission="manage:organization", + has_translations=True, + require_reauth=True, +) +class ManageOrganizationProjectsViews: + def __init__(self, organization, request): + self.organization = organization + self.request = request + self.user_service = request.find_service(IUserService, context=None) + self.organization_service = request.find_service( + IOrganizationService, context=None + ) + self.project_factory = ProjectFactory(request) + + @property + def active_projects(self): + return self.organization.projects + + @property + def default_response(self): + active_projects = self.active_projects + all_user_projects = user_projects(self.request) + projects_owned = set( + project.name for project in all_user_projects["projects_owned"] + ) + projects_sole_owned = set( + project.name for project in all_user_projects["projects_sole_owned"] + ) + projects_requiring_2fa = set( + project.name for project in all_user_projects["projects_requiring_2fa"] + ) + project_choices = set( + project.name + for project in all_user_projects["projects_owned"] + if not project.organization + ) + project_factory = self.project_factory + + return { + "organization": self.organization, + "active_projects": active_projects, + "projects_owned": projects_owned, + "projects_sole_owned": projects_sole_owned, + "projects_requiring_2fa": projects_requiring_2fa, + "add_organization_project_form": AddOrganizationProjectForm( + self.request.POST, + project_choices=project_choices, + project_factory=project_factory, + ), + } + + @view_config(request_method="GET", permission="view:organization") + def manage_organization_projects(self): + if self.request.flags.enabled(AdminFlagValue.DISABLE_ORGANIZATIONS): + raise HTTPNotFound + + return self.default_response + + @view_config(request_method="POST") + def add_organization_project(self): + if self.request.flags.enabled(AdminFlagValue.DISABLE_ORGANIZATIONS): + raise HTTPNotFound + + # Get and validate form from default response. + default_response = self.default_response + form = default_response["add_organization_project_form"] + if not form.validate(): + return default_response + + # Get existing project or add new project. + if form.add_existing_project.data: + # Get existing project. + project = self.project_factory[form.existing_project_name.data] + # Remove request user as individual project owner. + role = ( + self.request.db.query(Role) + .join(User) + .filter( + Role.role_name == "Owner", + Role.project == project, + Role.user == self.request.user, + ) + .first() + ) + if role: + self.request.db.delete(role) + self.request.db.add( + JournalEntry( + name=project.name, + action=f"remove {role.role_name} {role.user.username}", + submitted_by=self.request.user, + submitted_from=self.request.remote_addr, + ) + ) + project.record_event( + tag="project:role:delete", + ip_address=self.request.remote_addr, + additional={ + "submitted_by": self.request.user.username, + "role_name": role.role_name, + "target_user": role.user.username, + }, + ) + else: + # Validate new project name. + try: + validate_project_name(form.new_project_name.data, self.request) + except HTTPException as exc: + form.new_project_name.errors.append(exc.detail) + return default_response + # Add new project. + project = add_project(form.new_project_name.data, self.request) + + # Add project to organization. + self.organization_service.add_organization_project( + organization_id=self.organization.id, + project_id=project.id, + ) + + # Record events. + self.organization.record_event( + tag="organization:organization_project:add", + ip_address=self.request.remote_addr, + additional={ + "submitted_by_user_id": str(self.request.user.id), + "project_name": project.name, + }, + ) + project.record_event( + tag="project:organization_project:add", + ip_address=self.request.remote_addr, + additional={ + "submitted_by_user_id": str(self.request.user.id), + "organization_name": self.organization.name, + }, + ) + + # Send notification emails. + owner_users = set( + organization_owners(self.request, self.organization) + + project_owners(self.request, project) + ) + send_organization_project_added_email( + self.request, + owner_users, + organization_name=self.organization.name, + project_name=project.name, + ) + + # Display notification message. + self.request.session.flash( + f"Added the project {project.name!r} to {self.organization.name!r}", + queue="success", + ) + + # Refresh projects list. + return HTTPSeeOther(self.request.path) + + @view_config( route_name="manage.organization.roles", context=Organization, @@ -1767,6 +1986,7 @@ def _key(project): return project.created all_user_projects = user_projects(request) + projects = set(request.user.projects) | set(all_user_projects["projects_owned"]) projects_owned = set( project.name for project in all_user_projects["projects_owned"] ) @@ -1787,7 +2007,7 @@ def _key(project): (role_invite.project, role_invite.token) for role_invite in project_invites ] return { - "projects": sorted(request.user.projects, key=_key, reverse=True), + "projects": sorted(projects, key=_key, reverse=True), "projects_owned": projects_owned, "projects_sole_owned": projects_sole_owned, "projects_requiring_2fa": projects_requiring_2fa, @@ -1810,14 +2030,32 @@ def __init__(self, project, request): self.project = project self.request = request self.toggle_2fa_requirement_form_class = Toggle2FARequirementForm + self.transfer_organization_project_form_class = TransferOrganizationProjectForm @view_config(request_method="GET") def manage_project_settings(self): + if self.request.flags.enabled(AdminFlagValue.DISABLE_ORGANIZATIONS): + organization_choices = set() + else: + all_user_organizations = user_organizations(self.request) + organizations_owned = set( + organization.name + for organization in all_user_organizations["organizations_owned"] + ) + organization_choices = organizations_owned - ( + {self.project.organization.name} if self.project.organization else set() + ) + return { "project": self.project, "MAX_FILESIZE": MAX_FILESIZE, "MAX_PROJECT_SIZE": MAX_PROJECT_SIZE, "toggle_2fa_form": self.toggle_2fa_requirement_form_class(), + "transfer_organization_project_form": ( + self.transfer_organization_project_form_class( + organization_choices=organization_choices, + ) + ), } @view_config( @@ -2112,6 +2350,230 @@ def delete_oidc_provider(self): return self.default_response +@view_config( + route_name="manage.project.remove_organization_project", + context=Project, + uses_session=True, + require_methods=["POST"], + permission="manage:project", + has_translations=True, + require_reauth=True, +) +def remove_organization_project(project, request): + if request.flags.enabled(AdminFlagValue.DISABLE_ORGANIZATIONS): + request.session.flash("Organizations are disabled", queue="error") + return HTTPSeeOther( + request.route_path("manage.project.settings", project_name=project.name) + ) + + confirm_project( + project, + request, + fail_route="manage.project.settings", + field_name="confirm_remove_organization_project_name", + error_message="Could not remove project from organization", + ) + + if not project_owners(request, project): + request.session.flash( + "Could not remove project from organization", queue="error" + ) + return HTTPSeeOther( + request.route_path("manage.project.settings", project_name=project.name) + ) + + # Remove project from current organization. + organization_service = request.find_service(IOrganizationService, context=None) + if organization := project.organization: + organization_service.delete_organization_project(organization.id, project.id) + organization.record_event( + tag="organization:organization_project:remove", + ip_address=request.remote_addr, + additional={ + "submitted_by_user_id": str(request.user.id), + "project_name": project.name, + }, + ) + project.record_event( + tag="project:organization_project:remove", + ip_address=request.remote_addr, + additional={ + "submitted_by_user_id": str(request.user.id), + "organization_name": organization.name, + }, + ) + # Send notification emails. + owner_users = set( + organization_owners(request, organization) + + project_owners(request, project) + ) + send_organization_project_removed_email( + request, + owner_users, + organization_name=organization.name, + project_name=project.name, + ) + # Display notification message. + request.session.flash( + f"Removed the project {project.name!r} from {organization.name!r}", + queue="success", + ) + + return HTTPSeeOther( + request.route_path("manage.project.settings", project_name=project.name) + ) + + +@view_config( + route_name="manage.project.transfer_organization_project", + context=Project, + uses_session=True, + require_methods=["POST"], + permission="manage:project", + has_translations=True, + require_reauth=True, +) +def transfer_organization_project(project, request): + if request.flags.enabled(AdminFlagValue.DISABLE_ORGANIZATIONS): + request.session.flash("Organizations are disabled", queue="error") + return HTTPSeeOther( + request.route_path("manage.project.settings", project_name=project.name) + ) + + confirm_project( + project, + request, + fail_route="manage.project.settings", + field_name="confirm_transfer_organization_project_name", + error_message="Could not transfer project", + ) + + all_user_organizations = user_organizations(request) + organizations_owned = set( + organization.name + for organization in all_user_organizations["organizations_owned"] + ) + organization_choices = organizations_owned - ( + {project.organization.name} if project.organization else set() + ) + + form = TransferOrganizationProjectForm( + request.POST, + organization_choices=organization_choices, + ) + + if not form.validate(): + for error_list in form.errors.values(): + for error in error_list: + request.session.flash(error, queue="error") + return HTTPSeeOther( + request.route_path("manage.project.settings", project_name=project.name) + ) + + # Remove request user as individual project owner. + role = ( + request.db.query(Role) + .join(User) + .filter( + Role.role_name == "Owner", + Role.project == project, + Role.user == request.user, + ) + .first() + ) + if role: + request.db.delete(role) + request.db.add( + JournalEntry( + name=project.name, + action=f"remove {role.role_name} {role.user.username}", + submitted_by=request.user, + submitted_from=request.remote_addr, + ) + ) + project.record_event( + tag="project:role:delete", + ip_address=request.remote_addr, + additional={ + "submitted_by": request.user.username, + "role_name": role.role_name, + "target_user": role.user.username, + }, + ) + + # Remove project from current organization. + organization_service = request.find_service(IOrganizationService, context=None) + if organization := project.organization: + organization_service.delete_organization_project(organization.id, project.id) + organization.record_event( + tag="organization:organization_project:remove", + ip_address=request.remote_addr, + additional={ + "submitted_by_user_id": str(request.user.id), + "project_name": project.name, + }, + ) + project.record_event( + tag="project:organization_project:remove", + ip_address=request.remote_addr, + additional={ + "submitted_by_user_id": str(request.user.id), + "organization_name": organization.name, + }, + ) + # Send notification emails. + owner_users = set( + organization_owners(request, organization) + + project_owners(request, project) + ) + send_organization_project_removed_email( + request, + owner_users, + organization_name=organization.name, + project_name=project.name, + ) + + # Add project to selected organization. + organization = organization_service.get_organization_by_name(form.organization.data) + organization_service.add_organization_project(organization.id, project.id) + organization.record_event( + tag="organization:organization_project:add", + ip_address=request.remote_addr, + additional={ + "submitted_by_user_id": str(request.user.id), + "project_name": project.name, + }, + ) + project.record_event( + tag="project:organization_project:add", + ip_address=request.remote_addr, + additional={ + "submitted_by_user_id": str(request.user.id), + "organization_name": organization.name, + }, + ) + + # Send notification emails. + owner_users = set( + organization_owners(request, organization) + project_owners(request, project) + ) + send_organization_project_added_email( + request, + owner_users, + organization_name=organization.name, + project_name=project.name, + ) + + request.session.flash( + f"Transferred the project {project.name!r} to {organization.name!r}", + queue="success", + ) + + return HTTPSeeOther( + request.route_path("manage.project.settings", project_name=project.name) + ) + + def get_user_role_in_project(project, user, request): return ( request.db.query(Role) diff --git a/warehouse/migrations/versions/b08bcde4183c_remove_organization_project_is_active.py b/warehouse/migrations/versions/b08bcde4183c_remove_organization_project_is_active.py new file mode 100644 --- /dev/null +++ b/warehouse/migrations/versions/b08bcde4183c_remove_organization_project_is_active.py @@ -0,0 +1,55 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +remove_organization_project_is_active + +Revision ID: b08bcde4183c +Revises: 94c844c2da96 +Create Date: 2022-05-24 19:22:41.034512 +""" + +import sqlalchemy as sa + +from alembic import op + +revision = "b08bcde4183c" +down_revision = "94c844c2da96" + +# Note: It is VERY important to ensure that a migration does not lock for a +# long period of time and to ensure that each individual migration does +# not break compatibility with the *previous* version of the code base. +# This is because the migrations will be ran automatically as part of the +# deployment process, but while the previous version of the code is still +# up and running. Thus backwards incompatible changes must be broken up +# over multiple migrations inside of multiple pull requests in order to +# phase them in over multiple deploys. + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column("organization_project", "is_active") + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column( + "organization_project", + sa.Column( + "is_active", + sa.BOOLEAN(), + server_default=sa.text("false"), + autoincrement=False, + nullable=False, + ), + ) + # ### end Alembic commands ### diff --git a/warehouse/organizations/interfaces.py b/warehouse/organizations/interfaces.py --- a/warehouse/organizations/interfaces.py +++ b/warehouse/organizations/interfaces.py @@ -142,6 +142,22 @@ def update_organization(organization_id, **changes): attributes """ + def get_organization_project(organization_id, project_id): + """ + Return the organization project object that represents the given + organization and project or None + """ + + def add_organization_project(organization_id, project_id): + """ + Adds an association between the specified organization and project + """ + + def delete_organization_project(organization_id, project_id): + """ + Removes an association between the specified organization and project + """ + def record_event(organization_id, *, tag, additional=None): """ Creates a new Organization.Event for the given organization with the given diff --git a/warehouse/organizations/models.py b/warehouse/organizations/models.py --- a/warehouse/organizations/models.py +++ b/warehouse/organizations/models.py @@ -90,9 +90,8 @@ class OrganizationProject(db.Model): ), ) - __repr__ = make_repr("project_id", "organization_id", "is_active") + __repr__ = make_repr("project_id", "organization_id") - is_active = Column(Boolean, nullable=False, server_default=sql.false()) organization_id = Column( ForeignKey("organizations.id", onupdate="CASCADE", ondelete="CASCADE"), nullable=False, @@ -192,7 +191,7 @@ class Organization(HasEvents, db.Model): User, secondary=OrganizationRole.__table__, backref="organizations" # type: ignore # noqa ) projects = orm.relationship( - "Project", secondary=OrganizationProject.__table__, backref="organizations" # type: ignore # noqa + "Project", secondary=OrganizationProject.__table__, back_populates="organization" # type: ignore # noqa ) def record_event(self, *, tag, ip_address, additional={}): diff --git a/warehouse/organizations/services.py b/warehouse/organizations/services.py --- a/warehouse/organizations/services.py +++ b/warehouse/organizations/services.py @@ -342,6 +342,45 @@ def update_organization(self, organization_id, **changes): return organization + def get_organization_project(self, organization_id, project_id): + """ + Return the organization project object that represents the given + organization project id or None + """ + return ( + self.db.query(OrganizationProject) + .filter( + OrganizationProject.organization_id == organization_id, + OrganizationProject.project_id == project_id, + ) + .first() + ) + + def add_organization_project(self, organization_id, project_id): + """ + Adds an association between the specified organization and project + """ + organization_project = OrganizationProject( + organization_id=organization_id, + project_id=project_id, + ) + + self.db.add(organization_project) + self.db.flush() + + return organization_project + + def delete_organization_project(self, organization_id, project_id): + """ + Performs soft delete of association between specified organization and project + """ + organization_project = self.get_organization_project( + organization_id, project_id + ) + + self.db.delete(organization_project) + self.db.flush() + def record_event(self, organization_id, *, tag, additional=None): """ Creates a new Organization.Event for the given organization with the given diff --git a/warehouse/packaging/models.py b/warehouse/packaging/models.py --- a/warehouse/packaging/models.py +++ b/warehouse/packaging/models.py @@ -54,6 +54,12 @@ from warehouse.classifiers.models import Classifier from warehouse.events.models import HasEvents from warehouse.integrations.vulnerabilities.models import VulnerabilityRecord +from warehouse.organizations.models import ( + Organization, + OrganizationProject, + OrganizationRole, + OrganizationRoleType, +) from warehouse.sitemap.models import SitemapMixin from warehouse.utils import dotted_navigator from warehouse.utils.attrs import make_repr @@ -80,7 +86,7 @@ class Role(db.Model): ) user = orm.relationship(User, lazy=False) - project = orm.relationship("Project", lazy=False) + project = orm.relationship("Project", lazy=False, back_populates="roles") class RoleInvitationStatus(enum.Enum): @@ -135,6 +141,14 @@ def __getitem__(self, project): except NoResultFound: raise KeyError from None + def __contains__(self, project): + try: + self[project] + except KeyError: + return False + else: + return True + class TwoFactorRequireable: # Project owner requires 2FA for this project @@ -172,11 +186,16 @@ class Project(SitemapMixin, TwoFactorRequireable, HasEvents, db.Model): total_size_limit = Column(BigInteger, nullable=True) last_serial = Column(Integer, nullable=False, server_default=sql.text("0")) zscore = Column(Float, nullable=True) - total_size = Column(BigInteger, server_default=sql.text("0")) + organization = orm.relationship( + Organization, + secondary=OrganizationProject.__table__, # type: ignore + back_populates="projects", + uselist=False, + ) + roles = orm.relationship(Role, back_populates="project", passive_deletes=True) users = orm.relationship(User, secondary=Role.__table__, backref="projects") # type: ignore # noqa - releases = orm.relationship( "Release", backref="project", @@ -226,17 +245,26 @@ def __acl__(self): # Get all of the users for this project. query = session.query(Role).filter(Role.project == self) query = query.options(orm.lazyload("project")) - query = query.options(orm.joinedload("user").lazyload("emails")) - query = query.join(User).order_by(User.id.asc()) - for role in sorted( - query.all(), key=lambda x: ["Owner", "Maintainer"].index(x.role_name) + query = query.options(orm.lazyload("user")) + roles = {(role.user_id, role.role_name) for role in query.all()} + + # Add all organization owners for this project. + if self.organization: + query = session.query(OrganizationRole).filter( + OrganizationRole.organization == self.organization, + OrganizationRole.role_name == OrganizationRoleType.Owner, + ) + query = query.options(orm.lazyload("organization")) + query = query.options(orm.lazyload("user")) + roles |= {(role.user_id, "Owner") for role in query.all()} + + for user_id, role_name in sorted( + roles, key=lambda x: (["Owner", "Maintainer"].index(x[1]), x[0]) ): - if role.role_name == "Owner": - acls.append( - (Allow, f"user:{role.user.id}", ["manage:project", "upload"]) - ) + if role_name == "Owner": + acls.append((Allow, f"user:{user_id}", ["manage:project", "upload"])) else: - acls.append((Allow, f"user:{role.user.id}", ["upload"])) + acls.append((Allow, f"user:{user_id}", ["upload"])) return acls @property @@ -251,6 +279,23 @@ def documentation_url(self): return request.route_url("legacy.docs", project=self.name) + @property + def owners(self): + """Return all owners who are owners of the project.""" + owner_roles = ( + orm.object_session(self) + .query(User.id) + .join(Role.user) + .filter(Role.role_name == "Owner", Role.project == self) + .subquery() + ) + return ( + orm.object_session(self) + .query(User) + .join(owner_roles, User.id == owner_roles.c.id) + .all() + ) + @property def all_versions(self): return ( diff --git a/warehouse/routes.py b/warehouse/routes.py --- a/warehouse/routes.py +++ b/warehouse/routes.py @@ -234,6 +234,13 @@ def includeme(config): traverse="/{organization_name}", domain=warehouse, ) + config.add_route( + "manage.organization.projects", + "/manage/organization/{organization_name}/projects/", + factory="warehouse.organizations.models:OrganizationFactory", + traverse="/{organization_name}", + domain=warehouse, + ) config.add_route( "manage.organization.roles", "/manage/organization/{organization_name}/people/", @@ -277,6 +284,20 @@ def includeme(config): traverse="/{project_name}", domain=warehouse, ) + config.add_route( + "manage.project.remove_organization_project", + "/manage/project/{project_name}/remove_organization_project/", + factory="warehouse.packaging.models:ProjectFactory", + traverse="/{project_name}", + domain=warehouse, + ) + config.add_route( + "manage.project.transfer_organization_project", + "/manage/project/{project_name}/transfer_organization_project/", + factory="warehouse.packaging.models:ProjectFactory", + traverse="/{project_name}", + domain=warehouse, + ) config.add_route( "manage.project.delete_project", "/manage/project/{project_name}/delete_project/", diff --git a/warehouse/utils/project.py b/warehouse/utils/project.py --- a/warehouse/utils/project.py +++ b/warehouse/utils/project.py @@ -10,11 +10,23 @@ # See the License for the specific language governing permissions and # limitations under the License. -from packaging.utils import canonicalize_name -from pyramid.httpexceptions import HTTPSeeOther +from itertools import chain + +import stdlib_list +from packaging.utils import canonicalize_name +from pyramid.httpexceptions import ( + HTTPBadRequest, + HTTPConflict, + HTTPForbidden, + HTTPSeeOther, +) +from sqlalchemy import exists, func +from sqlalchemy.orm.exc import NoResultFound + +from warehouse.admin.flags import AdminFlagValue from warehouse.packaging.interfaces import IDocsStorage -from warehouse.packaging.models import JournalEntry +from warehouse.packaging.models import JournalEntry, ProhibitedProjectName, Project from warehouse.tasks import task @@ -28,15 +40,154 @@ def remove_documentation(task, request, project_name): task.retry(exc=exc) -def confirm_project(project, request, fail_route): - confirm = request.POST.get("confirm_project_name") +def _namespace_stdlib_list(module_list): + for module_name in module_list: + parts = module_name.split(".") + for i, part in enumerate(parts): + yield ".".join(parts[: i + 1]) + + +STDLIB_PROHIBITED = { + canonicalize_name(s.rstrip("-_.").lstrip("-_.")) + for s in chain.from_iterable( + _namespace_stdlib_list(stdlib_list.stdlib_list(version)) + for version in stdlib_list.short_versions + ) +} + + +def validate_project_name(name, request): + """ + Validate that a new project can be created with the given name. + """ + # Look up the project first before doing anything else, this is so we can + # automatically register it if we need to and can check permissions before + # going any further. + try: + # Find existing project or raise NoResultFound. + ( + request.db.query(Project.id) + .filter(Project.normalized_name == func.normalize_pep426_name(name)) + .one() + ) + + # Found existing project with conflicting name. + raise HTTPConflict( + ( + "The name {name!r} conflicts with an existing project. " + "See {projecthelp} for more information." + ).format( + name=name, + projecthelp=request.help_url(_anchor="project-name"), + ), + ) from None + except NoResultFound: + # Check for AdminFlag set by a PyPI Administrator disabling new project + # registration, reasons for this include Spammers, security + # vulnerabilities, or just wanting to be lazy and not worry ;) + if request.flags.enabled(AdminFlagValue.DISALLOW_NEW_PROJECT_REGISTRATION): + raise HTTPForbidden( + ( + "New project registration temporarily disabled. " + "See {projecthelp} for more information." + ).format(projecthelp=request.help_url(_anchor="admin-intervention")), + ) from None + + # Before we create the project, we're going to check our prohibited + # names to see if this project name prohibited, or if the project name + # is a close approximation of an existing project name. If it is, + # then we're going to deny the request to create this project. + _prohibited_name = request.db.query( + exists().where( + ProhibitedProjectName.name == func.normalize_pep426_name(name) + ) + ).scalar() + if _prohibited_name: + raise HTTPBadRequest( + ( + "The name {name!r} isn't allowed. " + "See {projecthelp} for more information." + ).format( + name=name, + projecthelp=request.help_url(_anchor="project-name"), + ), + ) from None + + _ultranormalize_collision = request.db.query( + exists().where( + func.ultranormalize_name(Project.name) == func.ultranormalize_name(name) + ) + ).scalar() + if _ultranormalize_collision: + raise HTTPBadRequest( + ( + "The name {name!r} is too similar to an existing project. " + "See {projecthelp} for more information." + ).format( + name=name, + projecthelp=request.help_url(_anchor="project-name"), + ), + ) from None + + # Also check for collisions with Python Standard Library modules. + if canonicalize_name(name) in STDLIB_PROHIBITED: + raise HTTPBadRequest( + ( + "The name {name!r} isn't allowed (conflict with Python " + "Standard Library module name). See " + "{projecthelp} for more information." + ).format( + name=name, + projecthelp=request.help_url(_anchor="project-name"), + ), + ) from None + + # Project name is valid. + return True + + +def add_project(name, request): + """ + Attempts to create a project with the given name. + """ + project = Project(name=name) + request.db.add(project) + + # TODO: This should be handled by some sort of database trigger or a + # SQLAlchemy hook or the like instead of doing it inline in this + # view. + request.db.add( + JournalEntry( + name=project.name, + action="create", + submitted_by=request.user, + submitted_from=request.remote_addr, + ) + ) + project.record_event( + tag="project:create", + ip_address=request.remote_addr, + additional={"created_by": request.user.username}, + ) + + return project + + +def confirm_project( + project, + request, + fail_route, + field_name="confirm_project_name", + error_message="Could not delete project", +): + confirm = request.POST.get(field_name) project_name = project.normalized_name if not confirm: request.session.flash("Confirm the request", queue="error") raise HTTPSeeOther(request.route_path(fail_route, project_name=project_name)) if canonicalize_name(confirm) != project.normalized_name: request.session.flash( - "Could not delete project - " + f"{error_message} - " + f"{confirm!r} is not the same as {project.normalized_name!r}", queue="error", )
diff --git a/tests/common/db/organizations.py b/tests/common/db/organizations.py --- a/tests/common/db/organizations.py +++ b/tests/common/db/organizations.py @@ -99,6 +99,5 @@ class Meta: model = OrganizationProject id = factory.Faker("uuid4", cast_to=None) - is_active = True organization = factory.SubFactory(OrganizationFactory) project = factory.SubFactory(ProjectFactory) diff --git a/tests/unit/email/test_init.py b/tests/unit/email/test_init.py --- a/tests/unit/email/test_init.py +++ b/tests/unit/email/test_init.py @@ -1833,6 +1833,78 @@ def test_send_new_organization_declined_email( ] +class TestOrganizationProjectEmails: + @pytest.fixture + def organization_project(self, pyramid_user): + self.user = pyramid_user + self.organization_name = "exampleorganization" + self.project_name = "exampleproject" + + @pytest.mark.parametrize( + ("email_template_name", "send_organization_project_email"), + [ + ("organization-project-added", email.send_organization_project_added_email), + ( + "organization-project-removed", + email.send_organization_project_removed_email, + ), + ], + ) + def test_send_organization_project_email( + self, + db_request, + organization_project, + make_email_renderers, + send_email, + email_template_name, + send_organization_project_email, + ): + subject_renderer, body_renderer, html_renderer = make_email_renderers( + email_template_name + ) + + result = send_organization_project_email( + db_request, + self.user, + organization_name=self.organization_name, + project_name=self.project_name, + ) + + assert result == { + "organization_name": self.organization_name, + "project_name": self.project_name, + } + subject_renderer.assert_(**result) + body_renderer.assert_(**result) + html_renderer.assert_(**result) + assert db_request.task.calls == [pretend.call(send_email)] + assert send_email.delay.calls == [ + pretend.call( + f"{self.user.name} <{self.user.email}>", + { + "subject": subject_renderer.string_response, + "body_text": body_renderer.string_response, + "body_html": ( + f"<html>\n" + f"<head></head>\n" + f"<body><p>{html_renderer.string_response}</p></body>\n" + f"</html>\n" + ), + }, + { + "tag": "account:email:sent", + "user_id": self.user.id, + "additional": { + "from_": db_request.registry.settings["mail.sender"], + "to": self.user.email, + "subject": subject_renderer.string_response, + "redact_ip": False, + }, + }, + ) + ] + + class TestOrganizationMemberEmails: @pytest.fixture def organization_invite(self, pyramid_user): diff --git a/tests/unit/manage/test_forms.py b/tests/unit/manage/test_forms.py --- a/tests/unit/manage/test_forms.py +++ b/tests/unit/manage/test_forms.py @@ -21,6 +21,8 @@ from warehouse.manage import forms +from ...common.db.packaging import ProjectFactory + class TestCreateRoleForm: def test_creation(self): @@ -539,6 +541,88 @@ def test_validate_name_with_organization(self): ] +class TestAddOrganizationProjectForm: + def test_creation(self, pyramid_request): + pyramid_request.POST = MultiDict() + project_choices = {"foo"} + project_factory = pretend.stub() + + form = forms.AddOrganizationProjectForm( + pyramid_request.POST, + project_choices=project_choices, + project_factory=project_factory, + ) + + assert form.existing_project_name.choices == [ + ("", "Select project"), + ("foo", "foo"), + ] + + @pytest.mark.parametrize( + ("add_existing_project", "existing_project_name", "new_project_name", "errors"), + [ + # Validate existing project name. + ("true", "foo", "", {}), + # Validate existing project name missing. + ("true", "", "", {"existing_project_name": ["Select project"]}), + # Validate new project name. + ("false", "", "bar", {}), + # Validate new project name missing. + ("false", "", "", {"new_project_name": ["Specify project name"]}), + # Validate new project name invalid character. + ( + "false", + "", + "@", + { + "new_project_name": [ + "Start and end with a letter or numeral containing " + "only ASCII numeric and '.', '_' and '-'." + ] + }, + ), + # Validate new project name already used. + ( + "false", + "", + "foo", + { + "new_project_name": [ + "This project name has already been used. " + "Choose a different project name." + ] + }, + ), + ], + ) + def test_validate( + self, + pyramid_request, + add_existing_project, + existing_project_name, + new_project_name, + errors, + ): + pyramid_request.POST = MultiDict( + { + "add_existing_project": add_existing_project, + "existing_project_name": existing_project_name, + "new_project_name": new_project_name, + } + ) + project_choices = {"foo"} + project_factory = {"foo": ProjectFactory.create(name="foo")} + + form = forms.AddOrganizationProjectForm( + pyramid_request.POST, + project_choices=project_choices, + project_factory=project_factory, + ) + + assert not form.validate() if errors else form.validate() + assert form.errors == errors + + class TestSaveAccountForm: def test_public_email_verified(self): email = pretend.stub(verified=True, public=False, email="[email protected]") diff --git a/tests/unit/manage/test_views.py b/tests/unit/manage/test_views.py --- a/tests/unit/manage/test_views.py +++ b/tests/unit/manage/test_views.py @@ -2976,6 +2976,405 @@ def test_delete_organization_disable_organizations(self, db_request): view.delete_organization() +class TestManageOrganizationProjects: + def test_manage_organization_projects( + self, + db_request, + pyramid_user, + organization_service, + enable_organizations, + monkeypatch, + ): + organization = OrganizationFactory.create() + organization.projects = [ProjectFactory.create()] + + add_organization_project_obj = pretend.stub() + add_organization_project_cls = pretend.call_recorder( + lambda *a, **kw: add_organization_project_obj + ) + monkeypatch.setattr( + views, "AddOrganizationProjectForm", add_organization_project_cls + ) + + view = views.ManageOrganizationProjectsViews(organization, db_request) + result = view.manage_organization_projects() + + assert view.request == db_request + assert view.organization_service == organization_service + assert result == { + "organization": organization, + "active_projects": view.active_projects, + "projects_owned": set(), + "projects_sole_owned": set(), + "projects_requiring_2fa": set(), + "add_organization_project_form": add_organization_project_obj, + } + assert len(add_organization_project_cls.calls) == 1 + + def test_manage_organization_projects_disable_organizations(self, db_request): + organization = OrganizationFactory.create() + + view = views.ManageOrganizationProjectsViews(organization, db_request) + with pytest.raises(HTTPNotFound): + view.manage_organization_projects() + + def test_add_organization_project_existing_project( + self, + db_request, + pyramid_user, + organization_service, + enable_organizations, + monkeypatch, + ): + organization = OrganizationFactory.create() + organization.projects = [ProjectFactory.create()] + + project = ProjectFactory.create() + + OrganizationRoleFactory.create( + organization=organization, user=db_request.user, role_name="Owner" + ) + RoleFactory.create(project=project, user=db_request.user, role_name="Owner") + + add_organization_project_obj = pretend.stub( + add_existing_project=pretend.stub(data=True), + existing_project_name=pretend.stub(data=project.name), + validate=lambda *a, **kw: True, + ) + add_organization_project_cls = pretend.call_recorder( + lambda *a, **kw: add_organization_project_obj + ) + monkeypatch.setattr( + views, "AddOrganizationProjectForm", add_organization_project_cls + ) + + def add_organization_project(*args, **kwargs): + organization.projects.append(project) + + monkeypatch.setattr( + organization_service, "add_organization_project", add_organization_project + ) + + send_organization_project_added_email = pretend.call_recorder( + lambda req, user, **k: None + ) + monkeypatch.setattr( + views, + "send_organization_project_added_email", + send_organization_project_added_email, + ) + + view = views.ManageOrganizationProjectsViews(organization, db_request) + result = view.add_organization_project() + + assert isinstance(result, HTTPSeeOther) + assert result.headers["Location"] == db_request.path + assert len(add_organization_project_cls.calls) == 1 + assert len(organization.projects) == 2 + assert send_organization_project_added_email.calls == [ + pretend.call( + db_request, + {db_request.user}, + organization_name=organization.name, + project_name=project.name, + ) + ] + + def test_add_organization_project_existing_project_no_individual_owner( + self, + db_request, + pyramid_user, + organization_service, + enable_organizations, + monkeypatch, + ): + organization = OrganizationFactory.create() + organization.projects = [ProjectFactory.create()] + + project = ProjectFactory.create() + + OrganizationRoleFactory.create( + organization=organization, user=db_request.user, role_name="Owner" + ) + + add_organization_project_obj = pretend.stub( + add_existing_project=pretend.stub(data=True), + existing_project_name=pretend.stub(data=project.name), + validate=lambda *a, **kw: True, + ) + add_organization_project_cls = pretend.call_recorder( + lambda *a, **kw: add_organization_project_obj + ) + monkeypatch.setattr( + views, "AddOrganizationProjectForm", add_organization_project_cls + ) + + def add_organization_project(*args, **kwargs): + organization.projects.append(project) + + monkeypatch.setattr( + organization_service, "add_organization_project", add_organization_project + ) + + send_organization_project_added_email = pretend.call_recorder( + lambda req, user, **k: None + ) + monkeypatch.setattr( + views, + "send_organization_project_added_email", + send_organization_project_added_email, + ) + + view = views.ManageOrganizationProjectsViews(organization, db_request) + result = view.add_organization_project() + + assert isinstance(result, HTTPSeeOther) + assert result.headers["Location"] == db_request.path + assert len(add_organization_project_cls.calls) == 1 + assert len(organization.projects) == 2 + assert send_organization_project_added_email.calls == [ + pretend.call( + db_request, + {db_request.user}, + organization_name=organization.name, + project_name=project.name, + ) + ] + + def test_add_organization_project_existing_project_invalid( + self, + db_request, + pyramid_user, + organization_service, + enable_organizations, + monkeypatch, + ): + organization = OrganizationFactory.create() + organization.projects = [ProjectFactory.create()] + + project = ProjectFactory.create() + + OrganizationRoleFactory.create( + organization=organization, user=db_request.user, role_name="Owner" + ) + RoleFactory.create(project=project, user=db_request.user, role_name="Owner") + + add_organization_project_obj = pretend.stub( + add_existing_project=pretend.stub(data=True), + existing_project_name=pretend.stub(data=project.name), + validate=lambda *a, **kw: False, + ) + add_organization_project_cls = pretend.call_recorder( + lambda *a, **kw: add_organization_project_obj + ) + monkeypatch.setattr( + views, "AddOrganizationProjectForm", add_organization_project_cls + ) + + def add_organization_project(*args, **kwargs): + organization.projects.append(project) + + monkeypatch.setattr( + organization_service, "add_organization_project", add_organization_project + ) + + view = views.ManageOrganizationProjectsViews(organization, db_request) + result = view.add_organization_project() + + assert result == { + "organization": organization, + "active_projects": view.active_projects, + "projects_owned": {project.name, organization.projects[0].name}, + "projects_sole_owned": {project.name, organization.projects[0].name}, + "projects_requiring_2fa": set(), + "add_organization_project_form": add_organization_project_obj, + } + assert len(add_organization_project_cls.calls) == 1 + assert len(organization.projects) == 1 + + def test_add_organization_project_new_project( + self, + db_request, + pyramid_user, + organization_service, + enable_organizations, + monkeypatch, + ): + db_request.help_url = lambda *a, **kw: "" + + organization = OrganizationFactory.create() + organization.projects = [ProjectFactory.create()] + + project = ProjectFactory.create() + + OrganizationRoleFactory.create( + organization=organization, user=db_request.user, role_name="Owner" + ) + RoleFactory.create(project=project, user=db_request.user, role_name="Owner") + + add_organization_project_obj = pretend.stub( + add_existing_project=pretend.stub(data=False), + new_project_name=pretend.stub(data=project.name), + validate=lambda *a, **kw: True, + ) + add_organization_project_cls = pretend.call_recorder( + lambda *a, **kw: add_organization_project_obj + ) + monkeypatch.setattr( + views, "AddOrganizationProjectForm", add_organization_project_cls + ) + + validate_project_name = pretend.call_recorder(lambda *a, **kw: True) + monkeypatch.setattr(views, "validate_project_name", validate_project_name) + + add_project = pretend.call_recorder(lambda *a, **kw: project) + monkeypatch.setattr(views, "add_project", add_project) + + def add_organization_project(*args, **kwargs): + organization.projects.append(project) + + monkeypatch.setattr( + organization_service, "add_organization_project", add_organization_project + ) + + send_organization_project_added_email = pretend.call_recorder( + lambda req, user, **k: None + ) + monkeypatch.setattr( + views, + "send_organization_project_added_email", + send_organization_project_added_email, + ) + + view = views.ManageOrganizationProjectsViews(organization, db_request) + result = view.add_organization_project() + + assert isinstance(result, HTTPSeeOther) + assert result.headers["Location"] == db_request.path + assert validate_project_name.calls == [pretend.call(project.name, db_request)] + assert add_project.calls == [pretend.call(project.name, db_request)] + assert len(add_organization_project_cls.calls) == 1 + assert len(organization.projects) == 2 + assert send_organization_project_added_email.calls == [ + pretend.call( + db_request, + {db_request.user}, + organization_name=organization.name, + project_name=project.name, + ) + ] + + def test_add_organization_project_new_project_exception( + self, + db_request, + pyramid_user, + organization_service, + enable_organizations, + monkeypatch, + ): + organization = OrganizationFactory.create() + organization.projects = [ProjectFactory.create()] + + project = ProjectFactory.create() + + OrganizationRoleFactory.create( + organization=organization, user=db_request.user, role_name="Owner" + ) + RoleFactory.create(project=project, user=db_request.user, role_name="Owner") + + add_organization_project_obj = pretend.stub( + add_existing_project=pretend.stub(data=False), + new_project_name=pretend.stub(data=project.name, errors=[]), + validate=lambda *a, **kw: True, + ) + add_organization_project_cls = pretend.call_recorder( + lambda *a, **kw: add_organization_project_obj + ) + monkeypatch.setattr( + views, "AddOrganizationProjectForm", add_organization_project_cls + ) + + def validate_project_name(*a, **kw): + raise HTTPBadRequest("error-message") + + monkeypatch.setattr(views, "validate_project_name", validate_project_name) + + view = views.ManageOrganizationProjectsViews(organization, db_request) + result = view.add_organization_project() + + assert result == { + "organization": organization, + "active_projects": view.active_projects, + "projects_owned": {project.name, organization.projects[0].name}, + "projects_sole_owned": {project.name, organization.projects[0].name}, + "projects_requiring_2fa": set(), + "add_organization_project_form": add_organization_project_obj, + } + assert add_organization_project_obj.new_project_name.errors == ["error-message"] + assert len(organization.projects) == 1 + + def test_add_organization_project_new_project_name_conflict( + self, + db_request, + pyramid_user, + organization_service, + enable_organizations, + monkeypatch, + ): + db_request.help_url = lambda *a, **kw: "help-url" + + organization = OrganizationFactory.create() + organization.projects = [ProjectFactory.create()] + + project = ProjectFactory.create() + + OrganizationRoleFactory.create( + organization=organization, user=db_request.user, role_name="Owner" + ) + RoleFactory.create(project=project, user=db_request.user, role_name="Owner") + + add_organization_project_obj = pretend.stub( + add_existing_project=pretend.stub(data=False), + new_project_name=pretend.stub(data=project.name, errors=[]), + validate=lambda *a, **kw: True, + ) + add_organization_project_cls = pretend.call_recorder( + lambda *a, **kw: add_organization_project_obj + ) + monkeypatch.setattr( + views, "AddOrganizationProjectForm", add_organization_project_cls + ) + + view = views.ManageOrganizationProjectsViews(organization, db_request) + result = view.add_organization_project() + + assert result == { + "organization": organization, + "active_projects": view.active_projects, + "projects_owned": {project.name, organization.projects[0].name}, + "projects_sole_owned": {project.name, organization.projects[0].name}, + "projects_requiring_2fa": set(), + "add_organization_project_form": add_organization_project_obj, + } + assert add_organization_project_obj.new_project_name.errors == [ + ( + "The name {name!r} conflicts with an existing project. " + "See {projecthelp} for more information." + ).format( + name=project.name, + projecthelp="help-url", + ) + ] + assert len(organization.projects) == 1 + + def test_add_organization_project_disable_organizations(self, db_request): + organization = OrganizationFactory.create() + + view = views.ManageOrganizationProjectsViews(organization, db_request) + with pytest.raises(HTTPNotFound): + view.add_organization_project() + + class TestManageOrganizationRoles: def test_get_manage_organization_roles(self, db_request, enable_organizations): organization = OrganizationFactory.create(name="foobar") @@ -4029,18 +4428,32 @@ def test_manage_projects(self, db_request): class TestManageProjectSettings: - def test_manage_project_settings(self): - request = pretend.stub() - project = pretend.stub() + @pytest.mark.parametrize("enabled", [False, True]) + def test_manage_project_settings(self, enabled, monkeypatch): + request = pretend.stub( + flags=pretend.stub(enabled=pretend.call_recorder(lambda *a: enabled)) + ) + project = pretend.stub(organization=None) view = views.ManageProjectSettingsViews(project, request) - form = pretend.stub - view.toggle_2fa_requirement_form_class = lambda: form + form = pretend.stub() + view.toggle_2fa_requirement_form_class = lambda *a, **kw: form + view.transfer_organization_project_form_class = lambda *a, **kw: form + + user_organizations = pretend.call_recorder( + lambda *a, **kw: { + "organizations_managed": [], + "organizations_owned": [], + "organizations_billing": [], + } + ) + monkeypatch.setattr(views, "user_organizations", user_organizations) assert view.manage_project_settings() == { "project": project, "MAX_FILESIZE": MAX_FILESIZE, "MAX_PROJECT_SIZE": MAX_PROJECT_SIZE, "toggle_2fa_form": form, + "transfer_organization_project_form": form, } @pytest.mark.parametrize("enabled", [False, None]) @@ -4175,6 +4588,506 @@ def test_toggle_2fa_requirement_non_critical( assert event.tag == tag assert event.additional == {"modified_by": db_request.user.username} + def test_remove_organization_project_no_confirm(self): + project = pretend.stub(normalized_name="foo") + request = pretend.stub( + POST={}, + flags=pretend.stub(enabled=pretend.call_recorder(lambda *a: False)), + session=pretend.stub(flash=pretend.call_recorder(lambda *a, **kw: None)), + route_path=lambda *a, **kw: "/foo/bar/", + ) + + with pytest.raises(HTTPSeeOther) as exc: + views.remove_organization_project(project, request) + assert exc.value.status_code == 303 + assert exc.value.headers["Location"] == "/foo/bar/" + + assert request.flags.enabled.calls == [ + pretend.call(AdminFlagValue.DISABLE_ORGANIZATIONS) + ] + assert request.session.flash.calls == [ + pretend.call("Confirm the request", queue="error") + ] + + def test_remove_organization_project_wrong_confirm(self): + project = pretend.stub(normalized_name="foo") + request = pretend.stub( + POST={"confirm_remove_organization_project_name": "bar"}, + flags=pretend.stub(enabled=pretend.call_recorder(lambda *a: False)), + session=pretend.stub(flash=pretend.call_recorder(lambda *a, **kw: None)), + route_path=lambda *a, **kw: "/foo/bar/", + ) + + with pytest.raises(HTTPSeeOther) as exc: + views.remove_organization_project(project, request) + assert exc.value.status_code == 303 + assert exc.value.headers["Location"] == "/foo/bar/" + + assert request.flags.enabled.calls == [ + pretend.call(AdminFlagValue.DISABLE_ORGANIZATIONS) + ] + assert request.session.flash.calls == [ + pretend.call( + ( + "Could not remove project from organization - " + "'bar' is not the same as 'foo'" + ), + queue="error", + ) + ] + + def test_remove_organization_project_disable_organizations(self): + project = pretend.stub(name="foo", normalized_name="foo") + request = pretend.stub( + flags=pretend.stub(enabled=pretend.call_recorder(lambda *a: True)), + route_path=pretend.call_recorder(lambda *a, **kw: "/the-redirect"), + session=pretend.stub(flash=pretend.call_recorder(lambda *a, **kw: None)), + ) + + result = views.remove_organization_project(project, request) + + assert isinstance(result, HTTPSeeOther) + assert result.headers["Location"] == "/the-redirect" + assert request.flags.enabled.calls == [ + pretend.call(AdminFlagValue.DISABLE_ORGANIZATIONS) + ] + assert request.session.flash.calls == [ + pretend.call("Organizations are disabled", queue="error") + ] + assert request.route_path.calls == [ + pretend.call("manage.project.settings", project_name="foo") + ] + + def test_remove_organization_project_no_current_organization( + self, monkeypatch, db_request + ): + project = ProjectFactory.create(name="foo") + + db_request.POST = MultiDict( + { + "confirm_remove_organization_project_name": project.normalized_name, + } + ) + db_request.flags = pretend.stub(enabled=pretend.call_recorder(lambda *a: False)) + db_request.route_path = pretend.call_recorder(lambda *a, **kw: "/the-redirect") + db_request.session = pretend.stub( + flash=pretend.call_recorder(lambda *a, **kw: None) + ) + db_request.user = UserFactory.create() + + RoleFactory.create(project=project, user=db_request.user, role_name="Owner") + + send_organization_project_removed_email = pretend.call_recorder( + lambda req, user, **k: None + ) + monkeypatch.setattr( + views, + "send_organization_project_removed_email", + send_organization_project_removed_email, + ) + + result = views.remove_organization_project(project, db_request) + + assert db_request.session.flash.calls == [] + assert db_request.route_path.calls == [ + pretend.call("manage.project.settings", project_name="foo") + ] + assert isinstance(result, HTTPSeeOther) + assert result.headers["Location"] == "/the-redirect" + assert send_organization_project_removed_email.calls == [] + + def test_remove_organization_project_no_individual_owner( + self, monkeypatch, db_request + ): + project = ProjectFactory.create(name="foo") + project.organization = OrganizationFactory.create(name="bar") + + db_request.POST = MultiDict( + { + "confirm_remove_organization_project_name": project.normalized_name, + } + ) + db_request.flags = pretend.stub(enabled=pretend.call_recorder(lambda *a: False)) + db_request.route_path = pretend.call_recorder(lambda *a, **kw: "/the-redirect") + db_request.session = pretend.stub( + flash=pretend.call_recorder(lambda *a, **kw: None) + ) + db_request.user = UserFactory.create() + + OrganizationRoleFactory.create( + organization=project.organization, user=db_request.user, role_name="Owner" + ) + + result = views.remove_organization_project(project, db_request) + + assert isinstance(result, HTTPSeeOther) + assert result.headers["Location"] == "/the-redirect" + assert db_request.flags.enabled.calls == [ + pretend.call(AdminFlagValue.DISABLE_ORGANIZATIONS) + ] + assert db_request.session.flash.calls == [ + pretend.call("Could not remove project from organization", queue="error") + ] + assert db_request.route_path.calls == [ + pretend.call("manage.project.settings", project_name="foo") + ] + + def test_remove_organization_project(self, monkeypatch, db_request): + project = ProjectFactory.create(name="foo") + project.organization = OrganizationFactory.create(name="bar") + + db_request.POST = MultiDict( + { + "confirm_remove_organization_project_name": project.normalized_name, + } + ) + db_request.flags = pretend.stub(enabled=pretend.call_recorder(lambda *a: False)) + db_request.route_path = pretend.call_recorder(lambda *a, **kw: "/the-redirect") + db_request.session = pretend.stub( + flash=pretend.call_recorder(lambda *a, **kw: None) + ) + db_request.user = UserFactory.create() + + OrganizationRoleFactory.create( + organization=project.organization, user=db_request.user, role_name="Owner" + ) + RoleFactory.create(project=project, user=db_request.user, role_name="Owner") + + send_organization_project_removed_email = pretend.call_recorder( + lambda req, user, **k: None + ) + monkeypatch.setattr( + views, + "send_organization_project_removed_email", + send_organization_project_removed_email, + ) + + result = views.remove_organization_project(project, db_request) + + assert db_request.session.flash.calls == [ + pretend.call("Removed the project 'foo' from 'bar'", queue="success") + ] + assert db_request.route_path.calls == [ + pretend.call("manage.project.settings", project_name="foo") + ] + assert isinstance(result, HTTPSeeOther) + assert result.headers["Location"] == "/the-redirect" + assert send_organization_project_removed_email.calls == [ + pretend.call( + db_request, + {db_request.user}, + organization_name=project.organization.name, + project_name=project.name, + ), + ] + + def test_transfer_organization_project_no_confirm(self): + project = pretend.stub(normalized_name="foo") + request = pretend.stub( + POST={}, + flags=pretend.stub(enabled=pretend.call_recorder(lambda *a: False)), + session=pretend.stub(flash=pretend.call_recorder(lambda *a, **kw: None)), + route_path=lambda *a, **kw: "/foo/bar/", + ) + + with pytest.raises(HTTPSeeOther) as exc: + views.transfer_organization_project(project, request) + assert exc.value.status_code == 303 + assert exc.value.headers["Location"] == "/foo/bar/" + + assert request.flags.enabled.calls == [ + pretend.call(AdminFlagValue.DISABLE_ORGANIZATIONS) + ] + assert request.session.flash.calls == [ + pretend.call("Confirm the request", queue="error") + ] + + def test_transfer_organization_project_wrong_confirm(self): + project = pretend.stub(normalized_name="foo") + request = pretend.stub( + POST={"confirm_transfer_organization_project_name": "bar"}, + flags=pretend.stub(enabled=pretend.call_recorder(lambda *a: False)), + session=pretend.stub(flash=pretend.call_recorder(lambda *a, **kw: None)), + route_path=lambda *a, **kw: "/foo/bar/", + ) + + with pytest.raises(HTTPSeeOther) as exc: + views.transfer_organization_project(project, request) + assert exc.value.status_code == 303 + assert exc.value.headers["Location"] == "/foo/bar/" + + assert request.flags.enabled.calls == [ + pretend.call(AdminFlagValue.DISABLE_ORGANIZATIONS) + ] + assert request.session.flash.calls == [ + pretend.call( + "Could not transfer project - 'bar' is not the same as 'foo'", + queue="error", + ) + ] + + def test_transfer_organization_project_disable_organizations(self): + project = pretend.stub(name="foo", normalized_name="foo") + request = pretend.stub( + flags=pretend.stub(enabled=pretend.call_recorder(lambda *a: True)), + route_path=pretend.call_recorder(lambda *a, **kw: "/the-redirect"), + session=pretend.stub(flash=pretend.call_recorder(lambda *a, **kw: None)), + ) + + result = views.transfer_organization_project(project, request) + assert isinstance(result, HTTPSeeOther) + assert result.headers["Location"] == "/the-redirect" + + assert request.flags.enabled.calls == [ + pretend.call(AdminFlagValue.DISABLE_ORGANIZATIONS) + ] + + assert request.session.flash.calls == [ + pretend.call("Organizations are disabled", queue="error") + ] + + assert request.route_path.calls == [ + pretend.call("manage.project.settings", project_name="foo") + ] + + def test_transfer_organization_project_no_current_organization( + self, monkeypatch, db_request + ): + organization = OrganizationFactory.create(name="baz") + project = ProjectFactory.create(name="foo") + + db_request.POST = MultiDict( + { + "organization": organization.normalized_name, + "confirm_transfer_organization_project_name": project.normalized_name, + } + ) + db_request.flags = pretend.stub(enabled=pretend.call_recorder(lambda *a: False)) + db_request.route_path = pretend.call_recorder(lambda *a, **kw: "/the-redirect") + db_request.session = pretend.stub( + flash=pretend.call_recorder(lambda *a, **kw: None) + ) + db_request.user = UserFactory.create() + + OrganizationRoleFactory.create( + organization=organization, user=db_request.user, role_name="Owner" + ) + RoleFactory.create(project=project, user=db_request.user, role_name="Owner") + + send_organization_project_removed_email = pretend.call_recorder( + lambda req, user, **k: None + ) + monkeypatch.setattr( + views, + "send_organization_project_removed_email", + send_organization_project_removed_email, + ) + + send_organization_project_added_email = pretend.call_recorder( + lambda req, user, **k: None + ) + monkeypatch.setattr( + views, + "send_organization_project_added_email", + send_organization_project_added_email, + ) + + result = views.transfer_organization_project(project, db_request) + + assert db_request.session.flash.calls == [ + pretend.call("Transferred the project 'foo' to 'baz'", queue="success") + ] + assert db_request.route_path.calls == [ + pretend.call("manage.project.settings", project_name="foo") + ] + assert isinstance(result, HTTPSeeOther) + assert result.headers["Location"] == "/the-redirect" + assert send_organization_project_removed_email.calls == [] + assert send_organization_project_added_email.calls == [ + pretend.call( + db_request, + {db_request.user}, + organization_name=organization.name, + project_name=project.name, + ) + ] + + def test_transfer_organization_project_no_individual_owner( + self, monkeypatch, db_request + ): + organization = OrganizationFactory.create(name="baz") + project = ProjectFactory.create(name="foo") + project.organization = OrganizationFactory.create(name="bar") + + db_request.POST = MultiDict( + { + "organization": organization.normalized_name, + "confirm_transfer_organization_project_name": project.normalized_name, + } + ) + db_request.flags = pretend.stub(enabled=pretend.call_recorder(lambda *a: False)) + db_request.route_path = pretend.call_recorder(lambda *a, **kw: "/the-redirect") + db_request.session = pretend.stub( + flash=pretend.call_recorder(lambda *a, **kw: None) + ) + db_request.user = UserFactory.create() + + OrganizationRoleFactory.create( + organization=organization, user=db_request.user, role_name="Owner" + ) + OrganizationRoleFactory.create( + organization=project.organization, user=db_request.user, role_name="Owner" + ) + + send_organization_project_removed_email = pretend.call_recorder( + lambda req, user, **k: None + ) + monkeypatch.setattr( + views, + "send_organization_project_removed_email", + send_organization_project_removed_email, + ) + + send_organization_project_added_email = pretend.call_recorder( + lambda req, user, **k: None + ) + monkeypatch.setattr( + views, + "send_organization_project_added_email", + send_organization_project_added_email, + ) + + result = views.transfer_organization_project(project, db_request) + + assert db_request.session.flash.calls == [ + pretend.call("Transferred the project 'foo' to 'baz'", queue="success") + ] + assert db_request.route_path.calls == [ + pretend.call("manage.project.settings", project_name="foo") + ] + assert isinstance(result, HTTPSeeOther) + assert result.headers["Location"] == "/the-redirect" + assert send_organization_project_removed_email.calls == [ + pretend.call( + db_request, + {db_request.user}, + organization_name=project.organization.name, + project_name=project.name, + ) + ] + assert send_organization_project_added_email.calls == [ + pretend.call( + db_request, + {db_request.user}, + organization_name=organization.name, + project_name=project.name, + ) + ] + + def test_transfer_organization_project_invalid(self, monkeypatch, db_request): + project = ProjectFactory.create(name="foo") + project.organization = OrganizationFactory.create(name="bar") + + db_request.POST = MultiDict( + { + "organization": "", + "confirm_transfer_organization_project_name": project.normalized_name, + } + ) + db_request.flags = pretend.stub(enabled=pretend.call_recorder(lambda *a: False)) + db_request.route_path = pretend.call_recorder(lambda *a, **kw: "/the-redirect") + db_request.session = pretend.stub( + flash=pretend.call_recorder(lambda *a, **kw: None) + ) + db_request.user = UserFactory.create() + + OrganizationRoleFactory.create( + organization=project.organization, user=db_request.user, role_name="Owner" + ) + RoleFactory.create(project=project, user=db_request.user, role_name="Owner") + + result = views.transfer_organization_project(project, db_request) + + assert db_request.session.flash.calls == [ + pretend.call("Select organization", queue="error") + ] + assert db_request.route_path.calls == [ + pretend.call("manage.project.settings", project_name="foo") + ] + assert isinstance(result, HTTPSeeOther) + assert result.headers["Location"] == "/the-redirect" + + def test_transfer_organization_project(self, monkeypatch, db_request): + organization = OrganizationFactory.create(name="baz") + project = ProjectFactory.create(name="foo") + project.organization = OrganizationFactory.create(name="bar") + + db_request.POST = MultiDict( + { + "organization": organization.normalized_name, + "confirm_transfer_organization_project_name": project.normalized_name, + } + ) + db_request.flags = pretend.stub(enabled=pretend.call_recorder(lambda *a: False)) + db_request.route_path = pretend.call_recorder(lambda *a, **kw: "/the-redirect") + db_request.session = pretend.stub( + flash=pretend.call_recorder(lambda *a, **kw: None) + ) + db_request.user = UserFactory.create() + + OrganizationRoleFactory.create( + organization=organization, user=db_request.user, role_name="Owner" + ) + OrganizationRoleFactory.create( + organization=project.organization, user=db_request.user, role_name="Owner" + ) + RoleFactory.create(project=project, user=db_request.user, role_name="Owner") + + send_organization_project_removed_email = pretend.call_recorder( + lambda req, user, **k: None + ) + monkeypatch.setattr( + views, + "send_organization_project_removed_email", + send_organization_project_removed_email, + ) + + send_organization_project_added_email = pretend.call_recorder( + lambda req, user, **k: None + ) + monkeypatch.setattr( + views, + "send_organization_project_added_email", + send_organization_project_added_email, + ) + + result = views.transfer_organization_project(project, db_request) + + assert db_request.session.flash.calls == [ + pretend.call("Transferred the project 'foo' to 'baz'", queue="success") + ] + assert db_request.route_path.calls == [ + pretend.call("manage.project.settings", project_name="foo") + ] + assert isinstance(result, HTTPSeeOther) + assert result.headers["Location"] == "/the-redirect" + assert send_organization_project_removed_email.calls == [ + pretend.call( + db_request, + {db_request.user}, + organization_name=project.organization.name, + project_name=project.name, + ) + ] + assert send_organization_project_added_email.calls == [ + pretend.call( + db_request, + {db_request.user}, + organization_name=organization.name, + project_name=project.name, + ) + ] + def test_delete_project_no_confirm(self): project = pretend.stub(normalized_name="foo") request = pretend.stub( diff --git a/tests/unit/organizations/test_services.py b/tests/unit/organizations/test_services.py --- a/tests/unit/organizations/test_services.py +++ b/tests/unit/organizations/test_services.py @@ -28,9 +28,11 @@ from ...common.db.organizations import ( OrganizationFactory, OrganizationInvitationFactory, + OrganizationProjectFactory, OrganizationRoleFactory, UserFactory, ) +from ...common.db.packaging import ProjectFactory def test_database_organizations_factory(): @@ -388,3 +390,44 @@ def test_update_organization(self, organization_service, db_request): ) .count() ) + + def test_get_organization_project(self, organization_service): + organization = OrganizationFactory.create() + project = ProjectFactory.create() + organization_project = OrganizationProjectFactory.create( + organization=organization, project=project + ) + + assert ( + organization_service.get_organization_project(organization.id, project.id) + == organization_project + ) + + def test_add_organization_project(self, organization_service, db_request): + organization = OrganizationFactory.create() + project = ProjectFactory.create() + + organization_service.add_organization_project(organization.id, project.id) + assert ( + db_request.db.query(OrganizationProject) + .filter( + OrganizationProject.organization_id == organization.id, + OrganizationProject.project_id == project.id, + ) + .count() + ) + + def test_delete_organization_project(self, organization_service, db_request): + organization = OrganizationFactory.create() + project = ProjectFactory.create() + OrganizationProjectFactory.create(organization=organization, project=project) + + organization_service.delete_organization_project(organization.id, project.id) + assert not ( + db_request.db.query(OrganizationProject) + .filter( + OrganizationProject.organization_id == organization.id, + OrganizationProject.project_id == project.id, + ) + .count() + ) diff --git a/tests/unit/packaging/test_models.py b/tests/unit/packaging/test_models.py --- a/tests/unit/packaging/test_models.py +++ b/tests/unit/packaging/test_models.py @@ -20,6 +20,11 @@ from warehouse.packaging.models import File, ProjectFactory, ReleaseURL +from ...common.db.organizations import ( + OrganizationFactory as DBOrganizationFactory, + OrganizationProjectFactory as DBOrganizationProjectFactory, + OrganizationRoleFactory as DBOrganizationRoleFactory, +) from ...common.db.packaging import ( FileFactory as DBFileFactory, ProjectFactory as DBProjectFactory, @@ -43,6 +48,13 @@ def test_travel_cant_find(self, db_request): with pytest.raises(KeyError): root[project.name + "invalid"] + def test_contains(self, db_request): + DBProjectFactory.create(name="foo") + root = ProjectFactory(db_request) + + assert "foo" in root + assert "bar" not in root + class TestProject: def test_traversal_finds(self, db_request): @@ -101,6 +113,10 @@ def test_acl(self, db_session): maintainer1 = DBRoleFactory.create(project=project, role_name="Maintainer") maintainer2 = DBRoleFactory.create(project=project, role_name="Maintainer") + organization = DBOrganizationFactory.create() + owner3 = DBOrganizationRoleFactory.create(organization=organization) + DBOrganizationProjectFactory.create(organization=organization, project=project) + acls = [] for location in lineage(project): try: @@ -120,6 +136,7 @@ def test_acl(self, db_session): [ (Allow, f"user:{owner1.user.id}", ["manage:project", "upload"]), (Allow, f"user:{owner2.user.id}", ["manage:project", "upload"]), + (Allow, f"user:{owner3.user.id}", ["manage:project", "upload"]), ], key=lambda x: x[1], ) + sorted( diff --git a/tests/unit/test_routes.py b/tests/unit/test_routes.py --- a/tests/unit/test_routes.py +++ b/tests/unit/test_routes.py @@ -252,6 +252,13 @@ def add_policy(name, filename): traverse="/{organization_name}", domain=warehouse, ), + pretend.call( + "manage.organization.projects", + "/manage/organization/{organization_name}/projects/", + factory="warehouse.organizations.models:OrganizationFactory", + traverse="/{organization_name}", + domain=warehouse, + ), pretend.call( "manage.organization.roles", "/manage/organization/{organization_name}/people/", @@ -295,6 +302,20 @@ def add_policy(name, filename): traverse="/{project_name}", domain=warehouse, ), + pretend.call( + "manage.project.remove_organization_project", + "/manage/project/{project_name}/remove_organization_project/", + factory="warehouse.packaging.models:ProjectFactory", + traverse="/{project_name}", + domain=warehouse, + ), + pretend.call( + "manage.project.transfer_organization_project", + "/manage/project/{project_name}/transfer_organization_project/", + factory="warehouse.packaging.models:ProjectFactory", + traverse="/{project_name}", + domain=warehouse, + ), pretend.call( "manage.project.delete_project", "/manage/project/{project_name}/delete_project/",
Create a project Feature request for organization account project in PyPI. Description | The Owner can create a project -- | -- User value | The project becomes associated with the Organization Acceptance criteria | Email notification to Owner
Currently creating a project on PyPI implicitly makes the initial uploader the Owner. This could be better specified as “Allow an Organization Owner to create an empty Project” which will allow for the creation of new Organization owned Projects so that Organization/Team privileges can be applied to it and the initial upload can be authenticated and authorized via those mechanisms, rather than someone needing to upload the project to create it and become owner´ *then* transfer it. This is the Organization flavor of somthing like https://github.com/pypa/warehouse/issues/6378.
2022-05-28T22:45:15Z
[]
[]
pypi/warehouse
11,665
pypi__warehouse-11665
[ "11084" ]
8a3cd48bf4162b4681ca8bdd4e96055bdcc22e63
diff --git a/warehouse/config.py b/warehouse/config.py --- a/warehouse/config.py +++ b/warehouse/config.py @@ -444,6 +444,9 @@ def configure(settings=None): jglobals.setdefault( "RoleInvitationStatus", "warehouse.packaging.models:RoleInvitationStatus" ) + jglobals.setdefault( + "TeamProjectRoleType", "warehouse.organizations.models:TeamProjectRoleType" + ) # We'll store all of our templates in one location, warehouse/templates # so we'll go ahead and add that to the Jinja2 search path. diff --git a/warehouse/email/__init__.py b/warehouse/email/__init__.py --- a/warehouse/email/__init__.py +++ b/warehouse/email/__init__.py @@ -583,6 +583,92 @@ def send_organization_deleted_email(request, user, *, organization_name): } +@_email("team-created") +def send_team_created_email(request, user, *, organization_name, team_name): + return { + "organization_name": organization_name, + "team_name": team_name, + } + + +@_email("team-deleted") +def send_team_deleted_email(request, user, *, organization_name, team_name): + return { + "organization_name": organization_name, + "team_name": team_name, + } + + +@_email("team-member-added") +def send_team_member_added_email( + request, + email_recipients, + *, + user, + submitter, + organization_name, + team_name, +): + return { + "username": user.username, + "submitter": submitter.username, + "organization_name": organization_name, + "team_name": team_name, + } + + +@_email("added-as-team-member") +def send_added_as_team_member_email( + request, + user, + *, + submitter, + organization_name, + team_name, +): + return { + "username": user.username, + "submitter": submitter.username, + "organization_name": organization_name, + "team_name": team_name, + } + + +@_email("team-member-removed") +def send_team_member_removed_email( + request, + email_recipients, + *, + user, + submitter, + organization_name, + team_name, +): + return { + "username": user.username, + "submitter": submitter.username, + "organization_name": organization_name, + "team_name": team_name, + } + + +@_email("removed-as-team-member") +def send_removed_as_team_member_email( + request, + user, + *, + submitter, + organization_name, + team_name, +): + return { + "username": user.username, + "submitter": submitter.username, + "organization_name": organization_name, + "team_name": team_name, + } + + @_email("verify-project-role", allow_unverified=True) def send_project_role_verification_email( request, @@ -666,6 +752,76 @@ def send_role_changed_as_collaborator_email( } +@_email("team-collaborator-added") +def send_team_collaborator_added_email( + request, email_recipients, *, team, submitter, project_name, role +): + return { + "team_name": team.name, + "project": project_name, + "submitter": submitter.username, + "role": role, + } + + +@_email("added-as-team-collaborator") +def send_added_as_team_collaborator_email( + request, email_recipients, *, team, submitter, project_name, role +): + return { + "team_name": team.name, + "project": project_name, + "submitter": submitter.username, + "role": role, + } + + +@_email("team-collaborator-removed") +def send_team_collaborator_removed_email( + request, email_recipients, *, team, submitter, project_name +): + return { + "team_name": team.name, + "project": project_name, + "submitter": submitter.username, + } + + +@_email("removed-as-team-collaborator") +def send_removed_as_team_collaborator_email( + request, email_recipients, *, team, submitter, project_name +): + return { + "team_name": team.name, + "project": project_name, + "submitter": submitter.username, + } + + +@_email("team-collaborator-role-changed") +def send_team_collaborator_role_changed_email( + request, email_recipients, *, team, submitter, project_name, role +): + return { + "team_name": team.name, + "project": project_name, + "submitter": submitter.username, + "role": role, + } + + +@_email("role-changed-as-team-collaborator") +def send_role_changed_as_team_collaborator_email( + request, email_recipients, *, team, submitter, project_name, role +): + return { + "team_name": team.name, + "project": project_name, + "submitter": submitter.username, + "role": role, + } + + @_email("two-factor-added") def send_two_factor_added_email(request, user, method): pretty_methods = {"totp": "TOTP", "webauthn": "WebAuthn"} diff --git a/warehouse/manage/forms.py b/warehouse/manage/forms.py --- a/warehouse/manage/forms.py +++ b/warehouse/manage/forms.py @@ -27,7 +27,11 @@ WebAuthnCredentialMixin, ) from warehouse.i18n import localize as _ -from warehouse.organizations.models import OrganizationRoleType, OrganizationType +from warehouse.organizations.models import ( + OrganizationRoleType, + OrganizationType, + TeamProjectRoleType, +) # /manage/account/ forms @@ -41,6 +45,20 @@ class RoleNameMixin: ) +class TeamProjectRoleNameMixin: + + team_project_role_name = wtforms.SelectField( + "Select permissions", + choices=[ + ("", "Select permissions"), + ("Upload", "Upload"), + ("Administer", "Administer"), + ], + coerce=lambda string: TeamProjectRoleType(string) if string else None, + validators=[wtforms.validators.DataRequired(message="Select permissions")], + ) + + class UsernameMixin: username = wtforms.StringField( @@ -62,10 +80,58 @@ def __init__(self, *args, user_service, **kwargs): self.user_service = user_service +class CreateInternalRoleForm( + RoleNameMixin, + TeamProjectRoleNameMixin, + UsernameMixin, + forms.Form, +): + is_team = wtforms.RadioField( + "Team or member?", + choices=[("true", "Team"), ("false", "Member")], + coerce=lambda string: True if string == "true" else False, + default="true", + validators=[wtforms.validators.InputRequired()], + ) + + team_name = wtforms.SelectField( + "Select team", + choices=[("", "Select team")], + validators=[wtforms.validators.InputRequired()], + ) + + def __init__(self, *args, team_choices, user_service, **kwargs): + super().__init__(*args, **kwargs) + self.team_name.choices += [(name, name) for name in sorted(team_choices)] + self.user_service = user_service + + # Do not check for required fields in browser. + self.team_name.flags.required = False + self.team_project_role_name.flags.required = False + self.username.flags.required = False + self.role_name.flags.required = False + + # Conditionally check for required fields on server. + if self.is_team.data: + self.username.validators = [] + self.role_name.validators = [] + else: + self.team_name.validators = [] + self.team_project_role_name.validators = [] + + def validate_username(self, field): + if not self.is_team.data: + super().validate_username(field) + + class ChangeRoleForm(RoleNameMixin, forms.Form): pass +class ChangeTeamProjectRoleForm(TeamProjectRoleNameMixin, forms.Form): + pass + + class SaveAccountForm(forms.Form): __params__ = ["name", "public_email"] @@ -531,3 +597,62 @@ class SaveOrganizationForm(forms.Form): class CreateOrganizationForm(SaveOrganizationNameForm, SaveOrganizationForm): __params__ = SaveOrganizationNameForm.__params__ + SaveOrganizationForm.__params__ + + +class CreateTeamRoleForm(UsernameMixin, forms.Form): + def __init__(self, *args, user_choices, **kwargs): + super().__init__(*args, **kwargs) + self.user_choices = user_choices + + def validate_username(self, field): + if field.data not in self.user_choices: + raise wtforms.validators.ValidationError( + _( + "No organization owner, manager, or member found " + "with that username. Please try again." + ) + ) + + +class SaveTeamForm(forms.Form): + + __params__ = ["name"] + + name = wtforms.StringField( + validators=[ + wtforms.validators.DataRequired(message="Specify team name"), + wtforms.validators.Length( + max=50, + message=_("Choose a team name with 50 characters or less."), + ), + # the regexp below must match the CheckConstraint + # for the name field in organizations.models.Team + wtforms.validators.Regexp( + r"^([^\s/._-]|[^\s/._-].*[^\s/._-])$", + message=_( + "The team name is invalid. Team names cannot start " + "or end with a space, period, underscore, hyphen, " + "or slash. Choose a different team name." + ), + ), + ] + ) + + def __init__(self, *args, organization_id, organization_service, **kwargs): + super().__init__(*args, **kwargs) + self.organization_id = organization_id + self.organization_service = organization_service + + def validate_name(self, field): + if self.organization_service.find_teamid(self.organization_id, field.data): + raise wtforms.validators.ValidationError( + _( + "This team name has already been used. " + "Choose a different team name." + ) + ) + + +class CreateTeamForm(SaveTeamForm): + + __params__ = SaveTeamForm.__params__ diff --git a/warehouse/manage/views.py b/warehouse/manage/views.py --- a/warehouse/manage/views.py +++ b/warehouse/manage/views.py @@ -44,10 +44,14 @@ from warehouse.admin.flags import AdminFlagValue from warehouse.email import ( send_account_deletion_email, + send_added_as_collaborator_email, + send_added_as_team_collaborator_email, + send_added_as_team_member_email, send_admin_new_organization_requested_email, send_admin_organization_deleted_email, send_admin_organization_renamed_email, send_canceled_as_invited_organization_member_email, + send_collaborator_added_email, send_collaborator_removed_email, send_collaborator_role_changed_email, send_email_verification_email, @@ -69,11 +73,21 @@ send_recovery_codes_generated_email, send_removed_as_collaborator_email, send_removed_as_organization_member_email, + send_removed_as_team_collaborator_email, + send_removed_as_team_member_email, send_removed_project_email, send_removed_project_release_email, send_removed_project_release_file_email, send_role_changed_as_collaborator_email, send_role_changed_as_organization_member_email, + send_role_changed_as_team_collaborator_email, + send_team_collaborator_added_email, + send_team_collaborator_removed_email, + send_team_collaborator_role_changed_email, + send_team_created_email, + send_team_deleted_email, + send_team_member_added_email, + send_team_member_removed_email, send_two_factor_added_email, send_two_factor_removed_email, send_unyanked_project_release_email, @@ -87,11 +101,15 @@ ChangeOrganizationRoleForm, ChangePasswordForm, ChangeRoleForm, + ChangeTeamProjectRoleForm, ConfirmPasswordForm, + CreateInternalRoleForm, CreateMacaroonForm, CreateOrganizationForm, CreateOrganizationRoleForm, CreateRoleForm, + CreateTeamForm, + CreateTeamRoleForm, DeleteMacaroonForm, DeleteTOTPForm, DeleteWebAuthnForm, @@ -100,6 +118,7 @@ SaveAccountForm, SaveOrganizationForm, SaveOrganizationNameForm, + SaveTeamForm, Toggle2FARequirementForm, TransferOrganizationProjectForm, ) @@ -113,6 +132,11 @@ OrganizationInvitationStatus, OrganizationRole, OrganizationRoleType, + Team, + TeamProjectRole, + TeamProjectRoleType, + TeamRole, + TeamRoleType, ) from warehouse.packaging.models import ( File, @@ -126,7 +150,7 @@ ) from warehouse.rate_limiting import IRateLimiter from warehouse.utils.http import is_safe_url -from warehouse.utils.organization import confirm_organization +from warehouse.utils.organization import confirm_organization, confirm_team from warehouse.utils.paginate import paginate_url_factory from warehouse.utils.project import ( add_project, @@ -183,10 +207,21 @@ def user_projects(request): .subquery() ) + teams = ( + request.db.query(Team.id) + .join(TeamRole.team) + .filter(TeamRole.user == request.user) + .subquery() + ) + projects_owned = projects_owned.union( request.db.query(Project.id.label("id")) .join(Organization.projects) - .join(organizations_owned, Organization.id == organizations_owned.c.id) + .join(organizations_owned, Organization.id == organizations_owned.c.id), + request.db.query(Project.id.label("id")) + .join(TeamProjectRole.project) + .join(teams, TeamProjectRole.team_id == teams.c.id) + .filter(TeamProjectRole.role_name == TeamProjectRoleType.Administer), ) with_sole_owner = with_sole_owner.union( @@ -1125,6 +1160,36 @@ def organization_owners(request, organization): return request.db.query(User).join(owner_roles, User.id == owner_roles.c.id).all() +def organization_managers(request, organization): + """Return all users who are managers of the organization.""" + manager_roles = ( + request.db.query(User.id) + .join(OrganizationRole.user) + .filter( + OrganizationRole.role_name == OrganizationRoleType.Manager, + OrganizationRole.organization == organization, + ) + .subquery() + ) + return ( + request.db.query(User).join(manager_roles, User.id == manager_roles.c.id).all() + ) + + +def organization_members(request, organization): + """Return all users who are members of the organization.""" + member_roles = ( + request.db.query(User.id) + .join(OrganizationRole.user) + .filter( + OrganizationRole.role_name == OrganizationRoleType.Member, + OrganizationRole.organization == organization, + ) + .subquery() + ) + return request.db.query(User).join(member_roles, User.id == member_roles.c.id).all() + + @view_defaults( route_name="manage.organizations", renderer="manage/organizations.html", @@ -1432,6 +1497,99 @@ def delete_organization(self): return HTTPSeeOther(self.request.route_path("manage.organizations")) +@view_defaults( + route_name="manage.organization.teams", + context=Organization, + renderer="manage/organization/teams.html", + uses_session=True, + require_csrf=True, + require_methods=False, + permission="manage:organization", + has_translations=True, + require_reauth=True, +) +class ManageOrganizationTeamsViews: + def __init__(self, organization, request): + self.organization = organization + self.request = request + self.organization_service = request.find_service( + IOrganizationService, context=None + ) + + @property + def default_response(self): + return { + "organization": self.organization, + "create_team_form": CreateTeamForm( + self.request.POST, + organization_id=self.organization.id, + organization_service=self.organization_service, + ), + } + + @view_config(request_method="GET", permission="view:organization") + def manage_teams(self): + if self.request.flags.enabled(AdminFlagValue.DISABLE_ORGANIZATIONS): + raise HTTPNotFound + + return self.default_response + + @view_config(request_method="POST") + def create_team(self): + if self.request.flags.enabled(AdminFlagValue.DISABLE_ORGANIZATIONS): + raise HTTPNotFound + + # Get and validate form from default response. + default_response = self.default_response + form = default_response["create_team_form"] + if not form.validate(): + return default_response + + # Add team to organization. + team = self.organization_service.add_team( + organization_id=self.organization.id, + name=form.name.data, + ) + + # Record events. + self.organization.record_event( + tag="organization:team:create", + ip_address=self.request.remote_addr, + additional={ + "submitted_by_user_id": str(self.request.user.id), + "team_name": team.name, + }, + ) + team.record_event( + tag="team:create", + ip_address=self.request.remote_addr, + additional={ + "submitted_by_user_id": str(self.request.user.id), + }, + ) + + # Send notification emails. + owner_and_manager_users = set( + organization_owners(self.request, self.organization) + + organization_managers(self.request, self.organization) + ) + send_team_created_email( + self.request, + owner_and_manager_users, + organization_name=self.organization.name, + team_name=team.name, + ) + + # Display notification message. + self.request.session.flash( + f"Created team {team.name!r} in {self.organization.name!r}", + queue="success", + ) + + # Refresh teams list. + return HTTPSeeOther(self.request.path) + + @view_defaults( route_name="manage.organization.projects", context=Organization, @@ -1984,163 +2142,571 @@ def delete_organization_role(organization, request): ) -@view_config( - route_name="manage.projects", - renderer="manage/projects.html", - uses_session=True, - permission="manage:user", - has_translations=True, -) -def manage_projects(request): - def _key(project): - if project.releases: - return project.releases[0].created - return project.created - - all_user_projects = user_projects(request) - projects = set(request.user.projects) | set(all_user_projects["projects_owned"]) - projects_owned = set( - project.name for project in all_user_projects["projects_owned"] - ) - projects_sole_owned = set( - project.name for project in all_user_projects["projects_sole_owned"] - ) - projects_requiring_2fa = set( - project.name for project in all_user_projects["projects_requiring_2fa"] - ) - - project_invites = ( - request.db.query(RoleInvitation) - .filter(RoleInvitation.invite_status == RoleInvitationStatus.Pending) - .filter(RoleInvitation.user == request.user) - .all() - ) - project_invites = [ - (role_invite.project, role_invite.token) for role_invite in project_invites - ] - return { - "projects": sorted(projects, key=_key, reverse=True), - "projects_owned": projects_owned, - "projects_sole_owned": projects_sole_owned, - "projects_requiring_2fa": projects_requiring_2fa, - "project_invites": project_invites, - } - - @view_defaults( - route_name="manage.project.settings", - context=Project, - renderer="manage/project/settings.html", + route_name="manage.team.settings", + context=Team, + renderer="manage/team/settings.html", uses_session=True, - permission="manage:project", + require_csrf=True, + require_methods=False, + permission="manage:team", has_translations=True, require_reauth=True, - require_methods=False, ) -class ManageProjectSettingsViews: - def __init__(self, project, request): - self.project = project +class ManageTeamSettingsViews: + def __init__(self, team, request): + self.team = team self.request = request - self.toggle_2fa_requirement_form_class = Toggle2FARequirementForm - self.transfer_organization_project_form_class = TransferOrganizationProjectForm - - @view_config(request_method="GET") - def manage_project_settings(self): - if self.request.flags.enabled(AdminFlagValue.DISABLE_ORGANIZATIONS): - organization_choices = set() - else: - all_user_organizations = user_organizations(self.request) - organizations_owned = set( - organization.name - for organization in all_user_organizations["organizations_owned"] - ) - organization_choices = organizations_owned - ( - {self.project.organization.name} if self.project.organization else set() - ) + self.user_service = request.find_service(IUserService, context=None) + self.organization_service = request.find_service( + IOrganizationService, context=None + ) + @property + def default_response(self): return { - "project": self.project, - "MAX_FILESIZE": MAX_FILESIZE, - "MAX_PROJECT_SIZE": MAX_PROJECT_SIZE, - "toggle_2fa_form": self.toggle_2fa_requirement_form_class(), - "transfer_organization_project_form": ( - self.transfer_organization_project_form_class( - organization_choices=organization_choices, - ) + "team": self.team, + "save_team_form": SaveTeamForm( + name=self.team.name, + organization_id=self.team.organization_id, + organization_service=self.organization_service, ), } - @view_config( - request_method="POST", - request_param=Toggle2FARequirementForm.__params__, - require_reauth=True, - ) - def toggle_2fa_requirement(self): - if not self.request.registry.settings[ - "warehouse.two_factor_requirement.enabled" - ]: + @view_config(request_method="GET", permission="view:team") + def manage_team(self): + if self.request.flags.enabled(AdminFlagValue.DISABLE_ORGANIZATIONS): raise HTTPNotFound - if self.project.pypi_mandates_2fa: - self.request.session.flash( - "2FA requirement cannot be disabled for critical projects", - queue="error", - ) - elif self.project.owners_require_2fa: - self.project.owners_require_2fa = False - self.project.record_event( - tag="project:owners_require_2fa:disabled", - ip_address=self.request.remote_addr, - additional={"modified_by": self.request.user.username}, - ) - self.request.session.flash( - f"2FA requirement disabled for { self.project.name }", - queue="success", - ) - else: - self.project.owners_require_2fa = True - self.project.record_event( - tag="project:owners_require_2fa:enabled", - ip_address=self.request.remote_addr, - additional={"modified_by": self.request.user.username}, - ) - self.request.session.flash( - f"2FA requirement enabled for { self.project.name }", - queue="success", - ) + return self.default_response - return HTTPSeeOther( - self.request.route_path( - "manage.project.settings", project_name=self.project.name - ) + @view_config(request_method="POST", request_param=SaveTeamForm.__params__) + def save_team(self): + if self.request.flags.enabled(AdminFlagValue.DISABLE_ORGANIZATIONS): + raise HTTPNotFound + + form = SaveTeamForm( + self.request.POST, + organization_id=self.team.organization_id, + organization_service=self.organization_service, ) + if form.validate(): + name = form.name.data + self.organization_service.rename_team(self.team.id, name) + self.request.session.flash("Team name updated", queue="success") + return HTTPSeeOther( + self.request.route_path( + "manage.team.settings", + organization_name=self.team.organization.normalized_name, + team_name=self.team.normalized_name, + ) + ) -@view_defaults( - context=Project, - route_name="manage.project.settings.publishing", - renderer="manage/project/publishing.html", - uses_session=True, - require_csrf=True, - require_methods=False, - permission="manage:project", - has_translations=True, - require_reauth=True, - http_cache=0, -) -class ManageOIDCProviderViews: - def __init__(self, project, request): - self.request = request - self.project = project - self.oidc_enabled = self.request.registry.settings["warehouse.oidc.enabled"] - self.metrics = self.request.find_service(IMetricsService, context=None) + return {**self.default_response, "save_team_form": form} - @property - def _ratelimiters(self): - return { - "user.oidc": self.request.find_service( - IRateLimiter, name="user_oidc.provider.register" - ), + @view_config(request_method="POST", request_param=["confirm_team_name"]) + def delete_team(self): + if self.request.flags.enabled(AdminFlagValue.DISABLE_ORGANIZATIONS): + raise HTTPNotFound + + # Confirm team name. + confirm_team(self.team, self.request, fail_route="manage.team.settings") + + # Get organization and team name before deleting team. + organization = self.team.organization + team_name = self.team.name + + # Record events. + organization.record_event( + tag="organization:team:delete", + ip_address=self.request.remote_addr, + additional={ + "deleted_by_user_id": str(self.request.user.id), + "team_name": team_name, + }, + ) + self.team.record_event( + tag="team:delete", + ip_address=self.request.remote_addr, + additional={ + "deleted_by_user_id": str(self.request.user.id), + }, + ) + + # Delete team. + self.organization_service.delete_team(self.team.id) + + # Send notification emails. + owner_and_manager_users = set( + organization_owners(self.request, organization) + + organization_managers(self.request, organization) + ) + send_team_deleted_email( + self.request, + owner_and_manager_users, + organization_name=organization.name, + team_name=team_name, + ) + + # Display notification message. + self.request.session.flash("Team deleted", queue="success") + + return HTTPSeeOther( + self.request.route_path( + "manage.organization.teams", + organization_name=organization.normalized_name, + ) + ) + + +@view_defaults( + route_name="manage.team.projects", + context=Team, + renderer="manage/team/projects.html", + uses_session=True, + require_csrf=True, + require_methods=False, + permission="manage:team", + has_translations=True, + require_reauth=True, +) +class ManageTeamProjectsViews: + def __init__(self, team, request): + self.team = team + self.request = request + + @property + def active_projects(self): + return self.team.projects + + @property + def default_response(self): + active_projects = self.active_projects + all_user_projects = user_projects(self.request) + projects_owned = set( + project.name for project in all_user_projects["projects_owned"] + ) + projects_sole_owned = set( + project.name for project in all_user_projects["projects_sole_owned"] + ) + projects_requiring_2fa = set( + project.name for project in all_user_projects["projects_requiring_2fa"] + ) + + return { + "team": self.team, + "active_projects": active_projects, + "projects_owned": projects_owned, + "projects_sole_owned": projects_sole_owned, + "projects_requiring_2fa": projects_requiring_2fa, + } + + @view_config(request_method="GET", permission="view:team") + def manage_team_projects(self): + if self.request.flags.enabled(AdminFlagValue.DISABLE_ORGANIZATIONS): + raise HTTPNotFound + + return self.default_response + + +@view_defaults( + route_name="manage.team.roles", + context=Team, + renderer="manage/team/roles.html", + uses_session=True, + require_csrf=True, + require_methods=False, + permission="manage:team", + has_translations=True, + require_reauth=True, +) +class ManageTeamRolesViews: + def __init__(self, team, request): + self.team = team + self.request = request + self.organization_service = request.find_service( + IOrganizationService, context=None + ) + self.user_service = request.find_service(IUserService, context=None) + self.user_choices = sorted( + user.username + for user in set( + organization_owners(self.request, self.team.organization) + + organization_managers(self.request, self.team.organization) + + organization_members(self.request, self.team.organization) + ) + ) + + @property + def default_response(self): + return { + "team": self.team, + "roles": self.organization_service.get_team_roles(self.team.id), + "form": CreateTeamRoleForm( + self.request.POST, + user_choices=self.user_choices, + ), + } + + @view_config(request_method="GET", permission="view:team") + def manage_team_roles(self): + if self.request.flags.enabled(AdminFlagValue.DISABLE_ORGANIZATIONS): + raise HTTPNotFound + + return self.default_response + + @view_config(request_method="POST") + def create_team_role(self): + if self.request.flags.enabled(AdminFlagValue.DISABLE_ORGANIZATIONS): + raise HTTPNotFound + + # Get and validate form from default response. + default_response = self.default_response + form = default_response["form"] + if not form.validate(): + return default_response + + # Check for existing role. + username = form.username.data + role_name = TeamRoleType.Member + user_id = self.user_service.find_userid(username) + existing_role = self.organization_service.get_team_role_by_user( + self.team.id, user_id + ) + if existing_role: + self.request.session.flash( + self.request._( + "User '${username}' is already a team member", + mapping={"username": username}, + ), + queue="error", + ) + return default_response + + # Add user to team. + role = self.organization_service.add_team_role( + team_id=self.team.id, + user_id=user_id, + role_name=role_name, + ) + + # Record events. + self.team.organization.record_event( + tag="organization:team_role:add", + ip_address=self.request.remote_addr, + additional={ + "submitted_by_user_id": str(self.request.user.id), + "team_name": self.team.name, + "role_name": role_name.value, + "target_user_id": str(user_id), + }, + ) + self.team.record_event( + tag="team:team_role:add", + ip_address=self.request.remote_addr, + additional={ + "submitted_by_user_id": str(self.request.user.id), + "role_name": role_name.value, + "target_user_id": str(user_id), + }, + ) + role.user.record_event( + tag="account:team_role:add", + ip_address=self.request.remote_addr, + additional={ + "submitted_by_user_id": str(self.request.user.id), + "organization_name": self.team.organization.name, + "team_name": self.team.name, + "role_name": role_name.value, + }, + ) + + # Send notification emails. + owner_and_manager_users = set( + organization_owners(self.request, self.team.organization) + + organization_managers(self.request, self.team.organization) + ) + owner_and_manager_users.discard(role.user) + send_team_member_added_email( + self.request, + owner_and_manager_users, + user=role.user, + submitter=self.request.user, + organization_name=self.team.organization.name, + team_name=self.team.name, + ) + send_added_as_team_member_email( + self.request, + role.user, + submitter=self.request.user, + organization_name=self.team.organization.name, + team_name=self.team.name, + ) + + # Display notification message. + self.request.session.flash( + f"Added the team {self.team.name!r} to {self.team.organization.name!r}", + queue="success", + ) + + # Refresh teams list. + return HTTPSeeOther(self.request.path) + + @view_config( + request_method="POST", + route_name="manage.team.delete_role", + permission="view:team", + ) + def delete_team_role(self): + if self.request.flags.enabled(AdminFlagValue.DISABLE_ORGANIZATIONS): + raise HTTPNotFound + + # Get team role. + role_id = self.request.POST["role_id"] + role = self.organization_service.get_team_role(role_id) + + if not role or role.team_id != self.team.id: + self.request.session.flash("Could not find member", queue="error") + elif ( + not self.request.has_permission("manage:team") + and role.user != self.request.user + ): + self.request.session.flash( + "Cannot remove other people from the team", queue="error" + ) + else: + # Delete team role. + self.organization_service.delete_team_role(role.id) + + # Record events. + self.team.organization.record_event( + tag="organization:team_role:delete", + ip_address=self.request.remote_addr, + additional={ + "submitted_by_user_id": str(self.request.user.id), + "team_name": self.team.name, + "role_name": role.role_name.value, + "target_user_id": str(role.user.id), + }, + ) + self.team.record_event( + tag="team:team_role:delete", + ip_address=self.request.remote_addr, + additional={ + "submitted_by_user_id": str(self.request.user.id), + "role_name": role.role_name.value, + "target_user_id": str(role.user.id), + }, + ) + role.user.record_event( + tag="account:team_role:delete", + ip_address=self.request.remote_addr, + additional={ + "submitted_by_user_id": str(self.request.user.id), + "organization_name": self.team.organization.name, + "team_name": self.team.name, + "role_name": role.role_name.value, + }, + ) + + # Send notification emails. + owner_and_manager_users = set( + organization_owners(self.request, self.team.organization) + + organization_managers(self.request, self.team.organization) + ) + owner_and_manager_users.discard(role.user) + send_team_member_removed_email( + self.request, + owner_and_manager_users, + user=role.user, + submitter=self.request.user, + organization_name=self.team.organization.name, + team_name=self.team.name, + ) + send_removed_as_team_member_email( + self.request, + role.user, + submitter=self.request.user, + organization_name=self.team.organization.name, + team_name=self.team.name, + ) + + # Display notification message. + self.request.session.flash("Removed from team", queue="success") + + # Refresh teams list. + return HTTPSeeOther( + self.request.route_path( + "manage.team.roles", + organization_name=self.team.organization.normalized_name, + team_name=self.team.normalized_name, + ) + ) + + +@view_config( + route_name="manage.projects", + renderer="manage/projects.html", + uses_session=True, + permission="manage:user", + has_translations=True, +) +def manage_projects(request): + def _key(project): + if project.releases: + return project.releases[0].created + return project.created + + projects = set(request.user.projects) + + all_user_projects = user_projects(request) + projects |= set(all_user_projects["projects_owned"]) + projects_owned = set( + project.name for project in all_user_projects["projects_owned"] + ) + projects_sole_owned = set( + project.name for project in all_user_projects["projects_sole_owned"] + ) + projects_requiring_2fa = set( + project.name for project in all_user_projects["projects_requiring_2fa"] + ) + + for team in request.user.teams: + projects |= set(team.projects) + + project_invites = ( + request.db.query(RoleInvitation) + .filter(RoleInvitation.invite_status == RoleInvitationStatus.Pending) + .filter(RoleInvitation.user == request.user) + .all() + ) + project_invites = [ + (role_invite.project, role_invite.token) for role_invite in project_invites + ] + return { + "projects": sorted(projects, key=_key, reverse=True), + "projects_owned": projects_owned, + "projects_sole_owned": projects_sole_owned, + "projects_requiring_2fa": projects_requiring_2fa, + "project_invites": project_invites, + } + + +@view_defaults( + route_name="manage.project.settings", + context=Project, + renderer="manage/project/settings.html", + uses_session=True, + permission="manage:project", + has_translations=True, + require_reauth=True, + require_methods=False, +) +class ManageProjectSettingsViews: + def __init__(self, project, request): + self.project = project + self.request = request + self.toggle_2fa_requirement_form_class = Toggle2FARequirementForm + self.transfer_organization_project_form_class = TransferOrganizationProjectForm + + @view_config(request_method="GET") + def manage_project_settings(self): + if self.request.flags.enabled(AdminFlagValue.DISABLE_ORGANIZATIONS): + organization_choices = set() + else: + all_user_organizations = user_organizations(self.request) + organizations_owned = set( + organization.name + for organization in all_user_organizations["organizations_owned"] + ) + organization_choices = organizations_owned - ( + {self.project.organization.name} if self.project.organization else set() + ) + + return { + "project": self.project, + "MAX_FILESIZE": MAX_FILESIZE, + "MAX_PROJECT_SIZE": MAX_PROJECT_SIZE, + "toggle_2fa_form": self.toggle_2fa_requirement_form_class(), + "transfer_organization_project_form": ( + self.transfer_organization_project_form_class( + organization_choices=organization_choices, + ) + ), + } + + @view_config( + request_method="POST", + request_param=Toggle2FARequirementForm.__params__, + require_reauth=True, + ) + def toggle_2fa_requirement(self): + if not self.request.registry.settings[ + "warehouse.two_factor_requirement.enabled" + ]: + raise HTTPNotFound + + if self.project.pypi_mandates_2fa: + self.request.session.flash( + "2FA requirement cannot be disabled for critical projects", + queue="error", + ) + elif self.project.owners_require_2fa: + self.project.owners_require_2fa = False + self.project.record_event( + tag="project:owners_require_2fa:disabled", + ip_address=self.request.remote_addr, + additional={"modified_by": self.request.user.username}, + ) + self.request.session.flash( + f"2FA requirement disabled for { self.project.name }", + queue="success", + ) + else: + self.project.owners_require_2fa = True + self.project.record_event( + tag="project:owners_require_2fa:enabled", + ip_address=self.request.remote_addr, + additional={"modified_by": self.request.user.username}, + ) + self.request.session.flash( + f"2FA requirement enabled for { self.project.name }", + queue="success", + ) + + return HTTPSeeOther( + self.request.route_path( + "manage.project.settings", project_name=self.project.name + ) + ) + + +@view_defaults( + context=Project, + route_name="manage.project.settings.publishing", + renderer="manage/project/publishing.html", + uses_session=True, + require_csrf=True, + require_methods=False, + permission="manage:project", + has_translations=True, + require_reauth=True, + http_cache=0, +) +class ManageOIDCProviderViews: + def __init__(self, project, request): + self.request = request + self.project = project + self.oidc_enabled = self.request.registry.settings["warehouse.oidc.enabled"] + self.metrics = self.request.find_service(IMetricsService, context=None) + + @property + def _ratelimiters(self): + return { + "user.oidc": self.request.find_service( + IRateLimiter, name="user_oidc.provider.register" + ), "ip.oidc": self.request.find_service( IRateLimiter, name="ip_oidc.provider.register" ), @@ -3032,86 +3598,344 @@ def _error(message): f"{self.release.project.name!r}" ) - self.request.db.add( + self.request.db.add( + JournalEntry( + name=self.release.project.name, + action=f"remove file {release_file.filename}", + version=self.release.version, + submitted_by=self.request.user, + submitted_from=self.request.remote_addr, + ) + ) + + self.release.project.record_event( + tag="project:release:file:remove", + ip_address=self.request.remote_addr, + additional={ + "submitted_by": self.request.user.username, + "canonical_version": self.release.canonical_version, + "filename": release_file.filename, + }, + ) + + submitter_role = get_user_role_in_project( + self.release.project, self.request.user, self.request + ) + + for contributor in self.release.project.users: + contributor_role = get_user_role_in_project( + self.release.project, contributor, self.request + ) + + send_removed_project_release_file_email( + self.request, + contributor, + file=release_file.filename, + release=self.release, + submitter_name=self.request.user.username, + submitter_role=submitter_role, + recipient_role=contributor_role, + ) + + self.request.db.delete(release_file) + + self.request.session.flash( + f"Deleted file {release_file.filename!r}", queue="success" + ) + + return HTTPSeeOther( + self.request.route_path( + "manage.project.release", + project_name=self.release.project.name, + version=self.release.version, + ) + ) + + +@view_config( + route_name="manage.project.roles", + context=Project, + renderer="manage/project/roles.html", + uses_session=True, + require_methods=False, + permission="manage:project", + has_translations=True, + require_reauth=True, +) +def manage_project_roles(project, request, _form_class=CreateRoleForm): + organization_service = request.find_service(IOrganizationService, context=None) + user_service = request.find_service(IUserService, context=None) + + # Roles, invitations, and invite collaborator form for all projects. + roles = set(request.db.query(Role).join(User).filter(Role.project == project).all()) + invitations = set( + request.db.query(RoleInvitation) + .join(User) + .filter(RoleInvitation.project == project) + .all() + ) + form = _form_class(request.POST, user_service=user_service) + + # Team project roles and add internal collaborator form for organization projects. + enable_internal_collaborator = bool( + not request.flags.enabled(AdminFlagValue.DISABLE_ORGANIZATIONS) + and project.organization + ) + if enable_internal_collaborator: + team_project_roles = set( + request.db.query(TeamProjectRole) + .join(Team) + .filter(TeamProjectRole.project == project) + .all() + ) + internal_role_form = CreateInternalRoleForm( + request.POST, + team_choices=sorted(team.name for team in project.organization.teams), + user_service=user_service, + ) + internal_users = set( + organization_owners(request, project.organization) + + organization_managers(request, project.organization) + + organization_members(request, project.organization) + ) + else: + team_project_roles = set() + internal_role_form = None + internal_users = set() + + default_response = { + "project": project, + "roles": roles, + "invitations": invitations, + "form": form, + "enable_internal_collaborator": enable_internal_collaborator, + "team_project_roles": team_project_roles, + "internal_role_form": internal_role_form, + } + + # Handle GET. + if request.method != "POST": + return default_response + + # Determine which form was submitted with POST. + if enable_internal_collaborator and "is_team" in request.POST: + form = internal_role_form + + # Validate form. + if not form.validate(): + return default_response + + # Try adding team as collaborator. + if enable_internal_collaborator and "is_team" in request.POST and form.is_team.data: + team_name = form.team_name.data + role_name = form.team_project_role_name.data + team_id = organization_service.find_teamid(project.organization.id, team_name) + team = organization_service.get_team(team_id) + + # Do nothing if role already exists. + existing_role = ( + request.db.query(TeamProjectRole) + .filter(TeamProjectRole.team == team, TeamProjectRole.project == project) + .first() + ) + if existing_role: + request.session.flash( + request._( + ( + "Team '${team_name}' already has " + "${role_name} permissions for project" + ), + mapping={ + "team_name": team_name, + "role_name": existing_role.role_name.value, + }, + ), + queue="error", + ) + return default_response + + # Add internal team. + organization_service.add_team_project_role(team.id, project.id, role_name) + + # Add journal entry. + request.db.add( + JournalEntry( + name=project.name, + action=f"add {role_name.value} {team_name}", + submitted_by=request.user, + submitted_from=request.remote_addr, + ) + ) + + # Record events. + project.record_event( + tag="project:team_project_role:create", + ip_address=request.remote_addr, + additional={ + "submitted_by_user_id": str(request.user.id), + "role_name": role_name.value, + "target_team": team.name, + }, + ) + team.organization.record_event( + tag="organization:team_project_role:create", + ip_address=request.remote_addr, + additional={ + "submitted_by_user_id": str(request.user.id), + "project_name": project.name, + "role_name": role_name.value, + "target_team": team.name, + }, + ) + team.record_event( + tag="team:team_project_role:create", + ip_address=request.remote_addr, + additional={ + "submitted_by_user_id": str(request.user.id), + "project_name": project.name, + "role_name": role_name.value, + }, + ) + + # Send notification emails. + member_users = set(team.members) + owner_users = set(project.owners + project.organization.owners) + owner_users -= member_users + send_team_collaborator_added_email( + request, + owner_users, + team=team, + submitter=request.user, + project_name=project.name, + role=role_name.value, + ) + send_added_as_team_collaborator_email( + request, + member_users, + team=team, + submitter=request.user, + project_name=project.name, + role=role_name.value, + ) + + # Display notification message. + request.session.flash( + request._( + ( + "${team_name} now has ${role} permissions " + "for the '${project_name}' project." + ), + mapping={ + "team_name": team.name, + "project_name": project.name, + "role": role_name.value, + }, + ), + queue="success", + ) + + # Refresh project collaborators. + return HTTPSeeOther(request.path) + + # Try adding user as collaborator. + username = form.username.data + role_name = form.role_name.data + userid = user_service.find_userid(username) + user = user_service.get_user(userid) + + # Do nothing if role already exists. + existing_role = ( + request.db.query(Role) + .filter(Role.user == user, Role.project == project) + .first() + ) + if existing_role: + request.session.flash( + request._( + "User '${username}' already has ${role_name} role for project", + mapping={ + "username": username, + "role_name": existing_role.role_name, + }, + ), + queue="error", + ) + return default_response + + if enable_internal_collaborator and user in internal_users: + + # Add internal member. + request.db.add(Role(user=user, project=project, role_name=role_name)) + + # Add journal entry. + request.db.add( JournalEntry( - name=self.release.project.name, - action=f"remove file {release_file.filename}", - version=self.release.version, - submitted_by=self.request.user, - submitted_from=self.request.remote_addr, + name=project.name, + action=f"add {role_name} {user.username}", + submitted_by=request.user, + submitted_from=request.remote_addr, ) ) - self.release.project.record_event( - tag="project:release:file:remove", - ip_address=self.request.remote_addr, + # Record events. + project.record_event( + tag="project:role:create", + ip_address=request.remote_addr, additional={ - "submitted_by": self.request.user.username, - "canonical_version": self.release.canonical_version, - "filename": release_file.filename, + "submitted_by": request.user.username, + "role_name": role_name, + "target_user": user.username, }, ) - - submitter_role = get_user_role_in_project( - self.release.project, self.request.user, self.request + user.record_event( + tag="account:role:create", + ip_address=request.remote_addr, + additional={ + "submitted_by": request.user.username, + "project_name": project.name, + "role_name": role_name, + }, ) - for contributor in self.release.project.users: - contributor_role = get_user_role_in_project( - self.release.project, contributor, self.request - ) - - send_removed_project_release_file_email( - self.request, - contributor, - file=release_file.filename, - release=self.release, - submitter_name=self.request.user.username, - submitter_role=submitter_role, - recipient_role=contributor_role, - ) - - self.request.db.delete(release_file) - - self.request.session.flash( - f"Deleted file {release_file.filename!r}", queue="success" + # Send notification emails. + owner_users = set(project.owners + project.organization.owners) + owner_users.discard(user) + send_collaborator_added_email( + request, + owner_users, + user=user, + submitter=request.user, + project_name=project.name, + role=role_name, ) - - return HTTPSeeOther( - self.request.route_path( - "manage.project.release", - project_name=self.release.project.name, - version=self.release.version, - ) + send_added_as_collaborator_email( + request, + user, + submitter=request.user, + project_name=project.name, + role=role_name, ) + # Display notification message. + request.session.flash( + request._( + "${username} is now ${role} of the '${project_name}' project.", + mapping={ + "username": username, + "project_name": project.name, + "role": role_name, + }, + ), + queue="success", + ) -@view_config( - route_name="manage.project.roles", - context=Project, - renderer="manage/project/roles.html", - uses_session=True, - require_methods=False, - permission="manage:project", - has_translations=True, - require_reauth=True, -) -def manage_project_roles(project, request, _form_class=CreateRoleForm): - user_service = request.find_service(IUserService, context=None) - form = _form_class(request.POST, user_service=user_service) + # Refresh project collaborators. + return HTTPSeeOther(request.path) + else: - if request.method == "POST" and form.validate(): - username = form.username.data - role_name = form.role_name.data - userid = user_service.find_userid(username) - user = user_service.get_user(userid) + # Invite external user. token_service = request.find_service(ITokenService, name="email") - existing_role = ( - request.db.query(Role) - .filter(Role.user == user, Role.project == project) - .first() - ) user_invite = ( request.db.query(RoleInvitation) .filter(RoleInvitation.user == user) @@ -3125,18 +3949,7 @@ def manage_project_roles(project, request, _form_class=CreateRoleForm): except (TokenExpired, AttributeError): invite_token = None - if existing_role: - request.session.flash( - request._( - "User '${username}' already has ${role_name} role for project", - mapping={ - "username": username, - "role_name": existing_role.role_name, - }, - ), - queue="error", - ) - elif user.primary_email is None or not user.primary_email.verified: + if user.primary_email is None or not user.primary_email.verified: request.session.flash( request._( "User '${username}' does not have a verified primary email " @@ -3145,6 +3958,7 @@ def manage_project_roles(project, request, _form_class=CreateRoleForm): ), queue="error", ) + return default_response elif ( user_invite and user_invite.invite_status == RoleInvitationStatus.Pending @@ -3158,6 +3972,7 @@ def manage_project_roles(project, request, _form_class=CreateRoleForm): ), queue="error", ) + return default_response else: invite_token = token_service.dumps( { @@ -3207,6 +4022,15 @@ def manage_project_roles(project, request, _form_class=CreateRoleForm): "target_user": username, }, ) + user.record_event( + tag="account:role:invite", + ip_address=request.remote_addr, + additional={ + "submitted_by": request.user.username, + "role_name": role_name, + "target_user": username, + }, + ) request.db.flush() # in order to get id request.session.flash( request._( @@ -3216,22 +4040,8 @@ def manage_project_roles(project, request, _form_class=CreateRoleForm): queue="success", ) - form = _form_class(user_service=user_service) - - roles = set(request.db.query(Role).join(User).filter(Role.project == project).all()) - invitations = set( - request.db.query(RoleInvitation) - .join(User) - .filter(RoleInvitation.project == project) - .all() - ) - - return { - "project": project, - "roles": roles, - "invitations": invitations, - "form": form, - } + # Refresh project collaborators. + return HTTPSeeOther(request.path) @view_config( @@ -3444,6 +4254,220 @@ def delete_project_role(project, request): ) +@view_config( + route_name="manage.project.change_team_project_role", + context=Project, + uses_session=True, + require_methods=["POST"], + permission="manage:project", + has_translations=True, + require_reauth=True, +) +def change_team_project_role(project, request, _form_class=ChangeTeamProjectRoleForm): + form = _form_class(request.POST) + + if form.validate(): + role_id = request.POST["role_id"] + try: + role = ( + request.db.query(TeamProjectRole) + .join(Team) + .filter( + TeamProjectRole.id == role_id, TeamProjectRole.project == project + ) + .one() + ) + if ( + role.role_name == TeamProjectRoleType.Administer + and request.user in role.team.members + and request.user not in role.team.organization.owners + ): + request.session.flash( + "Cannot remove your own team with Administer permissions", + queue="error", + ) + else: + # Add journal entry. + request.db.add( + JournalEntry( + name=project.name, + action="change {} {} to {}".format( + role.role_name.value, + role.team.name, + form.team_project_role_name.data.value, + ), + submitted_by=request.user, + submitted_from=request.remote_addr, + ) + ) + + # Change team project role. + role.role_name = form.team_project_role_name.data + + # Record events. + project.record_event( + tag="project:team_project_role:change", + ip_address=request.remote_addr, + additional={ + "submitted_by_user_id": str(request.user.id), + "role_name": role.role_name.value, + "target_team": role.team.name, + }, + ) + role.team.organization.record_event( + tag="organization:team_project_role:change", + ip_address=request.remote_addr, + additional={ + "submitted_by_user_id": str(request.user.id), + "project_name": role.project.name, + "role_name": role.role_name.value, + "target_team": role.team.name, + }, + ) + role.team.record_event( + tag="team:team_project_role:change", + ip_address=request.remote_addr, + additional={ + "submitted_by_user_id": str(request.user.id), + "project_name": role.project.name, + "role_name": role.role_name.value, + }, + ) + + # Send notification emails. + member_users = set(role.team.members) + owner_users = set(project.owners + role.team.organization.owners) + owner_users -= member_users + send_team_collaborator_role_changed_email( + request, + owner_users, + team=role.team, + submitter=request.user, + project_name=project.name, + role=role.role_name, + ) + send_role_changed_as_team_collaborator_email( + request, + member_users, + team=role.team, + submitter=request.user, + project_name=project.name, + role=role.role_name.value, + ) + + # Display notification message. + request.session.flash("Changed permissions", queue="success") + except NoResultFound: + request.session.flash("Could not find permissions", queue="error") + + return HTTPSeeOther( + request.route_path("manage.project.roles", project_name=project.name) + ) + + +@view_config( + route_name="manage.project.delete_team_project_role", + context=Project, + uses_session=True, + require_methods=["POST"], + permission="manage:project", + has_translations=True, + require_reauth=True, +) +def delete_team_project_role(project, request): + try: + role = ( + request.db.query(TeamProjectRole) + .join(Team) + .filter(TeamProjectRole.project == project) + .filter(TeamProjectRole.id == request.POST["role_id"]) + .one() + ) + removing_self = ( + role.role_name == TeamProjectRoleType.Administer + and request.user in role.team.members + and request.user not in role.team.organization.owners + ) + if removing_self: + request.session.flash( + "Cannot remove your own team with Administer permissions", queue="error" + ) + else: + role_name = role.role_name + team = role.team + + # Delete role. + request.db.delete(role) + + # Add journal entry. + request.db.add( + JournalEntry( + name=project.name, + action=f"remove {role_name.value} {team.name}", + submitted_by=request.user, + submitted_from=request.remote_addr, + ) + ) + + # Record event. + project.record_event( + tag="project:team_project_role:delete", + ip_address=request.remote_addr, + additional={ + "submitted_by_user_id": str(request.user.id), + "role_name": role_name.value, + "target_team": team.name, + }, + ) + team.organization.record_event( + tag="organization:team_project_role:delete", + ip_address=request.remote_addr, + additional={ + "submitted_by_user_id": str(request.user.id), + "project_name": project.name, + "role_name": role_name.value, + "target_team": team.name, + }, + ) + team.record_event( + tag="team:team_project_role:delete", + ip_address=request.remote_addr, + additional={ + "submitted_by_user_id": str(request.user.id), + "project_name": project.name, + "role_name": role_name.value, + }, + ) + + # Send notification emails. + member_users = set(team.members) + owner_users = set(project.owners + team.organization.owners) + owner_users -= member_users + send_team_collaborator_removed_email( + request, + owner_users, + team=role.team, + submitter=request.user, + project_name=project.name, + ) + send_removed_as_team_collaborator_email( + request, + member_users, + team=role.team, + submitter=request.user, + project_name=project.name, + ) + + # Display notification message. + request.session.flash("Removed permissions", queue="success") + except NoResultFound: + request.session.flash("Could not find permissions", queue="error") + + return HTTPSeeOther( + request.route_path("manage.project.roles", project_name=project.name) + ) + + @view_config( route_name="manage.project.history", context=Project, diff --git a/warehouse/migrations/versions/7eaad728b806_create_team_models.py b/warehouse/migrations/versions/7eaad728b806_create_team_models.py new file mode 100644 --- /dev/null +++ b/warehouse/migrations/versions/7eaad728b806_create_team_models.py @@ -0,0 +1,176 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +create_team_models + +Revision ID: 7eaad728b806 +Revises: 1e61006a47c2 +Create Date: 2022-06-13 13:23:42.629088 +""" + +import sqlalchemy as sa + +from alembic import op +from sqlalchemy.dialects import postgresql + +revision = "7eaad728b806" +down_revision = "2db9b00c8d00" + +# Note: It is VERY important to ensure that a migration does not lock for a +# long period of time and to ensure that each individual migration does +# not break compatibility with the *previous* version of the code base. +# This is because the migrations will be ran automatically as part of the +# deployment process, but while the previous version of the code is still +# up and running. Thus backwards incompatible changes must be broken up +# over multiple migrations inside of multiple pull requests in order to +# phase them in over multiple deploys. + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.execute( + r""" CREATE OR REPLACE FUNCTION normalize_team_name(text) + RETURNS text AS + $$ + SELECT lower(regexp_replace($1, '(\s|/|\.|_|-)+', '-', 'ig')) + $$ + LANGUAGE SQL + IMMUTABLE + RETURNS NULL ON NULL INPUT; + """ + ) + op.create_table( + "teams", + sa.Column( + "id", + postgresql.UUID(as_uuid=True), + server_default=sa.text("gen_random_uuid()"), + nullable=False, + ), + sa.Column("name", sa.Text(), nullable=False), + sa.Column("organization_id", postgresql.UUID(as_uuid=True), nullable=False), + sa.Column( + "created", sa.DateTime(), server_default=sa.text("now()"), nullable=False + ), + sa.CheckConstraint( + r"name ~* '^([^\s/._-]|[^\s/._-].*[^\s/._-])$'::text", + name="teams_valid_name", + ), + sa.ForeignKeyConstraint( + ["organization_id"], + ["organizations.id"], + onupdate="CASCADE", + ondelete="CASCADE", + ), + sa.PrimaryKeyConstraint("id"), + ) + op.create_index(op.f("ix_teams_created"), "teams", ["created"], unique=False) + op.create_index( + "teams_organization_id_idx", "teams", ["organization_id"], unique=False + ) + op.create_table( + "team_project_roles", + sa.Column( + "id", + postgresql.UUID(as_uuid=True), + server_default=sa.text("gen_random_uuid()"), + nullable=False, + ), + sa.Column("role_name", sa.Text(), nullable=False), + sa.Column("project_id", postgresql.UUID(as_uuid=True), nullable=False), + sa.Column("team_id", postgresql.UUID(as_uuid=True), nullable=False), + sa.ForeignKeyConstraint( + ["project_id"], ["projects.id"], onupdate="CASCADE", ondelete="CASCADE" + ), + sa.ForeignKeyConstraint( + ["team_id"], ["teams.id"], onupdate="CASCADE", ondelete="CASCADE" + ), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint( + "project_id", "team_id", name="_team_project_roles_project_team_uc" + ), + ) + op.create_index( + "team_project_roles_project_id_idx", + "team_project_roles", + ["project_id"], + unique=False, + ) + op.create_index( + "team_project_roles_team_id_idx", + "team_project_roles", + ["team_id"], + unique=False, + ) + op.create_table( + "team_roles", + sa.Column( + "id", + postgresql.UUID(as_uuid=True), + server_default=sa.text("gen_random_uuid()"), + nullable=False, + ), + sa.Column("role_name", sa.Text(), nullable=False), + sa.Column("user_id", postgresql.UUID(as_uuid=True), nullable=False), + sa.Column("team_id", postgresql.UUID(as_uuid=True), nullable=False), + sa.ForeignKeyConstraint( + ["team_id"], ["teams.id"], onupdate="CASCADE", ondelete="CASCADE" + ), + sa.ForeignKeyConstraint( + ["user_id"], ["users.id"], onupdate="CASCADE", ondelete="CASCADE" + ), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("user_id", "team_id", name="_team_roles_user_team_uc"), + ) + op.create_index("team_roles_team_id_idx", "team_roles", ["team_id"], unique=False) + op.create_index("team_roles_user_id_idx", "team_roles", ["user_id"], unique=False) + op.create_table( + "team_events", + sa.Column( + "id", + postgresql.UUID(as_uuid=True), + server_default=sa.text("gen_random_uuid()"), + nullable=False, + ), + sa.Column("tag", sa.String(), nullable=False), + sa.Column( + "time", sa.DateTime(), server_default=sa.text("now()"), nullable=False + ), + sa.Column("ip_address", sa.String(), nullable=False), + sa.Column("additional", postgresql.JSONB(astext_type=sa.Text()), nullable=True), + sa.Column("source_id", postgresql.UUID(as_uuid=True), nullable=False), + sa.ForeignKeyConstraint( + ["source_id"], ["teams.id"], initially="DEFERRED", deferrable=True + ), + sa.PrimaryKeyConstraint("id"), + ) + op.create_index( + "ix_team_events_source_id", "team_events", ["source_id"], unique=False + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index("ix_team_events_source_id", table_name="team_events") + op.drop_table("team_events") + op.drop_index("team_roles_user_id_idx", table_name="team_roles") + op.drop_index("team_roles_team_id_idx", table_name="team_roles") + op.drop_table("team_roles") + op.drop_index("team_project_roles_team_id_idx", table_name="team_project_roles") + op.drop_index("team_project_roles_project_id_idx", table_name="team_project_roles") + op.drop_table("team_project_roles") + op.drop_index("teams_organization_id_idx", table_name="teams") + op.drop_index(op.f("ix_teams_created"), table_name="teams") + op.drop_table("teams") + op.execute("DROP FUNCTION normalize_team_name(text)") + # ### end Alembic commands ### diff --git a/warehouse/organizations/interfaces.py b/warehouse/organizations/interfaces.py --- a/warehouse/organizations/interfaces.py +++ b/warehouse/organizations/interfaces.py @@ -158,6 +158,83 @@ def delete_organization_project(organization_id, project_id): Removes an association between the specified organization and project """ + def get_teams_by_organization(organization_id): + """ + Return a list of all team objects for the specified organization, + or None if there are none. + """ + + def get_team(team_id): + """ + Return a team object for the specified identifier, + """ + + def find_teamid(organization_id, team_name): + """ + Find the unique team identifier for the given organization and + team name or None if there is no such team. + """ + + def get_teams_by_user(user_id): + """ + Return a list of all team objects associated with a given user id. + """ + + def add_team(organization_id, name): + """ + Attempts to create a team with the specified name in an organization + """ + + def rename_team(team_id, name): + """ + Performs operations necessary to rename a Team + """ + + def delete_team(team_id): + """ + Delete team for the specified team id and all associated objects + """ + + def delete_teams_by_organization(organization_id): + """ + Delete all teams for the specified organization id + """ + + def get_team_role(team_role_id): + """ + Return the team role object that represents the given team role id, + """ + + def get_team_role_by_user(team_id, user_id): + """ + Gets an team role for a specified team and user + """ + + def add_team_role(team_id, user_id, role_name): + """ + Add the team role object to a team for a specified team id and user id + """ + + def delete_team_role(team_role_id): + """ + Remove the team role for a specified team id and user id + """ + + def get_team_project_role(team_project_role_id): + """ + Return the team project role object that represents the given team project role id, + """ + + def add_team_project_role(team_id, project_id, role_name): + """ + Adds a team project role for the specified team and project + """ + + def delete_team_project_role(team_project_role_id): + """ + Delete an team project role for a specified team project role id + """ + def record_event(organization_id, *, tag, additional=None): """ Creates a new Organization.Event for the given organization with the given diff --git a/warehouse/organizations/models.py b/warehouse/organizations/models.py --- a/warehouse/organizations/models.py +++ b/warehouse/organizations/models.py @@ -144,7 +144,10 @@ def __getitem__(self, organization): ) raise HTTPPermanentRedirect( self.request.matched_route.generate( - {"organization_name": organization.normalized_name} + { + **self.request.matchdict, + "organization_name": organization.normalized_name, + } ) ) except NoResultFound: @@ -194,6 +197,26 @@ class Organization(HasEvents, db.Model): "Project", secondary=OrganizationProject.__table__, back_populates="organization", viewonly=True # type: ignore # noqa ) + @property + def owners(self): + """Return all users who are owners of the organization.""" + owner_roles = ( + orm.object_session(self) + .query(User.id) + .join(OrganizationRole.user) + .filter( + OrganizationRole.role_name == OrganizationRoleType.Owner, + OrganizationRole.organization == self, + ) + .subquery() + ) + return ( + orm.object_session(self) + .query(User) + .join(owner_roles, User.id == owner_roles.c.id) + .all() + ) + def record_event(self, *, tag, ip_address, additional={}): """Record organization name in events in case organization is ever deleted.""" super().record_event( @@ -227,7 +250,12 @@ def __acl__(self): ( Allow, f"user:{role.user.id}", - ["view:organization", "manage:organization"], + [ + "view:organization", + "view:team", + "manage:organization", + "manage:team", + ], ) ) elif role.role_name == OrganizationRoleType.BillingManager: @@ -235,7 +263,7 @@ def __acl__(self): ( Allow, f"user:{role.user.id}", - ["view:organization", "manage:billing"], + ["view:organization", "view:team", "manage:billing"], ) ) elif role.role_name == OrganizationRoleType.Manager: @@ -243,12 +271,14 @@ def __acl__(self): ( Allow, f"user:{role.user.id}", - ["view:organization", "manage:team"], + ["view:organization", "view:team", "manage:team"], ) ) else: # No member-specific write access needed for now. - acls.append((Allow, f"user:{role.user.id}", ["view:organization"])) + acls.append( + (Allow, f"user:{role.user.id}", ["view:organization", "view:team"]) + ) return acls @@ -311,3 +341,145 @@ class OrganizationInvitation(db.Model): user = orm.relationship(User, lazy=False) organization = orm.relationship("Organization", lazy=False) + + +class TeamRoleType(str, enum.Enum): + + Member = "Member" + + +class TeamRole(db.Model): + + __tablename__ = "team_roles" + __table_args__ = ( + Index("team_roles_user_id_idx", "user_id"), + Index("team_roles_team_id_idx", "team_id"), + UniqueConstraint( + "user_id", + "team_id", + name="_team_roles_user_team_uc", + ), + ) + + __repr__ = make_repr("role_name", "team", "user") + + role_name = Column( + Enum(TeamRoleType, values_callable=lambda x: [e.value for e in x]), + nullable=False, + ) + user_id = Column( + ForeignKey("users.id", onupdate="CASCADE", ondelete="CASCADE"), nullable=False + ) + team_id = Column( + ForeignKey("teams.id", onupdate="CASCADE", ondelete="CASCADE"), + nullable=False, + ) + + user = orm.relationship(User, lazy=False) + team = orm.relationship("Team", lazy=False) + + +class TeamProjectRoleType(str, enum.Enum): + + Administer = "Administer" + Upload = "Upload" + + +class TeamProjectRole(db.Model): + + __tablename__ = "team_project_roles" + __table_args__ = ( + Index("team_project_roles_project_id_idx", "project_id"), + Index("team_project_roles_team_id_idx", "team_id"), + UniqueConstraint( + "project_id", + "team_id", + name="_team_project_roles_project_team_uc", + ), + ) + + __repr__ = make_repr("role_name", "team", "project") + + role_name = Column( + Enum(TeamProjectRoleType, values_callable=lambda x: [e.value for e in x]), + nullable=False, + ) + project_id = Column( + ForeignKey("projects.id", onupdate="CASCADE", ondelete="CASCADE"), + nullable=False, + ) + team_id = Column( + ForeignKey("teams.id", onupdate="CASCADE", ondelete="CASCADE"), + nullable=False, + ) + + project = orm.relationship("Project", lazy=False) + team = orm.relationship("Team", lazy=False) + + +class TeamFactory: + def __init__(self, request, organization=None): + self.request = request + self.organization = organization + + def __getitem__(self, name): + if self.organization is None: + organization = OrganizationFactory(self.request)[name] + return TeamFactory(self.request, organization) + try: + return ( + self.request.db.query(Team) + .filter( + Team.organization == self.organization, + Team.normalized_name == func.normalize_pep426_name(name), + ) + .one() + ) + except NoResultFound: + raise KeyError from None + + +class Team(HasEvents, db.Model): + + __tablename__ = "teams" + __table_args__ = ( + Index("teams_organization_id_idx", "organization_id"), + CheckConstraint( + r"name ~* '^([^\s/._-]|[^\s/._-].*[^\s/._-])$'::text", + name="teams_valid_name", + ), + ) + + __repr__ = make_repr("name", "organization") + + name = Column(Text, nullable=False) + normalized_name = orm.column_property(func.normalize_team_name(name)) + organization_id = Column( + ForeignKey("organizations.id", onupdate="CASCADE", ondelete="CASCADE"), + nullable=False, + ) + created = Column( + DateTime(timezone=False), + nullable=False, + server_default=sql.func.now(), + index=True, + ) + + organization = orm.relationship("Organization", lazy=False, backref="teams") + members = orm.relationship( + User, secondary=TeamRole.__table__, backref="teams", viewonly=True # type: ignore # noqa + ) + projects = orm.relationship( + "Project", secondary=TeamProjectRole.__table__, backref="teams", viewonly=True # type: ignore # noqa + ) + + def record_event(self, *, tag, ip_address, additional={}): + """Record team name in events in case team is ever deleted.""" + super().record_event( + tag=tag, + ip_address=ip_address, + additional={"team_name": self.name, **additional}, + ) + + def __acl__(self): + return self.organization.__acl__() diff --git a/warehouse/organizations/services.py b/warehouse/organizations/services.py --- a/warehouse/organizations/services.py +++ b/warehouse/organizations/services.py @@ -25,6 +25,9 @@ OrganizationNameCatalog, OrganizationProject, OrganizationRole, + Team, + TeamProjectRole, + TeamRole, ) NAME_FIELD = "name" @@ -310,6 +313,8 @@ def delete_organization(self, organization_id): self.db.query(OrganizationProject).filter_by(organization=organization).delete() # Delete roles self.db.query(OrganizationRole).filter_by(organization=organization).delete() + # Delete teams (and related data) + self.delete_teams_by_organization(organization_id) # TODO: Delete any stored card data from payment processor # Delete organization self.db.delete(organization) @@ -381,6 +386,183 @@ def delete_organization_project(self, organization_id, project_id): self.db.delete(organization_project) self.db.flush() + def get_teams_by_organization(self, organization_id): + """ + Return a list of all team objects for the specified organization, + or None if there are none. + """ + return self.db.query(Team).filter(Team.organization_id == organization_id).all() + + def get_team(self, team_id): + """ + Return a team object for the specified identifier, + """ + return self.db.query(Team).get(team_id) + + def find_teamid(self, organization_id, team_name): + """ + Find the unique team identifier for the given organization and + team name or None if there is no such team. + """ + normalized_name = func.normalize_team_name(team_name) + try: + (team_id,) = ( + self.db.query(Team.id) + .filter( + Team.organization_id == organization_id, + Team.normalized_name == normalized_name, + ) + .one() + ) + except NoResultFound: + return + + return team_id + + def get_teams_by_user(self, user_id): + """ + Return a list of all team objects associated with a given user id. + """ + return ( + self.db.query(Team) + .join(TeamRole, TeamRole.team_id == Team.id) + .filter(TeamRole.user_id == user_id) + .order_by(Team.name) + .all() + ) + + def add_team(self, organization_id, name): + """ + Attempts to create a team with the specified name in an organization + """ + team = Team( + name=name, + organization_id=organization_id, + ) + self.db.add(team) + self.db.flush() + + return team + + def rename_team(self, team_id, name): + """ + Performs operations necessary to rename a Team + """ + team = self.get_team(team_id) + + team.name = name + self.db.flush() + + return team + + def delete_team(self, team_id): + """ + Delete team for the specified team id and all associated objects + """ + team = self.get_team(team_id) + # Delete team members + self.db.query(TeamRole).filter_by(team=team).delete() + # Delete projects + self.db.query(TeamProjectRole).filter_by(team=team).delete() + # Delete team + self.db.delete(team) + self.db.flush() + + def delete_teams_by_organization(self, organization_id): + """ + Delete all teams for the specified organization id + """ + teams = self.get_teams_by_organization(organization_id) + for team in teams: + self.delete_team(team.id) + + def get_team_role(self, team_role_id): + """ + Return the team role object that represents the given team role id, + """ + return self.db.query(TeamRole).get(team_role_id) + + def get_team_role_by_user(self, team_id, user_id): + """ + Gets a team role for a specified team and user + """ + try: + team_role = ( + self.db.query(TeamRole) + .filter( + TeamRole.team_id == team_id, + TeamRole.user_id == user_id, + ) + .one() + ) + except NoResultFound: + return + + return team_role + + def get_team_roles(self, team_id): + """ + Gets a list of organization roles for a specified org + """ + return ( + self.db.query(TeamRole).join(User).filter(TeamRole.team_id == team_id).all() + ) + + def add_team_role(self, team_id, user_id, role_name): + """ + Add the team role object to a team for a specified team id and user id + """ + member = TeamRole( + team_id=team_id, + user_id=user_id, + role_name=role_name, + ) + + self.db.add(member) + self.db.flush() + + return member + + def delete_team_role(self, team_role_id): + """ + Remove the team role for a specified team id and user id + """ + member = self.get_team_role(team_role_id) + + self.db.delete(member) + self.db.flush() + + def get_team_project_role(self, team_project_role_id): + """ + Return the team project role object that + represents the given team project role id, + """ + return self.db.query(TeamProjectRole).get(team_project_role_id) + + def add_team_project_role(self, team_id, project_id, role_name): + """ + Adds a team project role for the specified team and project + """ + team_project_role = TeamProjectRole( + team_id=team_id, + project_id=project_id, + role_name=role_name, + ) + + self.db.add(team_project_role) + self.db.flush() + + return team_project_role + + def delete_team_project_role(self, team_project_role_id): + """ + Remove a team project role for a specified team project role id + """ + team_project_role = self.get_team_project_role(team_project_role_id) + + self.db.delete(team_project_role) + self.db.flush() + def record_event(self, organization_id, *, tag, additional=None): """ Creates a new Organization.Event for the given organization with the given diff --git a/warehouse/packaging/models.py b/warehouse/packaging/models.py --- a/warehouse/packaging/models.py +++ b/warehouse/packaging/models.py @@ -58,6 +58,7 @@ OrganizationProject, OrganizationRole, OrganizationRoleType, + TeamProjectRole, ) from warehouse.sitemap.models import SitemapMixin from warehouse.utils import dotted_navigator @@ -252,7 +253,19 @@ def __acl__(self): query = session.query(Role).filter(Role.project == self) query = query.options(orm.lazyload("project")) query = query.options(orm.lazyload("user")) - roles = {(role.user_id, role.role_name) for role in query.all()} + permissions = { + (role.user_id, "Administer" if role.role_name == "Owner" else "Upload") + for role in query.all() + } + + # Add all of the team members for this project. + query = session.query(TeamProjectRole).filter(TeamProjectRole.project == self) + query = query.options(orm.lazyload("project")) + query = query.options(orm.lazyload("team")) + for role in query.all(): + permissions |= { + (user.id, role.role_name.value) for user in role.team.members + } # Add all organization owners for this project. if self.organization: @@ -262,12 +275,10 @@ def __acl__(self): ) query = query.options(orm.lazyload("organization")) query = query.options(orm.lazyload("user")) - roles |= {(role.user_id, "Owner") for role in query.all()} + permissions |= {(role.user_id, "Administer") for role in query.all()} - for user_id, role_name in sorted( - roles, key=lambda x: (["Owner", "Maintainer"].index(x[1]), x[0]) - ): - if role_name == "Owner": + for user_id, permission_name in sorted(permissions, key=lambda x: (x[1], x[0])): + if permission_name == "Administer": acls.append((Allow, f"user:{user_id}", ["manage:project", "upload"])) else: acls.append((Allow, f"user:{user_id}", ["upload"])) @@ -287,7 +298,7 @@ def documentation_url(self): @property def owners(self): - """Return all owners who are owners of the project.""" + """Return all users who are owners of the project.""" owner_roles = ( orm.object_session(self) .query(User.id) diff --git a/warehouse/routes.py b/warehouse/routes.py --- a/warehouse/routes.py +++ b/warehouse/routes.py @@ -246,6 +246,13 @@ def includeme(config): traverse="/{organization_name}", domain=warehouse, ) + config.add_route( + "manage.organization.teams", + "/manage/organization/{organization_name}/teams/", + factory="warehouse.organizations.models:OrganizationFactory", + traverse="/{organization_name}", + domain=warehouse, + ) config.add_route( "manage.organization.roles", "/manage/organization/{organization_name}/people/", @@ -274,6 +281,34 @@ def includeme(config): traverse="/{organization_name}", domain=warehouse, ) + config.add_route( + "manage.team.settings", + "/manage/organization/{organization_name}/team/{team_name}/settings/", + factory="warehouse.organizations.models:TeamFactory", + traverse="/{organization_name}/{team_name}", + domain=warehouse, + ) + config.add_route( + "manage.team.projects", + "/manage/organization/{organization_name}/team/{team_name}/projects/", + factory="warehouse.organizations.models:TeamFactory", + traverse="/{organization_name}/{team_name}", + domain=warehouse, + ) + config.add_route( + "manage.team.roles", + "/manage/organization/{organization_name}/team/{team_name}/members/", + factory="warehouse.organizations.models:TeamFactory", + traverse="/{organization_name}/{team_name}", + domain=warehouse, + ) + config.add_route( + "manage.team.delete_role", + "/manage/organization/{organization_name}/team/{team_name}/members/delete/", + factory="warehouse.organizations.models:TeamFactory", + traverse="/{organization_name}/{team_name}", + domain=warehouse, + ) config.add_route("manage.projects", "/manage/projects/", domain=warehouse) config.add_route( "manage.project.settings", @@ -359,6 +394,20 @@ def includeme(config): traverse="/{project_name}", domain=warehouse, ) + config.add_route( + "manage.project.change_team_project_role", + "/manage/project/{project_name}/collaboration/change_team/", + factory="warehouse.packaging.models:ProjectFactory", + traverse="/{project_name}", + domain=warehouse, + ) + config.add_route( + "manage.project.delete_team_project_role", + "/manage/project/{project_name}/collaboration/delete_team/", + factory="warehouse.packaging.models:ProjectFactory", + traverse="/{project_name}", + domain=warehouse, + ) config.add_route( "manage.project.documentation", "/manage/project/{project_name}/documentation/", diff --git a/warehouse/utils/organization.py b/warehouse/utils/organization.py --- a/warehouse/utils/organization.py +++ b/warehouse/utils/organization.py @@ -39,3 +39,32 @@ def confirm_organization( raise HTTPSeeOther( request.route_path(fail_route, organization_name=organization_name) ) + + +def confirm_team( + team, + request, + fail_route, + field_name="confirm_team_name", + error_message="Could not delete team", +): + confirm = request.POST.get(field_name) + organization_name = team.organization.normalized_name + team_name = team.normalized_name + if not confirm: + request.session.flash("Confirm the request", queue="error") + raise HTTPSeeOther( + request.route_path( + fail_route, organization_name=organization_name, team_name=team_name + ) + ) + if confirm.strip() != team.name.strip(): + request.session.flash( + (f"{error_message} - " f"{confirm!r} is not the same as {team.name!r}"), + queue="error", + ) + raise HTTPSeeOther( + request.route_path( + fail_route, organization_name=organization_name, team_name=team_name + ) + )
diff --git a/tests/common/db/organizations.py b/tests/common/db/organizations.py --- a/tests/common/db/organizations.py +++ b/tests/common/db/organizations.py @@ -22,6 +22,11 @@ OrganizationProject, OrganizationRole, OrganizationRoleType, + Team, + TeamProjectRole, + TeamProjectRoleType, + TeamRole, + TeamRoleType, ) from .accounts import UserFactory @@ -101,3 +106,35 @@ class Meta: id = factory.Faker("uuid4", cast_to=None) organization = factory.SubFactory(OrganizationFactory) project = factory.SubFactory(ProjectFactory) + + +class TeamFactory(WarehouseFactory): + class Meta: + model = Team + + id = factory.Faker("uuid4", cast_to=None) + name = factory.Faker("pystr", max_chars=12) + created = factory.Faker( + "date_time_between_dates", + datetime_start=datetime.datetime(2020, 1, 1), + datetime_end=datetime.datetime(2022, 1, 1), + ) + organization = factory.SubFactory(OrganizationFactory) + + +class TeamRoleFactory(WarehouseFactory): + class Meta: + model = TeamRole + + role_name = TeamRoleType.Member + user = factory.SubFactory(UserFactory) + team = factory.SubFactory(TeamFactory) + + +class TeamProjectRoleFactory(WarehouseFactory): + class Meta: + model = TeamProjectRole + + role_name = TeamProjectRoleType.Administer + project = factory.SubFactory(ProjectFactory) + team = factory.SubFactory(TeamFactory) diff --git a/tests/unit/email/test_init.py b/tests/unit/email/test_init.py --- a/tests/unit/email/test_init.py +++ b/tests/unit/email/test_init.py @@ -24,6 +24,7 @@ from warehouse.email.services import EmailMessage from ...common.db.accounts import EmailFactory, UserFactory +from ...common.db.organizations import TeamFactory @pytest.mark.parametrize( @@ -2794,6 +2795,163 @@ def test_send_organization_deleted_email( ] +class TestTeamMemberEmails: + @pytest.fixture + def team(self, pyramid_user): + self.user = UserFactory.create() + EmailFactory.create(user=self.user, verified=True) + self.submitter = pyramid_user + self.organization_name = "exampleorganization" + self.team_name = "Example Team" + + @pytest.mark.parametrize( + ("email_template_name", "send_team_member_email"), + [ + ("added-as-team-member", email.send_added_as_team_member_email), + ("removed-as-team-member", email.send_removed_as_team_member_email), + ("team-member-added", email.send_team_member_added_email), + ("team-member-removed", email.send_team_member_removed_email), + ], + ) + def test_send_team_member_email( + self, + db_request, + team, + make_email_renderers, + send_email, + email_template_name, + send_team_member_email, + ): + subject_renderer, body_renderer, html_renderer = make_email_renderers( + email_template_name + ) + + if email_template_name.endswith("-as-team-member"): + recipient = self.user + result = send_team_member_email( + db_request, + self.user, + submitter=self.submitter, + organization_name=self.organization_name, + team_name=self.team_name, + ) + else: + recipient = self.submitter + result = send_team_member_email( + db_request, + self.submitter, + user=self.user, + submitter=self.submitter, + organization_name=self.organization_name, + team_name=self.team_name, + ) + + assert result == { + "username": self.user.username, + "submitter": self.submitter.username, + "organization_name": self.organization_name, + "team_name": self.team_name, + } + subject_renderer.assert_(**result) + body_renderer.assert_(**result) + html_renderer.assert_(**result) + assert db_request.task.calls == [pretend.call(send_email)] + assert send_email.delay.calls == [ + pretend.call( + f"{recipient.name} <{recipient.email}>", + { + "subject": subject_renderer.string_response, + "body_text": body_renderer.string_response, + "body_html": ( + f"<html>\n" + f"<head></head>\n" + f"<body><p>{html_renderer.string_response}</p></body>\n" + f"</html>\n" + ), + }, + { + "tag": "account:email:sent", + "user_id": recipient.id, + "additional": { + "from_": db_request.registry.settings["mail.sender"], + "to": recipient.email, + "subject": subject_renderer.string_response, + "redact_ip": recipient != self.submitter, + }, + }, + ) + ] + + +class TestTeamEmails: + @pytest.fixture + def team(self, pyramid_user): + self.user = pyramid_user + self.organization_name = "exampleorganization" + self.team_name = "Example Team" + + @pytest.mark.parametrize( + ("email_template_name", "send_team_email"), + [ + ("team-created", email.send_team_created_email), + ("team-deleted", email.send_team_deleted_email), + ], + ) + def test_send_team_email( + self, + db_request, + team, + make_email_renderers, + send_email, + email_template_name, + send_team_email, + ): + subject_renderer, body_renderer, html_renderer = make_email_renderers( + email_template_name + ) + + result = send_team_email( + db_request, + self.user, + organization_name=self.organization_name, + team_name=self.team_name, + ) + + assert result == { + "organization_name": self.organization_name, + "team_name": self.team_name, + } + subject_renderer.assert_(**result) + body_renderer.assert_(**result) + html_renderer.assert_(**result) + assert db_request.task.calls == [pretend.call(send_email)] + assert send_email.delay.calls == [ + pretend.call( + f"{self.user.name} <{self.user.email}>", + { + "subject": subject_renderer.string_response, + "body_text": body_renderer.string_response, + "body_html": ( + f"<html>\n" + f"<head></head>\n" + f"<body><p>{html_renderer.string_response}</p></body>\n" + f"</html>\n" + ), + }, + { + "tag": "account:email:sent", + "user_id": self.user.id, + "additional": { + "from_": db_request.registry.settings["mail.sender"], + "to": self.user.email, + "subject": subject_renderer.string_response, + "redact_ip": False, + }, + }, + ) + ] + + class TestCollaboratorAddedEmail: def test_collaborator_added_email( self, pyramid_request, pyramid_config, monkeypatch @@ -3615,6 +3773,111 @@ def test_role_changed_as_collaborator_email( ] +class TestTeamCollaboratorEmails: + @pytest.fixture + def team(self, pyramid_user): + self.user = UserFactory.create() + EmailFactory.create(user=self.user, verified=True) + self.submitter = pyramid_user + self.team = TeamFactory.create(name="Example Team") + self.project_name = "exampleproject" + self.role = "Admin" + + @pytest.mark.parametrize( + ("email_template_name", "send_team_collaborator_email"), + [ + ("added-as-team-collaborator", email.send_added_as_team_collaborator_email), + ( + "removed-as-team-collaborator", + email.send_removed_as_team_collaborator_email, + ), + ( + "role-changed-as-team-collaborator", + email.send_role_changed_as_team_collaborator_email, + ), + ("team-collaborator-added", email.send_team_collaborator_added_email), + ("team-collaborator-removed", email.send_team_collaborator_removed_email), + ( + "team-collaborator-role-changed", + email.send_team_collaborator_role_changed_email, + ), + ], + ) + def test_send_team_collaborator_email( + self, + db_request, + team, + make_email_renderers, + send_email, + email_template_name, + send_team_collaborator_email, + ): + subject_renderer, body_renderer, html_renderer = make_email_renderers( + email_template_name + ) + + if "removed" in email_template_name: + result = send_team_collaborator_email( + db_request, + self.user, + team=self.team, + submitter=self.submitter, + project_name=self.project_name, + ) + else: + result = send_team_collaborator_email( + db_request, + self.user, + team=self.team, + submitter=self.submitter, + project_name=self.project_name, + role=self.role, + ) + + if "removed" in email_template_name: + assert result == { + "team_name": self.team.name, + "project": self.project_name, + "submitter": self.submitter.username, + } + else: + assert result == { + "team_name": self.team.name, + "project": self.project_name, + "submitter": self.submitter.username, + "role": self.role, + } + subject_renderer.assert_(**result) + body_renderer.assert_(**result) + html_renderer.assert_(**result) + assert db_request.task.calls == [pretend.call(send_email)] + assert send_email.delay.calls == [ + pretend.call( + f"{self.user.name} <{self.user.email}>", + { + "subject": subject_renderer.string_response, + "body_text": body_renderer.string_response, + "body_html": ( + f"<html>\n" + f"<head></head>\n" + f"<body><p>{html_renderer.string_response}</p></body>\n" + f"</html>\n" + ), + }, + { + "tag": "account:email:sent", + "user_id": self.user.id, + "additional": { + "from_": db_request.registry.settings["mail.sender"], + "to": self.user.email, + "subject": subject_renderer.string_response, + "redact_ip": True, + }, + }, + ) + ] + + class TestRemovedProjectEmail: def test_removed_project_email_to_maintainer( self, pyramid_request, pyramid_config, monkeypatch diff --git a/tests/unit/manage/test_views.py b/tests/unit/manage/test_views.py --- a/tests/unit/manage/test_views.py +++ b/tests/unit/manage/test_views.py @@ -52,6 +52,9 @@ OrganizationRole, OrganizationRoleType, OrganizationType, + TeamProjectRole, + TeamProjectRoleType, + TeamRoleType, ) from warehouse.packaging.models import ( File, @@ -71,6 +74,9 @@ OrganizationInvitationFactory, OrganizationProjectFactory, OrganizationRoleFactory, + TeamFactory, + TeamProjectRoleFactory, + TeamRoleFactory, ) from ...common.db.packaging import ( FileFactory, @@ -2988,6 +2994,125 @@ def test_delete_organization_disable_organizations(self, db_request): view.delete_organization() +class TestManageOrganizationTeams: + def test_manage_teams( + self, + db_request, + pyramid_user, + organization_service, + enable_organizations, + monkeypatch, + ): + organization = OrganizationFactory.create() + organization.teams = [TeamFactory.create()] + + db_request.POST = MultiDict() + + view = views.ManageOrganizationTeamsViews(organization, db_request) + result = view.manage_teams() + form = result["create_team_form"] + + assert view.request == db_request + assert view.organization_service == organization_service + assert result == { + "organization": organization, + "create_team_form": form, + } + + def test_manage_teams_disable_organizations(self, db_request): + organization = OrganizationFactory.create() + + view = views.ManageOrganizationTeamsViews(organization, db_request) + with pytest.raises(HTTPNotFound): + view.manage_teams() + + def test_create_team( + self, + db_request, + pyramid_user, + organization_service, + enable_organizations, + monkeypatch, + ): + organization = OrganizationFactory.create() + organization.teams = [TeamFactory.create()] + + db_request.POST = MultiDict({"name": "Team Name"}) + + OrganizationRoleFactory.create( + organization=organization, user=db_request.user, role_name="Owner" + ) + + def add_team(name, *args, **kwargs): + team = TeamFactory.create(name=name) + organization.teams.append(team) + return team + + monkeypatch.setattr(organization_service, "add_team", add_team) + + send_team_created_email = pretend.call_recorder(lambda *a, **kw: None) + monkeypatch.setattr( + views, + "send_team_created_email", + send_team_created_email, + ) + + view = views.ManageOrganizationTeamsViews(organization, db_request) + result = view.create_team() + + assert isinstance(result, HTTPSeeOther) + assert result.headers["Location"] == db_request.path + assert len(organization.teams) == 2 + assert organization.teams[-1].name == "Team Name" + assert send_team_created_email.calls == [ + pretend.call( + db_request, + {db_request.user}, + organization_name=organization.name, + team_name="Team Name", + ) + ] + + def test_create_team_invalid( + self, + db_request, + pyramid_user, + organization_service, + enable_organizations, + monkeypatch, + ): + organization = OrganizationFactory.create() + organization.teams = [TeamFactory.create(name="Used Name")] + + OrganizationRoleFactory.create( + organization=organization, user=db_request.user, role_name="Owner" + ) + + db_request.POST = MultiDict({"name": "Used Name"}) + + view = views.ManageOrganizationTeamsViews(organization, db_request) + result = view.create_team() + form = result["create_team_form"] + + assert view.request == db_request + assert view.organization_service == organization_service + assert result == { + "organization": organization, + "create_team_form": form, + } + assert form.name.errors == [ + "This team name has already been used. Choose a different team name." + ] + assert len(organization.teams) == 1 + + def test_create_team_disable_organizations(self, db_request): + organization = OrganizationFactory.create() + + view = views.ManageOrganizationTeamsViews(organization, db_request) + with pytest.raises(HTTPNotFound): + view.create_team() + + class TestManageOrganizationProjects: def test_manage_organization_projects( self, @@ -4348,177 +4473,795 @@ def test_delete_non_owner_role(self, db_request, enable_organizations): assert result.headers["Location"] == "/the-redirect" -class TestManageProjects: - def test_manage_projects(self, db_request): - older_release = ReleaseFactory(created=datetime.datetime(2015, 1, 1)) - project_with_older_release = ProjectFactory(releases=[older_release]) - newer_release = ReleaseFactory(created=datetime.datetime(2017, 1, 1)) - project_with_newer_release = ProjectFactory(releases=[newer_release]) - older_project_with_no_releases = ProjectFactory( - releases=[], created=datetime.datetime(2016, 1, 1) - ) - newer_project_with_no_releases = ProjectFactory( - releases=[], created=datetime.datetime(2018, 1, 1) - ) - project_where_owners_require_2fa = ProjectFactory( - releases=[], created=datetime.datetime(2022, 1, 1), owners_require_2fa=True - ) - project_where_pypi_mandates_2fa = ProjectFactory( - releases=[], created=datetime.datetime(2022, 1, 2), pypi_mandates_2fa=True - ) - another_project_where_owners_require_2fa = ProjectFactory( - releases=[], created=datetime.datetime(2022, 3, 1), owners_require_2fa=True - ) - another_project_where_pypi_mandates_2fa = ProjectFactory( - releases=[], created=datetime.datetime(2022, 3, 2), pypi_mandates_2fa=True - ) - db_request.user = UserFactory() - RoleFactory.create( - user=db_request.user, - project=project_with_older_release, - role_name="Maintainer", - ) - RoleFactory.create( - user=db_request.user, project=project_with_newer_release, role_name="Owner" - ) - RoleFactory.create( - user=db_request.user, - project=newer_project_with_no_releases, - role_name="Owner", - ) - RoleFactory.create( - user=db_request.user, - project=older_project_with_no_releases, - role_name="Maintainer", - ) - user_second_owner = UserFactory() - RoleFactory.create( - user=user_second_owner, - project=project_with_older_release, - role_name="Owner", - ) - RoleFactory.create( - user=user_second_owner, - project=older_project_with_no_releases, - role_name="Owner", - ) - RoleFactory.create( - user=user_second_owner, - project=project_with_newer_release, - role_name="Owner", - ) - RoleFactory.create( - user=db_request.user, - project=project_where_owners_require_2fa, - role_name="Owner", - ) - RoleFactory.create( - user=db_request.user, - project=project_where_pypi_mandates_2fa, - role_name="Owner", - ) - RoleFactory.create( - user=db_request.user, - project=another_project_where_owners_require_2fa, - role_name="Maintainer", - ) - RoleFactory.create( - user=db_request.user, - project=another_project_where_pypi_mandates_2fa, - role_name="Maintainer", - ) +class TestManageTeamSettings: + def test_manage_team( + self, db_request, organization_service, user_service, enable_organizations + ): + team = TeamFactory.create() - assert views.manage_projects(db_request) == { - "projects": [ - another_project_where_pypi_mandates_2fa, - another_project_where_owners_require_2fa, - project_where_pypi_mandates_2fa, - project_where_owners_require_2fa, - newer_project_with_no_releases, - project_with_newer_release, - older_project_with_no_releases, - project_with_older_release, - ], - "projects_owned": { - project_with_newer_release.name, - newer_project_with_no_releases.name, - project_where_owners_require_2fa.name, - project_where_pypi_mandates_2fa.name, - }, - "projects_sole_owned": { - newer_project_with_no_releases.name, - project_where_owners_require_2fa.name, - project_where_pypi_mandates_2fa.name, - }, - "projects_requiring_2fa": { - project_where_owners_require_2fa.name, - project_where_pypi_mandates_2fa.name, - another_project_where_owners_require_2fa.name, - another_project_where_pypi_mandates_2fa.name, - }, - "project_invites": [], + view = views.ManageTeamSettingsViews(team, db_request) + result = view.manage_team() + form = result["save_team_form"] + + assert view.request == db_request + assert view.organization_service == organization_service + assert view.user_service == user_service + assert result == { + "team": team, + "save_team_form": form, } + def test_manage_team_disable_organizations(self, db_request): + team = TeamFactory.create() -class TestManageProjectSettings: - @pytest.mark.parametrize("enabled", [False, True]) - def test_manage_project_settings(self, enabled, monkeypatch): - request = pretend.stub( - flags=pretend.stub(enabled=pretend.call_recorder(lambda *a: enabled)) - ) - project = pretend.stub(organization=None) - view = views.ManageProjectSettingsViews(project, request) - form = pretend.stub() - view.toggle_2fa_requirement_form_class = lambda *a, **kw: form - view.transfer_organization_project_form_class = lambda *a, **kw: form + view = views.ManageTeamSettingsViews(team, db_request) + with pytest.raises(HTTPNotFound): + view.manage_team() - user_organizations = pretend.call_recorder( - lambda *a, **kw: { - "organizations_managed": [], - "organizations_owned": [], - "organizations_billing": [], - } - ) - monkeypatch.setattr(views, "user_organizations", user_organizations) + def test_save_team(self, db_request, organization_service, enable_organizations): + team = TeamFactory.create(name="Team Name") + db_request.POST = MultiDict({"name": "New Team Name"}) + db_request.route_path = pretend.call_recorder(lambda *a, **kw: "/foo/bar/") - assert view.manage_project_settings() == { - "project": project, - "MAX_FILESIZE": MAX_FILESIZE, - "MAX_PROJECT_SIZE": MAX_PROJECT_SIZE, - "toggle_2fa_form": form, - "transfer_organization_project_form": form, + view = views.ManageTeamSettingsViews(team, db_request) + result = view.save_team() + + assert isinstance(result, HTTPSeeOther) + assert result.headers["Location"] == "/foo/bar/" + assert team.name == "New Team Name" + + def test_save_team_validation_fails( + self, db_request, organization_service, enable_organizations + ): + team = TeamFactory.create(name="Team Name") + db_request.POST = MultiDict({"name": "Team Name"}) + + view = views.ManageTeamSettingsViews(team, db_request) + result = view.save_team() + form = result["save_team_form"] + + assert result == { + "team": team, + "save_team_form": form, } + assert team.name == "Team Name" + assert form.name.errors == [ + ("This team name has already been used. " "Choose a different team name.") + ] - @pytest.mark.parametrize("enabled", [False, None]) - def test_toggle_2fa_requirement_feature_disabled(self, enabled): - request = pretend.stub( - registry=pretend.stub( - settings={"warehouse.two_factor_requirement.enabled": enabled} - ), - ) + def test_save_team_disable_organizations(self, db_request): + team = TeamFactory.create() - project = pretend.stub() - view = views.ManageProjectSettingsViews(project, request) + view = views.ManageTeamSettingsViews(team, db_request) with pytest.raises(HTTPNotFound): - view.toggle_2fa_requirement() + view.save_team() - @pytest.mark.parametrize( - "owners_require_2fa, expected, expected_flash_calls", - [ - ( - False, - False, - [ - pretend.call( - "2FA requirement cannot be disabled for critical projects", - queue="error", - ) - ], - ), - ( - True, - True, - [ + def test_delete_team( + self, + db_request, + pyramid_user, + organization_service, + user_service, + enable_organizations, + monkeypatch, + ): + team = TeamFactory.create() + db_request.POST = MultiDict({"confirm_team_name": team.name}) + db_request.route_path = pretend.call_recorder(lambda *a, **kw: "/foo/bar/") + + send_email = pretend.call_recorder(lambda *a, **kw: None) + monkeypatch.setattr(views, "send_team_deleted_email", send_email) + + view = views.ManageTeamSettingsViews(team, db_request) + result = view.delete_team() + + assert isinstance(result, HTTPSeeOther) + assert result.headers["Location"] == "/foo/bar/" + assert send_email.calls == [ + pretend.call( + db_request, + set(), + organization_name=team.organization.name, + team_name=team.name, + ), + ] + + def test_delete_team_no_confirm( + self, + db_request, + pyramid_user, + organization_service, + user_service, + enable_organizations, + monkeypatch, + ): + team = TeamFactory.create() + db_request.POST = MultiDict() + db_request.session = pretend.stub( + flash=pretend.call_recorder(lambda *a, **kw: None) + ) + db_request.route_path = pretend.call_recorder(lambda *a, **kw: "/foo/bar/") + + view = views.ManageTeamSettingsViews(team, db_request) + with pytest.raises(HTTPSeeOther): + view.delete_team() + + assert db_request.session.flash.calls == [ + pretend.call("Confirm the request", queue="error") + ] + + def test_delete_team_wrong_confirm( + self, + db_request, + pyramid_user, + organization_service, + user_service, + enable_organizations, + monkeypatch, + ): + team = TeamFactory.create(name="Team Name") + db_request.POST = MultiDict({"confirm_team_name": "Wrong Team Name"}) + db_request.session = pretend.stub( + flash=pretend.call_recorder(lambda *a, **kw: None) + ) + db_request.route_path = pretend.call_recorder(lambda *a, **kw: "/foo/bar/") + + view = views.ManageTeamSettingsViews(team, db_request) + with pytest.raises(HTTPSeeOther): + view.delete_team() + + assert db_request.session.flash.calls == [ + pretend.call( + ( + "Could not delete team - " + "'Wrong Team Name' is not the same as 'Team Name'" + ), + queue="error", + ) + ] + + def test_delete_organization_disable_organizations(self, db_request): + team = TeamFactory.create() + + view = views.ManageTeamSettingsViews(team, db_request) + with pytest.raises(HTTPNotFound): + view.delete_team() + + +class TestManageTeamProjects: + def test_manage_team_projects( + self, + db_request, + pyramid_user, + organization_service, + enable_organizations, + monkeypatch, + ): + team = TeamFactory.create() + project = ProjectFactory.create() + + TeamProjectRoleFactory.create( + project=project, team=team, role_name=TeamProjectRoleType.Administer + ) + + view = views.ManageTeamProjectsViews(team, db_request) + result = view.manage_team_projects() + + assert view.team == team + assert view.request == db_request + assert result == { + "team": team, + "active_projects": view.active_projects, + "projects_owned": set(), + "projects_sole_owned": set(), + "projects_requiring_2fa": set(), + } + + def test_manage_team_projects_disable_teams(self, db_request): + team = TeamFactory.create() + + view = views.ManageTeamProjectsViews(team, db_request) + with pytest.raises(HTTPNotFound): + view.manage_team_projects() + + +class TestManageTeamRoles: + def test_manage_team_roles( + self, + db_request, + organization_service, + user_service, + enable_organizations, + ): + team = TeamFactory.create() + + db_request.POST = MultiDict() + + view = views.ManageTeamRolesViews(team, db_request) + result = view.manage_team_roles() + form = result["form"] + + assert result == { + "team": team, + "roles": [], + "form": form, + } + + def test_manage_team_roles_disable_organizations(self, db_request): + team = TeamFactory.create() + + view = views.ManageTeamRolesViews(team, db_request) + with pytest.raises(HTTPNotFound): + view.manage_team_roles() + + def test_create_team_role( + self, + db_request, + organization_service, + user_service, + enable_organizations, + monkeypatch, + ): + organization = OrganizationFactory.create() + team = TeamFactory(organization=organization) + owner = UserFactory.create(username="owner") + manager = UserFactory.create(username="manager") + member = UserFactory.create(username="user") + OrganizationRoleFactory.create( + organization=organization, + user=owner, + role_name=OrganizationRoleType.Owner, + ) + OrganizationRoleFactory.create( + organization=organization, + user=manager, + role_name=OrganizationRoleType.Manager, + ) + OrganizationRoleFactory.create( + organization=organization, + user=member, + role_name=OrganizationRoleType.Member, + ) + + db_request.method = "POST" + db_request.POST = MultiDict({"username": member.username}) + db_request.user = owner + db_request.session = pretend.stub( + flash=pretend.call_recorder(lambda *a, **kw: None) + ) + + send_team_member_added_email = pretend.call_recorder(lambda *a, **kw: None) + monkeypatch.setattr( + views, + "send_team_member_added_email", + send_team_member_added_email, + ) + send_added_as_team_member_email = pretend.call_recorder(lambda *a, **kw: None) + monkeypatch.setattr( + views, + "send_added_as_team_member_email", + send_added_as_team_member_email, + ) + + view = views.ManageTeamRolesViews(team, db_request) + result = view.create_team_role() + roles = organization_service.get_team_roles(team.id) + + assert len(roles) == 1 + assert roles[0].team_id == team.id + assert roles[0].user_id == member.id + assert send_team_member_added_email.calls == [ + pretend.call( + db_request, + {owner, manager}, + user=member, + submitter=db_request.user, + organization_name=team.organization.name, + team_name=team.name, + ) + ] + assert send_added_as_team_member_email.calls == [ + pretend.call( + db_request, + member, + submitter=db_request.user, + organization_name=team.organization.name, + team_name=team.name, + ) + ] + assert db_request.session.flash.calls == [ + pretend.call( + f"Added the team {team.name!r} to {team.organization.name!r}", + queue="success", + ) + ] + assert isinstance(result, HTTPSeeOther) + + def test_create_team_role_duplicate_member( + self, + db_request, + organization_service, + user_service, + enable_organizations, + ): + organization = OrganizationFactory.create() + team = TeamFactory(organization=organization) + owner = UserFactory.create(username="owner") + manager = UserFactory.create(username="manager") + member = UserFactory.create(username="user") + OrganizationRoleFactory.create( + organization=organization, + user=owner, + role_name=OrganizationRoleType.Owner, + ) + OrganizationRoleFactory.create( + organization=organization, + user=manager, + role_name=OrganizationRoleType.Manager, + ) + OrganizationRoleFactory.create( + organization=organization, + user=member, + role_name=OrganizationRoleType.Member, + ) + role = TeamRoleFactory.create( + team=team, + user=member, + role_name=TeamRoleType.Member, + ) + + db_request.method = "POST" + db_request.POST = MultiDict({"username": member.username}) + db_request.user = owner + db_request.session = pretend.stub( + flash=pretend.call_recorder(lambda *a, **kw: None) + ) + + view = views.ManageTeamRolesViews(team, db_request) + result = view.create_team_role() + form = result["form"] + + assert organization_service.get_team_roles(team.id) == [role] + assert db_request.session.flash.calls == [ + pretend.call( + f"User '{member.username}' is already a team member", queue="error" + ) + ] + assert result == { + "team": team, + "roles": [role], + "form": form, + } + + def test_create_team_role_not_a_member( + self, + db_request, + organization_service, + user_service, + enable_organizations, + ): + organization = OrganizationFactory.create() + team = TeamFactory(organization=organization) + owner = UserFactory.create(username="owner") + manager = UserFactory.create(username="manager") + not_a_member = UserFactory.create(username="user") + OrganizationRoleFactory.create( + organization=organization, + user=owner, + role_name=OrganizationRoleType.Owner, + ) + OrganizationRoleFactory.create( + organization=organization, + user=manager, + role_name=OrganizationRoleType.Manager, + ) + + db_request.method = "POST" + db_request.POST = MultiDict({"username": not_a_member.username}) + db_request.user = owner + db_request.session = pretend.stub( + flash=pretend.call_recorder(lambda *a, **kw: None) + ) + + view = views.ManageTeamRolesViews(team, db_request) + result = view.create_team_role() + form = result["form"] + + assert result == { + "team": team, + "roles": [], + "form": form, + } + assert form.username.errors == [ + ( + "No organization owner, manager, or member found " + "with that username. Please try again." + ) + ] + + def test_create_team_role_disable_organizations(self, db_request): + team = TeamFactory.create() + + view = views.ManageTeamRolesViews(team, db_request) + with pytest.raises(HTTPNotFound): + view.create_team_role() + + def test_delete_team_role( + self, + db_request, + organization_service, + user_service, + enable_organizations, + monkeypatch, + ): + organization = OrganizationFactory.create() + team = TeamFactory(organization=organization) + owner = UserFactory.create(username="owner") + manager = UserFactory.create(username="manager") + member = UserFactory.create(username="user") + OrganizationRoleFactory.create( + organization=organization, + user=owner, + role_name=OrganizationRoleType.Owner, + ) + OrganizationRoleFactory.create( + organization=organization, + user=manager, + role_name=OrganizationRoleType.Manager, + ) + OrganizationRoleFactory.create( + organization=organization, + user=member, + role_name=OrganizationRoleType.Member, + ) + role = TeamRoleFactory.create( + team=team, + user=member, + role_name=TeamRoleType.Member, + ) + + db_request.method = "POST" + db_request.POST = MultiDict({"role_id": role.id}) + db_request.user = owner + db_request.session = pretend.stub( + flash=pretend.call_recorder(lambda *a, **kw: None) + ) + db_request.route_path = pretend.call_recorder(lambda *a, **kw: "/foo/bar/") + + send_team_member_removed_email = pretend.call_recorder(lambda *a, **kw: None) + monkeypatch.setattr( + views, + "send_team_member_removed_email", + send_team_member_removed_email, + ) + send_removed_as_team_member_email = pretend.call_recorder(lambda *a, **kw: None) + monkeypatch.setattr( + views, + "send_removed_as_team_member_email", + send_removed_as_team_member_email, + ) + + view = views.ManageTeamRolesViews(team, db_request) + result = view.delete_team_role() + + assert organization_service.get_team_roles(team.id) == [] + assert send_team_member_removed_email.calls == [ + pretend.call( + db_request, + {owner, manager}, + user=member, + submitter=db_request.user, + organization_name=team.organization.name, + team_name=team.name, + ) + ] + assert send_removed_as_team_member_email.calls == [ + pretend.call( + db_request, + member, + submitter=db_request.user, + organization_name=team.organization.name, + team_name=team.name, + ) + ] + assert db_request.session.flash.calls == [ + pretend.call("Removed from team", queue="success") + ] + assert isinstance(result, HTTPSeeOther) + + def test_delete_team_role_not_a_member( + self, + db_request, + organization_service, + user_service, + enable_organizations, + ): + organization = OrganizationFactory.create() + team = TeamFactory(organization=organization) + other_team = TeamFactory(organization=organization) + owner = UserFactory.create(username="owner") + manager = UserFactory.create(username="manager") + not_a_member = UserFactory.create(username="user") + OrganizationRoleFactory.create( + organization=organization, + user=owner, + role_name=OrganizationRoleType.Owner, + ) + OrganizationRoleFactory.create( + organization=organization, + user=manager, + role_name=OrganizationRoleType.Manager, + ) + OrganizationRoleFactory.create( + organization=organization, + user=not_a_member, + role_name=OrganizationRoleType.Member, + ) + other_team_role = TeamRoleFactory.create( + team=other_team, + user=not_a_member, + role_name=TeamRoleType.Member, + ) + + db_request.method = "POST" + db_request.POST = MultiDict({"role_id": other_team_role.id}) + db_request.user = owner + db_request.session = pretend.stub( + flash=pretend.call_recorder(lambda *a, **kw: None) + ) + db_request.route_path = pretend.call_recorder(lambda *a, **kw: "/foo/bar/") + + view = views.ManageTeamRolesViews(team, db_request) + result = view.delete_team_role() + + assert organization_service.get_team_roles(team.id) == [] + assert db_request.session.flash.calls == [ + pretend.call("Could not find member", queue="error") + ] + assert isinstance(result, HTTPSeeOther) + + def test_delete_team_role_not_a_manager( + self, + db_request, + organization_service, + user_service, + enable_organizations, + ): + organization = OrganizationFactory.create() + team = TeamFactory(organization=organization) + owner = UserFactory.create(username="owner") + not_a_manager = UserFactory.create(username="manager") + member = UserFactory.create(username="user") + OrganizationRoleFactory.create( + organization=organization, + user=owner, + role_name=OrganizationRoleType.Owner, + ) + OrganizationRoleFactory.create( + organization=organization, + user=not_a_manager, + role_name=OrganizationRoleType.Member, + ) + OrganizationRoleFactory.create( + organization=organization, + user=member, + role_name=OrganizationRoleType.Member, + ) + role = TeamRoleFactory.create( + team=team, + user=member, + role_name=TeamRoleType.Member, + ) + + db_request.method = "POST" + db_request.POST = MultiDict({"role_id": role.id}) + db_request.user = not_a_manager + db_request.has_permission = lambda *a, **kw: False + db_request.session = pretend.stub( + flash=pretend.call_recorder(lambda *a, **kw: None) + ) + db_request.route_path = pretend.call_recorder(lambda *a, **kw: "/foo/bar/") + + view = views.ManageTeamRolesViews(team, db_request) + result = view.delete_team_role() + + assert organization_service.get_team_roles(team.id) == [role] + assert db_request.session.flash.calls == [ + pretend.call("Cannot remove other people from the team", queue="error") + ] + assert isinstance(result, HTTPSeeOther) + + def test_delete_team_role_disable_organizations(self, db_request): + team = TeamFactory.create() + + view = views.ManageTeamRolesViews(team, db_request) + with pytest.raises(HTTPNotFound): + view.delete_team_role() + + +class TestManageProjects: + def test_manage_projects(self, db_request): + older_release = ReleaseFactory(created=datetime.datetime(2015, 1, 1)) + project_with_older_release = ProjectFactory(releases=[older_release]) + newer_release = ReleaseFactory(created=datetime.datetime(2017, 1, 1)) + project_with_newer_release = ProjectFactory(releases=[newer_release]) + older_project_with_no_releases = ProjectFactory( + releases=[], created=datetime.datetime(2016, 1, 1) + ) + newer_project_with_no_releases = ProjectFactory( + releases=[], created=datetime.datetime(2018, 1, 1) + ) + project_where_owners_require_2fa = ProjectFactory( + releases=[], created=datetime.datetime(2022, 1, 1), owners_require_2fa=True + ) + project_where_pypi_mandates_2fa = ProjectFactory( + releases=[], created=datetime.datetime(2022, 1, 2), pypi_mandates_2fa=True + ) + another_project_where_owners_require_2fa = ProjectFactory( + releases=[], created=datetime.datetime(2022, 3, 1), owners_require_2fa=True + ) + another_project_where_pypi_mandates_2fa = ProjectFactory( + releases=[], created=datetime.datetime(2022, 3, 2), pypi_mandates_2fa=True + ) + team_project = ProjectFactory( + name="team-proj", releases=[], created=datetime.datetime(2022, 3, 3) + ) + + db_request.user = UserFactory() + RoleFactory.create( + user=db_request.user, + project=project_with_older_release, + role_name="Maintainer", + ) + RoleFactory.create( + user=db_request.user, project=project_with_newer_release, role_name="Owner" + ) + RoleFactory.create( + user=db_request.user, + project=newer_project_with_no_releases, + role_name="Owner", + ) + RoleFactory.create( + user=db_request.user, + project=older_project_with_no_releases, + role_name="Maintainer", + ) + user_second_owner = UserFactory() + RoleFactory.create( + user=user_second_owner, + project=project_with_older_release, + role_name="Owner", + ) + RoleFactory.create( + user=user_second_owner, + project=older_project_with_no_releases, + role_name="Owner", + ) + RoleFactory.create( + user=user_second_owner, + project=project_with_newer_release, + role_name="Owner", + ) + RoleFactory.create( + user=db_request.user, + project=project_where_owners_require_2fa, + role_name="Owner", + ) + RoleFactory.create( + user=db_request.user, + project=project_where_pypi_mandates_2fa, + role_name="Owner", + ) + RoleFactory.create( + user=db_request.user, + project=another_project_where_owners_require_2fa, + role_name="Maintainer", + ) + RoleFactory.create( + user=db_request.user, + project=another_project_where_pypi_mandates_2fa, + role_name="Maintainer", + ) + team = TeamFactory() + TeamRoleFactory.create(team=team, user=db_request.user) + TeamProjectRoleFactory( + team=team, + project=team_project, + role_name=TeamProjectRoleType.Upload, + ) + + assert views.manage_projects(db_request) == { + "projects": [ + team_project, + another_project_where_pypi_mandates_2fa, + another_project_where_owners_require_2fa, + project_where_pypi_mandates_2fa, + project_where_owners_require_2fa, + newer_project_with_no_releases, + project_with_newer_release, + older_project_with_no_releases, + project_with_older_release, + ], + "projects_owned": { + project_with_newer_release.name, + newer_project_with_no_releases.name, + project_where_owners_require_2fa.name, + project_where_pypi_mandates_2fa.name, + }, + "projects_sole_owned": { + newer_project_with_no_releases.name, + project_where_owners_require_2fa.name, + project_where_pypi_mandates_2fa.name, + }, + "projects_requiring_2fa": { + project_where_owners_require_2fa.name, + project_where_pypi_mandates_2fa.name, + another_project_where_owners_require_2fa.name, + another_project_where_pypi_mandates_2fa.name, + }, + "project_invites": [], + } + + +class TestManageProjectSettings: + @pytest.mark.parametrize("enabled", [False, True]) + def test_manage_project_settings(self, enabled, monkeypatch): + request = pretend.stub( + flags=pretend.stub(enabled=pretend.call_recorder(lambda *a: enabled)) + ) + project = pretend.stub(organization=None) + view = views.ManageProjectSettingsViews(project, request) + form = pretend.stub() + view.toggle_2fa_requirement_form_class = lambda *a, **kw: form + view.transfer_organization_project_form_class = lambda *a, **kw: form + + user_organizations = pretend.call_recorder( + lambda *a, **kw: { + "organizations_managed": [], + "organizations_owned": [], + "organizations_billing": [], + } + ) + monkeypatch.setattr(views, "user_organizations", user_organizations) + + assert view.manage_project_settings() == { + "project": project, + "MAX_FILESIZE": MAX_FILESIZE, + "MAX_PROJECT_SIZE": MAX_PROJECT_SIZE, + "toggle_2fa_form": form, + "transfer_organization_project_form": form, + } + + @pytest.mark.parametrize("enabled", [False, None]) + def test_toggle_2fa_requirement_feature_disabled(self, enabled): + request = pretend.stub( + registry=pretend.stub( + settings={"warehouse.two_factor_requirement.enabled": enabled} + ), + ) + + project = pretend.stub() + view = views.ManageProjectSettingsViews(project, request) + with pytest.raises(HTTPNotFound): + view.toggle_2fa_requirement() + + @pytest.mark.parametrize( + "owners_require_2fa, expected, expected_flash_calls", + [ + ( + False, + False, + [ + pretend.call( + "2FA requirement cannot be disabled for critical projects", + queue="error", + ) + ], + ), + ( + True, + True, + [ pretend.call( "2FA requirement cannot be disabled for critical projects", queue="error", @@ -6175,6 +6918,38 @@ def test_delete_project_release_file_bad_confirm(self, db_request): class TestManageProjectRoles: + @pytest.fixture + def organization(self, enable_organizations, pyramid_user): + organization = OrganizationFactory.create() + OrganizationRoleFactory.create( + organization=organization, + user=pyramid_user, + role_name=OrganizationRoleType.Owner, + ) + return organization + + @pytest.fixture + def organization_project(self, organization): + project = ProjectFactory.create(organization=organization) + OrganizationProjectFactory(organization=organization, project=project) + return project + + @pytest.fixture + def organization_member(self, organization): + member = UserFactory.create() + OrganizationRoleFactory.create( + organization=organization, + user=member, + role_name=OrganizationRoleType.Member, + ) + return member + + @pytest.fixture + def organization_team(self, organization, organization_member): + team = TeamFactory(organization=organization) + TeamRoleFactory.create(team=team, user=organization_member) + return team + def test_get_manage_project_roles(self, db_request): user_service = pretend.stub() db_request.find_service = pretend.call_recorder( @@ -6192,7 +6967,8 @@ def test_get_manage_project_roles(self, db_request): result = views.manage_project_roles(project, db_request, _form_class=form_class) assert db_request.find_service.calls == [ - pretend.call(IUserService, context=None) + pretend.call(IOrganizationService, context=None), + pretend.call(IUserService, context=None), ] assert form_class.calls == [ pretend.call(db_request.POST, user_service=user_service) @@ -6202,8 +6978,177 @@ def test_get_manage_project_roles(self, db_request): "roles": {role}, "invitations": {role_invitation}, "form": form_obj, + "enable_internal_collaborator": False, + "team_project_roles": set(), + "internal_role_form": None, + } + + def test_post_new_internal_team_role( + self, + db_request, + organization_project, + organization_team, + organization_member, + monkeypatch, + ): + db_request.method = "POST" + db_request.POST = MultiDict( + { + "is_team": "true", + "team_name": organization_team.name, + "team_project_role_name": "Administer", + "username": "", + "role_name": "", + } + ) + + send_team_collaborator_added_email = pretend.call_recorder( + lambda *a, **kw: None + ) + monkeypatch.setattr( + views, + "send_team_collaborator_added_email", + send_team_collaborator_added_email, + ) + send_added_as_team_collaborator_email = pretend.call_recorder( + lambda *a, **kw: None + ) + monkeypatch.setattr( + views, + "send_added_as_team_collaborator_email", + send_added_as_team_collaborator_email, + ) + + result = views.manage_project_roles(organization_project, db_request) + + assert send_team_collaborator_added_email.calls == [ + pretend.call( + db_request, + {db_request.user}, + team=organization_team, + submitter=db_request.user, + project_name=organization_project.name, + role="Administer", + ) + ] + assert send_added_as_team_collaborator_email.calls == [ + pretend.call( + db_request, + {organization_member}, + team=organization_team, + submitter=db_request.user, + project_name=organization_project.name, + role="Administer", + ) + ] + assert isinstance(result, HTTPSeeOther) + + def test_post_duplicate_internal_team_role( + self, + db_request, + organization_project, + organization_team, + monkeypatch, + ): + db_request.method = "POST" + db_request.POST = MultiDict( + { + "is_team": "true", + "team_name": organization_team.name, + "team_project_role_name": "Administer", + "username": "", + "role_name": "", + } + ) + db_request.session = pretend.stub( + flash=pretend.call_recorder(lambda *a, **kw: None) + ) + + team_project_role = TeamProjectRoleFactory.create( + team=organization_team, + project=organization_project, + role_name=TeamProjectRoleType.Administer, + ) + + result = views.manage_project_roles(organization_project, db_request) + form = result["form"] + internal_role_form = result["internal_role_form"] + + # No additional roles are created + assert team_project_role == db_request.db.query(TeamProjectRole).one() + assert db_request.session.flash.calls == [ + pretend.call( + ( + f"Team '{organization_team.name}' already has " + "Administer permissions for project" + ), + queue="error", + ) + ] + assert result == { + "project": organization_project, + "roles": set(), + "invitations": set(), + "form": form, + "enable_internal_collaborator": True, + "team_project_roles": {team_project_role}, + "internal_role_form": internal_role_form, } + def test_post_new_internal_role( + self, + db_request, + organization_project, + organization_member, + monkeypatch, + ): + db_request.method = "POST" + db_request.POST = MultiDict( + { + "is_team": "false", + "team_name": "", + "team_project_role_name": "Administer", + "username": organization_member.username, + "role_name": "Owner", + } + ) + + send_collaborator_added_email = pretend.call_recorder(lambda *a, **kw: None) + monkeypatch.setattr( + views, + "send_collaborator_added_email", + send_collaborator_added_email, + ) + send_added_as_collaborator_email = pretend.call_recorder(lambda *a, **kw: None) + monkeypatch.setattr( + views, + "send_added_as_collaborator_email", + send_added_as_collaborator_email, + ) + + result = views.manage_project_roles(organization_project, db_request) + + assert send_collaborator_added_email.calls == [ + pretend.call( + db_request, + {db_request.user}, + user=organization_member, + submitter=db_request.user, + project_name=organization_project.name, + role="Owner", + ) + ] + assert send_added_as_collaborator_email.calls == [ + pretend.call( + db_request, + organization_member, + submitter=db_request.user, + project_name=organization_project.name, + role="Owner", + ) + ] + assert isinstance(result, HTTPSeeOther) + def test_post_new_role_validation_fails(self, db_request): project = ProjectFactory.create(name="foobar") user = UserFactory.create(username="testuser") @@ -6222,7 +7167,8 @@ def test_post_new_role_validation_fails(self, db_request): result = views.manage_project_roles(project, db_request, _form_class=form_class) assert db_request.find_service.calls == [ - pretend.call(IUserService, context=None) + pretend.call(IOrganizationService, context=None), + pretend.call(IUserService, context=None), ] assert form_class.calls == [ pretend.call(db_request.POST, user_service=user_service) @@ -6233,6 +7179,9 @@ def test_post_new_role_validation_fails(self, db_request): "roles": {role}, "invitations": {role_invitation}, "form": form_obj, + "enable_internal_collaborator": False, + "team_project_roles": set(), + "internal_role_form": None, } def test_post_new_role(self, monkeypatch, db_request): @@ -6241,13 +7190,10 @@ def test_post_new_role(self, monkeypatch, db_request): EmailFactory.create(user=new_user, verified=True, primary=True) owner_1 = UserFactory.create(username="owner_1") owner_2 = UserFactory.create(username="owner_2") - owner_1_role = RoleFactory.create( - user=owner_1, project=project, role_name="Owner" - ) - owner_2_role = RoleFactory.create( - user=owner_2, project=project, role_name="Owner" - ) + RoleFactory.create(user=owner_1, project=project, role_name="Owner") + RoleFactory.create(user=owner_2, project=project, role_name="Owner") + organization_service = pretend.stub() user_service = pretend.stub( find_userid=lambda username: new_user.id, get_user=lambda userid: new_user ) @@ -6256,6 +7202,7 @@ def test_post_new_role(self, monkeypatch, db_request): ) db_request.find_service = pretend.call_recorder( lambda iface, context=None, name=None: { + IOrganizationService: organization_service, ITokenService: token_service, IUserService: user_service, }.get(iface) @@ -6285,32 +7232,27 @@ def test_post_new_role(self, monkeypatch, db_request): result = views.manage_project_roles(project, db_request, _form_class=form_class) assert db_request.find_service.calls == [ + pretend.call(IOrganizationService, context=None), pretend.call(IUserService, context=None), pretend.call(ITokenService, name="email"), ] assert form_obj.validate.calls == [pretend.call()] assert form_class.calls == [ pretend.call(db_request.POST, user_service=user_service), - pretend.call(user_service=user_service), ] assert db_request.session.flash.calls == [ pretend.call(f"Invitation sent to '{new_user.username}'", queue="success") ] # Only one role invitation is created - role_invitation = ( + assert ( db_request.db.query(RoleInvitation) .filter(RoleInvitation.user == new_user) .filter(RoleInvitation.project == project) .one() ) - assert result == { - "project": project, - "roles": {owner_1_role, owner_2_role}, - "invitations": {role_invitation}, - "form": form_obj, - } + assert isinstance(result, HTTPSeeOther) assert send_project_role_verification_email.calls == [ pretend.call( @@ -6336,6 +7278,7 @@ def test_post_duplicate_role(self, db_request): user = UserFactory.create(username="testuser") role = RoleFactory.create(user=user, project=project, role_name="Owner") + organization_service = pretend.stub() user_service = pretend.stub( find_userid=lambda username: user.id, get_user=lambda userid: user ) @@ -6344,6 +7287,7 @@ def test_post_duplicate_role(self, db_request): ) db_request.find_service = pretend.call_recorder( lambda iface, context=None, name=None: { + IOrganizationService: organization_service, ITokenService: token_service, IUserService: user_service, }.get(iface) @@ -6363,13 +7307,12 @@ def test_post_duplicate_role(self, db_request): result = views.manage_project_roles(project, db_request, _form_class=form_class) assert db_request.find_service.calls == [ + pretend.call(IOrganizationService, context=None), pretend.call(IUserService, context=None), - pretend.call(ITokenService, name="email"), ] assert form_obj.validate.calls == [pretend.call()] assert form_class.calls == [ pretend.call(db_request.POST, user_service=user_service), - pretend.call(user_service=user_service), ] assert db_request.session.flash.calls == [ pretend.call( @@ -6385,6 +7328,9 @@ def test_post_duplicate_role(self, db_request): "roles": {role}, "invitations": set(), "form": form_obj, + "enable_internal_collaborator": False, + "team_project_roles": set(), + "internal_role_form": None, } def test_reinvite_role_after_expiration(self, monkeypatch, db_request): @@ -6393,16 +7339,13 @@ def test_reinvite_role_after_expiration(self, monkeypatch, db_request): EmailFactory.create(user=new_user, verified=True, primary=True) owner_1 = UserFactory.create(username="owner_1") owner_2 = UserFactory.create(username="owner_2") - owner_1_role = RoleFactory.create( - user=owner_1, project=project, role_name="Owner" - ) - owner_2_role = RoleFactory.create( - user=owner_2, project=project, role_name="Owner" - ) - new_user_role_invitation = RoleInvitationFactory.create( + RoleFactory.create(user=owner_1, project=project, role_name="Owner") + RoleFactory.create(user=owner_2, project=project, role_name="Owner") + RoleInvitationFactory.create( user=new_user, project=project, invite_status="expired" ) + organization_service = pretend.stub() user_service = pretend.stub( find_userid=lambda username: new_user.id, get_user=lambda userid: new_user ) @@ -6411,6 +7354,7 @@ def test_reinvite_role_after_expiration(self, monkeypatch, db_request): ) db_request.find_service = pretend.call_recorder( lambda iface, context=None, name=None: { + IOrganizationService: organization_service, ITokenService: token_service, IUserService: user_service, }.get(iface) @@ -6441,34 +7385,27 @@ def test_reinvite_role_after_expiration(self, monkeypatch, db_request): result = views.manage_project_roles(project, db_request, _form_class=form_class) assert db_request.find_service.calls == [ + pretend.call(IOrganizationService, context=None), pretend.call(IUserService, context=None), pretend.call(ITokenService, name="email"), ] assert form_obj.validate.calls == [pretend.call()] assert form_class.calls == [ pretend.call(db_request.POST, user_service=user_service), - pretend.call(user_service=user_service), ] assert db_request.session.flash.calls == [ pretend.call(f"Invitation sent to '{new_user.username}'", queue="success") ] # Only one role invitation is created - role_invitation = ( + assert ( db_request.db.query(RoleInvitation) .filter(RoleInvitation.user == new_user) .filter(RoleInvitation.project == project) .one() ) - assert result["invitations"] == {new_user_role_invitation} - - assert result == { - "project": project, - "roles": {owner_1_role, owner_2_role}, - "invitations": {role_invitation}, - "form": form_obj, - } + assert isinstance(result, HTTPSeeOther) assert send_project_role_verification_email.calls == [ pretend.call( @@ -6496,6 +7433,7 @@ def test_post_unverified_email(self, db_request, with_email): if with_email: EmailFactory.create(user=user, verified=False, primary=True) + organization_service = pretend.stub() user_service = pretend.stub( find_userid=lambda username: user.id, get_user=lambda userid: user ) @@ -6506,6 +7444,7 @@ def test_post_unverified_email(self, db_request, with_email): ) db_request.find_service = pretend.call_recorder( lambda iface, context=None, name=None: { + IOrganizationService: organization_service, ITokenService: token_service, IUserService: user_service, }.get(iface) @@ -6525,13 +7464,13 @@ def test_post_unverified_email(self, db_request, with_email): result = views.manage_project_roles(project, db_request, _form_class=form_class) assert db_request.find_service.calls == [ + pretend.call(IOrganizationService, context=None), pretend.call(IUserService, context=None), pretend.call(ITokenService, name="email"), ] assert form_obj.validate.calls == [pretend.call()] assert form_class.calls == [ pretend.call(db_request.POST, user_service=user_service), - pretend.call(user_service=user_service), ] assert db_request.session.flash.calls == [ pretend.call( @@ -6549,6 +7488,9 @@ def test_post_unverified_email(self, db_request, with_email): "roles": set(), "invitations": set(), "form": form_obj, + "enable_internal_collaborator": False, + "team_project_roles": set(), + "internal_role_form": None, } def test_cannot_reinvite_role(self, db_request): @@ -6567,6 +7509,7 @@ def test_cannot_reinvite_role(self, db_request): user=new_user, project=project, invite_status="pending" ) + organization_service = pretend.stub() user_service = pretend.stub( find_userid=lambda username: new_user.id, get_user=lambda userid: new_user ) @@ -6577,6 +7520,7 @@ def test_cannot_reinvite_role(self, db_request): ) db_request.find_service = pretend.call_recorder( lambda iface, context=None, name=None: { + IOrganizationService: organization_service, ITokenService: token_service, IUserService: user_service, }.get(iface) @@ -6598,13 +7542,13 @@ def test_cannot_reinvite_role(self, db_request): result = views.manage_project_roles(project, db_request, _form_class=form_class) assert db_request.find_service.calls == [ + pretend.call(IOrganizationService, context=None), pretend.call(IUserService, context=None), pretend.call(ITokenService, name="email"), ] assert form_obj.validate.calls == [pretend.call()] assert form_class.calls == [ pretend.call(db_request.POST, user_service=user_service), - pretend.call(user_service=user_service), ] assert db_request.session.flash.calls == [ pretend.call( @@ -6618,6 +7562,9 @@ def test_cannot_reinvite_role(self, db_request): "roles": {owner_1_role, owner_2_role}, "invitations": {new_user_invitation}, "form": form_obj, + "enable_internal_collaborator": False, + "team_project_roles": set(), + "internal_role_form": None, } @@ -6899,7 +7846,7 @@ def test_change_own_owner_role(self, db_request): assert result.headers["Location"] == "/the-redirect" -class TestDeleteProjectRoles: +class TestDeleteProjectRole: def test_delete_role(self, db_request, monkeypatch): project = ProjectFactory.create(name="foobar") user = UserFactory.create(username="testuser") @@ -7022,6 +7969,357 @@ def test_delete_non_owner_role(self, db_request): assert result.headers["Location"] == "/the-redirect" +class TestChangeTeamProjectRole: + @pytest.fixture + def organization(self, enable_organizations, pyramid_user): + organization = OrganizationFactory.create() + OrganizationRoleFactory.create( + organization=organization, + user=pyramid_user, + role_name=OrganizationRoleType.Owner, + ) + return organization + + @pytest.fixture + def organization_project(self, organization): + project = ProjectFactory.create(organization=organization) + OrganizationProjectFactory(organization=organization, project=project) + return project + + @pytest.fixture + def organization_member(self, organization): + member = UserFactory.create() + OrganizationRoleFactory.create( + organization=organization, + user=member, + role_name=OrganizationRoleType.Member, + ) + return member + + @pytest.fixture + def organization_team(self, organization, organization_member): + team = TeamFactory(organization=organization) + TeamRoleFactory.create(team=team, user=organization_member) + return team + + def test_change_role( + self, + db_request, + pyramid_user, + organization_member, + organization_team, + organization_project, + monkeypatch, + ): + role = TeamProjectRoleFactory.create( + team=organization_team, + project=organization_project, + role_name=TeamProjectRoleType.Administer, + ) + new_role_name = TeamProjectRoleType.Upload + + db_request.method = "POST" + db_request.POST = MultiDict( + {"role_id": role.id, "team_project_role_name": new_role_name} + ) + db_request.session = pretend.stub( + flash=pretend.call_recorder(lambda *a, **kw: None) + ) + db_request.route_path = pretend.call_recorder(lambda *a, **kw: "/the-redirect") + + send_team_collaborator_role_changed_email = pretend.call_recorder( + lambda *a, **kw: None + ) + monkeypatch.setattr( + views, + "send_team_collaborator_role_changed_email", + send_team_collaborator_role_changed_email, + ) + send_role_changed_as_team_collaborator_email = pretend.call_recorder( + lambda *a, **kw: None + ) + monkeypatch.setattr( + views, + "send_role_changed_as_team_collaborator_email", + send_role_changed_as_team_collaborator_email, + ) + + result = views.change_team_project_role(organization_project, db_request) + + assert role.role_name == new_role_name + assert db_request.route_path.calls == [ + pretend.call("manage.project.roles", project_name=organization_project.name) + ] + assert send_team_collaborator_role_changed_email.calls == [ + pretend.call( + db_request, + {pyramid_user}, + team=organization_team, + submitter=pyramid_user, + project_name=organization_project.name, + role=new_role_name.value, + ) + ] + assert send_role_changed_as_team_collaborator_email.calls == [ + pretend.call( + db_request, + {organization_member}, + team=organization_team, + submitter=pyramid_user, + project_name=organization_project.name, + role=new_role_name.value, + ) + ] + assert db_request.session.flash.calls == [ + pretend.call("Changed permissions", queue="success") + ] + assert isinstance(result, HTTPSeeOther) + assert result.headers["Location"] == "/the-redirect" + + entry = ( + db_request.db.query(JournalEntry).options(joinedload("submitted_by")).one() + ) + + assert entry.name == organization_project.name + assert entry.action == f"change Administer {organization_team.name} to Upload" + assert entry.submitted_by == db_request.user + assert entry.submitted_from == db_request.remote_addr + + def test_change_role_invalid_role_name(self, pyramid_request, organization_project): + pyramid_request.method = "POST" + pyramid_request.POST = MultiDict( + { + "role_id": str(uuid.uuid4()), + "team_project_role_name": "Invalid Role Name", + } + ) + pyramid_request.route_path = pretend.call_recorder( + lambda *a, **kw: "/the-redirect" + ) + + result = views.change_team_project_role(organization_project, pyramid_request) + + assert pyramid_request.route_path.calls == [ + pretend.call("manage.project.roles", project_name=organization_project.name) + ] + assert isinstance(result, HTTPSeeOther) + assert result.headers["Location"] == "/the-redirect" + + def test_change_missing_role(self, db_request, organization_project): + missing_role_id = str(uuid.uuid4()) + + db_request.method = "POST" + db_request.POST = MultiDict( + {"role_id": missing_role_id, "team_project_role_name": "Administer"} + ) + db_request.session = pretend.stub( + flash=pretend.call_recorder(lambda *a, **kw: None) + ) + db_request.route_path = pretend.call_recorder(lambda *a, **kw: "/the-redirect") + + result = views.change_team_project_role(organization_project, db_request) + + assert db_request.session.flash.calls == [ + pretend.call("Could not find permissions", queue="error") + ] + assert isinstance(result, HTTPSeeOther) + assert result.headers["Location"] == "/the-redirect" + + def test_change_own_owner_role( + self, + db_request, + organization_member, + organization_team, + organization_project, + ): + role = TeamProjectRoleFactory.create( + team=organization_team, + project=organization_project, + role_name=TeamProjectRoleType.Administer, + ) + + db_request.method = "POST" + db_request.user = organization_member + db_request.POST = MultiDict( + {"role_id": role.id, "team_project_role_name": "Upload"} + ) + db_request.session = pretend.stub( + flash=pretend.call_recorder(lambda *a, **kw: None) + ) + db_request.route_path = pretend.call_recorder(lambda *a, **kw: "/the-redirect") + + result = views.change_team_project_role(organization_project, db_request) + + assert db_request.session.flash.calls == [ + pretend.call( + "Cannot remove your own team with Administer permissions", queue="error" + ) + ] + assert isinstance(result, HTTPSeeOther) + assert result.headers["Location"] == "/the-redirect" + + +class TestDeleteTeamProjectRole: + @pytest.fixture + def organization(self, enable_organizations, pyramid_user): + organization = OrganizationFactory.create() + OrganizationRoleFactory.create( + organization=organization, + user=pyramid_user, + role_name=OrganizationRoleType.Owner, + ) + return organization + + @pytest.fixture + def organization_project(self, organization): + project = ProjectFactory.create(organization=organization) + OrganizationProjectFactory(organization=organization, project=project) + return project + + @pytest.fixture + def organization_member(self, organization): + member = UserFactory.create() + OrganizationRoleFactory.create( + organization=organization, + user=member, + role_name=OrganizationRoleType.Member, + ) + return member + + @pytest.fixture + def organization_team(self, organization, organization_member): + team = TeamFactory(organization=organization) + TeamRoleFactory.create(team=team, user=organization_member) + return team + + def test_delete_role( + self, + db_request, + organization_member, + organization_team, + organization_project, + pyramid_user, + monkeypatch, + ): + role = TeamProjectRoleFactory.create( + team=organization_team, + project=organization_project, + role_name=TeamProjectRoleType.Administer, + ) + + db_request.method = "POST" + db_request.POST = MultiDict({"role_id": role.id}) + db_request.session = pretend.stub( + flash=pretend.call_recorder(lambda *a, **kw: None) + ) + db_request.route_path = pretend.call_recorder(lambda *a, **kw: "/the-redirect") + + send_team_collaborator_removed_email = pretend.call_recorder( + lambda *a, **kw: None + ) + monkeypatch.setattr( + views, + "send_team_collaborator_removed_email", + send_team_collaborator_removed_email, + ) + send_removed_as_team_collaborator_email = pretend.call_recorder( + lambda *a, **kw: None + ) + monkeypatch.setattr( + views, + "send_removed_as_team_collaborator_email", + send_removed_as_team_collaborator_email, + ) + + result = views.delete_team_project_role(organization_project, db_request) + + assert db_request.route_path.calls == [ + pretend.call("manage.project.roles", project_name=organization_project.name) + ] + assert db_request.db.query(TeamProjectRole).all() == [] + assert send_team_collaborator_removed_email.calls == [ + pretend.call( + db_request, + {pyramid_user}, + team=organization_team, + submitter=pyramid_user, + project_name=organization_project.name, + ) + ] + assert send_removed_as_team_collaborator_email.calls == [ + pretend.call( + db_request, + {organization_member}, + team=organization_team, + submitter=pyramid_user, + project_name=organization_project.name, + ) + ] + assert db_request.session.flash.calls == [ + pretend.call("Removed permissions", queue="success") + ] + assert isinstance(result, HTTPSeeOther) + assert result.headers["Location"] == "/the-redirect" + + entry = ( + db_request.db.query(JournalEntry).options(joinedload("submitted_by")).one() + ) + + assert entry.name == organization_project.name + assert entry.action == f"remove Administer {organization_team.name}" + assert entry.submitted_by == db_request.user + assert entry.submitted_from == db_request.remote_addr + + def test_delete_missing_role(self, db_request, organization_project): + missing_role_id = str(uuid.uuid4()) + + db_request.method = "POST" + db_request.POST = MultiDict({"role_id": missing_role_id}) + db_request.session = pretend.stub( + flash=pretend.call_recorder(lambda *a, **kw: None) + ) + db_request.route_path = pretend.call_recorder(lambda *a, **kw: "/the-redirect") + + result = views.delete_team_project_role(organization_project, db_request) + + assert db_request.session.flash.calls == [ + pretend.call("Could not find permissions", queue="error") + ] + assert isinstance(result, HTTPSeeOther) + assert result.headers["Location"] == "/the-redirect" + + def test_delete_own_owner_role( + self, + db_request, + organization_member, + organization_team, + organization_project, + ): + role = TeamProjectRoleFactory.create( + team=organization_team, + project=organization_project, + role_name=TeamProjectRoleType.Administer, + ) + + db_request.method = "POST" + db_request.user = organization_member + db_request.POST = MultiDict({"role_id": role.id}) + db_request.session = pretend.stub( + flash=pretend.call_recorder(lambda *a, **kw: None) + ) + db_request.route_path = pretend.call_recorder(lambda *a, **kw: "/the-redirect") + + result = views.delete_team_project_role(organization_project, db_request) + + assert db_request.session.flash.calls == [ + pretend.call( + "Cannot remove your own team with Administer permissions", queue="error" + ) + ] + assert isinstance(result, HTTPSeeOther) + assert result.headers["Location"] == "/the-redirect" + + class TestManageProjectHistory: def test_get(self, db_request): project = ProjectFactory.create() diff --git a/tests/unit/organizations/test_models.py b/tests/unit/organizations/test_models.py --- a/tests/unit/organizations/test_models.py +++ b/tests/unit/organizations/test_models.py @@ -17,12 +17,17 @@ from pyramid.httpexceptions import HTTPPermanentRedirect from pyramid.location import lineage -from warehouse.organizations.models import OrganizationFactory, OrganizationRoleType +from warehouse.organizations.models import ( + OrganizationFactory, + OrganizationRoleType, + TeamFactory, +) from ...common.db.organizations import ( OrganizationFactory as DBOrganizationFactory, OrganizationNameCatalogFactory as DBOrganizationNameCatalogFactory, OrganizationRoleFactory as DBOrganizationRoleFactory, + TeamFactory as DBTeamFactory, ) @@ -98,12 +103,142 @@ def test_acl(self, db_session): ( Allow, f"user:{owner1.user.id}", - ["view:organization", "manage:organization"], + [ + "view:organization", + "view:team", + "manage:organization", + "manage:team", + ], + ), + ( + Allow, + f"user:{owner2.user.id}", + [ + "view:organization", + "view:team", + "manage:organization", + "manage:team", + ], + ), + ], + key=lambda x: x[1], + ) + sorted( + [ + ( + Allow, + f"user:{billing_mgr1.user.id}", + ["view:organization", "view:team", "manage:billing"], + ), + ( + Allow, + f"user:{billing_mgr2.user.id}", + ["view:organization", "view:team", "manage:billing"], + ), + ], + key=lambda x: x[1], + ) + sorted( + [ + ( + Allow, + f"user:{account_mgr1.user.id}", + ["view:organization", "view:team", "manage:team"], + ), + ( + Allow, + f"user:{account_mgr2.user.id}", + ["view:organization", "view:team", "manage:team"], + ), + ], + key=lambda x: x[1], + ) + sorted( + [ + (Allow, f"user:{member1.user.id}", ["view:organization", "view:team"]), + (Allow, f"user:{member2.user.id}", ["view:organization", "view:team"]), + ], + key=lambda x: x[1], + ) + + +class TestTeamFactory: + def test_traversal_finds(self, db_request): + organization = DBOrganizationFactory.create(name="foo") + team = DBTeamFactory.create(organization=organization, name="Bar") + + root = TeamFactory(db_request) + + assert root["foo"]["bar"] == team + + def test_traversal_cant_find(self, db_request): + organization = DBOrganizationFactory.create(name="foo") + DBTeamFactory.create(organization=organization, name="Bar") + + root = TeamFactory(db_request) + + with pytest.raises(KeyError): + root["foo"]["invalid"] + + +class TestTeam: + def test_acl(self, db_session): + organization = DBOrganizationFactory.create() + team = DBTeamFactory.create(organization=organization) + owner1 = DBOrganizationRoleFactory.create(organization=organization) + owner2 = DBOrganizationRoleFactory.create(organization=organization) + billing_mgr1 = DBOrganizationRoleFactory.create( + organization=organization, role_name=OrganizationRoleType.BillingManager + ) + billing_mgr2 = DBOrganizationRoleFactory.create( + organization=organization, role_name=OrganizationRoleType.BillingManager + ) + account_mgr1 = DBOrganizationRoleFactory.create( + organization=organization, role_name=OrganizationRoleType.Manager + ) + account_mgr2 = DBOrganizationRoleFactory.create( + organization=organization, role_name=OrganizationRoleType.Manager + ) + member1 = DBOrganizationRoleFactory.create( + organization=organization, role_name=OrganizationRoleType.Member + ) + member2 = DBOrganizationRoleFactory.create( + organization=organization, role_name=OrganizationRoleType.Member + ) + + acls = [] + for location in lineage(team): + try: + acl = location.__acl__ + except AttributeError: + continue + + if acl and callable(acl): + acl = acl() + + acls.extend(acl) + + assert acls == [ + (Allow, "group:admins", "admin"), + (Allow, "group:moderators", "moderator"), + ] + sorted( + [ + ( + Allow, + f"user:{owner1.user.id}", + [ + "view:organization", + "view:team", + "manage:organization", + "manage:team", + ], ), ( Allow, f"user:{owner2.user.id}", - ["view:organization", "manage:organization"], + [ + "view:organization", + "view:team", + "manage:organization", + "manage:team", + ], ), ], key=lambda x: x[1], @@ -112,12 +247,12 @@ def test_acl(self, db_session): ( Allow, f"user:{billing_mgr1.user.id}", - ["view:organization", "manage:billing"], + ["view:organization", "view:team", "manage:billing"], ), ( Allow, f"user:{billing_mgr2.user.id}", - ["view:organization", "manage:billing"], + ["view:organization", "view:team", "manage:billing"], ), ], key=lambda x: x[1], @@ -126,19 +261,19 @@ def test_acl(self, db_session): ( Allow, f"user:{account_mgr1.user.id}", - ["view:organization", "manage:team"], + ["view:organization", "view:team", "manage:team"], ), ( Allow, f"user:{account_mgr2.user.id}", - ["view:organization", "manage:team"], + ["view:organization", "view:team", "manage:team"], ), ], key=lambda x: x[1], ) + sorted( [ - (Allow, f"user:{member1.user.id}", ["view:organization"]), - (Allow, f"user:{member2.user.id}", ["view:organization"]), + (Allow, f"user:{member1.user.id}", ["view:organization", "view:team"]), + (Allow, f"user:{member2.user.id}", ["view:organization", "view:team"]), ], key=lambda x: x[1], ) diff --git a/tests/unit/organizations/test_services.py b/tests/unit/organizations/test_services.py --- a/tests/unit/organizations/test_services.py +++ b/tests/unit/organizations/test_services.py @@ -23,6 +23,9 @@ OrganizationRole, OrganizationRoleType, OrganizationType, + Team, + TeamProjectRole, + TeamRole, ) from ...common.db.organizations import ( @@ -30,6 +33,9 @@ OrganizationInvitationFactory, OrganizationProjectFactory, OrganizationRoleFactory, + TeamFactory, + TeamProjectRoleFactory, + TeamRoleFactory, UserFactory, ) from ...common.db.packaging import ProjectFactory @@ -314,6 +320,8 @@ def test_decline_organization(self, organization_service): def test_delete_organization(self, organization_service, db_request): organization = OrganizationFactory.create() + TeamFactory.create(organization=organization) + TeamFactory.create(organization=organization) organization_service.delete_organization(organization.id) @@ -345,6 +353,9 @@ def test_delete_organization(self, organization_service, db_request): .count() ) ) + assert not ( + (db_request.db.query(Team).filter_by(organization=organization).count()) + ) assert organization_service.get_organization(organization.id) is None def test_rename_organization(self, organization_service, db_request): @@ -431,3 +442,174 @@ def test_delete_organization_project(self, organization_service, db_request): ) .count() ) + + def test_get_teams_by_organization(self, organization_service): + organization = OrganizationFactory.create() + + team = TeamFactory.create(organization=organization) + teams = organization_service.get_teams_by_organization(organization.id) + assert len(teams) == 1 + assert team in teams + + team2 = TeamFactory.create(organization=organization) + teams = organization_service.get_teams_by_organization(organization.id) + + assert len(teams) == 2 + assert team in teams + assert team2 in teams + + def test_get_team(self, organization_service): + team = TeamFactory.create() + assert organization_service.get_team(team.id) == team + + def test_find_teamid(self, organization_service): + organization = OrganizationFactory.create() + team = TeamFactory.create(organization=organization) + assert organization_service.find_teamid(organization.id, team.name) == team.id + + def test_find_teamid_nonexistent_org(self, organization_service): + organization = OrganizationFactory.create() + assert ( + organization_service.find_teamid(organization.id, "a_spoon_in_the_matrix") + is None + ) + + def test_get_teams_by_user(self, organization_service): + team = TeamFactory.create() + user = UserFactory.create() + TeamRoleFactory.create(team=team, user=user) + + teams = organization_service.get_teams_by_user(user.id) + assert team in teams + + team2 = TeamFactory.create() + TeamRoleFactory.create(team=team2, user=user) + + teams = organization_service.get_teams_by_user(user.id) + assert team in teams + assert team2 in teams + + def test_test_add_team(self, organization_service): + team = TeamFactory.create() + new_team = organization_service.add_team( + name=team.name, + organization_id=team.organization.id, + ) + organization_service.db.flush() + team_from_db = organization_service.get_team(new_team.id) + + assert team_from_db.name == team.name + assert team_from_db.organization_id == team.organization_id + + def test_rename_team(self, organization_service): + team = TeamFactory.create() + + organization_service.rename_team(team.id, "some_new_name") + assert team.name == "some_new_name" + + db_team = organization_service.get_team(team.id) + assert db_team.name == "some_new_name" + + def test_delete_team(self, organization_service): + team = TeamFactory.create() + user = UserFactory.create() + project = ProjectFactory.create() + team_role = TeamRoleFactory.create(team=team, user=user) + team_project_role = TeamProjectRoleFactory.create(team=team, project=project) + + assert organization_service.get_team_role(team_role.id) is not None + assert ( + organization_service.get_team_project_role(team_project_role.id) is not None + ) + + team_role_id = team_role.id + team_project_role_id = team_project_role.id + + organization_service.delete_team(team.id) + + assert organization_service.get_team_role(team_role_id) is None + assert organization_service.get_team_project_role(team_project_role_id) is None + assert organization_service.get_team(team.id) is None + + def test_delete_teams_by_organization(self, organization_service): + organization = OrganizationFactory.create() + + team = TeamFactory.create(organization=organization) + team2 = TeamFactory.create(organization=organization) + + teams = organization_service.get_teams_by_organization(organization.id) + assert len(teams) == 2 + assert team in teams + assert team2 in teams + + organization_service.delete_teams_by_organization(organization.id) + + teams = organization_service.get_teams_by_organization(organization.id) + assert len(teams) == 0 + assert team not in teams + assert team2 not in teams + + def test_get_team_role(self, organization_service): + team = TeamFactory.create() + user = UserFactory.create() + team_role = TeamRoleFactory.create(team=team, user=user) + + assert organization_service.get_team_role(team_role.id) == team_role + + def test_add_team_role(self, organization_service, db_request): + team = TeamFactory.create() + user = UserFactory.create() + + organization_service.add_team_role(team.id, user.id, "Member") + assert ( + db_request.db.query(TeamRole) + .filter( + TeamRole.team_id == team.id, + TeamRole.user_id == user.id, + TeamRole.role_name == "Member", + ) + .count() + ) + + def test_delete_team_role(self, organization_service): + team = TeamFactory.create() + user = UserFactory.create() + team_role = TeamRoleFactory.create(team=team, user=user) + team_role_id = team_role.id + + organization_service.delete_team_role(team_role.id) + assert organization_service.get_team_role(team_role_id) is None + + def test_get_team_project_role(self, organization_service): + team = TeamFactory.create() + project = ProjectFactory.create() + team_project_role = TeamProjectRoleFactory.create(team=team, project=project) + + assert ( + organization_service.get_team_project_role(team_project_role.id) + == team_project_role + ) + + def test_add_team_project_role(self, organization_service, db_request): + team = TeamFactory.create() + project = ProjectFactory.create() + + organization_service.add_team_project_role(team.id, project.id, "Owner") + assert ( + db_request.db.query(TeamProjectRole) + .filter( + TeamProjectRole.team_id == team.id, + TeamProjectRole.project_id == project.id, + TeamProjectRole.role_name == "Owner", + ) + .count() + ) + + def test_delete_team_project_role(self, organization_service): + team = TeamFactory.create() + project = ProjectFactory.create() + team_project_role = TeamProjectRoleFactory.create(team=team, project=project) + team_project_role_id = team_project_role.id + + organization_service.delete_team_project_role(team_project_role.id) + assert organization_service.get_team_role(team_project_role_id) is None diff --git a/tests/unit/packaging/test_models.py b/tests/unit/packaging/test_models.py --- a/tests/unit/packaging/test_models.py +++ b/tests/unit/packaging/test_models.py @@ -18,12 +18,16 @@ from pyramid.authorization import Allow from pyramid.location import lineage +from warehouse.organizations.models import TeamProjectRoleType from warehouse.packaging.models import File, ProjectFactory, ReleaseURL from ...common.db.organizations import ( OrganizationFactory as DBOrganizationFactory, OrganizationProjectFactory as DBOrganizationProjectFactory, OrganizationRoleFactory as DBOrganizationRoleFactory, + TeamFactory as DBTeamFactory, + TeamProjectRoleFactory as DBTeamProjectRoleFactory, + TeamRoleFactory as DBTeamRoleFactory, ) from ...common.db.packaging import ( FileFactory as DBFileFactory, @@ -117,6 +121,12 @@ def test_acl(self, db_session): owner3 = DBOrganizationRoleFactory.create(organization=organization) DBOrganizationProjectFactory.create(organization=organization, project=project) + team = DBTeamFactory.create() + owner4 = DBTeamRoleFactory.create(team=team) + DBTeamProjectRoleFactory.create( + team=team, project=project, role_name=TeamProjectRoleType.Administer + ) + acls = [] for location in lineage(project): try: @@ -137,6 +147,7 @@ def test_acl(self, db_session): (Allow, f"user:{owner1.user.id}", ["manage:project", "upload"]), (Allow, f"user:{owner2.user.id}", ["manage:project", "upload"]), (Allow, f"user:{owner3.user.id}", ["manage:project", "upload"]), + (Allow, f"user:{owner4.user.id}", ["manage:project", "upload"]), ], key=lambda x: x[1], ) + sorted( diff --git a/tests/unit/test_routes.py b/tests/unit/test_routes.py --- a/tests/unit/test_routes.py +++ b/tests/unit/test_routes.py @@ -262,6 +262,13 @@ def add_policy(name, filename): traverse="/{organization_name}", domain=warehouse, ), + pretend.call( + "manage.organization.teams", + "/manage/organization/{organization_name}/teams/", + factory="warehouse.organizations.models:OrganizationFactory", + traverse="/{organization_name}", + domain=warehouse, + ), pretend.call( "manage.organization.roles", "/manage/organization/{organization_name}/people/", @@ -290,6 +297,34 @@ def add_policy(name, filename): traverse="/{organization_name}", domain=warehouse, ), + pretend.call( + "manage.team.settings", + "/manage/organization/{organization_name}/team/{team_name}/settings/", + factory="warehouse.organizations.models:TeamFactory", + traverse="/{organization_name}/{team_name}", + domain=warehouse, + ), + pretend.call( + "manage.team.projects", + "/manage/organization/{organization_name}/team/{team_name}/projects/", + factory="warehouse.organizations.models:TeamFactory", + traverse="/{organization_name}/{team_name}", + domain=warehouse, + ), + pretend.call( + "manage.team.roles", + "/manage/organization/{organization_name}/team/{team_name}/members/", + factory="warehouse.organizations.models:TeamFactory", + traverse="/{organization_name}/{team_name}", + domain=warehouse, + ), + pretend.call( + "manage.team.delete_role", + "/manage/organization/{organization_name}/team/{team_name}/members/delete/", + factory="warehouse.organizations.models:TeamFactory", + traverse="/{organization_name}/{team_name}", + domain=warehouse, + ), pretend.call("manage.projects", "/manage/projects/", domain=warehouse), pretend.call( "manage.project.settings", @@ -375,6 +410,20 @@ def add_policy(name, filename): traverse="/{project_name}", domain=warehouse, ), + pretend.call( + "manage.project.change_team_project_role", + "/manage/project/{project_name}/collaboration/change_team/", + factory="warehouse.packaging.models:ProjectFactory", + traverse="/{project_name}", + domain=warehouse, + ), + pretend.call( + "manage.project.delete_team_project_role", + "/manage/project/{project_name}/collaboration/delete_team/", + factory="warehouse.packaging.models:ProjectFactory", + traverse="/{project_name}", + domain=warehouse, + ), pretend.call( "manage.project.documentation", "/manage/project/{project_name}/documentation/",
Dashboard to see Members, Teams, permission status, and projects Feature request for organization account project in PyPI. Description | The Owner/Manager can see a list of projects, teams associated with the project and Members in each Team -- | -- User value | Easy to view each project, Team and Members and permission status
Part of this will be updating the **Collaborators** page for projects to indicate permissions granted to organization owners and teams.
2022-06-23T17:31:27Z
[]
[]
pypi/warehouse
11,724
pypi__warehouse-11724
[ "11701" ]
0ea63b9bbe19d6b720a9561360eb4ccdd3c3e24e
diff --git a/warehouse/accounts/models.py b/warehouse/accounts/models.py --- a/warehouse/accounts/models.py +++ b/warehouse/accounts/models.py @@ -27,8 +27,6 @@ String, Text, UniqueConstraint, - and_, - or_, orm, select, sql, @@ -136,23 +134,10 @@ def email(self): .scalar_subquery() ) - @hybrid_property + @property def has_two_factor(self): return self.totp_secret is not None or len(self.webauthn) > 0 - @has_two_factor.expression # type: ignore - def has_two_factor(self): - return ( - select([True]) - .where( - or_( - WebAuthn.user_id == self.id, - and_(User.id == self.id, User.totp_secret.is_not(None)), - ) - ) - .limit(1) - ) - @property def has_recovery_codes(self): return any(not code.burned for code in self.recovery_codes) diff --git a/warehouse/packaging/__init__.py b/warehouse/packaging/__init__.py --- a/warehouse/packaging/__init__.py +++ b/warehouse/packaging/__init__.py @@ -21,6 +21,7 @@ from warehouse.packaging.models import File, Project, Release, Role from warehouse.packaging.tasks import ( compute_2fa_mandate, + compute_2fa_metrics, compute_trending, update_description_html, ) @@ -107,7 +108,7 @@ def includeme(config): config.add_periodic_task(crontab(minute=0, hour=3), compute_2fa_mandate) # Add a periodic task to generate 2FA metrics - # config.add_periodic_task(crontab(minute="*/5"), compute_2fa_metrics) + config.add_periodic_task(crontab(minute="*/5"), compute_2fa_metrics) # Add a periodic task to compute trending once a day, assuming we have # been configured to be able to access BigQuery. diff --git a/warehouse/packaging/tasks.py b/warehouse/packaging/tasks.py --- a/warehouse/packaging/tasks.py +++ b/warehouse/packaging/tasks.py @@ -23,7 +23,7 @@ from warehouse.cache.origin import IOriginCache from warehouse.email import send_two_factor_mandate_email from warehouse.metrics import IMetricsService -from warehouse.packaging.models import Description, File, Project, Release +from warehouse.packaging.models import Description, File, Project, Release, Role from warehouse.utils import readme @@ -105,10 +105,45 @@ def compute_2fa_metrics(request): critical_maintainers.count(), ) + # Number of critical project maintainers with TOTP enabled + total_critical_project_maintainers_with_totp_enabled = ( + request.db.query(User.id) + .distinct() + .join(Role, Role.user_id == User.id) + .join(Project, Project.id == Role.project_id) + .where(Project.pypi_mandates_2fa) + .where(User.totp_secret.is_not(None)) + .count() + ) + metrics.gauge( + "warehouse.2fa.total_critical_maintainers_with_totp_enabled", + total_critical_project_maintainers_with_totp_enabled, + ) + + # Number of critical project maintainers with WebAuthn enabled + metrics.gauge( + "warehouse.2fa.total_critical_maintainers_with_webauthn_enabled", + request.db.query(User.id) + .distinct() + .join(Role.user) + .join(Role.project) + .join(WebAuthn, WebAuthn.user_id == User.id) + .where(Project.pypi_mandates_2fa) + .count(), + ) + # Number of critical project maintainers with 2FA enabled metrics.gauge( "warehouse.2fa.total_critical_maintainers_with_2fa_enabled", - critical_maintainers.where(User.has_two_factor).count(), + total_critical_project_maintainers_with_totp_enabled + + request.db.query(User.id) + .distinct() + .join(Role.user) + .join(Role.project) + .join(WebAuthn, WebAuthn.user_id == User.id) + .where(Project.pypi_mandates_2fa) + .where(User.totp_secret.is_(None)) + .count(), ) # Number of projects manually requiring 2FA @@ -124,21 +159,32 @@ def compute_2fa_metrics(request): ) # Total number of users with TOTP enabled + total_users_with_totp_enabled = ( + request.db.query(User).where(User.totp_secret.is_not(None)).count() + ) metrics.gauge( "warehouse.2fa.total_users_with_totp_enabled", - request.db.query(User).where(User.totp_secret.is_not(None)).count(), + total_users_with_totp_enabled, ) # Total number of users with WebAuthn enabled metrics.gauge( "warehouse.2fa.total_users_with_webauthn_enabled", - request.db.query(WebAuthn.user_id).group_by(WebAuthn.user_id).count(), + request.db.query(User.id) + .distinct() + .join(WebAuthn, WebAuthn.user_id == User.id) + .count(), ) # Total number of users with 2FA enabled metrics.gauge( "warehouse.2fa.total_users_with_two_factor_enabled", - request.db.query(User).where(User.has_two_factor).count(), + total_users_with_totp_enabled + + request.db.query(User.id) + .distinct() + .join(WebAuthn, WebAuthn.user_id == User.id) + .where(User.totp_secret.is_(None)) + .count(), )
diff --git a/tests/unit/packaging/test_tasks.py b/tests/unit/packaging/test_tasks.py --- a/tests/unit/packaging/test_tasks.py +++ b/tests/unit/packaging/test_tasks.py @@ -727,6 +727,10 @@ def test_compute_2fa_metrics(db_request, monkeypatch): assert gauge.calls == [ pretend.call("warehouse.2fa.total_critical_projects", 1), pretend.call("warehouse.2fa.total_critical_maintainers", 3), + pretend.call("warehouse.2fa.total_critical_maintainers_with_totp_enabled", 1), + pretend.call( + "warehouse.2fa.total_critical_maintainers_with_webauthn_enabled", 1 + ), pretend.call("warehouse.2fa.total_critical_maintainers_with_2fa_enabled", 2), pretend.call("warehouse.2fa.total_projects_with_2fa_opt_in", 1), pretend.call("warehouse.2fa.total_projects_with_two_factor_required", 2),
Re-enable 2FA metrics, speed up queries by denormalizing counts In https://github.com/pypa/warehouse/pull/11699 we disabled the 2FA metrics temporarily due to DB performance issues. We should improve the count performance with something similar to https://github.com/pypa/warehouse/pull/745/ and re-enable the metrics.
2022-06-29T16:11:20Z
[]
[]
pypi/warehouse
11,858
pypi__warehouse-11858
[ "11734" ]
8e378dd19b6a6b83a95eb358c50693d1255c573a
diff --git a/warehouse/integrations/vulnerabilities/__init__.py b/warehouse/integrations/vulnerabilities/__init__.py --- a/warehouse/integrations/vulnerabilities/__init__.py +++ b/warehouse/integrations/vulnerabilities/__init__.py @@ -31,6 +31,7 @@ def __init__( advisory_link: str, aliases: List[str], details: str, + summary: str, fixed_in: List[str], ): self.project = project @@ -39,6 +40,7 @@ def __init__( self.advisory_link = advisory_link self.aliases = aliases self.details = details + self.summary = summary self.fixed_in = fixed_in @classmethod @@ -64,6 +66,7 @@ def from_api_request(cls, request): advisory_link=request["link"], aliases=request["aliases"], details=request.get("details"), + summary=request.get("summary"), fixed_in=[ version for event in request.get("events", []) diff --git a/warehouse/integrations/vulnerabilities/models.py b/warehouse/integrations/vulnerabilities/models.py --- a/warehouse/integrations/vulnerabilities/models.py +++ b/warehouse/integrations/vulnerabilities/models.py @@ -62,6 +62,9 @@ class VulnerabilityRecord(db.Model): # Details about the vulnerability details = Column(String) + # A short, plaintext summary of the vulnerability + summary = Column(String) + # Events of introduced/fixed versions fixed_in = Column(ARRAY(String)) diff --git a/warehouse/integrations/vulnerabilities/utils.py b/warehouse/integrations/vulnerabilities/utils.py --- a/warehouse/integrations/vulnerabilities/utils.py +++ b/warehouse/integrations/vulnerabilities/utils.py @@ -90,6 +90,7 @@ def _analyze_vulnerability(request, vulnerability_report, origin, metrics): link=report.advisory_link, aliases=report.aliases, details=report.details, + summary=report.summary, fixed_in=report.fixed_in, ) _add_vuln_record(request, vulnerability_record) diff --git a/warehouse/legacy/api/json.py b/warehouse/legacy/api/json.py --- a/warehouse/legacy/api/json.py +++ b/warehouse/legacy/api/json.py @@ -115,6 +115,7 @@ def _json_data(request, project, release, *, all_releases): "link": vulnerability_record.link, "aliases": vulnerability_record.aliases, "details": vulnerability_record.details, + "summary": vulnerability_record.summary, "fixed_in": vulnerability_record.fixed_in, } for vulnerability_record in release.vulnerabilities diff --git a/warehouse/migrations/versions/1e61006a47c2_add_detail_to_vulnerabilityrecord.py b/warehouse/migrations/versions/1e61006a47c2_add_detail_to_vulnerabilityrecord.py new file mode 100644 --- /dev/null +++ b/warehouse/migrations/versions/1e61006a47c2_add_detail_to_vulnerabilityrecord.py @@ -0,0 +1,33 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +add detail to VulnerabilityRecord + +Revision ID: 1e61006a47c2 +Revises: 8bee9c119e41 +Create Date: 2022-07-15 16:57:03.322702 +""" + +import sqlalchemy as sa + +from alembic import op + +revision = "1e61006a47c2" +down_revision = "8bee9c119e41" + + +def upgrade(): + op.add_column("vulnerabilities", sa.Column("summary", sa.String(), nullable=True)) + + +def downgrade(): + op.drop_column("vulnerabilities", "summary")
diff --git a/tests/unit/legacy/api/test_json.py b/tests/unit/legacy/api/test_json.py --- a/tests/unit/legacy/api/test_json.py +++ b/tests/unit/legacy/api/test_json.py @@ -672,6 +672,7 @@ def test_vulnerabilities_renders(self, pyramid_config, db_request): link="the link", aliases=["alias1", "alias2"], details="some details", + summary="some summary", fixed_in=["3.3.2"], releases=[release], ) @@ -688,6 +689,7 @@ def test_vulnerabilities_renders(self, pyramid_config, db_request): "link": "the link", "aliases": ["alias1", "alias2"], "details": "some details", + "summary": "some summary", "fixed_in": ["3.3.2"], }, ]
Add "summary" to `VulnerabilityDetail` model, when present **What's the problem this feature will solve?** PyPI gets its information from OSV, which (optionally) includes both the `summary` and `details` fields. `summary` is intended to be a short, plaintext-formatted summary of the vulnerability, while `details` is intended to be a longer-form CommonMark-formatted description of the vulnerability. At the moment, PyPI preserves only the `details` field and not the `summary` in its internal copies of each vulnerability. Preserving and exposing both would improve the UX of tools like `pip-audit`, which prefer a short summary and fall back on details. **Describe the solution you'd like** All of the necessary code is in place; the only changes needed are an additional `summary` column on `VulnerabilityRecord`, plus an accompanying migration. **Additional context** Ref: https://github.com/trailofbits/pip-audit/issues/314, which this will not solve. However, it will make the underlying problem less common.
2022-07-15T18:06:34Z
[]
[]
pypi/warehouse
11,862
pypi__warehouse-11862
[ "11859" ]
1f1006622c8244b0009d3a508425c8d6825381a6
diff --git a/warehouse/email/__init__.py b/warehouse/email/__init__.py --- a/warehouse/email/__init__.py +++ b/warehouse/email/__init__.py @@ -304,7 +304,7 @@ def send_token_compromised_email_leak(request, user, *, public_url, origin): allow_unverified=True, repeat_window=datetime.timedelta(days=1), ) -def send_basic_auth_with_two_factor_email(request, user): +def send_basic_auth_with_two_factor_email(request, user, *, project_name): return {} diff --git a/warehouse/forklift/legacy.py b/warehouse/forklift/legacy.py --- a/warehouse/forklift/legacy.py +++ b/warehouse/forklift/legacy.py @@ -947,7 +947,9 @@ def file_upload(request): and request.user.has_two_factor ): # Eventually, raise here to disable basic auth with 2FA enabled - send_basic_auth_with_two_factor_email(request, request.user) + send_basic_auth_with_two_factor_email( + request, request.user, project_name=project.name + ) # Update name if it differs but is still equivalent. We don't need to check if # they are equivalent when normalized because that's already been done when we
diff --git a/tests/unit/email/test_init.py b/tests/unit/email/test_init.py --- a/tests/unit/email/test_init.py +++ b/tests/unit/email/test_init.py @@ -1294,8 +1294,10 @@ def test_basic_auth_with_2fa_email( ) pyramid_request.user = stub_user pyramid_request.registry.settings = {"mail.sender": "[email protected]"} - - result = email.send_basic_auth_with_two_factor_email(pyramid_request, stub_user) + project_name = "exampleproject" + result = email.send_basic_auth_with_two_factor_email( + pyramid_request, stub_user, project_name=project_name + ) assert result == {} assert pyramid_request.task.calls == [pretend.call(send_email)] diff --git a/tests/unit/forklift/test_legacy.py b/tests/unit/forklift/test_legacy.py --- a/tests/unit/forklift/test_legacy.py +++ b/tests/unit/forklift/test_legacy.py @@ -2649,7 +2649,9 @@ def test_upload_succeeds_with_2fa_enabled( legacy.file_upload(db_request) - assert send_email.calls == [pretend.call(db_request, user)] + assert send_email.calls == [ + pretend.call(db_request, user, project_name=project.name) + ] @pytest.mark.parametrize( "plat",
Provide the project name in the email for migrating to API tokens for uploading to PyPI **What's the problem this feature will solve?** In the email from PyPI with subject **[PyPI] Migrate to API tokens for uploading to PyPI**, it tells us that a package was uploaded to PyPI using basic authentication instead of API token. It does not tell us which package was uploaded. Packages in our org are uploaded via various in-house tooling, so we're still tracking down which tool/package was this. I'd imagine there might be other maintainers who also use some form of automation in uploading their packages. In this case, adding the name of the package to the email would be useful in helping to track down the issue so we can improve the automation process. **Describe the solution you'd like** Include the name of the package in the email, to help maintainers figure out which package where this happened. Suggested wording change: > During your recent upload or upload attempt to PyPI **for the package {{ package_name }}**, we noticed you used basic authentication (username & password). However, your account has two-factor authentication (2FA) enabled. **Additional context** We received an email from PyPI, saying that there was a recent upload to PyPI using basic authentication (username & password). Here's a copy of the email: <details> # What? During your recent upload or upload attempt to PyPI, we noticed you used basic authentication (username & password). However, your account has two-factor authentication (2FA) enabled. In the near future, PyPI will begin prohibiting uploads using basic authentication for accounts with two-factor authentication enabled. Instead, we will require API tokens to be used. # What should I do? First, generate an API token for your account or project at https://pypi.org/manage/account/token/. Then, use this token when publishing instead of your username and password. See https://pypi.org/help/#apitoken for help using API tokens to publish. </details> It would be great if the package name was included in the email. We're also wondering if there was a reason that the package name was not included, in case we're missing some context here.
2022-07-16T03:26:22Z
[]
[]
pypi/warehouse
11,885
pypi__warehouse-11885
[ "9018" ]
12ba1bb38de9546c2a4d8a944a6a6bc0fdd4637d
diff --git a/warehouse/macaroons/caveats.py b/warehouse/macaroons/caveats.py --- a/warehouse/macaroons/caveats.py +++ b/warehouse/macaroons/caveats.py @@ -15,6 +15,9 @@ import pymacaroons +from pyramid.security import Allowed + +from warehouse.errors import WarehouseDenied from warehouse.packaging.models import Project @@ -58,22 +61,23 @@ def verify_projects(self, projects) -> bool: def verify(self, predicate) -> bool: try: data = json.loads(predicate) - except ValueError: - self.failure_reason = "malformatted predicate" + version = data["version"] + permissions = data["permissions"] + except (KeyError, ValueError, TypeError): return False - if data.get("version") != 1: - self.failure_reason = "invalid version in predicate" + if version != 1: return False - permissions = data.get("permissions") if permissions is None: - self.failure_reason = "invalid permissions in predicate" return False if permissions == "user": # User-scoped tokens behave exactly like a user's normal credentials. return True + elif not isinstance(permissions, dict): + self.failure_reason = "invalid permissions format" + return False projects = permissions.get("projects") if projects is None: @@ -90,7 +94,6 @@ def verify(self, predicate): expiry = data["exp"] not_before = data["nbf"] except (KeyError, ValueError, TypeError): - self.failure_reason = "malformatted predicate" return False if not expiry or not not_before: @@ -111,7 +114,6 @@ def verify(self, predicate): data = json.loads(predicate) project_ids = data["project_ids"] except (KeyError, ValueError, TypeError): - self.failure_reason = "malformatted predicate" return False if not project_ids: @@ -125,7 +127,6 @@ def verify(self, predicate): return False if str(self.verifier.context.id) not in project_ids: - self.failure_reason = "current project does not matched scoped project IDs" return False return True @@ -145,9 +146,35 @@ def verify(self, key): self.verifier.satisfy_general(ProjectIDsCaveat(self)) try: - return self.verifier.verify(self.macaroon, key) - except ( - pymacaroons.exceptions.MacaroonInvalidSignatureException, - Exception, # https://github.com/ecordell/pymacaroons/issues/50 - ): - return False + result = self.verifier.verify(self.macaroon, key) + except pymacaroons.exceptions.MacaroonInvalidSignatureException as exc: + failure_reasons = [] + for cb in self.verifier.callbacks: + failure_reason = getattr(cb, "failure_reason", None) + if failure_reason is not None: + failure_reasons.append(failure_reason) + + # If we have a more detailed set of failure reasons, use them. + # Otherwise, use whatever the exception gives us. + if len(failure_reasons) > 0: + return WarehouseDenied( + ", ".join(failure_reasons), reason="invalid_api_token" + ) + else: + return WarehouseDenied(str(exc), reason="invalid_api_token") + except Exception: + # The pymacaroons `verify` API with leak exceptions raised during caveat + # verification, which *normally* indicate a deserialization error + # (i.e., a malformed caveat body). + # When this happens, we don't want to display a random stringified + # Python exception to the user, so instead we emit a generic error. + # See https://github.com/ecordell/pymacaroons/issues/50 + return WarehouseDenied("malformed macaroon", reason="invalid_api_token") + + # NOTE: We should never hit this case, since pymacaroons *should* always either + # raise on failure *or* return true. But there's nothing stopping that from + # silently breaking in the future, so we check the result defensively here. + if not result: + return WarehouseDenied("unknown error", reason="invalid_api_token") + else: + return Allowed("signature and caveats OK") diff --git a/warehouse/macaroons/security_policy.py b/warehouse/macaroons/security_policy.py --- a/warehouse/macaroons/security_policy.py +++ b/warehouse/macaroons/security_policy.py @@ -153,7 +153,7 @@ def permits(self, context, principals, permission): macaroon_service.verify(macaroon, context, principals, permission) except InvalidMacaroonError as exc: return WarehouseDenied( - f"Invalid API Token: {exc!r}", reason="invalid_api_token" + f"Invalid API Token: {exc}", reason="invalid_api_token" ) # If our Macaroon is verified, and for a valid permission then we'll pass diff --git a/warehouse/macaroons/services.py b/warehouse/macaroons/services.py --- a/warehouse/macaroons/services.py +++ b/warehouse/macaroons/services.py @@ -121,11 +121,12 @@ def verify(self, raw_macaroon, context, principals, permission): raise InvalidMacaroonError("deleted or nonexistent macaroon") verifier = Verifier(m, context, principals, permission) - if verifier.verify(dm.key): + verified = verifier.verify(dm.key) + if verified: dm.last_used = datetime.datetime.now() return True - raise InvalidMacaroonError("invalid macaroon") + raise InvalidMacaroonError(verified.msg) def create_macaroon(self, location, user_id, description, caveats): """
diff --git a/tests/unit/macaroons/test_caveats.py b/tests/unit/macaroons/test_caveats.py --- a/tests/unit/macaroons/test_caveats.py +++ b/tests/unit/macaroons/test_caveats.py @@ -19,7 +19,9 @@ import pytest from pymacaroons.exceptions import MacaroonInvalidSignatureException +from pyramid.security import Allowed +from warehouse.errors import WarehouseDenied from warehouse.macaroons.caveats import ( Caveat, ExpiryCaveat, @@ -48,6 +50,9 @@ class TestV1Caveat: ("invalid json", False), ('{"version": 2}', False), ('{"permissions": null, "version": 1}', False), + ('{"permissions": null, "version": 2}', False), + ('{"permissions": "user", "version": 2}', False), + ('{"permissions": "", "version": 2}', False), ], ) def test_verify_invalid_predicates(self, predicate, result): @@ -217,7 +222,7 @@ def test_creation(self): def test_verify_invalid_signature(self, monkeypatch): verify = pretend.call_recorder( - pretend.raiser(MacaroonInvalidSignatureException) + pretend.raiser(MacaroonInvalidSignatureException("Signatures do not match")) ) macaroon = pretend.stub() context = pretend.stub() @@ -227,11 +232,43 @@ def test_verify_invalid_signature(self, monkeypatch): verifier = Verifier(macaroon, context, principals, permission) monkeypatch.setattr(verifier.verifier, "verify", verify) - assert verifier.verify(key) is False + status = verifier.verify(key) + assert not status + assert status.msg == "Signatures do not match" + assert verify.calls == [pretend.call(macaroon, key)] + + def test_verify_generic_exception(self, monkeypatch): + verify = pretend.call_recorder(pretend.raiser(ValueError)) + macaroon = pretend.stub() + context = pretend.stub() + principals = pretend.stub() + permission = pretend.stub() + key = pretend.stub() + verifier = Verifier(macaroon, context, principals, permission) + + monkeypatch.setattr(verifier.verifier, "verify", verify) + status = verifier.verify(key) + assert not status + assert status.msg == "malformed macaroon" + assert verify.calls == [pretend.call(macaroon, key)] + + def test_verify_inner_verifier_returns_false(self, monkeypatch): + verify = pretend.call_recorder(lambda macaroon, key: False) + macaroon = pretend.stub() + context = pretend.stub() + principals = pretend.stub() + permission = pretend.stub() + key = pretend.stub() + verifier = Verifier(macaroon, context, principals, permission) + + monkeypatch.setattr(verifier.verifier, "verify", verify) + status = verifier.verify(key) + assert not status + assert status.msg == "unknown error" assert verify.calls == [pretend.call(macaroon, key)] @pytest.mark.parametrize( - ["caveats", "valid"], + ["caveats", "expected_status"], [ # Both V1 and expiry present and valid. ( @@ -239,23 +276,31 @@ def test_verify_invalid_signature(self, monkeypatch): {"permissions": "user", "version": 1}, {"exp": int(time.time()) + 3600, "nbf": int(time.time()) - 1}, ], - True, + Allowed("signature and caveats OK"), ), # V1 only present and valid. - ([{"permissions": "user", "version": 1}], True), + ( + [{"permissions": "user", "version": 1}], + Allowed("signature and caveats OK"), + ), # V1 and expiry present but V1 invalid. - ([{"permissions": "bad", "version": 1}], False), + ( + [{"permissions": "bad", "version": 1}], + WarehouseDenied( + "invalid permissions format", reason="invalid_api_token" + ), + ), # V1 and expiry present but expiry invalid. ( [ {"permissions": "user", "version": 1}, {"exp": int(time.time()) + 1, "nbf": int(time.time()) + 3600}, ], - False, + WarehouseDenied("token is expired", reason="invalid_api_token"), ), ], ) - def test_verify(self, monkeypatch, caveats, valid): + def test_verify(self, monkeypatch, caveats, expected_status): key = os.urandom(32) m = pymacaroons.Macaroon( location="fakelocation", @@ -276,4 +321,6 @@ def test_verify(self, monkeypatch, caveats, valid): permission = pretend.stub() verifier = Verifier(deserialized_macaroon, context, principals, permission) - assert verifier.verify(key) is valid + status = verifier.verify(key) + assert bool(status) is bool(expected_status) + assert status.msg == expected_status.msg diff --git a/tests/unit/macaroons/test_security_policy.py b/tests/unit/macaroons/test_security_policy.py --- a/tests/unit/macaroons/test_security_policy.py +++ b/tests/unit/macaroons/test_security_policy.py @@ -235,7 +235,7 @@ def test_permits_invalid_macaroon(self, monkeypatch): result = policy.permits(pretend.stub(), pretend.stub(), pretend.stub()) assert result == Denied("") - assert result.s == "Invalid API Token: InvalidMacaroonError('foo')" + assert result.s == "Invalid API Token: foo" def test_permits_valid_macaroon(self, monkeypatch): macaroon_service = pretend.stub( diff --git a/tests/unit/macaroons/test_services.py b/tests/unit/macaroons/test_services.py --- a/tests/unit/macaroons/test_services.py +++ b/tests/unit/macaroons/test_services.py @@ -22,6 +22,7 @@ from pymacaroons.exceptions import MacaroonDeserializationException +from warehouse.errors import WarehouseDenied from warehouse.macaroons import services from warehouse.macaroons.models import Macaroon @@ -137,7 +138,9 @@ def test_verify_unprefixed_macaroon(self, macaroon_service): version=pymacaroons.MACAROON_V2, ).serialize() - with pytest.raises(services.InvalidMacaroonError): + with pytest.raises( + services.InvalidMacaroonError, match="malformed or nonexistent macaroon" + ): macaroon_service.verify( raw_macaroon, pretend.stub(), pretend.stub(), pretend.stub() ) @@ -151,7 +154,9 @@ def test_verify_no_macaroon(self, macaroon_service): ).serialize() raw_macaroon = f"pypi-{raw_macaroon}" - with pytest.raises(services.InvalidMacaroonError): + with pytest.raises( + services.InvalidMacaroonError, match="deleted or nonexistent macaroon" + ): macaroon_service.verify( raw_macaroon, pretend.stub(), pretend.stub(), pretend.stub() ) @@ -162,7 +167,9 @@ def test_verify_invalid_macaroon(self, monkeypatch, user_service, macaroon_servi "fake location", user.id, "fake description", [{"permissions": "user"}] ) - verifier_obj = pretend.stub(verify=pretend.call_recorder(lambda k: False)) + verifier_obj = pretend.stub( + verify=pretend.call_recorder(lambda k: WarehouseDenied("foo")) + ) verifier_cls = pretend.call_recorder(lambda *a: verifier_obj) monkeypatch.setattr(services, "Verifier", verifier_cls) @@ -170,7 +177,7 @@ def test_verify_invalid_macaroon(self, monkeypatch, user_service, macaroon_servi principals = pretend.stub() permissions = pretend.stub() - with pytest.raises(services.InvalidMacaroonError): + with pytest.raises(services.InvalidMacaroonError, match="foo"): macaroon_service.verify(raw_macaroon, context, principals, permissions) assert verifier_cls.calls == [ pretend.call(mock.ANY, context, principals, permissions) @@ -180,7 +187,9 @@ def test_deserialize_raw_macaroon_when_none(self, macaroon_service): raw_macaroon = pretend.stub() macaroon_service._extract_raw_macaroon = pretend.call_recorder(lambda a: None) - with pytest.raises(services.InvalidMacaroonError): + with pytest.raises( + services.InvalidMacaroonError, match="malformed or nonexistent macaroon" + ): macaroon_service._deserialize_raw_macaroon(raw_macaroon) assert macaroon_service._extract_raw_macaroon.calls == [
Improve "invalid macaroon signature" error Right now, an API token can fail for a number of reasons, most of which produce an "invalid macaroon signature" exception. https://github.com/pypa/warehouse/blob/aafc5185e57e67d43487ce4faa95913dd4573e14/warehouse/macaroons/caveats.py#L93 Common issues that could use more detailed error messages: * the API token is scoped for a different project than it's being used for * the API token is malformed and cannot be deserialized * the API token doesn't begin with `pypi-` * (probably more)
If we're touching this part, do you think we could try and merge #8598 first ? Yes, agreed, just wanted to capture the issue. This also may happen when the target repository does not exist yet when using an "all projects" API token. In such cases I had to first **create** the project by uploading using the _human_ account credential. But still, I'm currently debugging one of my repo facing this error, with correctly configured org-level secrets and environment variables on GitHub workflows, while many other repositories with the same token work well with the same token, and where that particular project is created using the human credential after failing with the org-configured "all projects" token and the repository name was renamed. I tried upgrading `twine` from 1.x to 3.4 but had no luck yet. I'd like to see more detailed information related to this error. #9264 will bring much more detailed error messages. Nevertheless, what you experience with the first upload is not the expected behavior, but it belongs to another issue. Can you open a new issue ? (Feel free to ping me) #11122 will refactor some of the code responsible for the error handling here, hopefully making it easier to resolve this.
2022-07-19T15:39:48Z
[]
[]
pypi/warehouse
11,897
pypi__warehouse-11897
[ "11862" ]
41231889fdbf1928c104865c8f3570c9e8eb5542
diff --git a/warehouse/email/__init__.py b/warehouse/email/__init__.py --- a/warehouse/email/__init__.py +++ b/warehouse/email/__init__.py @@ -305,7 +305,7 @@ def send_token_compromised_email_leak(request, user, *, public_url, origin): repeat_window=datetime.timedelta(days=1), ) def send_basic_auth_with_two_factor_email(request, user, *, project_name): - return {} + return {"project_name": project_name} @_email("account-deleted")
diff --git a/tests/unit/email/test_init.py b/tests/unit/email/test_init.py --- a/tests/unit/email/test_init.py +++ b/tests/unit/email/test_init.py @@ -1299,7 +1299,7 @@ def test_basic_auth_with_2fa_email( pyramid_request, stub_user, project_name=project_name ) - assert result == {} + assert result == {"project_name": project_name} assert pyramid_request.task.calls == [pretend.call(send_email)] assert send_email.delay.calls == [ pretend.call(
Pass the project name to the basic-auth-with-2fa email. Closes #11859
This will include the project name in the jinja2 context, but won't actually include the project name in the email itself as far as I can tell -- I assume you want it in the email's body. Oh, yes! I'd forgotten to commit the email templates. I've included them now: body.html and body.txt Thank you.
2022-07-20T15:37:36Z
[]
[]
pypi/warehouse
11,903
pypi__warehouse-11903
[ "11873" ]
dbc71597e757e0ad933bbc04096fcf45e4f2fd53
diff --git a/warehouse/integrations/github/utils.py b/warehouse/integrations/github/utils.py --- a/warehouse/integrations/github/utils.py +++ b/warehouse/integrations/github/utils.py @@ -21,7 +21,7 @@ from warehouse import integrations from warehouse.accounts.interfaces import IUserService from warehouse.email import send_token_compromised_email_leak -from warehouse.macaroons.caveats import InvalidMacaroonError +from warehouse.macaroons import InvalidMacaroonError from warehouse.macaroons.interfaces import IMacaroonService from warehouse.metrics import IMetricsService diff --git a/warehouse/macaroons/__init__.py b/warehouse/macaroons/__init__.py --- a/warehouse/macaroons/__init__.py +++ b/warehouse/macaroons/__init__.py @@ -10,9 +10,12 @@ # See the License for the specific language governing permissions and # limitations under the License. +from warehouse.macaroons.errors import InvalidMacaroonError from warehouse.macaroons.interfaces import IMacaroonService from warehouse.macaroons.services import database_macaroon_factory +__all__ = ["InvalidMacaroonError", "includeme"] + def includeme(config): config.register_service_factory(database_macaroon_factory, IMacaroonService) diff --git a/warehouse/macaroons/caveats.py b/warehouse/macaroons/caveats.py deleted file mode 100644 --- a/warehouse/macaroons/caveats.py +++ /dev/null @@ -1,180 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import json -import time - -import pymacaroons - -from pyramid.security import Allowed - -from warehouse.errors import WarehouseDenied -from warehouse.packaging.models import Project - - -class InvalidMacaroonError(Exception): - ... - - -class Caveat: - def __init__(self, verifier): - self.verifier = verifier - # TODO: Surface this failure reason to the user. - # See: https://github.com/pypi/warehouse/issues/9018 - self.failure_reason = None - - def verify(self, predicate) -> bool: - return False - - def __call__(self, predicate): - return self.verify(predicate) - - -class V1Caveat(Caveat): - def verify_projects(self, projects) -> bool: - # First, ensure that we're actually operating in - # the context of a package. - if not isinstance(self.verifier.context, Project): - self.failure_reason = ( - "project-scoped token used outside of a project context" - ) - return False - - project = self.verifier.context - if project.normalized_name in projects: - return True - - self.failure_reason = ( - f"project-scoped token is not valid for project '{project.name}'" - ) - return False - - def verify(self, predicate) -> bool: - try: - data = json.loads(predicate) - version = data["version"] - permissions = data["permissions"] - except (KeyError, ValueError, TypeError): - return False - - if version != 1: - return False - - if permissions is None: - return False - - if permissions == "user": - # User-scoped tokens behave exactly like a user's normal credentials. - return True - elif not isinstance(permissions, dict): - self.failure_reason = "invalid permissions format" - return False - - projects = permissions.get("projects") - if projects is None: - self.failure_reason = "invalid projects in predicate" - return False - - return self.verify_projects(projects) - - -class ExpiryCaveat(Caveat): - def verify(self, predicate): - try: - data = json.loads(predicate) - expiry = data["exp"] - not_before = data["nbf"] - except (KeyError, ValueError, TypeError): - return False - - if not expiry or not not_before: - self.failure_reason = "missing fields" - return False - - now = int(time.time()) - if now < not_before or now >= expiry: - self.failure_reason = "token is expired" - return False - - return True - - -class ProjectIDsCaveat(Caveat): - def verify(self, predicate): - try: - data = json.loads(predicate) - project_ids = data["project_ids"] - except (KeyError, ValueError, TypeError): - return False - - if not project_ids: - self.failure_reason = "missing fields" - return False - - if not isinstance(self.verifier.context, Project): - self.failure_reason = ( - "project-scoped token used outside of a project context" - ) - return False - - if str(self.verifier.context.id) not in project_ids: - return False - - return True - - -class Verifier: - def __init__(self, macaroon, context, principals, permission): - self.macaroon = macaroon - self.context = context - self.principals = principals - self.permission = permission - self.verifier = pymacaroons.Verifier() - - def verify(self, key): - self.verifier.satisfy_general(V1Caveat(self)) - self.verifier.satisfy_general(ExpiryCaveat(self)) - self.verifier.satisfy_general(ProjectIDsCaveat(self)) - - try: - result = self.verifier.verify(self.macaroon, key) - except pymacaroons.exceptions.MacaroonInvalidSignatureException as exc: - failure_reasons = [] - for cb in self.verifier.callbacks: - failure_reason = getattr(cb, "failure_reason", None) - if failure_reason is not None: - failure_reasons.append(failure_reason) - - # If we have a more detailed set of failure reasons, use them. - # Otherwise, use whatever the exception gives us. - if len(failure_reasons) > 0: - return WarehouseDenied( - ", ".join(failure_reasons), reason="invalid_api_token" - ) - else: - return WarehouseDenied(str(exc), reason="invalid_api_token") - except Exception: - # The pymacaroons `verify` API with leak exceptions raised during caveat - # verification, which *normally* indicate a deserialization error - # (i.e., a malformed caveat body). - # When this happens, we don't want to display a random stringified - # Python exception to the user, so instead we emit a generic error. - # See https://github.com/ecordell/pymacaroons/issues/50 - return WarehouseDenied("malformed macaroon", reason="invalid_api_token") - - # NOTE: We should never hit this case, since pymacaroons *should* always either - # raise on failure *or* return true. But there's nothing stopping that from - # silently breaking in the future, so we check the result defensively here. - if not result: - return WarehouseDenied("unknown error", reason="invalid_api_token") - else: - return Allowed("signature and caveats OK") diff --git a/warehouse/macaroons/caveats/__init__.py b/warehouse/macaroons/caveats/__init__.py new file mode 100644 --- /dev/null +++ b/warehouse/macaroons/caveats/__init__.py @@ -0,0 +1,143 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import time + +from typing import Any + +from pydantic import StrictInt, StrictStr +from pydantic.dataclasses import dataclass +from pymacaroons import Macaroon, Verifier +from pymacaroons.exceptions import MacaroonInvalidSignatureException +from pyramid.request import Request +from pyramid.security import Allowed + +from warehouse.accounts.models import User +from warehouse.errors import WarehouseDenied +from warehouse.macaroons.caveats._core import ( + Caveat, + CaveatError, + Failure, + Result, + Success, + as_caveat, + deserialize, + serialize, +) +from warehouse.packaging.models import Project + +__all__ = ["deserialize", "serialize", "verify"] + + +@as_caveat(tag=0) +@dataclass(frozen=True) +class Expiration(Caveat): + expires_at: StrictInt + not_before: StrictInt + + def verify(self, request: Request, context: Any, permission: str) -> Result: + now = int(time.time()) + if now < self.not_before or now >= self.expires_at: + return Failure("token is expired") + return Success() + + +@as_caveat(tag=1) +@dataclass(frozen=True) +class ProjectName(Caveat): + normalized_names: list[StrictStr] + + def verify(self, request: Request, context: Any, permission: str) -> Result: + if not isinstance(context, Project): + return Failure("project-scoped token used outside of a project context") + + if context.normalized_name not in self.normalized_names: + return Failure( + f"project-scoped token is not valid for project: {context.name!r}" + ) + + return Success() + + +@as_caveat(tag=2) +@dataclass(frozen=True) +class ProjectID(Caveat): + project_ids: list[StrictStr] + + def verify(self, request: Request, context: Any, permission: str) -> Result: + if not isinstance(context, Project): + return Failure("project-scoped token used outside of a project context") + + if str(context.id) not in self.project_ids: + return Failure( + f"project-scoped token is not valid for project: {context.name!r}" + ) + + return Success() + + +@as_caveat(tag=3) +@dataclass(frozen=True) +class RequestUser(Caveat): + user_id: StrictStr + + def verify(self, request: Request, context: Any, permission: str) -> Result: + if not isinstance(request.identity, User): + return Failure("token with user restriction without a user") + + if str(request.identity.id) != self.user_id: + return Failure("current user does not match user restriction in token") + + return Success() + + +def verify( + macaroon: Macaroon, key: bytes, request: Request, context: Any, permission: str +) -> Allowed | WarehouseDenied: + errors: list[str] = [] + + def _verify_caveat(predicate: bytes): + try: + caveat = deserialize(predicate) + except CaveatError as exc: + errors.append(str(exc)) + return False + + result = caveat.verify(request, context, permission) + assert isinstance(result, (Success, Failure)) + + if isinstance(result, Failure): + errors.append(result.reason) + return False + + return True + + verifier = Verifier() + verifier.satisfy_general(_verify_caveat) + + result = False + try: + result = verifier.verify(macaroon, key) + except ( + MacaroonInvalidSignatureException, + Exception, # https://github.com/ecordell/pymacaroons/issues/50 + ) as exc: + if errors: + return WarehouseDenied(", ".join(errors), reason="invalid_api_token") + elif isinstance(exc, MacaroonInvalidSignatureException): + return WarehouseDenied( + "signatures do not match", reason="invalid_api_token" + ) + + if not result: + return WarehouseDenied("unknown error", reason="invalid_api_token") + return Allowed("signature and caveats OK") diff --git a/warehouse/macaroons/caveats/_core.py b/warehouse/macaroons/caveats/_core.py new file mode 100644 --- /dev/null +++ b/warehouse/macaroons/caveats/_core.py @@ -0,0 +1,151 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +import dataclasses +import json +import typing + +from collections.abc import Mapping, Sequence +from dataclasses import dataclass +from typing import Any, Callable, ClassVar, Type, TypeVar + +from pydantic import ValidationError +from pydantic.dataclasses import dataclass as pydantic_dataclass +from pyramid.request import Request + +from warehouse.macaroons.caveats import _legacy + +T = TypeVar("T") +S = TypeVar("S") + + +class CaveatError(Exception): + pass + + +class CaveatDeserializationError(CaveatError): + pass + + +@dataclass(frozen=True, slots=True) +class Success: + def __bool__(self): + return True + + +@dataclass(frozen=True, slots=True) +class Failure: + reason: str + + def __bool__(self): + return False + + +Result = Success | Failure + + +@pydantic_dataclass(frozen=True) +class Caveat: + tag: ClassVar[int] + + def verify(self, request: Request, context: Any, permission: str) -> Result: + raise NotImplementedError + + def __serialize__(self) -> Sequence: + return (self.tag,) + dataclasses.astuple(self) + + @classmethod + def __deserialize__(cls: Type[S], data: Sequence) -> S: + kwargs = {} + for i, field in enumerate(dataclasses.fields(cls)): + if len(data) > i: + value = data[i] + elif field.default is not dataclasses.MISSING: + value = field.default + elif field.default_factory is not dataclasses.MISSING: + value = field.default_factory() + else: + raise CaveatDeserializationError("Not enough values") + + kwargs[field.name] = value + + try: + obj = cls(**kwargs) + except ValidationError: + raise CaveatDeserializationError("invalid values for fields") + + return obj + + +class _CaveatRegistry: + + _tags: dict[int, Type[Caveat]] + + def __init__(self, *args: Any, **kwargs: Any): + super().__init__(*args, **kwargs) + self._tags = {} + + def add(self, tag: int, cls: Type[Caveat]): + if tag in self._tags: + raise TypeError( + f"Cannot re-use tag: {tag}, already used by {self._tags[tag]}" + ) + + self._tags[tag] = cls + cls.tag = tag + + def lookup(self, /, tag: int) -> Type[Caveat] | None: + return self._tags.get(tag) + + +_caveat_registry = _CaveatRegistry() + + +def as_caveat(*, tag: int) -> Callable[[Type[T]], Type[T]]: + def deco(cls: Type[T]) -> Type[T]: + _caveat_registry.add(tag, typing.cast(Type[Caveat], cls)) + return cls + + return deco + + +def serialize(caveat: Caveat) -> bytes: + return json.dumps( + caveat.__serialize__(), sort_keys=True, separators=(",", ":") + ).encode("utf8") + + +def deserialize(data: bytes) -> Caveat: + loaded = json.loads(data) + + # Our original caveats were implemented as a mapping with arbitrary keys, + # so if we've gotten one of our those, we'll attempt to adapt it to our + # new format. + if isinstance(loaded, Mapping): + loaded = _legacy.adapt(loaded) + if loaded is None: + raise CaveatDeserializationError("caveat must be an array") + + if not isinstance(loaded, Sequence) or isinstance(loaded, str): + raise CaveatDeserializationError("caveat must be an array") + + if not len(loaded): + raise CaveatDeserializationError("caveat array cannot be empty") + + tag, *fields = loaded + cls = _caveat_registry.lookup(tag) + + if cls is None: + raise CaveatDeserializationError(f"caveat has unknown tag: {tag}") + + return cls.__deserialize__(fields) diff --git a/warehouse/macaroons/caveats/_legacy.py b/warehouse/macaroons/caveats/_legacy.py new file mode 100644 --- /dev/null +++ b/warehouse/macaroons/caveats/_legacy.py @@ -0,0 +1,79 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from collections.abc import Mapping, Sequence + +from pyramid.threadlocal import get_current_request + + +def _adapt_v1(data: Mapping) -> Sequence | None: + permissions = data.get("permissions") + + # This would be a malformed token, so we'll just refuse + # to adapt it. + if permissions is None: + return None + + # Our V1 token didn't have a way to specify that a token should be + # restricted to a specific user, just that it was scoped to "the user", + # which the user was whoever the token was linked to in the database. + # Our new tokens strengthens that to validate that the linked user + # matches who it is expected to be, but since we don't have that + # data for V1 tokens, we'll just use the current user. + if permissions == "user": + request = get_current_request() + + # If we don't have a current request, then we can't validate this + # token. + if request is None: + return None + + # If we don't have a user associated with this request, then we + # can't validate this token. + if request.user is None: + return None + + return [3, str(request.user.id)] + # Our project level permissions for V1 caveats had a dictionary, with + # the key "projects", and that was a list of normalized project names. + elif isinstance(permissions, Mapping) and "projects" in permissions: + return [1, permissions["projects"]] + + # If we get to here, then we don't know how to adapt this token, so + # we'll just return None. + return None + + +def _adapt_expiry(data: Mapping) -> Sequence | None: + return [0, data["exp"], data["nbf"]] + + +def _adapt_project_ids(data: Mapping) -> Sequence | None: + return [2, data["project_ids"]] + + +def adapt(data: Mapping) -> Sequence | None: + # Check for our previous `V1Caveat` type. + if data.get("version") == 1: + return _adapt_v1(data) + + # Check for our previous `ExpiryCaveat` type. + if "exp" in data and "nbf" in data: + return _adapt_expiry(data) + + # Check for our previous `ProjectIDsCaveat` type. + if "project_ids" in data: + return _adapt_project_ids(data) + + # We don't have any other caveat types, so we don't know how to adapt + # this payload. + return None diff --git a/warehouse/macaroons/errors.py b/warehouse/macaroons/errors.py new file mode 100644 --- /dev/null +++ b/warehouse/macaroons/errors.py @@ -0,0 +1,15 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +class InvalidMacaroonError(Exception): + ... diff --git a/warehouse/macaroons/interfaces.py b/warehouse/macaroons/interfaces.py --- a/warehouse/macaroons/interfaces.py +++ b/warehouse/macaroons/interfaces.py @@ -41,15 +41,15 @@ def find_userid(raw_macaroon): macaroon. """ - def verify(raw_macaroon, context, principals, permission): + def verify(raw_macaroon, request, context, permission): """ Returns True if the given raw (serialized) macaroon is - valid for the context, principals, and requested permission. + valid for the request, context, and requested permission. Raises InvalidMacaroon if the macaroon is not valid. """ - def create_macaroon(location, user_id, description, caveats): + def create_macaroon(location, user_id, description, scopes): """ Returns a new raw (serialized) macaroon. The description provided is not embedded into the macaroon, only stored in the DB model. diff --git a/warehouse/macaroons/security_policy.py b/warehouse/macaroons/security_policy.py --- a/warehouse/macaroons/security_policy.py +++ b/warehouse/macaroons/security_policy.py @@ -19,8 +19,8 @@ from warehouse.accounts.interfaces import IUserService from warehouse.cache.http import add_vary_callback from warehouse.errors import WarehouseDenied +from warehouse.macaroons import InvalidMacaroonError from warehouse.macaroons.interfaces import IMacaroonService -from warehouse.macaroons.services import InvalidMacaroonError from warehouse.utils.security_policy import AuthenticationMethod @@ -150,7 +150,7 @@ def permits(self, context, principals, permission): macaroon_service = request.find_service(IMacaroonService, context=None) try: - macaroon_service.verify(macaroon, context, principals, permission) + macaroon_service.verify(macaroon, request, context, permission) except InvalidMacaroonError as exc: return WarehouseDenied( f"Invalid API Token: {exc}", reason="invalid_api_token" diff --git a/warehouse/macaroons/services.py b/warehouse/macaroons/services.py --- a/warehouse/macaroons/services.py +++ b/warehouse/macaroons/services.py @@ -11,7 +11,6 @@ # limitations under the License. import datetime -import json import uuid import pymacaroons @@ -22,7 +21,8 @@ from zope.interface import implementer from warehouse.accounts.models import User -from warehouse.macaroons.caveats import InvalidMacaroonError, Verifier +from warehouse.macaroons import caveats +from warehouse.macaroons.errors import InvalidMacaroonError from warehouse.macaroons.interfaces import IMacaroonService from warehouse.macaroons.models import Macaroon @@ -107,10 +107,10 @@ def find_from_raw(self, raw_macaroon): raise InvalidMacaroonError("Macaroon not found") return dm - def verify(self, raw_macaroon, context, principals, permission): + def verify(self, raw_macaroon, request, context, permission): """ Returns True if the given raw (serialized) macaroon is - valid for the context, principals, and requested permission. + valid for the request, context, and requested permission. Raises InvalidMacaroonError if the macaroon is not valid. """ @@ -120,15 +120,14 @@ def verify(self, raw_macaroon, context, principals, permission): if dm is None: raise InvalidMacaroonError("deleted or nonexistent macaroon") - verifier = Verifier(m, context, principals, permission) - verified = verifier.verify(dm.key) + verified = caveats.verify(m, dm.key, request, context, permission) if verified: dm.last_used = datetime.datetime.now() return True raise InvalidMacaroonError(verified.msg) - def create_macaroon(self, location, user_id, description, caveats): + def create_macaroon(self, location, user_id, description, scopes): """ Returns a tuple of a new raw (serialized) macaroon and its DB model. The description provided is not embedded into the macaroon, only stored @@ -136,12 +135,25 @@ def create_macaroon(self, location, user_id, description, caveats): """ user = self.db.query(User).filter(User.id == user_id).one() + if not all(isinstance(c, caveats.Caveat) for c in scopes): + raise TypeError("scopes must be a list of Caveat instances") + # NOTE: This is a bit of a hack: we keep a separate copy of the # permissions caveat in the DB, so that we can display scope information # in the UI. - permissions = next(c for c in caveats if "permissions" in c) # pragma: no cover + permissions = {} + for caveat in scopes: + if isinstance(caveat, caveats.ProjectName): + projects = permissions.setdefault("projects", []) + projects.extend(caveat.normalized_names) + elif isinstance(caveat, caveats.RequestUser): + permissions = "user" + break + dm = Macaroon( - user=user, description=description, permissions_caveat=permissions + user=user, + description=description, + permissions_caveat={"permissions": permissions}, ) self.db.add(dm) self.db.flush() @@ -152,8 +164,8 @@ def create_macaroon(self, location, user_id, description, caveats): key=dm.key, version=pymacaroons.MACAROON_V2, ) - for caveat in caveats: - m.add_first_party_caveat(json.dumps(caveat)) + for caveat in scopes: + m.add_first_party_caveat(caveats.serialize(caveat)) serialized_macaroon = f"pypi-{m.serialize()}" return serialized_macaroon, dm diff --git a/warehouse/manage/views.py b/warehouse/manage/views.py --- a/warehouse/manage/views.py +++ b/warehouse/manage/views.py @@ -94,6 +94,7 @@ send_yanked_project_release_email, ) from warehouse.forklift.legacy import MAX_FILESIZE, MAX_PROJECT_SIZE +from warehouse.macaroons import caveats from warehouse.macaroons.interfaces import IMacaroonService from warehouse.manage.forms import ( AddEmailForm, @@ -999,30 +1000,37 @@ def create_macaroon(self): response = {**self.default_response} if form.validate(): if form.validated_scope == "user": - macaroon_caveats = [{"permissions": form.validated_scope, "version": 1}] + recorded_caveats = [{"permissions": form.validated_scope, "version": 1}] + macaroon_caveats = [caveats.RequestUser(user_id=self.request.user.id)] else: project_ids = [ str(project.id) for project in self.request.user.projects if project.normalized_name in form.validated_scope["projects"] ] - macaroon_caveats = [ + recorded_caveats = [ {"permissions": form.validated_scope, "version": 1}, {"project_ids": project_ids}, ] + macaroon_caveats = [ + caveats.ProjectName( + normalized_names=form.validated_scope["projects"] + ), + caveats.ProjectID(project_ids=project_ids), + ] serialized_macaroon, macaroon = self.macaroon_service.create_macaroon( location=self.request.domain, user_id=self.request.user.id, description=form.description.data, - caveats=macaroon_caveats, + scopes=macaroon_caveats, ) self.user_service.record_event( self.request.user.id, tag="account:api_token:added", additional={ "description": form.description.data, - "caveats": macaroon_caveats, + "caveats": recorded_caveats, }, ) if "projects" in form.validated_scope:
diff --git a/tests/unit/macaroons/test_caveats.py b/tests/unit/macaroons/test_caveats.py --- a/tests/unit/macaroons/test_caveats.py +++ b/tests/unit/macaroons/test_caveats.py @@ -10,317 +10,324 @@ # See the License for the specific language governing permissions and # limitations under the License. -import json -import os +import dataclasses import time import pretend -import pymacaroons import pytest -from pymacaroons.exceptions import MacaroonInvalidSignatureException -from pyramid.security import Allowed +from pydantic.dataclasses import dataclass +from pymacaroons import Macaroon -from warehouse.errors import WarehouseDenied +from warehouse.macaroons import caveats from warehouse.macaroons.caveats import ( Caveat, - ExpiryCaveat, - ProjectIDsCaveat, - V1Caveat, - Verifier, + CaveatError, + Expiration, + Failure, + ProjectID, + ProjectName, + RequestUser, + Success, + deserialize, + serialize, + verify, ) +from warehouse.macaroons.caveats._core import _CaveatRegistry +from ...common.db.accounts import UserFactory from ...common.db.packaging import ProjectFactory -class TestCaveat: - def test_creation(self): - verifier = pretend.stub() - caveat = Caveat(verifier) +@dataclass(frozen=True) +class TestCaveat(Caveat): + first: int + second: int = 2 + third: int = dataclasses.field(default_factory=lambda: 3) - assert caveat.verifier is verifier - assert caveat.verify(pretend.stub()) is False - assert caveat(pretend.stub()) is False +def test_bools(): + assert bool(Success()) is True + assert bool(Failure("anything")) is False -class TestV1Caveat: + +def test_caveat_verify_fails(): + caveat = Caveat() + with pytest.raises(NotImplementedError): + caveat.verify(pretend.stub(), pretend.stub(), pretend.stub()) + + [email protected]( + "caveat,expected", + [ + (Expiration(expires_at=50, not_before=10), b"[0,50,10]"), + (ProjectName(normalized_names=["foo", "bar"]), b'[1,["foo","bar"]]'), + ( + ProjectID(project_ids=["123uuid", "456uuid"]), + b'[2,["123uuid","456uuid"]]', + ), + (RequestUser(user_id="a uuid"), b'[3,"a uuid"]'), + ], +) +def test_serialization(caveat, expected): + assert serialize(caveat) == expected + + +class TestDeserialization: @pytest.mark.parametrize( - ["predicate", "result"], + "data,expected", [ - ("invalid json", False), - ('{"version": 2}', False), - ('{"permissions": null, "version": 1}', False), - ('{"permissions": null, "version": 2}', False), - ('{"permissions": "user", "version": 2}', False), - ('{"permissions": "", "version": 2}', False), + # Current Caveat Style + (b"[0,50,10]", Expiration(expires_at=50, not_before=10)), + (b'[1,["foo","bar"]]', ProjectName(normalized_names=["foo", "bar"])), + ( + b'[2,["123uuid","456uuid"]]', + ProjectID(project_ids=["123uuid", "456uuid"]), + ), + (b'[3,"a uuid"]', RequestUser(user_id="a uuid")), + # Legacy Caveat Style + (b'{"exp": 50, "nbf": 10}', Expiration(expires_at=50, not_before=10)), + ( + b'{"version": 1, "permissions": {"projects": ["foo", "bar"]}}', + ProjectName(normalized_names=["foo", "bar"]), + ), + ( + b'{"project_ids": ["123uuid", "456uuid"]}', + ProjectID(project_ids=["123uuid", "456uuid"]), + ), ], ) - def test_verify_invalid_predicates(self, predicate, result): - verifier = pretend.stub() - caveat = V1Caveat(verifier) + def test_valid_deserialization(self, data, expected): + assert deserialize(data) == expected - assert caveat(predicate) is False + @pytest.mark.parametrize( + "data", + [ + b'{"version": 1}', + b'{"version": 1, "permissions": "user"}', + b'{"version": 1, "permissions": []}', + b'{"version": 1, "permissions": {"otherkey": "foo"}}', + b'{"exp": 1}', + b'{"nbf": 1}', + b'[0,"50",10]', + b"[0,5]", + b'"foo"', + b"null", + b"[]", + b"[9999999]", + ], + ) + def test_invalid_deserialization(self, data): + with pytest.raises(CaveatError): + deserialize(data) + + def test_valid_test_valid_deserialization_request_user( + self, pyramid_request, pyramid_config + ): + pyramid_request.user = pretend.stub(id="a uuid") + assert deserialize(b'{"version": 1, "permissions": "user"}') == RequestUser( + user_id="a uuid" + ) - def test_verify_valid_predicate(self): - verifier = pretend.stub() - caveat = V1Caveat(verifier) - predicate = '{"permissions": "user", "version": 1}' + def test_invalid_deserialization_request_user( + self, pyramid_request, pyramid_config + ): + pyramid_request.user = None + with pytest.raises(CaveatError): + deserialize(b'{"version": 1, "permissions": "user"}') + + def test_deserialize_with_defaults(self): + assert TestCaveat.__deserialize__([1]) == TestCaveat(first=1, second=2, third=3) + assert TestCaveat.__deserialize__([1, 5]) == TestCaveat( + first=1, second=5, third=3 + ) + assert TestCaveat.__deserialize__([1, 5, 7]) == TestCaveat( + first=1, second=5, third=7 + ) - assert caveat(predicate) is True - def test_verify_project_invalid_context(self): - verifier = pretend.stub(context=pretend.stub()) - caveat = V1Caveat(verifier) +class TestExpirationCaveat: + def test_verify_not_before(self): + not_before = int(time.time()) + 60 + expiry = not_before + 60 - predicate = {"version": 1, "permissions": {"projects": ["notfoobar"]}} + caveat = Expiration(expires_at=expiry, not_before=not_before) + result = caveat.verify(pretend.stub(), pretend.stub(), pretend.stub()) - assert caveat(json.dumps(predicate)) is False + assert result == Failure("token is expired") - def test_verify_project_invalid_project_name(self, db_request): - project = ProjectFactory.create(name="foobar") - verifier = pretend.stub(context=project) - caveat = V1Caveat(verifier) + def test_verify_already_expired(self): + not_before = int(time.time()) - 10 + expiry = not_before - 5 + + caveat = Expiration(expires_at=expiry, not_before=not_before) + result = caveat.verify(pretend.stub(), pretend.stub(), pretend.stub()) + + assert result == Failure("token is expired") + + def test_verify_ok(self): + not_before = int(time.time()) - 10 + expiry = int(time.time()) + 60 + + caveat = Expiration(expires_at=expiry, not_before=not_before) + result = caveat.verify(pretend.stub(), pretend.stub(), pretend.stub()) + + assert result == Success() - predicate = {"version": 1, "permissions": {"projects": ["notfoobar"]}} - assert caveat(json.dumps(predicate)) is False +class TestProjectNameCaveat: + def test_verify_invalid_context(self): + caveat = ProjectName(normalized_names=[]) + result = caveat.verify(pretend.stub(), pretend.stub(), pretend.stub()) + + assert result == Failure( + "project-scoped token used outside of a project context" + ) - def test_verify_project_no_projects_object(self, db_request): + def test_verify_invalid_project_id(self, db_request): project = ProjectFactory.create(name="foobar") - verifier = pretend.stub(context=project) - caveat = V1Caveat(verifier) - predicate = { - "version": 1, - "permissions": {"somethingthatisntprojects": ["blah"]}, - } + caveat = ProjectName(normalized_names=["not_foobar"]) + result = caveat.verify(db_request, project, pretend.stub()) - assert caveat(json.dumps(predicate)) is False + assert result == Failure( + f"project-scoped token is not valid for project: {project.name!r}" + ) - def test_verify_project(self, db_request): + def test_verify_ok(self, db_request): project = ProjectFactory.create(name="foobar") - verifier = pretend.stub(context=project) - caveat = V1Caveat(verifier) - predicate = {"version": 1, "permissions": {"projects": ["foobar"]}} - assert caveat(json.dumps(predicate)) is True + caveat = ProjectName(normalized_names=["foobar"]) + result = caveat.verify(db_request, project, pretend.stub()) + assert result == Success() -class TestExpiryCaveat: - @pytest.mark.parametrize( - "predicate", - [ - # invalid JSON - "invalid json", - # missing nbf and exp - '{"missing": "values"}', - # nbf and exp present, but null - '{"nbf": null, "exp": null}', - # nbf and exp present, but empty - '{"nbf": "", "exp": ""}', - # valid JSON, but wrong type - "[]", - ], - ) - def test_verify_invalid_predicates(self, predicate): - verifier = pretend.stub() - caveat = ExpiryCaveat(verifier) - assert caveat(predicate) is False +class TestProjectIDsCaveat: + def test_verify_invalid_context(self): + caveat = ProjectID(project_ids=[]) + result = caveat.verify(pretend.stub(), pretend.stub(), pretend.stub()) - def test_verify_not_before(self): - verifier = pretend.stub() - caveat = ExpiryCaveat(verifier) + assert result == Failure( + "project-scoped token used outside of a project context" + ) - not_before = int(time.time()) + 60 - expiry = not_before + 60 - predicate = json.dumps({"exp": expiry, "nbf": not_before}) - assert caveat(predicate) is False + def test_verify_invalid_project_id(self, db_request): + project = ProjectFactory.create(name="foobar") - def test_verify_already_expired(self): - verifier = pretend.stub() - caveat = ExpiryCaveat(verifier) + caveat = ProjectID(project_ids=["not-foobars-uuid"]) + result = caveat.verify(db_request, project, pretend.stub()) - not_before = int(time.time()) - 10 - expiry = not_before - 5 - predicate = json.dumps({"exp": expiry, "nbf": not_before}) - assert caveat(predicate) is False + assert result == Failure( + f"project-scoped token is not valid for project: {project.name!r}" + ) - def test_verify_ok(self): - verifier = pretend.stub() - caveat = ExpiryCaveat(verifier) + def test_verify_ok(self, db_request): + project = ProjectFactory.create(name="foobar") - not_before = int(time.time()) - 10 - expiry = int(time.time()) + 60 - predicate = json.dumps({"exp": expiry, "nbf": not_before}) - assert caveat(predicate) + caveat = ProjectID(project_ids=[str(project.id)]) + result = caveat.verify(db_request, project, pretend.stub()) + assert result == Success() -class TestProjectIDsCaveat: - @pytest.mark.parametrize( - "predicate", - [ - # invalid JSON - "invalid json", - # missing project_ids - '{"missing": "values"}', - # project_ids present, but null - '{"project_ids": null}', - # nbf and exp present, but empty - '{"project_ids": ""}', - '{"project_ids": []}', - # valid JSON, but wrong type - "[]", - '""', - ], - ) - def test_verify_invalid_predicates(self, predicate): - verifier = pretend.stub() - caveat = ProjectIDsCaveat(verifier) - assert caveat(predicate) is False +class TestRequestUserCaveat: + def test_verify_no_identity(self): + caveat = RequestUser(user_id="invalid") + result = caveat.verify( + pretend.stub(identity=None), pretend.stub(), pretend.stub() + ) - def test_verify_invalid_context(self): - verifier = pretend.stub(context=pretend.stub()) - caveat = ProjectIDsCaveat(verifier) + assert result == Failure("token with user restriction without a user") - predicate = {"project_ids": ["foo"]} + def test_verify_invalid_identity(self): + caveat = RequestUser(user_id="invalid") + result = caveat.verify( + pretend.stub(identity=pretend.stub()), pretend.stub(), pretend.stub() + ) - assert caveat(json.dumps(predicate)) is False + assert result == Failure("token with user restriction without a user") - def test_verify_invalid_project_id(self, db_request): - project = ProjectFactory.create(name="foobar") - verifier = pretend.stub(context=project) - caveat = ProjectIDsCaveat(verifier) + def test_verify_invalid_user_id(self, db_request): + user = UserFactory.create() - predicate = {"project_ids": ["not-foobars-uuid"]} + caveat = RequestUser(user_id="invalid") + result = caveat.verify( + pretend.stub(identity=user), pretend.stub(), pretend.stub() + ) - assert caveat(json.dumps(predicate)) is False + assert result == Failure( + "current user does not match user restriction in token" + ) def test_verify_ok(self, db_request): - project = ProjectFactory.create(name="foobar") - verifier = pretend.stub(context=project) - caveat = ProjectIDsCaveat(verifier) + user = UserFactory.create() - predicate = {"project_ids": [str(project.id)]} + caveat = RequestUser(user_id=str(user.id)) + result = caveat.verify( + pretend.stub(identity=user), pretend.stub(), pretend.stub() + ) - assert caveat(json.dumps(predicate)) is True + assert result == Success() -class TestVerifier: - def test_creation(self): - macaroon = pretend.stub() - context = pretend.stub() - principals = pretend.stub() - permission = pretend.stub() - verifier = Verifier(macaroon, context, principals, permission) +class TestCaveatRegistry: + def test_cannot_reuse_tag(self): + registry = _CaveatRegistry() + registry.add(0, Expiration) + with pytest.raises(TypeError): + registry.add(0, ProjectName) - assert verifier.macaroon is macaroon - assert verifier.context is context - assert verifier.principals is principals - assert verifier.permission is permission - def test_verify_invalid_signature(self, monkeypatch): - verify = pretend.call_recorder( - pretend.raiser(MacaroonInvalidSignatureException("Signatures do not match")) +class TestVerification: + def test_verify_invalid_signature(self): + m = Macaroon(location="somewhere", identifier="something", key=b"a secure key") + status = verify( + m, b"a different key", pretend.stub(), pretend.stub(), pretend.stub() ) - macaroon = pretend.stub() - context = pretend.stub() - principals = pretend.stub() - permission = pretend.stub() - key = pretend.stub() - verifier = Verifier(macaroon, context, principals, permission) - - monkeypatch.setattr(verifier.verifier, "verify", verify) - status = verifier.verify(key) - assert not status - assert status.msg == "Signatures do not match" - assert verify.calls == [pretend.call(macaroon, key)] - - def test_verify_generic_exception(self, monkeypatch): - verify = pretend.call_recorder(pretend.raiser(ValueError)) - macaroon = pretend.stub() - context = pretend.stub() - principals = pretend.stub() - permission = pretend.stub() - key = pretend.stub() - verifier = Verifier(macaroon, context, principals, permission) - - monkeypatch.setattr(verifier.verifier, "verify", verify) - status = verifier.verify(key) assert not status - assert status.msg == "malformed macaroon" - assert verify.calls == [pretend.call(macaroon, key)] - - def test_verify_inner_verifier_returns_false(self, monkeypatch): - verify = pretend.call_recorder(lambda macaroon, key: False) - macaroon = pretend.stub() - context = pretend.stub() - principals = pretend.stub() - permission = pretend.stub() - key = pretend.stub() - verifier = Verifier(macaroon, context, principals, permission) - - monkeypatch.setattr(verifier.verifier, "verify", verify) - status = verifier.verify(key) + assert status.msg == "signatures do not match" + + def test_caveat_returns_false(self): + m = Macaroon(location="somewhere", identifier="something", key=b"a secure key") + m.add_first_party_caveat(serialize(Expiration(expires_at=10, not_before=0))) + status = verify( + m, b"a secure key", pretend.stub(), pretend.stub(), pretend.stub() + ) assert not status - assert status.msg == "unknown error" - assert verify.calls == [pretend.call(macaroon, key)] + assert status.msg == "token is expired" - @pytest.mark.parametrize( - ["caveats", "expected_status"], - [ - # Both V1 and expiry present and valid. - ( - [ - {"permissions": "user", "version": 1}, - {"exp": int(time.time()) + 3600, "nbf": int(time.time()) - 1}, - ], - Allowed("signature and caveats OK"), - ), - # V1 only present and valid. - ( - [{"permissions": "user", "version": 1}], - Allowed("signature and caveats OK"), - ), - # V1 and expiry present but V1 invalid. - ( - [{"permissions": "bad", "version": 1}], - WarehouseDenied( - "invalid permissions format", reason="invalid_api_token" - ), - ), - # V1 and expiry present but expiry invalid. - ( - [ - {"permissions": "user", "version": 1}, - {"exp": int(time.time()) + 1, "nbf": int(time.time()) + 3600}, - ], - WarehouseDenied("token is expired", reason="invalid_api_token"), - ), - ], - ) - def test_verify(self, monkeypatch, caveats, expected_status): - key = os.urandom(32) - m = pymacaroons.Macaroon( - location="fakelocation", - identifier="fakeid", - key=key, - version=pymacaroons.MACAROON_V2, + def test_caveat_errors_on_deserialize(self): + m = Macaroon(location="somewhere", identifier="something", key=b"a secure key") + m.add_first_party_caveat(b"[]") + status = verify( + m, b"a secure key", pretend.stub(), pretend.stub(), pretend.stub() ) + assert not status + assert status.msg == "caveat array cannot be empty" - for caveat in caveats: - m.add_first_party_caveat(json.dumps(caveat)) + def test_valid_caveat(self): + now = int(time.time()) + m = Macaroon(location="somewhere", identifier="something", key=b"a secure key") + m.add_first_party_caveat( + serialize(Expiration(expires_at=now + 1000, not_before=now - 1000)) + ) + status = verify( + m, b"a secure key", pretend.stub(), pretend.stub(), pretend.stub() + ) + assert status + assert status.msg == "signature and caveats OK" - # Round-trip through serialization to ensure we're not clinging to any state. - serialized_macaroon = m.serialize() - deserialized_macaroon = pymacaroons.Macaroon.deserialize(serialized_macaroon) + def test_generic_exception(self, monkeypatch): + def _raiser(*args, **kwargs): + raise Exception("my generic exception") - context = pretend.stub() - principals = pretend.stub() - permission = pretend.stub() + monkeypatch.setattr(caveats, "deserialize", _raiser) - verifier = Verifier(deserialized_macaroon, context, principals, permission) - status = verifier.verify(key) - assert bool(status) is bool(expected_status) - assert status.msg == expected_status.msg + m = Macaroon(location="somewhere", identifier="something", key=b"a secure key") + m.add_first_party_caveat(serialize(Expiration(expires_at=1, not_before=1))) + status = verify( + m, b"a secure key", pretend.stub(), pretend.stub(), pretend.stub() + ) + assert not status + assert status.msg == "unknown error" diff --git a/tests/unit/macaroons/test_services.py b/tests/unit/macaroons/test_services.py --- a/tests/unit/macaroons/test_services.py +++ b/tests/unit/macaroons/test_services.py @@ -23,7 +23,7 @@ from pymacaroons.exceptions import MacaroonDeserializationException from warehouse.errors import WarehouseDenied -from warehouse.macaroons import services +from warehouse.macaroons import caveats, services from warehouse.macaroons.models import Macaroon from ...common.db.accounts import UserFactory @@ -62,7 +62,10 @@ def test_find_macaroon_invalid_macaroon(self, macaroon_service): def test_find_macaroon(self, user_service, macaroon_service): user = UserFactory.create() _, macaroon = macaroon_service.create_macaroon( - "fake location", user.id, "fake description", [{"permissions": "user"}] + "fake location", + user.id, + "fake description", + [caveats.RequestUser(user_id=str(user.id))], ) dm = macaroon_service.find_macaroon(str(macaroon.id)) @@ -73,7 +76,10 @@ def test_find_macaroon(self, user_service, macaroon_service): def test_find_from_raw(self, user_service, macaroon_service): user = UserFactory.create() serialized, macaroon = macaroon_service.create_macaroon( - "fake location", user.id, "fake description", [{"permissions": "user"}] + "fake location", + user.id, + "fake description", + [caveats.RequestUser(user_id=str(user.id))], ) dm = macaroon_service.find_from_raw(serialized) @@ -117,14 +123,20 @@ def test_find_userid_malformed_macaroon(self, macaroon_service): def test_find_userid_valid_macaroon_trailinglinebreak(self, macaroon_service): user = UserFactory.create() raw_macaroon, _ = macaroon_service.create_macaroon( - "fake location", user.id, "fake description", [{"permissions": "user"}] + "fake location", + user.id, + "fake description", + [caveats.ProjectName(normalized_names=["foo"])], ) assert macaroon_service.find_userid(f"{raw_macaroon}\n") is None def test_find_userid(self, macaroon_service): user = UserFactory.create() raw_macaroon, _ = macaroon_service.create_macaroon( - "fake location", user.id, "fake description", [{"permissions": "user"}] + "fake location", + user.id, + "fake description", + [caveats.RequestUser(user_id=str(user.id))], ) user_id = macaroon_service.find_userid(raw_macaroon) @@ -164,23 +176,23 @@ def test_verify_no_macaroon(self, macaroon_service): def test_verify_invalid_macaroon(self, monkeypatch, user_service, macaroon_service): user = UserFactory.create() raw_macaroon, _ = macaroon_service.create_macaroon( - "fake location", user.id, "fake description", [{"permissions": "user"}] + "fake location", + user.id, + "fake description", + [caveats.RequestUser(user_id=str(user.id))], ) - verifier_obj = pretend.stub( - verify=pretend.call_recorder(lambda k: WarehouseDenied("foo")) - ) - verifier_cls = pretend.call_recorder(lambda *a: verifier_obj) - monkeypatch.setattr(services, "Verifier", verifier_cls) + verify = pretend.call_recorder(lambda m, k, r, c, p: WarehouseDenied("foo")) + monkeypatch.setattr(caveats, "verify", verify) + request = pretend.stub() context = pretend.stub() - principals = pretend.stub() permissions = pretend.stub() with pytest.raises(services.InvalidMacaroonError, match="foo"): - macaroon_service.verify(raw_macaroon, context, principals, permissions) - assert verifier_cls.calls == [ - pretend.call(mock.ANY, context, principals, permissions) + macaroon_service.verify(raw_macaroon, request, context, permissions) + assert verify.calls == [ + pretend.call(mock.ANY, mock.ANY, request, context, permissions) ] def test_deserialize_raw_macaroon_when_none(self, macaroon_service): @@ -228,26 +240,31 @@ def test_verify_malformed_macaroon(self, macaroon_service): def test_verify_valid_macaroon(self, monkeypatch, macaroon_service): user = UserFactory.create() raw_macaroon, _ = macaroon_service.create_macaroon( - "fake location", user.id, "fake description", [{"permissions": "user"}] + "fake location", + user.id, + "fake description", + [caveats.RequestUser(user_id=str(user.id))], ) - verifier_obj = pretend.stub(verify=pretend.call_recorder(lambda k: True)) - verifier_cls = pretend.call_recorder(lambda *a: verifier_obj) - monkeypatch.setattr(services, "Verifier", verifier_cls) + verify = pretend.call_recorder(lambda m, k, r, c, p: True) + monkeypatch.setattr(caveats, "verify", verify) + request = pretend.stub() context = pretend.stub() - principals = pretend.stub() permissions = pretend.stub() - assert macaroon_service.verify(raw_macaroon, context, principals, permissions) - assert verifier_cls.calls == [ - pretend.call(mock.ANY, context, principals, permissions) + assert macaroon_service.verify(raw_macaroon, request, context, permissions) + assert verify.calls == [ + pretend.call(mock.ANY, mock.ANY, request, context, permissions) ] def test_delete_macaroon(self, user_service, macaroon_service): user = UserFactory.create() _, macaroon = macaroon_service.create_macaroon( - "fake location", user.id, "fake description", [{"permissions": "user"}] + "fake location", + user.id, + "fake description", + [caveats.RequestUser(user_id=str(user.id))], ) macaroon_id = str(macaroon.id) @@ -265,7 +282,13 @@ def test_get_macaroon_by_description_no_macaroon(self, macaroon_service): def test_get_macaroon_by_description(self, macaroon_service): user = UserFactory.create() _, macaroon = macaroon_service.create_macaroon( - "fake location", user.id, "fake description", [{"permissions": "user"}] + "fake location", + user.id, + "fake description", + [ + caveats.ProjectName(normalized_names=["foo", "bar"]), + caveats.Expiration(expires_at=10, not_before=5), + ], ) dm = macaroon_service.find_macaroon(str(macaroon.id)) @@ -274,3 +297,14 @@ def test_get_macaroon_by_description(self, macaroon_service): macaroon_service.get_macaroon_by_description(user.id, macaroon.description) == dm ) + + def test_errors_with_wrong_caveats(self, macaroon_service): + user = UserFactory.create() + + with pytest.raises(TypeError): + macaroon_service.create_macaroon( + "fake location", + user.id, + "fake description", + [{"version": 1, "permissions": "user"}], + ) diff --git a/tests/unit/manage/test_views.py b/tests/unit/manage/test_views.py --- a/tests/unit/manage/test_views.py +++ b/tests/unit/manage/test_views.py @@ -41,6 +41,7 @@ ) from warehouse.admin.flags import AdminFlagValue from warehouse.forklift.legacy import MAX_FILESIZE, MAX_PROJECT_SIZE +from warehouse.macaroons import caveats from warehouse.macaroons.interfaces import IMacaroonService from warehouse.manage import views from warehouse.metrics.interfaces import IMetricsService @@ -1944,7 +1945,7 @@ def test_create_macaroon(self, monkeypatch): request = pretend.stub( POST={}, domain=pretend.stub(), - user=pretend.stub(id=pretend.stub(), has_primary_verified_email=True), + user=pretend.stub(id="a user id", has_primary_verified_email=True), find_service=lambda interface, **kw: { IMacaroonService: macaroon_service, IUserService: user_service, @@ -1980,11 +1981,8 @@ def test_create_macaroon(self, monkeypatch): location=request.domain, user_id=request.user.id, description=create_macaroon_obj.description.data, - caveats=[ - { - "permissions": create_macaroon_obj.validated_scope, - "version": 1, - } + scopes=[ + caveats.RequestUser(user_id="a user id"), ], ) ] @@ -2075,12 +2073,11 @@ def test_create_macaroon_records_events_for_each_project(self, monkeypatch): location=request.domain, user_id=request.user.id, description=create_macaroon_obj.description.data, - caveats=[ - { - "permissions": create_macaroon_obj.validated_scope, - "version": 1, - }, - {"project_ids": [str(p.id) for p in request.user.projects]}, + scopes=[ + caveats.ProjectName(normalized_names=["foo", "bar"]), + caveats.ProjectID( + project_ids=[str(p.id) for p in request.user.projects] + ), ], ) ]
Caveats v2 I've been chatting with @woodruffw lately about our Macaroons, and it got me to thinking that maybe it would be worthwhile to rethink our Caveat structures. Some background since this is all very subtle! Currently we have 3 Caveat types: - ``V1Caveat``: This is the original caveat, it kind of mixes together a lot of concepts all at once since it can either have ``permissions = "user"`` or ``permissions = [.. list of project names]``. It also has a "version" field, which is always 1. - ``ExpiryCaveat``: This let's a token be restricted temporally to expire in N, and not be valid before Z. - ``ProjectIDsCaveat`` A list of project ids that the token is valid for, this isn't currently be used, but its goal is to prevent creating a project scoped token that is still good after the project has been deleted then recreated. All of these caveats are implements as JSON dicts, and the way that the verification works is that for every caveat in the macaroon (which is just some raw bytes, it has no idea what they are) it basically does: ```python def try_caveats(data: bytes): for c in ALL_CAVEATS: if c(data): return True return False valid = all([try_caveats(d) for d in caveats]) ``` Or to put into english: Every individual ``bytes`` that a Macaroon has added as a caveat must validate as ``True`` by at least one of our caveat types, and to implement that it just throws the data into all of them and sees if any of them returns True. Internally each caveat has to take an arbitrary payload (remember, users can add whatever they want here as well) and attempt to deserialize it and interpret it. Since these are all currently just implemented as untyped json dicts, that means that we're extremely permissive in what we allow in those fields *and* trying to determine if this is a caveat that we understand at all is intermixed all throughout the actual verification code, which makes them harder to read for the people implementing/maintaining them. It also means that new caveats beyond the initial ones have been added as wholly distinct caveat types instead of falling under the versioned caveat, because trying to shoe horn them into it would have made it even harder to understand. I don't believe that many, if anyone is adding caveats besides us currently, but one major problem with this scheme is if you're not very careful to keep the different types of caveats between our 3 types separate, (say if you added a single caveat that had the keys for both v1 and expiry), then only one of the "types" of caveats would have its validations matter, the others would just be silently ignored even though that's a subtle, non-obvious security bug. On top of that, the size of our caveats when serialized matters somewhat, as that data has to be embedded into the macaroon itself, so the larger the serialized result of the caveat is, the larger our macaroons are. Large macaroons are not the end of the world, but it also would be nice if we could make them smaller too. Ultimately it would be nice to make our Caveat language more expressive, able to represent additional attenuations that we don't currently have available to us, but that ends up getting hampered by how every caveat we add further exacerbates these problems. Thinking through it all, I think if we were to write a "caveats v2", it would be best to implement it as a union of types with a wrapper container that holds our versioning, so it would look something like: ```python from typing import Literal class Projects: names: list[str] class Expiry: expires_at: int not_valid_before: int class Caveat: version: Literal[2] = 2 caveat: Projects | Expiry ``` Conceptually the, for this new format, we know ahead of time what all of our caveat values are, and we can use the same overall structure for every caveat instead of having multiple adhoc structures. Our verifier function would just deserialize the data, see it is a version 2, then check what type the caveat is, and if it's a supported one will dispatch to the correct validation function for that caveat. What translating the above to a serialization scheme looks like depends a lot on the capabilities of the specific serialization scheme in question. In something like JSON, which doesn't have support for defining types, you'd likely implement it as a tagged union, so something like: ```json { "version": 2, "caveat": { "type": "Projects", "names": ["..."] } } ``` Here the "type" key becomes a special key (that we can't use in the definition of the individual caveat types), that lets us know definitely what kind of caveat it is without attempting to guess it by seeing what the structure looks like. We're currently using JSON which is nice in general for content that humans might attempt to read, because it's self-describing and human readable it doesn't require any special tools or software or libraries to crack it open and visually look at it and see what it has in it. Unfortunately, it has the downside that its serialized contents are larger in practice than other available options [^1]. Personally, I think that for macaroons trying to be human readable and self-describing is the wrong trade off to make. Whatever we serialize and embed inside of the macaroon will require tooling or a library to extract either way, since the macaroon itself requires that and we can just include those field definitions in whatever tooling/library we ultimately end up providing for that as well. I've been playing with a number of different options, and one of the smallest options we have available to us is Protobufs, using a protobuf file that looks something like this [^2]: ```proto message UserCaveat { string id = 1; } message ProjectCaveat { string normalized_name = 1; string id = 2; } message Caveat { uint32 version = 1; oneof caveat { UserCaveat user = 2; ProjectCaveat project = 3; } } ``` Creating a project scoped token that is valid for a project named ``example-project`` with a uuid4 ID goes from producing a macaroon that is 269 bytes long to one that is 201 bytes long, or we end up saving 68 bytes. The above token actually produces a slightly bigger token for user scoped tokens (current = 172, protobuf = 179), but the protobuf one is embedding the uuid ID of the user, whereas the current one is just embedding ``"permission":"user"``, so it's fundamentally storing less data too. That's not savings to sneeze at, but Protobufs do have their own problems. They require an explicit compilation step to turn the ``.proto`` file into a Python file that can be imported, which means that we have to have something to build that proto file. Fortunately these things will change so infrequently we can problem just have a make task that does the compilation and commit the generated code so that's not the end of the world. A possibly bigger problem is that protobufs don't really have the concept of an unset field or a required field, fields have a "zero" value, and if the value isn't set then it is implicitly that zero/default value. This isn't the end of the world, but it is a bit of a weird constraint and a non-obvious one that could make them less interesting to work with. Another option here a library called [msgspec](https://jcristharif.com/msgspec/), it's basically a library that lets you write a Python class with type hints, and it will know how to serialize/deserialize it and it already supports the things we would need to implement tagged unions in a pretty simple way. Using a msgspec[^3] that looks something like: ```python class BaseCaveat(msgspec.Struct): pass class UserCaveat(BaseCaveat, tag="user"): id: str class ProjectNameCaveat(BaseCaveat, tag="project.name"): name: str class ProjectIdCaveat(BaseCaveat, tag="project.id"): id: str class Caveat(msgspec.Struct): caveat: UserCaveat | ProjectNameCaveat | ProjectIdCaveat version: typing.Literal[2] = 2 ``` A caveat can then be serialized using JSON by msgspec using something like: ```pycon >>> msgspec.json.encode(Caveat(caveat=ProjectNameCaveat(name="example-project"))) b'{"caveat":{"type":"project.name","name":"example-project"},"version":2}' ``` The above is actually quite a bit larger than our current Macaroons, if you add ``ProjectNameCaveat`` and ``ProjectIdCaveat`` we're up to 339 bytes (versus 269 for a relatively similar structure). We have options for getting this size down though, the first thing is that our tag to communicate what our type is, is currently consuming 21 bytes, we could get that down to 7-8 bytes by choosing a much more compressed name for it, using something like: ``` {"caveat":{"t":"pn","name":"example-project"},"version":2} ``` This saves 13 bytes, but reduces the readability of the format some, but that's OK because readability isn't super important here. We can save another 6 bytes by shortening "version" to "v" and another 5 bytes by taking "caveat" to "c", bringing us to: ``` {"c":{"t":"pn","name":"example-project"},"v":2} ``` At this point, we've shortened most of the "structural" fields down to as small as they can get, but we still have all of the data fields like "name" in there. We could try to keep shortening these fields down playing whack a mole, but msgspec introduces another trick [^4], where it stops emitting the field names all together. This saves a minimum of 4 bytes for every field assuming you have a 1 length key, or more if your keys are longer. You can control each dict in our response independently, but just for kicks let's change both of them, that gives us a result like: ``` [["pn","example-project"],2] ``` This essentially just turns an object into an array, and uses the ordering of the keys in that array to indicate what key the value is for instead of using a dict, essentially moving the "key" out of band. This has a few downsides besides losing the self descriptive-ness: - You can never remove a field entirely from being serialized (though you can stop emitting anything but a default value for it). - You can never re-order your fields. - New fields must be appended. - You cannot omit default values (with key based, you can reduce the size of the serialized object by choosing not to emit the default values, but we can't do that in this case). For our caveats, I don't think any of those things are particularly troublesome. I don't think our caveats are ever going to have a bunch of optional fields with a default nor are we ever going to be able to remove fields anyways because we need to continue supporting old messages forever, so it really just comes down to ordering concerns, which doesn't seem particularly bad to me. We could possibly save a couple more bytes by changing things around slightly too, using something like: ``` ["pn",2,"example-project"] ``` Where those fields are the tagged union tag, the version, and the name field. This ends up going from 269 for the equivalent token in our current scheme, to 224, saving 45 bytes in length. Not quite the 68 bytes that protobuf saved us, but pretty good I think. If we wanted to manually construct those or use cattrs or something rather than using msgspec, we could even change things slightly and do: ``` [2,"pn","example-project"] ``` To put the version first, then the tag, then the fields of the object. That would let us version everything, including the tag. msgspec doesn't allow it because it sees ``version`` as a field on the type itself, and it doesn't know what the type is until it's read the tag. We could only do it because we could say that we're never going to serialize something without a version field so we can put it first. Or just use 2 extra bytes and do: ``` [2,["pn","example-project"]] ``` [^1]: This increase in size comes both from the fact it is attempting to be human readable instead of binary and because it's attempting to be self-describing, so that the keys and types and such are embedded into the document itself instead of relying on out of band information. Neither option is wrong or right, they just represent different trade offs. [^2]: I'm not proposing this exact structure, just threw something together to test things out. [^3]: There's of course nothing super special about msgspec here other than it makes this pretty easy to do. We could implement the same manually or using cattrs or whatever. [^4]: Again, nothing special about msgspec here except that it makes this easy.
2022-07-21T04:50:25Z
[]
[]
pypi/warehouse
11,920
pypi__warehouse-11920
[ "11702" ]
c30a60be0817901c1880a24e1304560678d9d31a
diff --git a/warehouse/classifiers/models.py b/warehouse/classifiers/models.py --- a/warehouse/classifiers/models.py +++ b/warehouse/classifiers/models.py @@ -28,3 +28,4 @@ class Classifier(db.ModelBase): id = Column(Integer, primary_key=True, nullable=False) classifier = Column(Text, unique=True) + ordering = Column(Integer, nullable=True) diff --git a/warehouse/cli/classifiers.py b/warehouse/cli/classifiers.py new file mode 100644 --- /dev/null +++ b/warehouse/cli/classifiers.py @@ -0,0 +1,61 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import click + +from trove_classifiers import all_classifiers as sorted_classifiers + +from warehouse.cli import warehouse + + [email protected]() # pragma: no branch +def classifiers(): + """ + Manage the Warehouse Malware Checks. + """ + + [email protected]() [email protected]_obj +def sync(config): + """ + Sync the Warehouse database with the classifiers. + """ + # Imported here because we don't want to trigger an import from anything + # but warehouse.cli at the module scope. + from warehouse.classifiers.models import Classifier + from warehouse.db import Session + + session = Session(bind=config.registry["sqlalchemy.engine"]) + + # Look up all of the valid classifiers + all_classifiers = session.query(Classifier).all() + + # Determine if we need to add any new classifiers to the database + missing_classifiers = set(sorted_classifiers) - set( + c.classifier for c in all_classifiers + ) + + # Add any new classifiers to the database + if missing_classifiers: + for name in missing_classifiers: + missing_classifier = Classifier( + classifier=name, ordering=sorted_classifiers.index(name) + ) + session.add(missing_classifier) + + # Check to see if any of our existing classifiers need their ordering + # changed + for classifier in all_classifiers: + classifier.ordering = sorted_classifiers.index(classifier.classifier) + + session.commit() diff --git a/warehouse/forklift/legacy.py b/warehouse/forklift/legacy.py --- a/warehouse/forklift/legacy.py +++ b/warehouse/forklift/legacy.py @@ -1011,26 +1011,13 @@ def file_upload(request): .one() ) except NoResultFound: - # Look up all of the valid classifiers - all_classifiers = request.db.query(Classifier).all() - # Get all the classifiers for this release - release_classifiers = [ - c for c in all_classifiers if c.classifier in form.classifiers.data - ] - - # Determine if we need to add any new classifiers to the database - missing_classifiers = set(form.classifiers.data or []) - set( - c.classifier for c in release_classifiers + release_classifiers = ( + request.db.query(Classifier) + .filter(Classifier.classifier.in_(form.classifiers.data)) + .all() ) - # Add any new classifiers to the database - if missing_classifiers: - for missing_classifier_name in missing_classifiers: - missing_classifier = Classifier(classifier=missing_classifier_name) - request.db.add(missing_classifier) - release_classifiers.append(missing_classifier) - # Parse the Project URLs structure into a key/value dict project_urls = { name.strip(): url.strip() diff --git a/warehouse/legacy/api/json.py b/warehouse/legacy/api/json.py --- a/warehouse/legacy/api/json.py +++ b/warehouse/legacy/api/json.py @@ -10,9 +10,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +from packaging.utils import canonicalize_name, canonicalize_version from pyramid.httpexceptions import HTTPMovedPermanently, HTTPNotFound from pyramid.view import view_config -from sqlalchemy.orm import Load +from sqlalchemy.orm import Load, joinedload from sqlalchemy.orm.exc import NoResultFound from warehouse.cache.http import cache_control @@ -167,23 +168,24 @@ def _json_data(request, project, release, *, all_releases): @view_config( route_name="legacy.api.json.project", - context=Project, renderer="json", decorator=_CACHE_DECORATOR, ) -def json_project(project, request): - if project.normalized_name != request.matchdict.get( - "name", project.normalized_name - ): - return HTTPMovedPermanently( - request.current_route_path(name=project.normalized_name), - headers=_CORS_HEADERS, - ) +def json_project(request): + normalized_name = canonicalize_name(request.matchdict["name"]) try: release = ( request.db.query(Release) - .filter(Release.project == project) + .join(Project) + .options( + joinedload(Release.project), + joinedload(Release.description), + joinedload(Release._project_urls), + joinedload(Release._requires_dist), + joinedload(Release.vulnerabilities), + ) + .filter(Project.normalized_name == normalized_name) .order_by( Release.yanked.asc(), Release.is_prerelease.nullslast(), @@ -192,9 +194,16 @@ def json_project(project, request): .limit(1) .one() ) + project = release.project except NoResultFound: return HTTPNotFound(headers=_CORS_HEADERS) + if project.normalized_name != request.matchdict["name"]: + return HTTPMovedPermanently( + request.current_route_path(name=project.normalized_name), + headers=_CORS_HEADERS, + ) + # Apply CORS headers. request.response.headers.update(_CORS_HEADERS) @@ -209,26 +218,44 @@ def json_project(project, request): @view_config( route_name="legacy.api.json.project_slash", - context=Project, renderer="json", decorator=_CACHE_DECORATOR, ) -def json_project_slash(project, request): - return json_project(project, request) +def json_project_slash(request): + return json_project(request) @view_config( route_name="legacy.api.json.release", - context=Release, renderer="json", decorator=_CACHE_DECORATOR, ) -def json_release(release, request): - project = release.project +def json_release(request): + normalized_name = canonicalize_name(request.matchdict["name"]) + canonical_version = canonicalize_version(request.matchdict["version"]) + + try: + release = ( + request.db.query(Release) + .join(Project) + .options( + joinedload(Release.project), + joinedload(Release.description), + joinedload(Release._project_urls), + joinedload(Release._requires_dist), + joinedload(Release.vulnerabilities), + ) + .filter(Project.normalized_name == normalized_name) + .filter( + Release.canonical_version == canonical_version, + ) + .one() + ) + project = release.project + except NoResultFound: + return HTTPNotFound(headers=_CORS_HEADERS) - if project.normalized_name != request.matchdict.get( - "name", project.normalized_name - ): + if project.normalized_name != request.matchdict["name"]: return HTTPMovedPermanently( request.current_route_path(name=project.normalized_name), headers=_CORS_HEADERS, @@ -246,9 +273,8 @@ def json_release(release, request): @view_config( route_name="legacy.api.json.release_slash", - context=Release, renderer="json", decorator=_CACHE_DECORATOR, ) -def json_release_slash(release, request): - return json_release(release, request) +def json_release_slash(request): + return json_release(request) diff --git a/warehouse/migrations/env.py b/warehouse/migrations/env.py --- a/warehouse/migrations/env.py +++ b/warehouse/migrations/env.py @@ -57,6 +57,7 @@ def run_migrations_online(): connection=connection, target_metadata=db.metadata, compare_server_default=True, + transaction_per_migration=True, ) with context.begin_transaction(): context.run_migrations() diff --git a/warehouse/migrations/versions/4490777c984f_migrate_existing_data_for_release_is_.py b/warehouse/migrations/versions/4490777c984f_migrate_existing_data_for_release_is_.py new file mode 100644 --- /dev/null +++ b/warehouse/migrations/versions/4490777c984f_migrate_existing_data_for_release_is_.py @@ -0,0 +1,75 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +Migrate Existing Data for Release.is_prerelease + +Revision ID: 4490777c984f +Revises: b0dbcd2f5c77 +Create Date: 2022-06-27 17:49:09.835384 +""" + +import sqlalchemy as sa + +from alembic import op + +revision = "4490777c984f" +down_revision = "b0dbcd2f5c77" + + +def _get_num_rows(conn): + return list( + conn.execute( + sa.text("SELECT COUNT(id) FROM releases WHERE is_prerelease IS NULL") + ) + )[0][0] + + +def upgrade(): + conn = op.get_bind() + total_rows = _get_num_rows(conn) + max_loops = total_rows / 100000 * 2 + loops = 0 + while _get_num_rows(conn) > 0 and loops < max_loops: + loops += 1 + conn.execute( + sa.text( + """ + UPDATE releases + SET is_prerelease = pep440_is_prerelease(version) + WHERE id IN ( + SELECT id + FROM releases + WHERE is_prerelease IS NULL + LIMIT 100000 + ) + """ + ) + ) + conn.execute("COMMIT") + + op.alter_column( + "releases", + "is_prerelease", + existing_type=sa.BOOLEAN(), + server_default=sa.text("false"), + nullable=False, + ) + + +def downgrade(): + op.alter_column( + "releases", + "is_prerelease", + existing_type=sa.BOOLEAN(), + server_default=None, + nullable=True, + ) diff --git a/warehouse/migrations/versions/8a335305fd39_add_a_column_for_ordering_classifiers.py b/warehouse/migrations/versions/8a335305fd39_add_a_column_for_ordering_classifiers.py new file mode 100644 --- /dev/null +++ b/warehouse/migrations/versions/8a335305fd39_add_a_column_for_ordering_classifiers.py @@ -0,0 +1,35 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +Add a column for ordering classifiers + +Revision ID: 8a335305fd39 +Revises: 4490777c984f +Create Date: 2022-07-22 00:06:40.868910 +""" + +import sqlalchemy as sa + +from alembic import op + +revision = "8a335305fd39" +down_revision = "4490777c984f" + + +def upgrade(): + op.add_column( + "trove_classifiers", sa.Column("ordering", sa.Integer(), nullable=True) + ) + + +def downgrade(): + op.drop_column("trove_classifiers", "ordering") diff --git a/warehouse/migrations/versions/b0dbcd2f5c77_add_a_column_for_denormalizing_release_.py b/warehouse/migrations/versions/b0dbcd2f5c77_add_a_column_for_denormalizing_release_.py new file mode 100644 --- /dev/null +++ b/warehouse/migrations/versions/b0dbcd2f5c77_add_a_column_for_denormalizing_release_.py @@ -0,0 +1,53 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +Add a column for denormalizing Release.is_prerelease + +Revision ID: b0dbcd2f5c77 +Revises: 8bee9c119e41 +Create Date: 2022-06-27 17:19:00.117464 +""" + +import sqlalchemy as sa + +from alembic import op + +revision = "b0dbcd2f5c77" +down_revision = "1e61006a47c2" + + +def upgrade(): + op.add_column("releases", sa.Column("is_prerelease", sa.Boolean(), nullable=True)) + + op.execute( + """ CREATE OR REPLACE FUNCTION maintain_releases_is_prerelease() + RETURNS TRIGGER AS $$ + BEGIN + NEW.is_prerelease := pep440_is_prerelease(NEW.version); + RETURN NEW; + END; + $$ + LANGUAGE plpgsql + """ + ) + + op.execute( + """ CREATE TRIGGER releases_update_is_prerelease + BEFORE INSERT OR UPDATE OF version ON releases + FOR EACH ROW + EXECUTE PROCEDURE maintain_releases_is_prerelease() + """ + ) + + +def downgrade(): + op.drop_column("releases", "is_prerelease") diff --git a/warehouse/packaging/models.py b/warehouse/packaging/models.py --- a/warehouse/packaging/models.py +++ b/warehouse/packaging/models.py @@ -47,8 +47,6 @@ from sqlalchemy.orm import validates from sqlalchemy.orm.collections import attribute_mapped_collection from sqlalchemy.orm.exc import MultipleResultsFound, NoResultFound -from sqlalchemy.sql import expression -from trove_classifiers import sorted_classifiers from warehouse import db from warehouse.accounts.models import User @@ -419,7 +417,7 @@ def __table_args__(cls): # noqa ) version = Column(Text, nullable=False) canonical_version = Column(Text, nullable=False) - is_prerelease = orm.column_property(func.pep440_is_prerelease(version)) + is_prerelease = Column(Boolean, nullable=False, server_default=sql.false()) author = Column(Text) author_email = Column(Text) maintainer = Column(Text) @@ -461,10 +459,7 @@ def __table_args__(cls): # noqa Classifier, backref="project_releases", secondary=lambda: release_classifiers, # type: ignore - order_by=expression.case( - {c: i for i, c in enumerate(sorted_classifiers)}, - value=Classifier.classifier, - ), + order_by=Classifier.ordering, passive_deletes=True, ) classifiers = association_proxy("_classifiers", "classifier") diff --git a/warehouse/routes.py b/warehouse/routes.py --- a/warehouse/routes.py +++ b/warehouse/routes.py @@ -442,16 +442,12 @@ def includeme(config): config.add_route( "legacy.api.json.project", "/pypi/{name}/json", - factory="warehouse.packaging.models:ProjectFactory", - traverse="/{name}", read_only=True, domain=warehouse, ) config.add_route( "legacy.api.json.project_slash", "/pypi/{name}/json/", - factory="warehouse.packaging.models:ProjectFactory", - traverse="/{name}", read_only=True, domain=warehouse, ) @@ -459,16 +455,12 @@ def includeme(config): config.add_route( "legacy.api.json.release", "/pypi/{name}/{version}/json", - factory="warehouse.packaging.models:ProjectFactory", - traverse="/{name}/{version}", read_only=True, domain=warehouse, ) config.add_route( "legacy.api.json.release_slash", "/pypi/{name}/{version}/json/", - factory="warehouse.packaging.models:ProjectFactory", - traverse="/{name}/{version}", read_only=True, domain=warehouse, )
diff --git a/tests/common/db/packaging.py b/tests/common/db/packaging.py --- a/tests/common/db/packaging.py +++ b/tests/common/db/packaging.py @@ -43,6 +43,9 @@ class Meta: id = factory.Faker("uuid4", cast_to=None) name = factory.Faker("pystr", max_chars=12) + normalized_name = factory.LazyAttribute( + lambda o: packaging.utils.canonicalize_name(o.name) + ) class ProjectEventFactory(WarehouseFactory): diff --git a/tests/unit/cli/test_classifiers.py b/tests/unit/cli/test_classifiers.py new file mode 100644 --- /dev/null +++ b/tests/unit/cli/test_classifiers.py @@ -0,0 +1,64 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pretend + +from warehouse import db +from warehouse.classifiers.models import Classifier +from warehouse.cli import classifiers + + +def test_classifiers_update(db_request, monkeypatch, cli): + engine = pretend.stub() + config = pretend.stub(registry={"sqlalchemy.engine": engine}) + session_cls = pretend.call_recorder(lambda bind: db_request.db) + monkeypatch.setattr(db, "Session", session_cls) + + cs = [ + c.classifier + for c in db_request.db.query(Classifier).order_by(Classifier.ordering).all() + ] + + monkeypatch.setattr(classifiers, "sorted_classifiers", ["C :: D", "A :: B"] + cs) + + db_request.db.add(Classifier(classifier="A :: B", ordering=0)) + assert db_request.db.query(Classifier).filter_by(classifier="C :: D").count() == 0 + cli.invoke(classifiers.sync, obj=config) + + c = db_request.db.query(Classifier).filter_by(classifier="C :: D").one() + + assert c.classifier == "C :: D" + assert c.ordering == 0 + + c = db_request.db.query(Classifier).filter_by(classifier="A :: B").one() + + assert c.classifier == "A :: B" + assert c.ordering == 1 + + +def test_classifiers_no_update(db_request, monkeypatch, cli): + engine = pretend.stub() + config = pretend.stub(registry={"sqlalchemy.engine": engine}) + session_cls = pretend.call_recorder(lambda bind: db_request.db) + monkeypatch.setattr(db, "Session", session_cls) + + original = db_request.db.query(Classifier).order_by(Classifier.ordering).all() + + monkeypatch.setattr( + classifiers, "sorted_classifiers", [c.classifier for c in original] + ) + + cli.invoke(classifiers.sync, obj=config) + + after = db_request.db.query(Classifier).order_by(Classifier.ordering).all() + + assert original == after diff --git a/tests/unit/forklift/test_legacy.py b/tests/unit/forklift/test_legacy.py --- a/tests/unit/forklift/test_legacy.py +++ b/tests/unit/forklift/test_legacy.py @@ -3134,77 +3134,6 @@ def test_upload_succeeds_creates_release( ), ] - def test_upload_succeeds_creates_classifier( - self, pyramid_config, db_request, metrics, monkeypatch - ): - pyramid_config.testing_securitypolicy(userid=1) - - user = UserFactory.create() - EmailFactory.create(user=user) - project = ProjectFactory.create() - RoleFactory.create(user=user, project=project) - - monkeypatch.setattr(legacy, "classifiers", {"AA :: BB", "CC :: DD"}) - - db_request.db.add(Classifier(classifier="AA :: BB")) - - filename = "{}-{}.tar.gz".format(project.name, "1.0") - - db_request.user = user - db_request.user_agent = "warehouse-tests/6.6.6" - db_request.POST = MultiDict( - { - "metadata_version": "1.2", - "name": project.name, - "version": "1.0", - "summary": "This is my summary!", - "filetype": "sdist", - "md5_digest": _TAR_GZ_PKG_MD5, - "content": pretend.stub( - filename=filename, - file=io.BytesIO(_TAR_GZ_PKG_TESTDATA), - type="application/tar", - ), - } - ) - db_request.POST.extend( - [ - ("classifiers", "AA :: BB"), - ("classifiers", "CC :: DD"), - ("requires_dist", "foo"), - ("requires_dist", "bar (>1.0)"), - ("project_urls", "Test, https://example.com/"), - ("requires_external", "Cheese (>1.0)"), - ("provides", "testing"), - ] - ) - - storage_service = pretend.stub(store=lambda path, filepath, meta: None) - db_request.find_service = lambda svc, name=None, context=None: { - IFileStorage: storage_service, - IMetricsService: metrics, - }.get(svc) - - resp = legacy.file_upload(db_request) - - assert resp.status_code == 200 - - # Ensure that a new Classifier has been created - classifier = ( - db_request.db.query(Classifier) - .filter(Classifier.classifier == "CC :: DD") - .one() - ) - assert classifier.classifier == "CC :: DD" - - # Ensure that the Release has the new classifier - release = ( - db_request.db.query(Release) - .filter((Release.project == project) & (Release.version == "1.0")) - .one() - ) - assert release.classifiers == ["AA :: BB", "CC :: DD"] - def test_all_valid_classifiers_can_be_created(self, db_request): for classifier in classifiers: db_request.db.add(Classifier(classifier=classifier)) diff --git a/tests/unit/legacy/api/test_json.py b/tests/unit/legacy/api/test_json.py --- a/tests/unit/legacy/api/test_json.py +++ b/tests/unit/legacy/api/test_json.py @@ -42,6 +42,7 @@ def _assert_has_cors_headers(headers): class TestJSONProject: def test_normalizing_redirects(self, db_request): project = ProjectFactory.create() + ReleaseFactory.create(project=project, version="1.0") name = project.name.lower() if name == project.normalized_name: @@ -52,7 +53,7 @@ def test_normalizing_redirects(self, db_request): lambda name: "/project/the-redirect/" ) - resp = json.json_project(project, db_request) + resp = json.json_project(db_request) assert isinstance(resp, HTTPMovedPermanently) assert resp.headers["Location"] == "/project/the-redirect/" @@ -63,7 +64,8 @@ def test_normalizing_redirects(self, db_request): def test_missing_release(self, db_request): project = ProjectFactory.create() - resp = json.json_project(project, db_request) + db_request.matchdict = {"name": project.normalized_name} + resp = json.json_project(db_request) assert isinstance(resp, HTTPNotFound) _assert_has_cors_headers(resp.headers) @@ -81,8 +83,9 @@ def test_with_prereleases(self, monkeypatch, db_request): lambda request, project, release, *, all_releases: data ) monkeypatch.setattr(json, "_json_data", json_data) + db_request.matchdict = {"name": project.normalized_name} - rvalue = json.json_project(project, db_request) + rvalue = json.json_project(db_request) assert rvalue is data assert json_data.calls == [ @@ -102,8 +105,9 @@ def test_only_prereleases(self, monkeypatch, db_request): lambda request, project, release, *, all_releases: data ) monkeypatch.setattr(json, "_json_data", json_data) + db_request.matchdict = {"name": project.normalized_name} - rvalue = json.json_project(project, db_request) + rvalue = json.json_project(db_request) assert rvalue is data assert json_data.calls == [ @@ -129,8 +133,9 @@ def test_all_releases_yanked(self, monkeypatch, db_request): lambda request, project, release, *, all_releases: data ) monkeypatch.setattr(json, "_json_data", json_data) + db_request.matchdict = {"name": project.normalized_name} - rvalue = json.json_project(project, db_request) + rvalue = json.json_project(db_request) assert rvalue is data assert json_data.calls == [ @@ -156,8 +161,9 @@ def test_latest_release_yanked(self, monkeypatch, db_request): lambda request, project, release, *, all_releases: data ) monkeypatch.setattr(json, "_json_data", json_data) + db_request.matchdict = {"name": project.normalized_name} - rvalue = json.json_project(project, db_request) + rvalue = json.json_project(db_request) assert rvalue is data assert json_data.calls == [ @@ -184,8 +190,9 @@ def test_all_non_prereleases_yanked(self, monkeypatch, db_request): lambda request, project, release, *, all_releases: data ) monkeypatch.setattr(json, "_json_data", json_data) + db_request.matchdict = {"name": project.normalized_name} - rvalue = json.json_project(project, db_request) + rvalue = json.json_project(db_request) assert rvalue is data assert json_data.calls == [ @@ -254,8 +261,9 @@ def test_renders(self, pyramid_config, db_request, db_session): je = JournalEntryFactory.create(name=project.name, submitted_by=user) db_request.route_url = pretend.call_recorder(lambda *args, **kw: url) + db_request.matchdict = {"name": project.normalized_name} - result = json.json_project(project, db_request) + result = json.json_project(db_request) assert set(db_request.route_url.calls) == { pretend.call("packaging.file", path=files[0].path), @@ -405,6 +413,7 @@ def test_renders(self, pyramid_config, db_request, db_session): class TestJSONProjectSlash: def test_normalizing_redirects(self, db_request): project = ProjectFactory.create() + ReleaseFactory.create(project=project, version="1.0") name = project.name.lower() if name == project.normalized_name: @@ -415,7 +424,7 @@ def test_normalizing_redirects(self, db_request): lambda name: "/project/the-redirect/" ) - resp = json.json_project_slash(project, db_request) + resp = json.json_project_slash(db_request) assert isinstance(resp, HTTPMovedPermanently) assert resp.headers["Location"] == "/project/the-redirect/" @@ -434,12 +443,12 @@ def test_normalizing_redirects(self, db_request): if name == release.project.normalized_name: name = release.project.name.upper() - db_request.matchdict = {"name": name} + db_request.matchdict = {"name": name, "version": "3.0"} db_request.current_route_path = pretend.call_recorder( lambda name: "/project/the-redirect/3.0/" ) - resp = json.json_release(release, db_request) + resp = json.json_release(db_request) assert isinstance(resp, HTTPMovedPermanently) assert resp.headers["Location"] == "/project/the-redirect/3.0/" @@ -448,6 +457,13 @@ def test_normalizing_redirects(self, db_request): pretend.call(name=release.project.normalized_name) ] + def test_missing_release(self, db_request): + project = ProjectFactory.create() + db_request.matchdict = {"name": project.normalized_name, "version": "3.0"} + resp = json.json_release(db_request) + assert isinstance(resp, HTTPNotFound) + _assert_has_cors_headers(resp.headers) + def test_detail_renders(self, pyramid_config, db_request, db_session): project = ProjectFactory.create(has_docs=True) description_content_type = "text/x-rst" @@ -510,8 +526,12 @@ def test_detail_renders(self, pyramid_config, db_request, db_session): je = JournalEntryFactory.create(name=project.name, submitted_by=user) db_request.route_url = pretend.call_recorder(lambda *args, **kw: url) + db_request.matchdict = { + "name": project.normalized_name, + "version": releases[3].canonical_version, + } - result = json.json_release(releases[3], db_request) + result = json.json_release(db_request) assert set(db_request.route_url.calls) == { pretend.call("packaging.file", path=files[2].path), @@ -597,8 +617,12 @@ def test_minimal_renders(self, pyramid_config, db_request): url = "/the/fake/url/" db_request.route_url = pretend.call_recorder(lambda *args, **kw: url) + db_request.matchdict = { + "name": project.normalized_name, + "version": release.canonical_version, + } - result = json.json_release(release, db_request) + result = json.json_release(db_request) assert set(db_request.route_url.calls) == { pretend.call("packaging.file", path=file.path), @@ -679,8 +703,12 @@ def test_vulnerabilities_renders(self, pyramid_config, db_request): url = "/the/fake/url/" db_request.route_url = pretend.call_recorder(lambda *args, **kw: url) + db_request.matchdict = { + "name": project.normalized_name, + "version": release.canonical_version, + } - result = json.json_release(release, db_request) + result = json.json_release(db_request) assert result["vulnerabilities"] == [ { @@ -704,12 +732,12 @@ def test_normalizing_redirects(self, db_request): if name == release.project.normalized_name: name = release.project.name.upper() - db_request.matchdict = {"name": name} + db_request.matchdict = {"name": name, "version": "3.0"} db_request.current_route_path = pretend.call_recorder( lambda name: "/project/the-redirect/3.0/" ) - resp = json.json_release_slash(release, db_request) + resp = json.json_release_slash(db_request) assert isinstance(resp, HTTPMovedPermanently) assert resp.headers["Location"] == "/project/the-redirect/3.0/" diff --git a/tests/unit/test_routes.py b/tests/unit/test_routes.py --- a/tests/unit/test_routes.py +++ b/tests/unit/test_routes.py @@ -444,32 +444,24 @@ def add_policy(name, filename): pretend.call( "legacy.api.json.project", "/pypi/{name}/json", - factory="warehouse.packaging.models:ProjectFactory", - traverse="/{name}", read_only=True, domain=warehouse, ), pretend.call( "legacy.api.json.project_slash", "/pypi/{name}/json/", - factory="warehouse.packaging.models:ProjectFactory", - traverse="/{name}", read_only=True, domain=warehouse, ), pretend.call( "legacy.api.json.release", "/pypi/{name}/{version}/json", - factory="warehouse.packaging.models:ProjectFactory", - traverse="/{name}/{version}", read_only=True, domain=warehouse, ), pretend.call( "legacy.api.json.release_slash", "/pypi/{name}/{version}/json/", - factory="warehouse.packaging.models:ProjectFactory", - traverse="/{name}/{version}", read_only=True, domain=warehouse, ),
Denormalize Release.is_prerelease Note, this is two commits, and is meant to be split into two PRs, the first one adds the column, and the second one will backfill data and switch us over to using the new column. I've left both commits in this PR for now to make it easier to review. Between the two PRs we can, if needed, manually backfill data in chunks before landing the second PR.
2022-07-22T01:15:07Z
[]
[]
pypi/warehouse
11,950
pypi__warehouse-11950
[ "11949" ]
6e7fd3a87a4a99f200e1fb56fa44e68d30c5a989
diff --git a/warehouse/legacy/api/json.py b/warehouse/legacy/api/json.py --- a/warehouse/legacy/api/json.py +++ b/warehouse/legacy/api/json.py @@ -166,12 +166,7 @@ def _json_data(request, project, release, *, all_releases): return data -@view_config( - route_name="legacy.api.json.project", - renderer="json", - decorator=_CACHE_DECORATOR, -) -def json_project(request): +def latest_release_factory(request): normalized_name = canonicalize_name(request.matchdict["name"]) try: @@ -202,6 +197,17 @@ def json_project(request): .filter(Release.id == latest.id) .one() ) + + return release + + +@view_config( + route_name="legacy.api.json.project", + context=Release, + renderer="json", + decorator=_CACHE_DECORATOR, +) +def json_project(release, request): project = release.project if project.normalized_name != request.matchdict["name"]: @@ -224,19 +230,15 @@ def json_project(request): @view_config( route_name="legacy.api.json.project_slash", + context=Release, renderer="json", decorator=_CACHE_DECORATOR, ) -def json_project_slash(request): - return json_project(request) +def json_project_slash(release, request): + return json_project(release, request) -@view_config( - route_name="legacy.api.json.release", - renderer="json", - decorator=_CACHE_DECORATOR, -) -def json_release(request): +def release_factory(request): normalized_name = canonicalize_name(request.matchdict["name"]) version = request.matchdict["version"] canonical_version = canonicalize_version(version) @@ -269,6 +271,16 @@ def json_release(request): except NoResultFound: return HTTPNotFound(headers=_CORS_HEADERS) + return release + + +@view_config( + route_name="legacy.api.json.release", + context=Release, + renderer="json", + decorator=_CACHE_DECORATOR, +) +def json_release(release, request): project = release.project if project.normalized_name != request.matchdict["name"]: @@ -289,8 +301,9 @@ def json_release(request): @view_config( route_name="legacy.api.json.release_slash", + context=Release, renderer="json", decorator=_CACHE_DECORATOR, ) -def json_release_slash(request): - return json_release(request) +def json_release_slash(release, request): + return json_release(release, request) diff --git a/warehouse/routes.py b/warehouse/routes.py --- a/warehouse/routes.py +++ b/warehouse/routes.py @@ -442,12 +442,14 @@ def includeme(config): config.add_route( "legacy.api.json.project", "/pypi/{name}/json", + factory="warehouse.legacy.api.json.latest_release_factory", read_only=True, domain=warehouse, ) config.add_route( "legacy.api.json.project_slash", "/pypi/{name}/json/", + factory="warehouse.legacy.api.json.latest_release_factory", read_only=True, domain=warehouse, ) @@ -455,12 +457,14 @@ def includeme(config): config.add_route( "legacy.api.json.release", "/pypi/{name}/{version}/json", + factory="warehouse.legacy.api.json.release_factory", read_only=True, domain=warehouse, ) config.add_route( "legacy.api.json.release_slash", "/pypi/{name}/{version}/json/", + factory="warehouse.legacy.api.json.release_factory", read_only=True, domain=warehouse, )
diff --git a/tests/unit/legacy/api/test_json.py b/tests/unit/legacy/api/test_json.py --- a/tests/unit/legacy/api/test_json.py +++ b/tests/unit/legacy/api/test_json.py @@ -40,33 +40,11 @@ def _assert_has_cors_headers(headers): assert headers["Access-Control-Expose-Headers"] == "X-PyPI-Last-Serial" -class TestJSONProject: - def test_normalizing_redirects(self, db_request): - project = ProjectFactory.create() - ReleaseFactory.create(project=project, version="1.0") - - name = project.name.lower() - if name == project.normalized_name: - name = project.name.upper() - - db_request.matchdict = {"name": name} - db_request.current_route_path = pretend.call_recorder( - lambda name: "/project/the-redirect/" - ) - - resp = json.json_project(db_request) - - assert isinstance(resp, HTTPMovedPermanently) - assert resp.headers["Location"] == "/project/the-redirect/" - _assert_has_cors_headers(resp.headers) - assert db_request.current_route_path.calls == [ - pretend.call(name=project.normalized_name) - ] - +class TestLatestReleaseFactory: def test_missing_release(self, db_request): project = ProjectFactory.create() db_request.matchdict = {"name": project.normalized_name} - resp = json.json_project(db_request) + resp = json.latest_release_factory(db_request) assert isinstance(resp, HTTPNotFound) _assert_has_cors_headers(resp.headers) @@ -78,20 +56,8 @@ def test_with_prereleases(self, monkeypatch, db_request): ReleaseFactory.create(project=project, version="4.0.dev0") release = ReleaseFactory.create(project=project, version="3.0") - - data = pretend.stub() - json_data = pretend.call_recorder( - lambda request, project, release, *, all_releases: data - ) - monkeypatch.setattr(json, "_json_data", json_data) db_request.matchdict = {"name": project.normalized_name} - - rvalue = json.json_project(db_request) - - assert rvalue is data - assert json_data.calls == [ - pretend.call(db_request, project, release, all_releases=True) - ] + assert json.latest_release_factory(db_request) == release def test_only_prereleases(self, monkeypatch, db_request): project = ProjectFactory.create() @@ -100,20 +66,8 @@ def test_only_prereleases(self, monkeypatch, db_request): ReleaseFactory.create(project=project, version="2.0.dev0") release = ReleaseFactory.create(project=project, version="3.0.dev0") - - data = pretend.stub() - json_data = pretend.call_recorder( - lambda request, project, release, *, all_releases: data - ) - monkeypatch.setattr(json, "_json_data", json_data) db_request.matchdict = {"name": project.normalized_name} - - rvalue = json.json_project(db_request) - - assert rvalue is data - assert json_data.calls == [ - pretend.call(db_request, project, release, all_releases=True) - ] + assert json.latest_release_factory(db_request) == release def test_all_releases_yanked(self, monkeypatch, db_request): """ @@ -128,20 +82,8 @@ def test_all_releases_yanked(self, monkeypatch, db_request): ReleaseFactory.create(project=project, version="4.0.dev0", yanked=True) release = ReleaseFactory.create(project=project, version="3.0", yanked=True) - - data = pretend.stub() - json_data = pretend.call_recorder( - lambda request, project, release, *, all_releases: data - ) - monkeypatch.setattr(json, "_json_data", json_data) db_request.matchdict = {"name": project.normalized_name} - - rvalue = json.json_project(db_request) - - assert rvalue is data - assert json_data.calls == [ - pretend.call(db_request, project, release, all_releases=True) - ] + assert json.latest_release_factory(db_request) == release def test_latest_release_yanked(self, monkeypatch, db_request): """ @@ -156,20 +98,8 @@ def test_latest_release_yanked(self, monkeypatch, db_request): ReleaseFactory.create(project=project, version="3.0.dev0") release = ReleaseFactory.create(project=project, version="2.0") - - data = pretend.stub() - json_data = pretend.call_recorder( - lambda request, project, release, *, all_releases: data - ) - monkeypatch.setattr(json, "_json_data", json_data) db_request.matchdict = {"name": project.normalized_name} - - rvalue = json.json_project(db_request) - - assert rvalue is data - assert json_data.calls == [ - pretend.call(db_request, project, release, all_releases=True) - ] + assert json.latest_release_factory(db_request) == release def test_all_non_prereleases_yanked(self, monkeypatch, db_request): """ @@ -185,19 +115,31 @@ def test_all_non_prereleases_yanked(self, monkeypatch, db_request): ReleaseFactory.create(project=project, version="3.0.dev0", yanked=True) release = ReleaseFactory.create(project=project, version="2.0.dev0") + db_request.matchdict = {"name": project.normalized_name} + assert json.latest_release_factory(db_request) == release + + +class TestJSONProject: + def test_normalizing_redirects(self, db_request): + project = ProjectFactory.create() + release = ReleaseFactory.create(project=project, version="1.0") - data = pretend.stub() - json_data = pretend.call_recorder( - lambda request, project, release, *, all_releases: data + name = project.name.lower() + if name == project.normalized_name: + name = project.name.upper() + + db_request.matchdict = {"name": name} + db_request.current_route_path = pretend.call_recorder( + lambda name: "/project/the-redirect/" ) - monkeypatch.setattr(json, "_json_data", json_data) - db_request.matchdict = {"name": project.normalized_name} - rvalue = json.json_project(db_request) + resp = json.json_project(release, db_request) - assert rvalue is data - assert json_data.calls == [ - pretend.call(db_request, project, release, all_releases=True) + assert isinstance(resp, HTTPMovedPermanently) + assert resp.headers["Location"] == "/project/the-redirect/" + _assert_has_cors_headers(resp.headers) + assert db_request.current_route_path.calls == [ + pretend.call(name=project.normalized_name) ] def test_renders(self, pyramid_config, db_request, db_session): @@ -264,7 +206,7 @@ def test_renders(self, pyramid_config, db_request, db_session): db_request.route_url = pretend.call_recorder(lambda *args, **kw: url) db_request.matchdict = {"name": project.normalized_name} - result = json.json_project(db_request) + result = json.json_project(releases[-1], db_request) assert set(db_request.route_url.calls) == { pretend.call("packaging.file", path=files[0].path), @@ -414,7 +356,7 @@ def test_renders(self, pyramid_config, db_request, db_session): class TestJSONProjectSlash: def test_normalizing_redirects(self, db_request): project = ProjectFactory.create() - ReleaseFactory.create(project=project, version="1.0") + release = ReleaseFactory.create(project=project, version="1.0") name = project.name.lower() if name == project.normalized_name: @@ -425,7 +367,7 @@ def test_normalizing_redirects(self, db_request): lambda name: "/project/the-redirect/" ) - resp = json.json_project_slash(db_request) + resp = json.json_project_slash(release, db_request) assert isinstance(resp, HTTPMovedPermanently) assert resp.headers["Location"] == "/project/the-redirect/" @@ -435,33 +377,11 @@ def test_normalizing_redirects(self, db_request): ] -class TestJSONRelease: - def test_normalizing_redirects(self, db_request): - project = ProjectFactory.create() - release = ReleaseFactory.create(project=project, version="3.0") - - name = release.project.name.lower() - if name == release.project.normalized_name: - name = release.project.name.upper() - - db_request.matchdict = {"name": name, "version": "3.0"} - db_request.current_route_path = pretend.call_recorder( - lambda name: "/project/the-redirect/3.0/" - ) - - resp = json.json_release(db_request) - - assert isinstance(resp, HTTPMovedPermanently) - assert resp.headers["Location"] == "/project/the-redirect/3.0/" - _assert_has_cors_headers(resp.headers) - assert db_request.current_route_path.calls == [ - pretend.call(name=release.project.normalized_name) - ] - +class TestReleaseFactory: def test_missing_release(self, db_request): project = ProjectFactory.create() db_request.matchdict = {"name": project.normalized_name, "version": "3.0"} - resp = json.json_release(db_request) + resp = json.release_factory(db_request) assert isinstance(resp, HTTPNotFound) _assert_has_cors_headers(resp.headers) @@ -470,7 +390,7 @@ def test_missing_release_with_multiple_canonical(self, db_request): ReleaseFactory.create(project=project, version="3.0.0") ReleaseFactory.create(project=project, version="3.0.0.0") db_request.matchdict = {"name": project.normalized_name, "version": "3.0"} - resp = json.json_release(db_request) + resp = json.release_factory(db_request) assert isinstance(resp, HTTPNotFound) _assert_has_cors_headers(resp.headers) @@ -482,15 +402,51 @@ def test_missing_release_with_multiple_canonical(self, db_request): (["0.1", "1.0", "2.0", "3.0.0"], "3.0.0.0.0", "3.0.0.0.0"), ], ) - def test_detail_renders( - self, - pyramid_config, - db_request, - db_session, - other_versions, - the_version, - lookup_version, + def test_lookup_release( + self, db_request, other_versions, the_version, lookup_version ): + project = ProjectFactory.create() + releases = [ + ReleaseFactory.create(project=project, version=v) for v in other_versions + ] + releases += [ReleaseFactory.create(project=project, version=the_version)] + + user = UserFactory.create() + JournalEntryFactory.reset_sequence() + JournalEntryFactory.create(name=project.name, submitted_by=user) + + db_request.matchdict = { + "name": project.normalized_name, + "version": lookup_version, + } + + assert json.release_factory(db_request) == releases[-1] + + +class TestJSONRelease: + def test_normalizing_redirects(self, db_request): + project = ProjectFactory.create() + release = ReleaseFactory.create(project=project, version="3.0") + + name = release.project.name.lower() + if name == release.project.normalized_name: + name = release.project.name.upper() + + db_request.matchdict = {"name": name, "version": "3.0"} + db_request.current_route_path = pretend.call_recorder( + lambda name: "/project/the-redirect/3.0/" + ) + + resp = json.json_release(release, db_request) + + assert isinstance(resp, HTTPMovedPermanently) + assert resp.headers["Location"] == "/project/the-redirect/3.0/" + _assert_has_cors_headers(resp.headers) + assert db_request.current_route_path.calls == [ + pretend.call(name=release.project.normalized_name) + ] + + def test_detail_renders(self, pyramid_config, db_request, db_session): project = ProjectFactory.create(has_docs=True) description_content_type = "text/x-rst" url = "/the/fake/url/" @@ -514,12 +470,13 @@ def test_detail_renders( expected_urls = dict(tuple(expected_urls)) releases = [ - ReleaseFactory.create(project=project, version=v) for v in other_versions + ReleaseFactory.create(project=project, version=v) + for v in ["0.1", "1.0", "2.0"] ] releases += [ ReleaseFactory.create( project=project, - version=the_version, + version="3.0", description=DescriptionFactory.create( content_type=description_content_type ), @@ -553,10 +510,10 @@ def test_detail_renders( db_request.route_url = pretend.call_recorder(lambda *args, **kw: url) db_request.matchdict = { "name": project.normalized_name, - "version": lookup_version, + "version": "3.0", } - result = json.json_release(db_request) + result = json.json_release(releases[-1], db_request) assert set(db_request.route_url.calls) == { pretend.call("packaging.file", path=files[-1].path), @@ -597,7 +554,7 @@ def test_detail_renders( "summary": None, "yanked": False, "yanked_reason": None, - "version": the_version, + "version": "3.0", }, "urls": [ { @@ -647,7 +604,7 @@ def test_minimal_renders(self, pyramid_config, db_request): "version": release.canonical_version, } - result = json.json_release(db_request) + result = json.json_release(release, db_request) assert set(db_request.route_url.calls) == { pretend.call("packaging.file", path=file.path), @@ -733,7 +690,7 @@ def test_vulnerabilities_renders(self, pyramid_config, db_request): "version": release.canonical_version, } - result = json.json_release(db_request) + result = json.json_release(release, db_request) assert result["vulnerabilities"] == [ { @@ -762,7 +719,7 @@ def test_normalizing_redirects(self, db_request): lambda name: "/project/the-redirect/3.0/" ) - resp = json.json_release_slash(db_request) + resp = json.json_release_slash(release, db_request) assert isinstance(resp, HTTPMovedPermanently) assert resp.headers["Location"] == "/project/the-redirect/3.0/" diff --git a/tests/unit/test_routes.py b/tests/unit/test_routes.py --- a/tests/unit/test_routes.py +++ b/tests/unit/test_routes.py @@ -444,24 +444,28 @@ def add_policy(name, filename): pretend.call( "legacy.api.json.project", "/pypi/{name}/json", + factory="warehouse.legacy.api.json.latest_release_factory", read_only=True, domain=warehouse, ), pretend.call( "legacy.api.json.project_slash", "/pypi/{name}/json/", + factory="warehouse.legacy.api.json.latest_release_factory", read_only=True, domain=warehouse, ), pretend.call( "legacy.api.json.release", "/pypi/{name}/{version}/json", + factory="warehouse.legacy.api.json.release_factory", read_only=True, domain=warehouse, ), pretend.call( "legacy.api.json.release_slash", "/pypi/{name}/{version}/json/", + factory="warehouse.legacy.api.json.release_factory", read_only=True, domain=warehouse, ),
CDN Purging is broken, stale JSON responses are served instead In https://github.com/pypi/warehouse/pull/11920 we removed traversal for our legacy JSON views, however our purging mechanisms depend on traversal to determine when to issue purges. As a result, PyPI is currently serving stale data for all JSON endpoints for which responses existed before the merge of pypi/warehouse#11920, as no purges have been issued for these when something changes which would update the responses here (e.g., publishing a new release). (ref: https://github.com/pypi/infra/issues/104, https://github.com/pypi/warehouse/issues/11936, https://github.com/pypi/warehouse/issues/11937, https://github.com/pypi/warehouse/issues/11939, https://github.com/pypi/warehouse/issues/11940)
2022-07-25T15:22:27Z
[]
[]