repo
stringclasses 358
values | pull_number
int64 6
67.9k
| instance_id
stringlengths 12
49
| issue_numbers
sequencelengths 1
7
| base_commit
stringlengths 40
40
| patch
stringlengths 87
101M
| test_patch
stringlengths 72
22.3M
| problem_statement
stringlengths 3
256k
| hints_text
stringlengths 0
545k
| created_at
stringlengths 20
20
| PASS_TO_PASS
sequencelengths 0
0
| FAIL_TO_PASS
sequencelengths 0
0
|
---|---|---|---|---|---|---|---|---|---|---|---|
freedomofpress/securedrop | 1,619 | freedomofpress__securedrop-1619 | [
"1597"
] | c8748a3c3b3910f8eeacdd8ea4cf219ea9fe58e0 | diff --git a/securedrop/db.py b/securedrop/db.py
--- a/securedrop/db.py
+++ b/securedrop/db.py
@@ -270,6 +270,9 @@ def _scrypt_hash(self, password, salt, params=None):
MAX_PASSWORD_LEN = 128
def set_password(self, password):
+ # Don't do anything if user's password hasn't changed.
+ if self.pw_hash and self.valid_password(password):
+ return
# Enforce a reasonable maximum length for passwords to avoid DoS
if len(password) > self.MAX_PASSWORD_LEN:
raise InvalidPasswordLength(password)
diff --git a/securedrop/journalist.py b/securedrop/journalist.py
--- a/securedrop/journalist.py
+++ b/securedrop/journalist.py
@@ -247,40 +247,67 @@ def admin_reset_two_factor_hotp():
return render_template('admin_edit_hotp_secret.html', uid=uid)
+class PasswordMismatchError(Exception):
+ pass
+
+
+def edit_account_password(user, password, password_again):
+ if password:
+ if password != password_again:
+ flash("Passwords didn't match!", "error")
+ raise PasswordMismatchError
+ try:
+ user.set_password(password)
+ except InvalidPasswordLength:
+ flash("Password must be less than {} characters!".format(
+ Journalist.MAX_PASSWORD_LEN), 'error')
+ raise
+
+
+def commit_account_changes(user):
+ if db_session.is_modified(user):
+ try:
+ db_session.add(user)
+ db_session.commit()
+ except Exception as e:
+ flash("An unexpected error occurred! Please check the application "
+ "logs or inform your adminstrator.", "error")
+ app.logger.error("Account changes for '{}' failed: {}".format(user,
+ e))
+ db_session.rollback()
+ else:
+ flash("Account successfully updated!", "success")
+
+
@app.route('/admin/edit/<int:user_id>', methods=('GET', 'POST'))
@admin_required
def admin_edit_user(user_id):
user = Journalist.query.get(user_id)
if request.method == 'POST':
- if request.form['username'] != "":
+ if request.form['username']:
new_username = request.form['username']
- if Journalist.query.filter_by(username=new_username).one_or_none():
- flash("Username {} is already taken".format(new_username),
+ if new_username == user.username:
+ pass
+ elif Journalist.query.filter_by(
+ username=new_username).one_or_none():
+ flash('Username "{}" is already taken!'.format(new_username),
"error")
+ return redirect(url_for("admin_edit_user", user_id=user_id))
else:
user.username = new_username
- if request.form['password'] != "":
- if request.form['password'] != request.form['password_again']:
- flash("Passwords didn't match", "error")
- return redirect(url_for("admin_edit_user", user_id=user_id))
- try:
- user.set_password(request.form['password'])
- flash("Password successfully changed for user {} ".format(
- user.username), "notification")
- except InvalidPasswordLength:
- flash("Your password is too long "
- "(maximum length {} characters)".format(
- Journalist.MAX_PASSWORD_LEN), "error")
- return redirect(url_for("admin_edit_user", user_id=user_id))
+ try:
+ edit_account_password(user, request.form['password'],
+ request.form['password_again'])
+ except (PasswordMismatchError, InvalidPasswordLength):
+ return redirect(url_for("admin_edit_user", user_id=user_id))
user.is_admin = bool(request.form.get('is_admin'))
- db_session.add(user)
- db_session.commit()
+ commit_account_changes(user)
- return render_template("admin_edit_user.html", user=user)
+ return render_template("edit_account.html", user=user)
@app.route('/admin/delete/<int:user_id>', methods=('POST',))
@@ -306,31 +333,14 @@ def edit_account():
user = g.user
if request.method == 'POST':
- if request.form['password'] != "":
- if request.form['password'] != request.form['password_again']:
- flash("Passwords didn't match", "error")
- return redirect(url_for("edit_account"))
- try:
- user.set_password(request.form['password'])
- except InvalidPasswordLength:
- flash("Your password is too long "
- "(maximum length {} characters)".format(
- Journalist.MAX_PASSWORD_LEN), "error")
- return redirect(url_for("edit_account"))
-
try:
- db_session.add(user)
- db_session.commit()
- flash(
- "Password successfully changed!",
- "notification")
- except Exception as e:
- flash(
- "An unknown error occurred, please inform your administrator",
- "error")
- app.logger.error("Password change for '{}' failed: {}".format(
- user, e))
- db_session.rollback()
+ edit_account_password(user, request.form['password'],
+ request.form['password_again'])
+ except (PasswordMismatchError, InvalidPasswordLength):
+ return redirect(url_for('edit_account'))
+
+ commit_account_changes(user)
+
return render_template('edit_account.html')
| diff --git a/securedrop/tests/functional/journalist_navigation_steps.py b/securedrop/tests/functional/journalist_navigation_steps.py
--- a/securedrop/tests/functional/journalist_navigation_steps.py
+++ b/securedrop/tests/functional/journalist_navigation_steps.py
@@ -75,10 +75,6 @@ def _admin_visits_admin_interface(self):
h1s = self.driver.find_elements_by_tag_name('h1')
self.assertIn("Admin Interface", [el.text for el in h1s])
- users_table_rows = self.driver.find_elements_by_css_selector(
- 'table#users tr.user')
- self.assertEquals(len(users_table_rows), 1)
-
def _add_user(self, username, password, is_admin=False):
username_field = self.driver.find_element_by_css_selector(
'input[name="username"]')
@@ -176,12 +172,76 @@ def _new_user_can_log_in(self):
self.driver.find_element_by_link_text,
'Admin')
+ def _edit_account(self):
+ edit_account_link = self.driver.find_element_by_link_text(
+ 'Edit Account')
+ edit_account_link.click()
+
+ # The header says "Edit your account"
+ h1s = self.driver.find_elements_by_tag_name('h1')[0]
+ self.assertEqual('Edit your account', h1s.text)
+ # There's no link back to the admin interface.
+ with self.assertRaises(NoSuchElementException):
+ self.driver.find_element_by_partial_link_text('Back to admin interface')
+ # There's no field to change your username.
+ with self.assertRaises(NoSuchElementException):
+ self.driver.find_element_by_css_selector('#username')
+ # There's no checkbox to change the administrator status of your
+ # account.
+ with self.assertRaises(NoSuchElementException):
+ username_field = self.driver.find_element_by_css_selector('#is_admin')
+ # 2FA reset buttons at the bottom point to the user URLs for reset.
+ totp_reset_button = self.driver.find_elements_by_css_selector(
+ '#reset-two-factor-totp')[0]
+ self.assertRegexpMatches(totp_reset_button.get_attribute('action'),
+ '/account/reset-2fa-totp')
+ hotp_reset_button = self.driver.find_elements_by_css_selector(
+ '#reset-two-factor-hotp')[0]
+ self.assertRegexpMatches(hotp_reset_button.get_attribute('action'),
+ '/account/reset-2fa-hotp')
+
def _edit_user(self, username):
+ user = Journalist.query.filter_by(username=username).one()
+
new_user_edit_links = filter(
lambda el: el.get_attribute('data-username') == username,
self.driver.find_elements_by_tag_name('a'))
self.assertEquals(len(new_user_edit_links), 1)
new_user_edit_links[0].click()
+ # The header says "Edit user "username"".
+ h1s = self.driver.find_elements_by_tag_name('h1')[0]
+ self.assertEqual('Edit user "{}"'.format(username), h1s.text)
+ # There's a convenient link back to the admin interface.
+ admin_interface_link = self.driver.find_element_by_partial_link_text(
+ 'Back to admin interface')
+ self.assertRegexpMatches(admin_interface_link.get_attribute('href'),
+ '/admin$')
+ # There's a field to change the user's username and it's already filled
+ # out with the user's username.
+ username_field = self.driver.find_element_by_css_selector('#username')
+ self.assertEqual(username_field.get_attribute('placeholder'), username)
+ # There's a checkbox to change the administrator status of the user and
+ # it's already checked appropriately to reflect the current status of
+ # our user.
+ username_field = self.driver.find_element_by_css_selector('#is_admin')
+ self.assertEqual(bool(username_field.get_attribute('checked')),
+ user.is_admin)
+ # 2FA reset buttons at the bottom point to the admin URLs for
+ # resettting 2FA and include the correct user id in the hidden uid.
+ totp_reset_button = self.driver.find_elements_by_css_selector(
+ '#reset-two-factor-totp')[0]
+ self.assertRegexpMatches(totp_reset_button.get_attribute('action'),
+ '/admin/reset-2fa-totp')
+ totp_reset_uid = totp_reset_button.find_element_by_name('uid')
+ self.assertEqual(int(totp_reset_uid.get_attribute('value')), user.id)
+ self.assertFalse(totp_reset_uid.is_displayed())
+ hotp_reset_button = self.driver.find_elements_by_css_selector(
+ '#reset-two-factor-hotp')[0]
+ self.assertRegexpMatches(hotp_reset_button.get_attribute('action'),
+ '/admin/reset-2fa-hotp')
+ hotp_reset_uid = hotp_reset_button.find_element_by_name('uid')
+ self.assertEqual(int(hotp_reset_uid.get_attribute('value')), user.id)
+ self.assertFalse(hotp_reset_uid.is_displayed())
def _admin_can_edit_new_user(self):
# Log the new user out
@@ -250,7 +310,7 @@ def _admin_can_edit_new_user(self):
'input[name="password_again"]')
password_again_field.send_keys(new_password)
update_user_btn = self.driver.find_element_by_css_selector(
- 'button#update-user')
+ 'button#update')
update_user_btn.click()
# Wait until page refreshes to avoid causing a broken pipe error (#623)
diff --git a/securedrop/tests/functional/make_account_changes.py b/securedrop/tests/functional/make_account_changes.py
new file mode 100644
--- /dev/null
+++ b/securedrop/tests/functional/make_account_changes.py
@@ -0,0 +1,24 @@
+# -*- coding: utf-8 -*-
+from unittest import TestCase
+
+from functional_test import FunctionalTest
+from journalist_navigation_steps import JournalistNavigationSteps
+
+class MakeAccountChanges(FunctionalTest, JournalistNavigationSteps, TestCase):
+ def test_admin_edit_account_html_template_rendering(self):
+ """The edit_account.html template is used both when an admin is editing
+ a user's account, and when a user is editing their own account. While
+ there is no security risk in doing so, we do want to ensure the UX is
+ as expected: that only the elements that belong in a particular view
+ are exposed there."""
+ self._admin_logs_in()
+ self._admin_visits_admin_interface()
+ # Admin view of admin user
+ self._edit_user('admin')
+ self._admin_visits_admin_interface()
+ self._admin_adds_a_user()
+ # Admin view of non-admin user
+ self._edit_user('dellsberg')
+ # User view of self
+ self._edit_account()
+ self._logout()
diff --git a/securedrop/tests/test_journalist.py b/securedrop/tests/test_journalist.py
--- a/securedrop/tests/test_journalist.py
+++ b/securedrop/tests/test_journalist.py
@@ -181,14 +181,14 @@ def test_admin_edits_user_password_success_response(self):
data=dict(username=self.user.username, is_admin=False,
password='valid', password_again='valid'))
- self.assertIn('Password successfully changed', resp.data)
+ self.assertMessageFlashed("Account successfully updated!", 'success')
def test_user_edits_password_success_reponse(self):
self._login_user()
resp = self.client.post(url_for('edit_account'),
data=dict(password='valid',
password_again='valid'))
- self.assertIn("Password successfully changed", resp.data)
+ self.assertMessageFlashed("Account successfully updated!", 'success')
def test_admin_edits_user_password_mismatch_warning(self):
self._login_admin()
@@ -199,7 +199,7 @@ def test_admin_edits_user_password_mismatch_warning(self):
password='not', password_again='thesame'),
follow_redirects=True)
- self.assertIn(escape("Passwords didn't match"), resp.data)
+ self.assertMessageFlashed("Passwords didn't match!", "error")
def test_user_edits_password_mismatch_redirect(self):
self._login_user()
@@ -237,7 +237,9 @@ def test_admin_edits_user_password_too_long_warning(self):
password_again=overly_long_password),
follow_redirects=True)
- self.assertIn('Your password is too long', resp.data)
+ self.assertMessageFlashed('Password must be less than {} '
+ 'characters!'.format(
+ Journalist.MAX_PASSWORD_LEN), 'error')
def test_user_edits_password_too_long_warning(self):
self._login_user()
@@ -248,7 +250,9 @@ def test_user_edits_password_too_long_warning(self):
password_again=overly_long_password),
follow_redirects=True)
- self.assertIn('Your password is too long', resp.data)
+ self.assertMessageFlashed('Password must be less than {} '
+ 'characters!'.format(
+ Journalist.MAX_PASSWORD_LEN), 'error')
def test_admin_add_user_password_too_long_warning(self):
self._login_admin()
@@ -272,8 +276,8 @@ def test_admin_edits_user_invalid_username(self):
data=dict(username=new_username, is_admin=False,
password='', password_again=''))
- self.assertIn('Username {} is already taken'.format(new_username),
- resp.data)
+ self.assertMessageFlashed('Username "{}" is already taken!'.format(
+ new_username), 'error')
def test_admin_resets_user_hotp(self):
self._login_admin()
@@ -436,14 +440,16 @@ def test_too_long_user_password_change(self):
password_again=overly_long_password),
follow_redirects=True)
- self.assertIn('Your password is too long', res.data)
+ self.assertMessageFlashed('Password must be less than {} '
+ 'characters!'.format(
+ Journalist.MAX_PASSWORD_LEN), 'error')
def test_valid_user_password_change(self):
self._login_user()
res = self.client.post(url_for('edit_account'), data=dict(
password='valid',
password_again='valid'))
- self.assertIn("Password successfully changed", res.data)
+ self.assertMessageFlashed("Account successfully updated!", 'success')
def test_regenerate_totp(self):
self._login_user()
| Reset 2FA buttons in the `/admin/edit/<int:user_id>` view are way out of alignment
The reset TOTP button is not justified left and there is no div before the reset TOTP and HOTP buttons as there is in the `/account` view. This would be a good time to DRY out the templates here by making a single template for these two views. There is no security risk in doing so as we rely on the `@login_required` decorator, not choice of template, which is irrelevant if you know the right URL to send the right HTTP request to because you've read the SD source. Obviously, the template would not show the admin-only capabilities to the non-admin users, as a matter of design, not security.
| 2017-03-14T00:52:06Z | [] | [] |
|
freedomofpress/securedrop | 1,636 | freedomofpress__securedrop-1636 | [
"1620"
] | 22182dfe62453dff6aba72527f51103c117627e4 | diff --git a/docs/conf.py b/docs/conf.py
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -59,9 +59,9 @@
# built documents.
#
# The short X.Y version.
-version = '0.3.11'
+version = '0.3.12'
# The full version, including alpha/beta/rc tags.
-release = '0.3.11'
+release = '0.3.12'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
diff --git a/securedrop/version.py b/securedrop/version.py
--- a/securedrop/version.py
+++ b/securedrop/version.py
@@ -1 +1 @@
-__version__ = '0.3.11'
+__version__ = '0.3.12'
| diff --git a/testinfra/common/test_system_hardening.py b/testinfra/common/test_system_hardening.py
--- a/testinfra/common/test_system_hardening.py
+++ b/testinfra/common/test_system_hardening.py
@@ -1,6 +1,9 @@
+import os
import pytest
import re
+hostenv = os.environ['SECUREDROP_TESTINFRA_TARGET_HOST']
+
@pytest.mark.parametrize('sysctl_opt', [
('net.ipv4.conf.all.accept_redirects', 0),
@@ -60,9 +63,8 @@ def test_blacklisted_kernel_modules(Command, File, Sudo, kernel_module):
assert f.contains("^blacklist {}$".format(kernel_module))
-# Expecting failure here, since the Ansible config doesn't actually
-# disable swap, as intended. (It doesn't manage /etc/fstab.)
[email protected]
[email protected](hostenv.startswith('mon'),
+ reason="Monitor Server does not have swap disabled yet.")
def test_swap_disabled(Command):
"""
Ensure swap space is disabled. Prohibit writing memory to swapfiles
@@ -73,4 +75,4 @@ def test_swap_disabled(Command):
# A leading slash will indicate full path to a swapfile.
assert not re.search("^/", c.stdout, re.M)
# Expect that ONLY the headers will be present in the output.
- assert c.stdout == "Filename\t\t\t\tType\t\tSize\tUsed\tPriority\n"
+ assert c.stdout == "Filename\t\t\t\tType\t\tSize\tUsed\tPriority"
diff --git a/testinfra/common/test_user_config.py b/testinfra/common/test_user_config.py
--- a/testinfra/common/test_user_config.py
+++ b/testinfra/common/test_user_config.py
@@ -1,8 +1,10 @@
-import re
import os
+import pytest
+import re
hostenv = os.environ['SECUREDROP_TESTINFRA_TARGET_HOST']
+
def test_sudoers_config(File, Sudo):
"""
Check sudoers config for passwordless sudo via group membership,
| Swapfile not really disabled
The Ansible config tries to disable swapfile on the Application and Monitor Servers, via `swapoff -a`. This works, but only for the current boot cycle. If a machine is configured with a swapfile in `/etc/fstab`, that swapfile will be restored on a subsequent reboot. Since the machines reboot nightly, the `swapoff -a` approach is close to useless.
In order to disable swap effectively, the first-run Ansible config should ensure that no swap entries exist in fstab, removing them if found.
| This may effectively undermine the efforts of https://github.com/freedomofpress/securedrop/commit/1dbab3c26f0c6a2275a28d24c287705694ca2108 to minimize the duration of plaintext in memory, and should be resolved in the 0.4 release. Last time I checked it is the default to include a swapfile in `fstab` when using "guided partitioning" in the Ubuntu installer.
# Next steps
- [x] Verify that swap is re-enabled after reboot on our test instance.
- [ ] Announce intent to release 0.3.12 on https://securedrop.org/news ahead of release, so admins have some prior warning.
# Goals
1. Ensure swap is disabled now and forever on all instances after 0.3.12 is installed.
2. Securely erase any existing swap that may have been populated with sensitive data prior to the 0.3.12 release.
3. This update should be automatic and not require intervention from the admins. Therefore, we need to investigate how best to achieve these goals within the environment of the `securedrop-app-code` Debian package and its postinstall script.
Have a working implementation for detecting active swap, disabling it, shredding it, and removing the fstab entry. The changes are idempotent. Will place them in the `preinst` script in the `securedrop-app-code` package and begin testing in VMs and hardware. | 2017-03-29T18:31:58Z | [] | [] |
freedomofpress/securedrop | 1,649 | freedomofpress__securedrop-1649 | [
"1646"
] | 17b1df3952249f2fd67dbfab45d1055a08e7726d | diff --git a/securedrop/source.py b/securedrop/source.py
--- a/securedrop/source.py
+++ b/securedrop/source.py
@@ -20,7 +20,6 @@
import crypto_util
import store
import template_filters
-import util
from db import db_session, Source, Submission, Reply, get_one_or_else
from request_that_secures_file_uploads import RequestThatSecuresFileUploads
from jinja2 import evalcontextfilter
@@ -125,7 +124,8 @@ def check_tor2web():
@app.route('/')
def index():
return render_template('index.html',
- custom_notification=config.CUSTOM_NOTIFICATION)
+ custom_notification=getattr(config,
+ 'CUSTOM_NOTIFICATION', ''))
def generate_unique_codename(num_words=7):
diff --git a/securedrop/store.py b/securedrop/store.py
--- a/securedrop/store.py
+++ b/securedrop/store.py
@@ -12,7 +12,6 @@
from werkzeug import secure_filename
from secure_tempfile import SecureTemporaryFile
-from util import PathException
import logging
log = logging.getLogger(__name__)
@@ -21,6 +20,14 @@
"^(?P<index>\d+)\-[a-z0-9-_]*(?P<file_type>msg|doc\.(gz|zip)|reply)\.gpg$").match
+class PathException(Exception):
+
+ """An exception raised by `util.verify` when it encounters a bad path. A path
+ can be bad when it is not absolute or not normalized.
+ """
+ pass
+
+
def verify(p):
"""Assert that the path is absolute, normalized, inside `config.STORE_DIR`, and
matches the filename format.
diff --git a/securedrop/util.py b/securedrop/util.py
deleted file mode 100644
--- a/securedrop/util.py
+++ /dev/null
@@ -1,11 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from os import path
-
-
-class PathException(Exception):
-
- """An exception raised by `util.verify` when it encounters a bad path. A path
- can be bad when it is not absolute or not normalized.
- """
- pass
| diff --git a/securedrop/tests/test_source.py b/securedrop/tests/test_source.py
--- a/securedrop/tests/test_source.py
+++ b/securedrop/tests/test_source.py
@@ -14,6 +14,7 @@
import version
import utils
import json
+import config
class TestSourceApp(TestCase):
@@ -244,6 +245,28 @@ def test_submit_sanitizes_filename(self, gzipfile):
mode=ANY,
fileobj=ANY)
+ def test_custom_notification(self):
+ """Test that `CUSTOM_NOTIFICATION` string in config file
+ is rendered on the Source Interface page. We cannot assume
+ it will be present in production instances, since it is added
+ via the Ansible config, not the Debian package scripts."""
+ custom_msg = config.CUSTOM_NOTIFICATION
+
+ dev_msg = ("This is an insecure SecureDrop Development server "
+ "for testing ONLY. Do NOT submit documents here.")
+ staging_msg = "This is a SecureDrop Staging VM for testing ONLY"
+
+ self.assertTrue(custom_msg in (dev_msg, staging_msg))
+ resp = self.client.get('/')
+ self.assertEqual(resp.status_code, 200)
+ # The app-tests aren't host-aware, so we can't accurately predict
+ # which custom notification message we want. Let's check for both,
+ # and fail only if both are not found.
+ try:
+ self.assertIn(dev_msg, resp.data)
+ except AssertionError:
+ self.assertIn(staging_msg, resp.data)
+
def test_tor2web_warning_headers(self):
resp = self.client.get('/', headers=[('X-tor2web', 'encrypted')])
self.assertEqual(resp.status_code, 200)
diff --git a/testinfra/app/test_apparmor.py b/testinfra/app/test_apparmor.py
--- a/testinfra/app/test_apparmor.py
+++ b/testinfra/app/test_apparmor.py
@@ -58,8 +58,7 @@ def test_apparmor_ensure_not_disabled(File, Sudo, profile):
with Sudo():
assert not f.exists
[email protected](os.environ['SECUREDROP_TESTINFRA_TARGET_HOST'] != 'app-staging',
- reason='only to be run on app-staging')
+
@pytest.mark.parametrize('complain_pkg', sdvars.apparmor_complain)
def test_app_apparmor_complain(Command, Sudo, complain_pkg):
""" Ensure app-armor profiles are in complain mode for staging """
@@ -68,8 +67,7 @@ def test_app_apparmor_complain(Command, Sudo, complain_pkg):
c = Command.check_output("aa-status | {}".format(awk))
assert complain_pkg in c
[email protected](os.environ['SECUREDROP_TESTINFRA_TARGET_HOST'] != 'app-staging',
- reason='only to be run on app-staging')
+
def test_app_apparmor_complain_count(Command, Sudo):
""" Ensure right number of app-armor profiles are in complain mode """
with Sudo():
diff --git a/testinfra/build/test_securedrop_deb_package.py b/testinfra/build/test_securedrop_deb_package.py
--- a/testinfra/build/test_securedrop_deb_package.py
+++ b/testinfra/build/test_securedrop_deb_package.py
@@ -141,3 +141,27 @@ def test_deb_package_contains_no_config_file(File, Command, deb):
# would be cleaner. Will defer to adding lintian tests later.
c = Command("dpkg-deb --contents {}".format(deb_package.path))
assert not re.search("^.*config\.py$", c.stdout, re.M)
+
+
[email protected]("deb", deb_packages)
+def test_deb_package_contains_no_generated_assets(File, Command, deb):
+ """
+ Ensures the `securedrop-app-code` package does not ship a minified
+ static assets, which are built automatically via Flask-Assets, and may be
+ present in the source directory used to build from.
+ """
+ deb_package = File(deb.format(
+ securedrop_test_vars.securedrop_version))
+
+ # Only relevant for the securedrop-app-code package:
+ if "securedrop-app-code" in deb_package.path:
+ c = Command("dpkg-deb --contents {}".format(deb_package.path))
+ # static/gen/ directory should exist
+ assert re.search("^.*\./var/www/securedrop/static/gen/$", c.stdout, re.M)
+ # static/gen/ directory should be empty
+ assert not re.search("^.*\./var/www/securedrop/static/gen/.+$", c.stdout, re.M)
+
+ # static/.webassets-cache/ directory should exist
+ assert re.search("^.*\./var/www/securedrop/static/.webassets-cache/$", c.stdout, re.M)
+ # static/.webassets-cache/ directory should be empty
+ assert not re.search("^.*\./var/www/securedrop/static/.webassets-cache/.+$", c.stdout, re.M)
| Ensure latest app code runs cleanly in staging environment
Due to churn on the develop branch, the layout of the application docroot has changed quite a bit, and the current AppArmor profile is not permitting the application to run when profiles are set to enforce. We'll need to update the AppArmor profile for Apache iteratively, to make sure we whitelist only what's required. There may be slight changes necessary to the app-code build role, as well, such as creating directories, but we'll cross that bridge when we come to it.
There have also been changes to the application config values (in `config.py`) that aren't mindful of upgrade strategy: we'll need to make sure all these app code adjustments work well when existing 0.3.12 hosts upgrade.
Will keep a running list of required changes going here:
* [x] `util.py[c]` file not permitted for reading
* [x] jquery version has changed 2.1.1 -> 2.1.4
* [x] `config.CUSTOM_NOTIFICATION` may not be defined
* [x] `webassets` directory may need to be created (via deb package include; see also #1643)
* [x] stop setting complain mode on AppArmor profiles
Also considering dropping the `aa-complain` strategy in the Staging environment, since having AppArmor errors in staging is actually quite useful.
| Given the history on the util.py file, the most straightforward solution may be to move the custom `PathException` back into `store.py`, where it used to live. See:
* 7ae0fd9c884e4334b1b668e3af04c3ea63f5b258
* 956b2137255a39fb7091a82b669af6537ba0548d
> Also considering dropping the `aa-complain` strategy in the Staging environment, since having AppArmor errors in staging is actually quite useful.
I feel strongly we should stop setting `aa-complain` in staging. The `development` VM already provides devs with a platform for rapid prototyping without worrying about AppArmor profilesβthe staging environment, as another step toward imitating production instances, should indeed enable AppArmor. That's the rationale on the strategy front.
The rationale on the technical front is that since merging #1464, the staging environment actually has AppArmor profiles enabled via the deb package `postinst` script. We _could_ refactor the package build logic to disable that check intelligently if we're building for staging, but I don't think that's worth the effort, as it introduces complexity that may permit mistakes to slip in on the build process.
Using the staging environment to rebuild packages, install them, then validate via manual testing (or automated, via the application tests) that code changes play well with the Apache vhosts and the corresponding AppArmor rules is a sane and intuitive method for ensuring we're delivering stable code. | 2017-04-17T21:52:53Z | [] | [] |
freedomofpress/securedrop | 1,656 | freedomofpress__securedrop-1656 | [
"1074",
"1620"
] | 8ca6b1cbb751b98221069538dc34ca4c2929343c | diff --git a/docs/conf.py b/docs/conf.py
new file mode 100644
--- /dev/null
+++ b/docs/conf.py
@@ -0,0 +1,303 @@
+# -*- coding: utf-8 -*-
+#
+# SecureDrop documentation build configuration file, created by
+# sphinx-quickstart on Tue Oct 13 12:08:52 2015.
+#
+# This file is execfile()d with the current directory set to its
+# containing dir.
+#
+# Note that not all possible configuration values are present in this
+# autogenerated file.
+#
+# All configuration values have a default; values that are commented out
+# serve to show the default.
+
+import sys
+import os
+import shlex
+
+# Detect if we're being built by Read the Docs
+# https://docs.readthedocs.org/en/latest/faq.html#how-do-i-change-behavior-for-read-the-docs
+on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+#sys.path.insert(0, os.path.abspath('.'))
+
+# -- General configuration ------------------------------------------------
+
+# If your documentation needs a minimal Sphinx version, state it here.
+#needs_sphinx = '1.0'
+
+# Add any Sphinx extension module names here, as strings. They can be
+# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
+# ones.
+extensions = ['sphinx.ext.todo', ]
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['_templates']
+
+# The suffix(es) of source filenames.
+# You can specify multiple suffix as a list of string:
+# source_suffix = ['.rst', '.md']
+source_suffix = '.rst'
+
+# The encoding of source files.
+#source_encoding = 'utf-8-sig'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General information about the project.
+project = u'SecureDrop'
+copyright = u'2015, Freedom of the Press Foundation'
+author = u'SecureDrop Team and Contributors'
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+# The short X.Y version.
+version = '0.3.12'
+# The full version, including alpha/beta/rc tags.
+release = '0.3.12'
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#
+# This is also used if you do content translation via gettext catalogs.
+# Usually you set "language" from the command line for these cases.
+language = None
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+#today = ''
+# Else, today_fmt is used as the format for a strftime call.
+#today_fmt = '%B %d, %Y'
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+exclude_patterns = ['_build']
+
+# The reST default role (used for this markup: `text`) to use for all
+# documents.
+#default_role = None
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+#add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+#add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+#show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+# A list of ignored prefixes for module index sorting.
+#modindex_common_prefix = []
+
+# If true, keep warnings as "system message" paragraphs in the built documents.
+#keep_warnings = False
+
+# If true, `todo` and `todoList` produce output, else they produce nothing.
+todo_include_todos = False
+
+
+# -- Options for HTML output ----------------------------------------------
+
+# The theme to use for HTML and HTML Help pages. See the documentation for
+# a list of builtin themes.
+if on_rtd:
+ html_theme = 'default'
+else:
+ try:
+ # If you want to build the docs locally using the RTD theme,
+ # you may need to install it: ``pip install sphinx_rtd_theme``.
+ # https://github.com/snide/sphinx_rtd_theme#via-package
+ import sphinx_rtd_theme
+ html_theme = "sphinx_rtd_theme"
+ html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
+ except ImportError:
+ # This theme is included with Sphinx and is quite nice (based
+ # on the Pocoo themes), but since we're using the RTD theme
+ # for the production docs, it's best to use that to avoid
+ # issues due to discrepancies between the themes.
+ html_theme = 'alabaster'
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further. For a list of options available for each theme, see the
+# documentation.
+#html_theme_options = {}
+
+# Add any paths that contain custom themes here, relative to this directory.
+#html_theme_path = []
+
+# The name for this set of Sphinx documents. If None, it defaults to
+# "<project> v<release> documentation".
+#html_title = None
+
+# A shorter title for the navigation bar. Default is the same as html_title.
+#html_short_title = None
+
+# The name of an image file (relative to this directory) to place at the top
+# of the sidebar.
+#html_logo = None
+
+# The name of an image file (within the static path) to use as favicon of the
+# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+#html_favicon = None
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = ['_static']
+
+# Add any extra paths that contain custom files (such as robots.txt or
+# .htaccess) here, relative to this directory. These files are copied
+# directly to the root of the documentation.
+#html_extra_path = []
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+#html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+#html_use_smartypants = True
+
+# Custom sidebar templates, maps document names to template names.
+#html_sidebars = {}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+#html_additional_pages = {}
+
+# If false, no module index is generated.
+#html_domain_indices = True
+
+# If false, no index is generated.
+#html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+#html_split_index = False
+
+# If true, links to the reST sources are added to the pages.
+#html_show_sourcelink = True
+
+# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
+#html_show_sphinx = True
+
+# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
+#html_show_copyright = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a <link> tag referring to it. The value of this option must be the
+# base URL from which the finished HTML is served.
+#html_use_opensearch = ''
+
+# This is the file name suffix for HTML files (e.g. ".xhtml").
+#html_file_suffix = None
+
+# Language to be used for generating the HTML full-text search index.
+# Sphinx supports the following languages:
+# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
+# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
+#html_search_language = 'en'
+
+# A dictionary with options for the search language support, empty by default.
+# Now only 'ja' uses this config value
+#html_search_options = {'type': 'default'}
+
+# The name of a javascript file (relative to the configuration directory) that
+# implements a search results scorer. If empty, the default will be used.
+#html_search_scorer = 'scorer.js'
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'SecureDropdoc'
+
+# -- Options for LaTeX output ---------------------------------------------
+
+latex_elements = {
+# The paper size ('letterpaper' or 'a4paper').
+#'papersize': 'letterpaper',
+
+# The font size ('10pt', '11pt' or '12pt').
+#'pointsize': '10pt',
+
+# Additional stuff for the LaTeX preamble.
+#'preamble': '',
+
+# Latex figure (float) alignment
+#'figure_align': 'htbp',
+}
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title,
+# author, documentclass [howto, manual, or own class]).
+latex_documents = [
+ (master_doc, 'SecureDrop.tex', u'SecureDrop Documentation',
+ author, 'manual'),
+]
+
+# The name of an image file (relative to this directory) to place at the top of
+# the title page.
+#latex_logo = None
+
+# For "manual" documents, if this is true, then toplevel headings are parts,
+# not chapters.
+#latex_use_parts = False
+
+# If true, show page references after internal links.
+#latex_show_pagerefs = False
+
+# If true, show URL addresses after external links.
+#latex_show_urls = False
+
+# Documents to append as an appendix to all manuals.
+#latex_appendices = []
+
+# If false, no module index is generated.
+#latex_domain_indices = True
+
+
+# -- Options for manual page output ---------------------------------------
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [
+ (master_doc, 'securedrop', u'SecureDrop Documentation',
+ [author], 1)
+]
+
+# If true, show URL addresses after external links.
+#man_show_urls = False
+
+
+# -- Options for Texinfo output -------------------------------------------
+
+# Grouping the document tree into Texinfo files. List of tuples
+# (source start file, target name, title, author,
+# dir menu entry, description, category)
+texinfo_documents = [
+ (master_doc, 'SecureDrop', u'SecureDrop Documentation',
+ author, 'SecureDrop', 'One line description of project.',
+ 'Miscellaneous'),
+]
+
+# Documents to append as an appendix to all manuals.
+#texinfo_appendices = []
+
+# If false, no module index is generated.
+#texinfo_domain_indices = True
+
+# How to display URL addresses: 'footnote', 'no', or 'inline'.
+#texinfo_show_urls = 'footnote'
+
+# If true, do not generate a @detailmenu in the "Top" node's menu.
+#texinfo_no_detailmenu = False
diff --git a/install_files/ansible-base/roles/backup/files/0.3_collect.py b/install_files/ansible-base/roles/backup/files/0.3_collect.py
deleted file mode 100755
--- a/install_files/ansible-base/roles/backup/files/0.3_collect.py
+++ /dev/null
@@ -1,81 +0,0 @@
-#!/usr/bin/python2.7
-"""
-
-This script should be copied to the App server and ran by the anisble
-plabook. When run (as root), it collects all of the necessary information
-to backup the 0.3 system and stores it in /tmp/sd-backup-0.3-TIME_STAMP.zip.gpg
-
-"""
-
-import sys
-import os
-import re
-import zipfile
-from datetime import datetime
-import functools
-# Import the application config.py file
-sys.path.append("/var/www/securedrop")
-import config
-import gnupg
-import subprocess
-
-TOR_SERVICES = "/var/lib/tor/services"
-TOR_CONFIG = "/etc/tor/torrc"
-
-
-def collect_config_file(zf):
- config_file_path = os.path.join(config.SECUREDROP_ROOT, "config.py")
- zf.write(config_file_path)
-
-
-def collect_securedrop_data_root(zf):
- # The store and key dirs are shared between both interfaces
- for root, dirs, files in os.walk(config.SECUREDROP_DATA_ROOT):
- for name in files:
- zf.write(os.path.join(root, name))
-
-
-def collect_custom_header_image(zf):
- # The custom header image is copied over the deafult `static/i/logo.png`.
- zf.write(os.path.join(config.SECUREDROP_ROOT, "static/i/logo.png"))
-
-
-def collect_tor_files(zf):
- # All of the tor hidden service private keys are stored in the THS specific
- # subdirectory `/var/lib/tor/services` backing up this directory will back
- # up all of the THS and ATHS required keys needed to restore all the hidden
- # services on that system.
- for root, dirs, files in os.walk(TOR_SERVICES):
- for name in files:
- zf.write(os.path.join(root, name))
-
- # The tor config file has the ATHS client names required to restore
- # the ATHS info. These names are also in the the specific client_key file
- # but backing up this file makes it easier than parsing the files during a
- # restore.
- zf.write(TOR_CONFIG)
-
-
-def encrypt_zip_file(zf_fn):
- # Encrypt the backup zip file with the application's gpg public key
- gpg = gnupg.GPG(binary='gpg2', homedir=config.GPG_KEY_DIR)
- e_fn = '{}.gpg'.format(zf_fn)
-
- stream = open(zf_fn, "rb")
- gpg.encrypt_file(stream, config.JOURNALIST_KEY, always_trust='True', output=e_fn)
-
-
-def main():
- # name append a timestamp to the sd-backup zip filename
- dt = str(datetime.utcnow().strftime("%Y-%m-%d--%H-%M-%S"))
- zf_fn = 'sd-backup-{}.zip'.format(dt)
- with zipfile.ZipFile(zf_fn, 'w') as zf:
- collect_config_file(zf)
- collect_securedrop_data_root(zf)
- collect_custom_header_image(zf)
- collect_tor_files(zf)
- encrypt_zip_file(zf_fn)
- print zf_fn
-
-if __name__ == "__main__":
- main()
diff --git a/install_files/ansible-base/roles/backup/files/0.3_restore.py b/install_files/ansible-base/roles/backup/files/0.3_restore.py
deleted file mode 100755
--- a/install_files/ansible-base/roles/backup/files/0.3_restore.py
+++ /dev/null
@@ -1,155 +0,0 @@
-#!/usr/bin/python2.7
-"""
-
-This script and decrypted backup zip should be copied to the App server
-and run by the anisble plabook. When run (as root), it restores the 0.3
-backup file.
-
-python 0.3_restore.py sd-backup-TIMESTAMP.zip
-
-"""
-
-import sys
-import os
-import re
-import zipfile
-import subprocess
-import shutil
-from datetime import datetime
-from operator import itemgetter
-import calendar
-import traceback
-
-
-def replace_prefix(path, p1, p2):
- """
- Replace p1 in path with p2
-
- >>> replace_prefix("/tmp/files/foo.bar", "/tmp", "/home/me")
- "home/me/files/foo.bar"
- """
- common_prefix = os.path.commonprefix([path, p1])
- if common_prefix:
- assert path.find(common_prefix) == 0
- # +1 so chop off the next path separator, which otherwise becomes a
- # leading path separate and confuses os.path.join
- path = path[len(common_prefix)+1:]
- return os.path.join(p2, path)
-
-
-def extract_to_path(archive, member, path, user):
- """
- Extract from the zip archive `archive` the member `member` and write it to
- `path`, preserving file metadata and chown'ing the file using `user`
- """
- # Create all upper directories if necessary
- upperdirs = os.path.dirname(path)
- if upperdirs and not os.path.exists(upperdirs):
- os.makedirs(upperdirs)
-
- with archive.open(member) as source, file(path, "wb") as target:
- shutil.copyfileobj(source, target)
-
- # Update the timestamps as well (as best we can, thanks, conversion to
- # localtime). This only actually works if the .zip was created on a
- # machine where the timezone was set to UTC, but it might be good
- # enough since we just need the relative order of timestamps (they will
- # all be normalized anyway).
- if hasattr(member, 'date_time'):
- timestamp = calendar.timegm(member.date_time)
- os.utime(path, (timestamp, timestamp))
-
- ug = "{}:{}".format(user, user)
- subprocess.call(['chown', '-R', ug, path])
-
-
-def restore_config_file(zf):
- print "* Migrating SecureDrop config file from backup..."
-
- # Restore the original config file
- for zi in zf.infolist():
- if "var/www/securedrop/config.py" in zi.filename:
- extract_to_path(zf, "var/www/securedrop/config.py", "/var/www/securedrop/config.py", "www-data")
-
-
-def restore_securedrop_root(zf):
- print "* Migrating directories from SECUREDROP_ROOT..."
-
- # Restore the original source directories and key files
- for zi in zf.infolist():
- if "var/lib/securedrop/store" in zi.filename:
- extract_to_path(zf, zi, replace_prefix(zi.filename,
- "var/lib/securedrop/store", "/var/lib/securedrop/store"), "www-data")
- elif "var/lib/securedrop/keys" in zi.filename:
- # TODO: is it a bad idea to migrate the random_seed from the
- # previous installation?
- extract_to_path(zf, zi, replace_prefix(zi.filename,
- "var/lib/securedrop/keys", "/var/lib/securedrop/keys"), "www-data")
-
-
-def restore_database(zf):
- print "* Migrating database..."
-
- extract_to_path(zf, "var/lib/securedrop/db.sqlite", "/var/lib/securedrop/db.sqlite", "www-data")
-
-
-def restore_custom_header_image(zf):
- print "* Migrating custom header image..."
- extract_to_path(zf,
- "var/www/securedrop/static/i/logo.png",
- "/var/www/securedrop/static/i/logo.png", "www-data")
-
-
-def restore_tor_files(zf):
- tor_root_dir = "/var/lib/tor"
- ths_root_dir = os.path.join(tor_root_dir, "services")
- source_ths_dir = os.path.join(ths_root_dir, "source")
- document_ths_dir = os.path.join(ths_root_dir, "document")
-
- print "* Deleting previous source THS interface..."
-
- for fn in os.listdir(source_ths_dir):
- os.remove(os.path.join(source_ths_dir, fn))
-
- print "* Deleting previous document ATHS interface..."
-
- for fn in os.listdir(document_ths_dir):
- os.remove(os.path.join(document_ths_dir, fn))
-
- print "* Migrating source and document interface .onion..."
-
- for zi in zf.infolist():
- if "var/lib/tor/services/source" in zi.filename:
- extract_to_path(zf, zi, replace_prefix(zi.filename,
- "var/lib/tor/services/source", "/var/lib/tor/services/source"), "debian-tor")
- elif "var/lib/tor/services/document" in zi.filename:
- extract_to_path(zf, zi, replace_prefix(zi.filename,
- "var/lib/tor/services/document", "/var/lib/tor/services/document"), "debian-tor")
-
- # Reload Tor to trigger registering the old Tor Hidden Services
- # reloading Tor compared to restarting tor will not break the current tor
- # connections for SSH
- subprocess.call(['service', 'tor', 'reload'])
-
-
-def main():
- if len(sys.argv) <= 1:
- print "Usage: 0.3_restore.py <filename>\n\n <filename>\tPath to a SecureDrop 0.3 backup .zip file created by 0.3_collect.py"
- sys.exit(1)
-
- try:
- zf_fn = sys.argv[1]
- with zipfile.ZipFile(zf_fn, 'r') as zf:
- restore_config_file(zf)
- restore_securedrop_root(zf)
- restore_database(zf)
- restore_custom_header_image(zf)
- restore_tor_files(zf)
- except:
- print "\n!!! Something went wrong, please file an issue.\n"
- print traceback.format_exc()
- else:
- print "Done!"
-
-if __name__ == "__main__":
- main()
diff --git a/install_files/ansible-base/roles/backup/files/backup.py b/install_files/ansible-base/roles/backup/files/backup.py
new file mode 100755
--- /dev/null
+++ b/install_files/ansible-base/roles/backup/files/backup.py
@@ -0,0 +1,36 @@
+#!/usr/bin/python2.7
+"""
+This script is copied to the App server and run by the Ansible playbook. When
+run (as root), it collects all of the necessary information to backup the 0.3
+system and stores it in /tmp/sd-backup-0.3-TIME_STAMP.tar.gz.
+"""
+
+from datetime import datetime
+import os
+import tarfile
+
+def main():
+ backup_filename = 'sd-backup-{}.tar.gz'.format(
+ datetime.utcnow().strftime("%Y-%m-%d--%H-%M-%S"))
+
+ # This code assumes everything is in the default locations.
+ sd_data = '/var/lib/securedrop'
+
+ sd_code = '/var/www/securedrop'
+ sd_config = os.path.join(sd_code, "config.py")
+ sd_custom_logo = os.path.join(sd_code, "static/i/logo.png")
+
+ tor_hidden_services = "/var/lib/tor/services"
+ torrc = "/etc/tor/torrc"
+
+ with tarfile.open(backup_filename, 'w:gz') as backup:
+ backup.add(sd_config)
+ backup.add(sd_custom_logo)
+ backup.add(sd_data)
+ backup.add(tor_hidden_services)
+ backup.add(torrc)
+
+ print backup_filename
+
+if __name__ == "__main__":
+ main()
diff --git a/install_files/ansible-base/roles/backup/files/restore.py b/install_files/ansible-base/roles/backup/files/restore.py
new file mode 100755
--- /dev/null
+++ b/install_files/ansible-base/roles/backup/files/restore.py
@@ -0,0 +1,50 @@
+#!/usr/bin/python2.7
+"""
+This script and backup archive should be copied to the App server and run by
+the Ansible playbook. When run (as root), it restores the contents of the 0.3
+backup file to the machine it's run on.
+
+python restore.py sd-backup-TIMESTAMP.tar.gz
+"""
+
+import os
+import subprocess
+import sys
+import tarfile
+
+
+def verify_args():
+ usage = """
+Usage: restore.py <backup file>
+
+ <backup file> Path to a SecureDrop 0.3 backup created by backup.py"
+ """
+ if len(sys.argv) != 2:
+ print(usage)
+ sys.exit(1)
+
+ if not os.path.exists(sys.argv[1]):
+ print("<backup file> '{}' not found".format(sys.argv(1)))
+ sys.exit(1)
+
+ if os.geteuid() != 0:
+ print("This program must be run as root!")
+ sys.exit(1)
+
+
+def main():
+ verify_args()
+
+ with tarfile.open(sys.argv[1], 'r:*') as backup:
+ # This assumes that both the old installation (source of the backup)
+ # and the new installation (destination of the restore) used the
+ # default paths for various locations.
+ backup.extractall(path='/')
+
+ # Reload Tor and the web server so they pick up the new configuration
+ # If the process exits with a non-zero return code, raises an exception.
+ subprocess.check_call(['service', 'apache2', 'restart'])
+ subprocess.check_call(['service', 'tor', 'reload'])
+
+if __name__ == "__main__":
+ main()
diff --git a/securedrop/crypto_util.py b/securedrop/crypto_util.py
--- a/securedrop/crypto_util.py
+++ b/securedrop/crypto_util.py
@@ -5,6 +5,7 @@
from Crypto.Random import random
import gnupg
+from gnupg._util import _is_stream, _make_binary_stream
import scrypt
import config
@@ -154,22 +155,21 @@ def encrypt(plaintext, fingerprints, output=None):
fingerprints = [fingerprints, ]
fingerprints = [fpr.replace(' ', '') for fpr in fingerprints]
- if isinstance(plaintext, unicode):
- plaintext = plaintext.encode('utf8')
+ if not _is_stream(plaintext):
+ plaintext = _make_binary_stream(plaintext, "utf_8")
- encrypt_fn = gpg.encrypt if isinstance(plaintext, str) else gpg.encrypt_file
- out = encrypt_fn(plaintext,
- *fingerprints,
- output=output,
- always_trust=True,
- armor=False)
+ out = gpg.encrypt(plaintext,
+ *fingerprints,
+ output=output,
+ always_trust=True,
+ armor=False)
if out.ok:
return out.data
else:
raise CryptoException(out.stderr)
-def decrypt(secret, plain_text):
+def decrypt(secret, ciphertext):
"""
>>> key = genkeypair('randomid', 'randomid')
>>> decrypt('randomid', 'randomid',
@@ -178,8 +178,7 @@ def decrypt(secret, plain_text):
'Goodbye, cruel world!'
"""
hashed_codename = hash_codename(secret, salt=SCRYPT_GPG_PEPPER)
- return gpg.decrypt(plain_text, passphrase=hashed_codename).data
-
+ return gpg.decrypt(ciphertext, passphrase=hashed_codename).data
if __name__ == "__main__":
import doctest
diff --git a/securedrop/manage.py b/securedrop/manage.py
--- a/securedrop/manage.py
+++ b/securedrop/manage.py
@@ -97,7 +97,7 @@ def test():
os.environ['SECUREDROP_ENV'] = 'test'
import config
_start_test_rqworker(config)
- test_cmds = ["py.test", "./test.sh"]
+ test_cmds = [["py.test", "--cov"], "./test.sh"]
test_rc = int(any([subprocess.call(cmd) for cmd in test_cmds]))
_stop_test_rqworker()
sys.exit(test_rc)
@@ -110,7 +110,7 @@ def test_unit():
os.environ['SECUREDROP_ENV'] = 'test'
import config
_start_test_rqworker(config)
- test_rc = int(subprocess.call("py.test"))
+ test_rc = int(subprocess.call(["py.test", "--cov"]))
_stop_test_rqworker()
sys.exit(test_rc)
diff --git a/securedrop/secure_tempfile.py b/securedrop/secure_tempfile.py
--- a/securedrop/secure_tempfile.py
+++ b/securedrop/secure_tempfile.py
@@ -6,6 +6,7 @@
from Crypto.Random import random
from Crypto.Util import Counter
+from gnupg._util import _STREAMLIKE_TYPES
class SecureTemporaryFile(_TemporaryFileWrapper):
"""Temporary file that is ephemerally encrypted on the fly.
@@ -77,3 +78,8 @@ def read(self, count=None):
def close(self):
return _TemporaryFileWrapper.close(self)
+
+# python-gnupg will not recognize our SecureTemporaryFile as a stream-like type
+# and will attempt to call encode on it, thinking it's a string-like type. To
+# avoid this we add it the list of stream-like types.
+_STREAMLIKE_TYPES.append(_TemporaryFileWrapper)
diff --git a/securedrop/source.py b/securedrop/source.py
--- a/securedrop/source.py
+++ b/securedrop/source.py
@@ -9,7 +9,7 @@
from threading import Thread
import operator
from flask import (Flask, request, render_template, session, redirect, url_for,
- flash, abort, g, send_file)
+ flash, abort, g, send_file, Markup)
from flask_wtf.csrf import CsrfProtect
from sqlalchemy.orm.exc import MultipleResultsFound, NoResultFound
@@ -120,7 +120,7 @@ def index():
return render_template('index.html')
-def generate_unique_codename(num_words):
+def generate_unique_codename(num_words=7):
"""Generate random codenames until we get an unused one"""
while True:
codename = crypto_util.genrandomid(num_words)
@@ -146,22 +146,14 @@ def generate_unique_codename(num_words):
@app.route('/generate', methods=('GET', 'POST'))
def generate():
- # Popping this key prevents errors when a logged in user returns to /generate.
- # TODO: is this the best experience? A logged in user will be automatically
- # logged out if they navigate to /generate by accident, which could be
- # confusing. It might be better to instead redirect them to the lookup
- # page, or inform them that they're logged in.
- session.pop('logged_in', None)
-
- num_words = 7
- if request.method == 'POST':
- num_words = int(request.form['number-words'])
- if num_words not in range(7, 11):
- abort(403)
+ if logged_in():
+ flash("You were redirected because you are already logged in. If you want"
+ "to create a new account, you should log out first.", "notification")
+ return redirect(url_for('lookup'))
- codename = generate_unique_codename(num_words)
+ codename = generate_unique_codename()
session['codename'] = codename
- return render_template('generate.html', codename=codename, num_words=num_words)
+ return render_template('generate.html', codename=codename)
@app.route('/create', methods=['POST'])
@@ -345,6 +337,15 @@ def login():
return render_template('login.html')
[email protected]('/logout')
+def logout():
+ if logged_in():
+ session.clear()
+ tor_msg = render_template('logout_flashed_message.html')
+ flash(Markup(tor_msg), "error")
+ return redirect(url_for('index'))
+
+
@app.route('/howto-disable-js')
def howto_disable_js():
return render_template("howto-disable-js.html")
diff --git a/securedrop/version.py b/securedrop/version.py
--- a/securedrop/version.py
+++ b/securedrop/version.py
@@ -1 +1 @@
-__version__ = '0.3.4'
+__version__ = '0.3.12'
diff --git a/tails_files/securedrop_init.py b/tails_files/securedrop_init.py
--- a/tails_files/securedrop_init.py
+++ b/tails_files/securedrop_init.py
@@ -1,44 +1,47 @@
-#!/usr/bin/env python
+#!/usr/bin/python
import os
import sys
import subprocess
-if __name__ == '__main__':
- # check for root
- if os.geteuid() != 0:
- sys.exit('You need to run this as root')
+# check for root
+if os.geteuid() != 0:
+ sys.exit('You need to run this as root')
- # paths
- path_torrc_additions = '/home/amnesia/Persistent/.securedrop/torrc_additions'
- path_torrc_backup = '/etc/tor/torrc.bak'
- path_torrc = '/etc/tor/torrc'
+# paths
+path_torrc_additions = '/home/amnesia/Persistent/.securedrop/torrc_additions'
+path_torrc_backup = '/etc/tor/torrc.bak'
+path_torrc = '/etc/tor/torrc'
- # load torrc_additions
- if os.path.isfile(path_torrc_additions):
- torrc_additions = open(path_torrc_additions).read()
- else:
- sys.exit('Error opening {0} for reading'.format(path_torrc_additions))
+# load torrc_additions
+if os.path.isfile(path_torrc_additions):
+ torrc_additions = open(path_torrc_additions).read()
+else:
+ sys.exit('Error opening {0} for reading'.format(path_torrc_additions))
- # load torrc
- if os.path.isfile(path_torrc_backup):
- torrc = open(path_torrc_backup).read()
+# load torrc
+if os.path.isfile(path_torrc_backup):
+ torrc = open(path_torrc_backup).read()
+else:
+ if os.path.isfile(path_torrc):
+ torrc = open(path_torrc).read()
else:
- if os.path.isfile(path_torrc):
- torrc = open(path_torrc).read()
- else:
- sys.exit('Error opening {0} for reading'.format(path_torrc))
+ sys.exit('Error opening {0} for reading'.format(path_torrc))
- # save a backup
- open(path_torrc_backup, 'w').write(torrc)
+ # save a backup
+ open(path_torrc_backup, 'w').write(torrc)
- # append the additions
- open(path_torrc, 'w').write(torrc + torrc_additions)
+# append the additions
+open(path_torrc, 'w').write(torrc + torrc_additions)
- # reload tor
- subprocess.call(['/usr/sbin/service', 'tor', 'reload'])
+# reload tor
+try:
+ subprocess.check_call(['systemctl', 'reload', '[email protected]'])
+except subprocess.CalledProcessError:
+ sys.exit('Error reloading Tor')
- # success
- subprocess.call(['/usr/bin/sudo', '-u', 'amnesia', '/usr/bin/notify-send', '-i', '/home/amnesia/Persistent/.securedrop/securedrop_icon.png',
- 'Updated torrc!', 'You can now connect to your SecureDrop\ndocument interface.'])
+# notify the user
+subprocess.call(['tails-notify-user',
+ 'SecureDrop successfully auto-configured!',
+ 'You can now access the Document Interface.\nIf you are an admin, you can now SSH to the servers.'])
| diff --git a/docs/development/spec_tests.rst b/docs/development/spec_tests.rst
new file mode 100644
--- /dev/null
+++ b/docs/development/spec_tests.rst
@@ -0,0 +1,100 @@
+Serverspec Tests
+================
+
+serverspec_ tests verify the end state of the vagrant machines. Any
+changes to the Ansible configuration should have a corresponding
+spectest.
+
+.. _serverspec: http://serverspec.org
+
+Install directions (Ubuntu)
+---------------------------
+
+.. code:: sh
+
+ apt-get install bundler
+ cd spec_tests/
+ bundle update
+
+Running the tests
+-----------------
+
+.. code:: sh
+
+ cd spec_tests/
+ bundle exec rake spec
+
+This will run the tests against all configured hosts, specifically:
+
+- development
+- app-staging
+- mon-staging
+- build
+
+In order to run the tests, each VM will be created and provisioned, if
+necessary. Running all VMs concurrently may cause performance
+problems if you have less than 8GB of RAM. You can isolate specific
+machines for faster testing:
+
+.. code:: sh
+
+ cd spec_tests
+ bundle exec rake --tasks # check output for desired machine
+ bundle exec rake spec:development
+
+Updating the tests
+------------------
+
+Changes to the ansible config should result in failing spectests, but
+only if an existing task was modified. If you add a new task, make
+sure to add a corresponding spectest to validate that state after a
+new provisioning run. Tests import variables from separate YAML files
+than the Ansible playbooks: ::
+
+ spec_tests/spec/vars
+ βββ development.yml
+ βββ staging.yml
+
+Any variable changes in the Ansible config should have a corresponding
+entry in these vars files. These vars are dynamically loaded for each
+host via the ``spec_helper.rb`` file. Make sure to add your tests to
+relevant location for the host you plan to test: ::
+
+ spec_tests/spec/app-staging
+ βββ apache_spec.rb
+ βββ apparmor_spec.rb
+ βββ iptables_spec.rb
+ βββ ossec_agent_spec.rb
+ βββ securedrop_app_spec.rb
+ βββ securedrop_app_test_spec.rb
+ βββ tor_spec.rb
+
+In the example above, to add a new test for the ``app-staging`` host,
+add a new file to the ``spec_tests/spec/app-staging`` directory.
+
+Spectest layout
+---------------
+
+The serverspec tests are mostly broken up according to machines in the
+Vagrantfile: ::
+
+ spec_tests/spec
+ βββ app-staging
+ βββ build
+ βββ common-development
+ βββ common-staging
+ βββ development
+ βββ mon-staging
+ βββ vars
+
+There are a few exceptions:
+
+- ``common-development`` shares tests between ``development`` and
+ ``app-staging``
+- ``common-staging`` shares tests between ``app-staging`` and
+ ``mon-staging``
+
+Ideally the serverspec tests would be broken up according to roles,
+mirroring the Ansible configuration. Prior to the reorganization of
+the Ansible layout, the tests are rather tightly coupled to hosts. The
+layout of spectests is therefore subject to change.
diff --git a/docs/images/printer_setup_guide/bad_test_page.png b/docs/images/printer_setup_guide/bad_test_page.png
new file mode 100644
Binary files /dev/null and b/docs/images/printer_setup_guide/bad_test_page.png differ
diff --git a/docs/images/printer_setup_guide/good_test_page.png b/docs/images/printer_setup_guide/good_test_page.png
new file mode 100644
Binary files /dev/null and b/docs/images/printer_setup_guide/good_test_page.png differ
diff --git a/docs/images/printer_setup_guide/print_test_page.png b/docs/images/printer_setup_guide/print_test_page.png
new file mode 100755
Binary files /dev/null and b/docs/images/printer_setup_guide/print_test_page.png differ
diff --git a/docs/spec_tests.md b/docs/spec_tests.md
deleted file mode 100644
--- a/docs/spec_tests.md
+++ /dev/null
@@ -1,11 +0,0 @@
-##Running Spec test cheat sheet
-
-serverspec and rspec tests verify the end state of app and monitor servers. The tests are ran remotely from your host system.
-
-###Ubuntu install directions
-
-`apt-get install rake bundler ruby-rspec-core ruby-serverspec`
-
-`cd spec_tests/`
-
-rspec spec
diff --git a/docs/test_the_installation.rst b/docs/test_the_installation.rst
new file mode 100644
--- /dev/null
+++ b/docs/test_the_installation.rst
@@ -0,0 +1,106 @@
+Test the Installation
+=====================
+
+Test connectivity
+-----------------
+
+SSH to both servers over Tor
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+On the Admin Workstation, you should be able to SSH to the App
+Server and the Monitor Server. ::
+
+ $ ssh app
+ $ ssh mon
+
+The SSH aliases should have been configured automatically by running
+the ``install.sh`` script. If you're unable to connect via aliases,
+try using the verbose command format to troubleshoot: ::
+
+ $ ssh <username>@<app .onion>
+ $ ssh <username>@<mon .onion>
+
+.. tip:: You can find the Onion URLs for SSH in ``app-ssh-aths`` and
+ ``mon-ssh-aths`` inside the ``install_files/ansible-base`` directory.
+
+Log in to both servers via TTY
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+All access to the SecureDrop servers should be performed over SSH from the
+Admin Workstation. To aid in troubleshooting, physical logins via TTY are
+supported, but require 2FA to be configured. See the :doc:`2FA setup guide
+<google_authenticator>` for information how to enable console logins.
+
+Test the 2FA functionality by connecting a keyboard and display to each server,
+then login with the Admin username. You will need:
+
+* sudo password for the Admin username
+* TOTP code from a 2FA app such as Google Authenticator or FreeOTP
+
+Confirm that logging in via TTY prompts for a 2FA code, and that the code
+generated by your smartphone app permits logging in to an interactive shell.
+
+Sanity-check the install
+------------------------
+
+On each server:
+
+#. Check that you can execute privileged commands by running ``sudo su``.
+#. Verify that you are booted into a grsec kernel: run ``uname -r``
+ and verify that the name of the running kernel ends with ``-grsec``.
+#. Check the AppArmor status with ``sudo aa-status``. On a production
+ instance all profiles should be in enforce mode.
+#. Check the current applied iptables rules with ``iptables-save``. It
+ should output *approximately* 50 lines.
+#. You should have received an email alert from OSSEC when it first
+ started. If not, review our :doc:`OSSEC Alerts
+ Guide <ossec_alerts>`.
+
+Test the web interfaces
+-----------------------
+
+#. Make sure the Source Interface is available, and that you can make a
+ submission.
+
+ - Do this by opening the Tor Browser and navigating to the onion
+ URL from ``app-source-ths``. Proceed through the codename
+ generation (copy this down somewhere) and you can submit a
+ message or attach any random unimportant file.
+ - Usage of the Source Interface is covered by our :doc:`Source User
+ Manual <source>`.
+
+#. Test that you can access the Document Interface, and that you can log
+ in as the admin user you just created.
+
+ - Open the Tor Browser and navigate to the onion URL from
+ app-document-aths. Enter your password and two-factor
+ authentication code to log in.
+ - If you have problems logging in to the Admin/Document Interface,
+ SSH to the App Server and restart the ntp daemon to synchronize
+ the time: ``sudo service ntp restart``. Also check that your
+ smartphone's time is accurate and set to network time in its
+ device settings.
+
+#. Test replying to the test submission.
+
+ - While logged in as an admin, you can send a reply to the test
+ source submission you made earlier.
+ - Usage of the Document Interface is covered by our :doc:`Journalist
+ User Manual <journalist>`.
+
+#. Test that the source received the reply.
+
+ - Within Tor Browser, navigate back to the app-source-ths URL and
+ use your previous test source codename to log in (or reload the
+ page if it's still open) and check that the reply you just made
+ is present.
+
+#. We highly recommend that you create persistent bookmarks for the
+ Source and Document Interface addresses within Tor Browser.
+
+#. Remove the test submissions you made prior to putting SecureDrop to
+ real use. On the main Document Interface page, select all sources and
+ click 'Delete selected'.
+
+Once you've tested the installation and verified that everything is
+working, see :doc:`How to Use SecureDrop <journalist>`.
diff --git a/install_files/ansible-base/roles/app-test/tasks/dev_setup_xvfb_for_functional_tests.yml b/install_files/ansible-base/roles/app-test/tasks/dev_setup_xvfb_for_functional_tests.yml
--- a/install_files/ansible-base/roles/app-test/tasks/dev_setup_xvfb_for_functional_tests.yml
+++ b/install_files/ansible-base/roles/app-test/tasks/dev_setup_xvfb_for_functional_tests.yml
@@ -6,8 +6,52 @@
apt: name="{{ item }}" state=latest
with_items: test_apt_dependencies
-- name: copy xvfb init script to /etc/init.d
- copy: src=xvfb dest=/etc/init.d/xvfb owner=root mode=700
+# Selenium 3 makes breaking changes with the 2.X API, and requires the
+# installation of the Mozilla geckodriver. Since the Aaron Swartz Day Hackathon
+# is approaching, which will involve many new external contributors, we've
+# decided to play it as safe as possible by downgrading Firefox to the latest
+# version (46.0.1) that is compatible with the last 2.X series Selenium release
+# (2.53.6). After the Hackathon, we'll resolve the geckodriver business and
+# remove the following three tasks (as well as add firefox back to the
+# `test_apt_dependencies` list).
+- name: Download Firefox 46.0.1 for compatibility with Selenium 2.53.6.
+ sudo: no
+ get_url:
+ # Since the whole tasklisk is run as root, the ansible_env.HOME fact is
+ # /root. Since this command doesn't need to be run as root and is part of a
+ # crutch anyway, I've just hardcoded /home/vagrant.
+ dest: /home/vagrant/
+ url: https://launchpad.net/~ubuntu-mozilla-security/+archive/ubuntu/ppa/+build/9727836/+files/firefox_46.0.1+build1-0ubuntu0.14.04.3_amd64.deb
+ sha256sum: 88d25053306d33658580973b063cd459a56e3596a3a298c1fb8ab1d52171d860
+ tags:
+ - apt
+
+- name: Install dependencies for Firefox 46.0.1.
+ apt:
+ name: "{{ item }}"
+ with_items:
+ - libasound2
+ - libcairo-gobject2
+ - libgtk-3-0
+ - libstartup-notification0
+ tags:
+ - apt
+
+- name: Install Firefox 46.0.1 for compatibility with Selenium 2.53.6.
+ apt:
+ deb: /home/vagrant/firefox_46.0.1+build1-0ubuntu0.14.04.3_amd64.deb
+ tags:
+ - apt
+
+- name: Copy xvfb init script.
+ copy:
+ src: xvfb
+ dest: /etc/init.d/xvfb
+ owner: root
+ mode: '700'
+ tags:
+ - xvfb
+ - permissions
- name: update rc.d to run xvfb at boot
command: "update-rc.d xvfb defaults"
diff --git a/securedrop/tests/functional/functional_test.py b/securedrop/tests/functional/functional_test.py
--- a/securedrop/tests/functional/functional_test.py
+++ b/securedrop/tests/functional/functional_test.py
@@ -76,6 +76,11 @@ def start_journalist_server():
self.driver = self._create_webdriver()
+ # Set window size and position explicitly to avoid potential bugs due
+ # to discrepancies between environments.
+ self.driver.set_window_position(0, 0);
+ self.driver.set_window_size(1024, 768);
+
# Poll the DOM briefly to wait for elements. It appears .click() does
# not always do a good job waiting for the page to load, or perhaps
# Firefox takes too long to render it (#399)
diff --git a/securedrop/tests/functional/source_navigation_steps.py b/securedrop/tests/functional/source_navigation_steps.py
--- a/securedrop/tests/functional/source_navigation_steps.py
+++ b/securedrop/tests/functional/source_navigation_steps.py
@@ -1,5 +1,6 @@
import tempfile
-
+from selenium.webdriver.common.action_chains import ActionChains
+from selenium.webdriver.common.by import By
class SourceNavigationSteps():
@@ -9,7 +10,31 @@ def _source_visits_source_homepage(self):
self.assertEqual("SecureDrop | Protecting Journalists and Sources", self.driver.title)
def _source_chooses_to_submit_documents(self):
- self.driver.find_element_by_id('submit-documents-button').click()
+ # First move the cursor to a known position in case it happens to
+ # be hovering over one of the buttons we are testing below.
+ header_image = self.driver.find_element_by_id('header')
+ ActionChains(self.driver).move_to_element(header_image).perform()
+
+ # It's the source's first time visiting this SecureDrop site, so they
+ # choose to "Submit Documents".
+ submit_button = self.driver.find_element_by_id('submit-documents-button')
+
+ submit_button_icon = self.driver.find_element_by_css_selector(
+ 'a#submit-documents-button > img.off-hover')
+ self.assertTrue(submit_button_icon.is_displayed())
+
+ # The source hovers their cursor over the button, and the visual style
+ # of the button changes to encourage them to click it.
+ ActionChains(self.driver).move_to_element(submit_button).perform()
+
+ ## Let's make sure toggling the icon image with the hover state is working.
+ self.assertFalse(submit_button_icon.is_displayed())
+ submit_button_hover_icon = self.driver.find_element_by_css_selector(
+ 'a#submit-documents-button > img.on-hover')
+ self.assertTrue(submit_button_hover_icon.is_displayed())
+
+ # The source clicks the submit button.
+ submit_button.click()
codename = self.driver.find_element_by_css_selector('#codename')
@@ -19,7 +44,22 @@ def _source_chooses_to_submit_documents(self):
def _source_continues_to_submit_page(self):
continue_button = self.driver.find_element_by_id('continue-button')
+ continue_button_icon = self.driver.find_element_by_css_selector(
+ 'button#continue-button > img.off-hover')
+ self.assertTrue(continue_button_icon.is_displayed())
+
+ ## Hover over the continue button test toggle the icon images with the
+ ## hover state.
+ ActionChains(self.driver).move_to_element(continue_button).perform()
+ self.assertFalse(continue_button_icon.is_displayed())
+
+ continue_button_hover_icon = self.driver.find_element_by_css_selector(
+ 'button#continue-button img.on-hover'
+ )
+ self.assertTrue(continue_button_hover_icon.is_displayed())
+
continue_button.click()
+
headline = self.driver.find_element_by_class_name('headline')
self.assertEqual('Submit documents and messages', headline.text)
@@ -34,8 +74,14 @@ def _source_submits_a_file(self):
file_upload_box = self.driver.find_element_by_css_selector('[name=fh]')
file_upload_box.send_keys(filename)
- submit_button = self.driver.find_element_by_css_selector(
- 'button[type=submit]')
+ submit_button = self.driver.find_element_by_id('submit-doc-button')
+ ActionChains(self.driver).move_to_element(submit_button).perform()
+
+ toggled_submit_button_icon = self.driver.find_element_by_css_selector(
+ 'button#submit-doc-button img.on-hover'
+ )
+ self.assertTrue(toggled_submit_button_icon.is_displayed())
+
submit_button.click()
notification = self.driver.find_element_by_css_selector('p.notification')
@@ -46,9 +92,17 @@ def _source_submits_a_message(self):
text_box = self.driver.find_element_by_css_selector('[name=msg]')
text_box.send_keys(self.secret_message) # send_keys = type into text box
- submit_button = self.driver.find_element_by_css_selector(
- 'button[type=submit]')
+
+ submit_button = self.driver.find_element_by_id('submit-doc-button')
submit_button.click()
- notification = self.driver.find_element_by_css_selector('p.notification')
- self.assertIn('Thanks for submitting something to SecureDrop! Please check back later for replies.', notification.text)
+ notification = self.driver.find_element_by_css_selector(
+ 'p.notification')
+ self.assertIn('Thanks for submitting something to SecureDrop!'
+ ' Please check back later for replies.',
+ notification.text)
+
+ def _source_logs_out(self):
+ logout_button = self.driver.find_element_by_id('logout').click()
+ notification = self.driver.find_element_by_css_selector('p.error')
+ self.assertIn('Thank you for logging out!', notification.text)
diff --git a/securedrop/tests/functional/submit_and_retrieve_file.py b/securedrop/tests/functional/submit_and_retrieve_file.py
--- a/securedrop/tests/functional/submit_and_retrieve_file.py
+++ b/securedrop/tests/functional/submit_and_retrieve_file.py
@@ -21,6 +21,7 @@ def test_submit_and_retrieve_happy_path(self):
self._source_chooses_to_submit_documents()
self._source_continues_to_submit_page()
self._source_submits_a_file()
+ self._source_logs_out()
self._journalist_logs_in()
self._journalist_checks_messages()
self._journalist_downloads_message()
diff --git a/securedrop/tests/functional/submit_and_retrieve_message.py b/securedrop/tests/functional/submit_and_retrieve_message.py
--- a/securedrop/tests/functional/submit_and_retrieve_message.py
+++ b/securedrop/tests/functional/submit_and_retrieve_message.py
@@ -22,6 +22,7 @@ def test_submit_and_retrieve_happy_path(self):
self._source_chooses_to_submit_documents()
self._source_continues_to_submit_page()
self._source_submits_a_message()
+ self._source_logs_out()
self._journalist_logs_in()
self._journalist_checks_messages()
self._journalist_downloads_message()
diff --git a/securedrop/tests/test_unit_integration.py b/securedrop/tests/test_unit_integration.py
--- a/securedrop/tests/test_unit_integration.py
+++ b/securedrop/tests/test_unit_integration.py
@@ -257,8 +257,21 @@ def test_submit_file(self):
def test_reply_normal(self):
self.helper_test_reply("This is a test reply.", True)
- def test_reply_unicode(self):
- self.helper_test_reply("TeΕekkΓΌrler", True)
+ def test_unicode_reply_with_ansi_env(self):
+ # This makes python-gnupg handle encoding equivalent to if we were
+ # running SD in an environment where os.getenv("LANG") == "C".
+ # Unfortunately, with the way our test suite is set up simply setting
+ # that env var here will not have the desired effect. Instead we
+ # monkey-patch the GPG object that is called crypto_util to imitate the
+ # _encoding attribute it would have had it been initialized in a "C"
+ # environment. See
+ # https://github.com/freedomofpress/securedrop/issues/1360 for context.
+ old_encoding = crypto_util.gpg._encoding
+ crypto_util.gpg._encoding = "ansi_x3.4_1968"
+ try:
+ self.helper_test_reply("α αα»α«αα¦α¦α«α α±α©α α’α±α«α αα±αͺα«α·αα»αΉα¦αα³α’α", True)
+ finally:
+ crypto_util.gpg._encoding = old_encoding
def _can_decrypt_with_key(self, msg, key_fpr, passphrase=None):
"""
diff --git a/securedrop/tests/test_unit_source.py b/securedrop/tests/test_unit_source.py
--- a/securedrop/tests/test_unit_source.py
+++ b/securedrop/tests/test_unit_source.py
@@ -58,24 +58,6 @@ def test_generate(self):
# codename displayed to the source
self.assertEqual(codename, escape(session_codename))
- def test_regenerate_valid_lengths(self):
- """Make sure we can regenerate all valid length codenames"""
- for codename_len in xrange(7, 11):
- response = self.client.post('/generate', data={
- 'number-words': str(codename_len),
- })
- self.assertEqual(response.status_code, 200)
- codename = self._find_codename(response.data)
- self.assertEquals(len(codename.split()), codename_len)
-
- def test_regenerate_invalid_lengths(self):
- """If the codename length is invalid, it should return 403 Forbidden"""
- for codename_len in (2, 999):
- response = self.client.post('/generate', data={
- 'number-words': str(codename_len),
- })
- self.assertEqual(response.status_code, 403)
-
def test_generate_has_login_link(self):
"""The generate page should have a link to remind people to login
if they already have a codename, rather than create a new one.
@@ -86,6 +68,17 @@ def test_generate_has_login_link(self):
already_have_codename_link = soup.select('a#already-have-codename')[0]
self.assertEqual(already_have_codename_link['href'], '/login')
+ def test_generate_already_logged_in(self):
+ self._new_codename()
+ # Make sure it redirects to /lookup when logged in
+ rv = self.client.get('/generate')
+ self.assertEqual(rv.status_code, 302)
+ # Make sure it flashes the message on the lookup page
+ rv = self.client.get('/generate', follow_redirects=True)
+ # Should redirect to /lookup
+ self.assertEqual(rv.status_code, 200)
+ self.assertIn("because you are already logged in.", rv.data)
+
def test_create(self):
with self.client as c:
rv = c.get('/generate')
@@ -130,6 +123,15 @@ def test_login_and_logout(self):
self.assertIn('Sorry, that is not a recognized codename.', rv.data)
self.assertNotIn('logged_in', session)
+ with self.client as c:
+ rv = c.post('/login', data=dict(codename=codename),
+ follow_redirects=True)
+ self.assertEqual(rv.status_code, 200)
+ self.assertTrue(session['logged_in'])
+ rv = c.get('/logout', follow_redirects=True)
+ self.assertTrue(not session)
+ self.assertIn('Thank you for logging out!', rv.data)
+
def test_login_with_whitespace(self):
"""Test that codenames with leading or trailing whitespace still work"""
def login_test(codename):
@@ -218,11 +220,26 @@ def test_submit_sanitizes_filename(self, gzipfile):
mode=ANY,
fileobj=ANY)
- def test_tor2web_warning(self):
+ def test_tor2web_warning_headers(self):
rv = self.client.get('/', headers=[('X-tor2web', 'encrypted')])
self.assertEqual(rv.status_code, 200)
self.assertIn("You appear to be using Tor2Web.", rv.data)
+ def test_tor2web_warning(self):
+ rv = self.client.get('/tor2web-warning')
+ self.assertEqual(rv.status_code, 200)
+ self.assertIn("Why is there a warning about Tor2Web?", rv.data)
+
+ def test_why_journalist_key(self):
+ rv = self.client.get('/why-journalist-key')
+ self.assertEqual(rv.status_code, 200)
+ self.assertIn("Why download the journalist's public key?", rv.data)
+
+ def test_howto_disable_js(self):
+ rv = self.client.get('/howto-disable-js')
+ self.assertEqual(rv.status_code, 200)
+ self.assertIn("Turn the Security Slider to High to Protect Your Anonymity", rv.data)
+
@patch('crypto_util.hash_codename')
def test_login_with_overly_long_codename(self, mock_hash_codename):
"""Attempting to login with an overly long codename should result in
| Allow logrotate in Apache AppArmor profile
The AppArmor profile for Apache permits writing to a few discrete log files:
```
/var/log/apache2/document-access.log w,
/var/log/apache2/document-error.log w,
/var/log/apache2/error.log w,
/var/log/apache2/other_vhosts_access.log rw,
/var/log/apache2/source-error.log w,
```
Those hard-coded strings don't permit logrotate behavior, in which a new logfile would be created with a suffix. Here's an example OSSEC notification from a test instance:
> Jul 19 06:33:47 app kernel: [ 5616.463637] audit: type=1400 audit(1437312827.290:34): apparmor="DENIED" operation="file_perm" profile="/usr/sbin/apache2" name="/var/log/apache2/document-error.log.1" pid=1207 comm="apache2" requested_mask="w" denied_mask="w" fsuid=33 ouid=0
Since logrotate is normal, expected behavior, the AppArmor profile for Apache should be updated with more flexible filepaths for log location, which would improve the signal to noise ratio of the OSSEC alerts for admins.
Swapfile not really disabled
The Ansible config tries to disable swapfile on the Application and Monitor Servers, via `swapoff -a`. This works, but only for the current boot cycle. If a machine is configured with a swapfile in `/etc/fstab`, that swapfile will be restored on a subsequent reboot. Since the machines reboot nightly, the `swapoff -a` approach is close to useless.
In order to disable swap effectively, the first-run Ansible config should ensure that no swap entries exist in fstab, removing them if found.
| 2017-04-20T21:17:57Z | [] | [] |
|
freedomofpress/securedrop | 1,668 | freedomofpress__securedrop-1668 | [
"1468"
] | c7ad4b44ef3ad783294918d9c619c4c9a4320fa1 | diff --git a/install_files/ansible-base/roles/build-ossec-deb-pkg/library/ossec_urls.py b/install_files/ansible-base/roles/build-ossec-deb-pkg/library/ossec_urls.py
new file mode 100644
--- /dev/null
+++ b/install_files/ansible-base/roles/build-ossec-deb-pkg/library/ossec_urls.py
@@ -0,0 +1,128 @@
+#!/usr/bin/env python
+DOCUMENTATION = '''
+---
+module: ossec_urls
+short_description: Gather facts for OSSEC download URLs
+description:
+ - Gather version, checksum, and URL info for OSSEC downloads
+author:
+ - Conor Schaefer (@conorsch)
+ - Freedom of the Press Foundation (@freedomofpress)
+requirements:
+ - requests
+options:
+ ossec_version:
+ description:
+ - version number of release to download
+ default: "2.8.2"
+ required: no
+notes:
+ - The OSSEC version to download is hardcoded to avoid surprises.
+ If you want a newer version than the current default, you should
+ pass the version in via I(ossec_version).
+'''
+EXAMPLES = '''
+- ossec_urls:
+ ossec_version: "2.8.2"
+'''
+
+from StringIO import StringIO
+from urlparse import urljoin
+import re
+
+HAS_REQUESTS = True
+try:
+ import requests
+except ImportError:
+ HAS_REQUESTS = False
+
+
+
+class OSSECURLs():
+
+ def __init__(self, ossec_version):
+ self.ossec_version = ossec_version
+
+ checksums = self.parse_checksums()
+
+ self.ansible_facts = dict(
+ ossec_version=self.ossec_version,
+ ossec_tarball_filename=self.ossec_tarball_filename,
+ ossec_tarball_url=self.ossec_tarball_url,
+ ossec_checksum_filename=self.ossec_checksum_filename,
+ ossec_checksum_url=self.ossec_checksum_url,
+ )
+
+ self.ansible_facts.update(checksums)
+
+ @property
+ def ossec_tarball_filename(self):
+ return "ossec-hids-{}.tar.gz".format(self.ossec_version)
+
+
+ @property
+ def ossec_tarball_url(self):
+ return "https://github.com/ossec/ossec-hids/archive/{}.tar.gz".format(
+ self.ossec_version)
+
+
+ @property
+ def ossec_checksum_url(self):
+ return "https://github.com/ossec/ossec-hids/releases/download/{}/{}".format(
+ self.ossec_version, self.ossec_checksum_filename)
+
+
+ @property
+ def ossec_checksum_filename(self):
+ return "{}-checksum.txt".format(self.ossec_tarball_filename)
+
+
+ def parse_checksums(self):
+ r = requests.get(self.ossec_checksum_url)
+ checksum_regex = re.compile(r'''
+ ^MD5\(
+ '''
+ +re.escape(self.ossec_tarball_filename)+
+ r'''\)=\s+(?P<ossec_md5_checksum>[0-9a-f]{32})\s+
+ SHA1\(
+ '''
+ +re.escape(self.ossec_tarball_filename)+
+ r'''\)=\s+(?P<ossec_sha1_checksum>[0-9a-f]{40})$
+ ''', re.VERBOSE | re.MULTILINE
+ )
+ checksum_list = r.content.rstrip()
+ results = re.match(checksum_regex, checksum_list).groupdict()
+ return results
+
+
+def main():
+ module = AnsibleModule(
+ argument_spec=dict(
+ ossec_version=dict(default="2.8.2" ),
+ ),
+ supports_check_mode=False
+ )
+ if not HAS_REQUESTS:
+ module.fail_json(msg='requests required for this module')
+
+ ossec_version = module.params['ossec_version']
+ try:
+ ossec_config = OSSECURLs(ossec_version=ossec_version)
+ except:
+ msg = ("Failed to find checksum information for OSSEC v{}."
+ "Ensure you have the proper release specified, "
+ "and check the download page to confirm: "
+ "http://www.ossec.net/?page_id=19".format(ossec_version))
+ module.fail_json(msg=msg)
+
+ results = ossec_config.ansible_facts
+
+ if results:
+ module.exit_json(changed=False, ansible_facts=results)
+ else:
+ msg = "Failed to fetch OSSEC URL facts."
+ module.fail_json(msg=msg)
+
+
+from ansible.module_utils.basic import *
+main()
| diff --git a/testinfra/build/test_ossec_packages.py b/testinfra/build/test_ossec_packages.py
new file mode 100644
--- /dev/null
+++ b/testinfra/build/test_ossec_packages.py
@@ -0,0 +1,16 @@
+import pytest
+
+
[email protected]('apt_package', [
+ 'inotify-tools',
+ 'libssl-dev',
+ 'make',
+ 'tar',
+ 'unzip',
+])
+def test_build_ossec_apt_dependencies(Package, apt_package):
+ """
+ Ensure that the apt dependencies required for building the OSSEC
+ source deb packages (not the metapackages) are installed.
+ """
+ assert Package(apt_package).is_installed
diff --git a/testinfra/build/test_securedrop_deb_package.py b/testinfra/build/test_securedrop_deb_package.py
--- a/testinfra/build/test_securedrop_deb_package.py
+++ b/testinfra/build/test_securedrop_deb_package.py
@@ -110,7 +110,12 @@ def test_deb_package_control_fields_homepage(File, Command, deb):
securedrop_test_vars.securedrop_version))
# The `--field` option will display all fields if none are specified.
c = Command("dpkg-deb --field {}".format(deb_package.path))
- assert "Homepage: https://securedrop.org" in c.stdout
+ # The OSSEC source packages will have a different homepage;
+ # all other packages should set securedrop.org as homepage.
+ if os.path.basename(deb_package.path).startswith('ossec-'):
+ assert "Homepage: http://ossec.net" in c.stdout
+ else:
+ assert "Homepage: https://securedrop.org" in c.stdout
@pytest.mark.parametrize("deb", deb_packages)
| Consider merging the ossec repo into this one
I'm just going to quote a comment I made in a relevant PR that more or less explains the proposal I'm making and the reasoning behind it:
> @conorsch and I just talked for a minute, and I expressed concerns about the process of building the OSSEC packages via a separate repo being cumbersome. It also seems possible if not likely that a developer could forget to pull, rebuild, and re-copy-over new OSSEC packages as work happens in that repo. I proposed the possibility of merging that repo into this one. By merging the OSSEC repository into the SD repository, I see us gaining two things:
> 1. We could more fully realize the aspiration of this PR, which I see as being to automate the (re-)building and (re-)installation of local deb packages from the latest version of the source for use in the staging test VMs.
> 2. The OSSEC packages might get more love, since they'll be more visible.
---https://github.com/freedomofpress/securedrop/pull/1464#issuecomment-259299390
| 2017-04-25T18:22:44Z | [] | [] |
|
freedomofpress/securedrop | 1,678 | freedomofpress__securedrop-1678 | [
"1675"
] | c6a190af0826b6d1193daef668ce697cfa852ed0 | diff --git a/securedrop/manage.py b/securedrop/manage.py
--- a/securedrop/manage.py
+++ b/securedrop/manage.py
@@ -224,12 +224,18 @@ def get_args():
admin_subp = subps.add_parser('add-admin', help='Add an admin to the '
'application.')
admin_subp.set_defaults(func=add_admin)
+ admin_subp_a = subps.add_parser('add_admin', help='^')
+ admin_subp_a.set_defaults(func=add_admin)
journalist_subp = subps.add_parser('add-journalist', help='Add a '
'journalist to the application.')
journalist_subp.set_defaults(func=add_journalist)
+ journalist_subp_a = subps.add_parser('add_journalist', help='^')
+ journalist_subp_a.set_defaults(func=add_journalist)
delete_user_subp = subps.add_parser('delete-user', help='Delete a user '
'from the application.')
delete_user_subp.set_defaults(func=delete_user)
+ delete_user_subp_a = subps.add_parser('delete_user', help='^')
+ delete_user_subp_a.set_defaults(func=delete_user)
# Reset application state
reset_subp = subps.add_parser('reset', help='DANGER!!! Clears the '
@@ -239,6 +245,8 @@ def get_args():
clean_tmp_subp = subps.add_parser('clean-tmp', help='Cleanup the '
'SecureDrop temp directory.')
clean_tmp_subp.set_defaults(func=clean_tmp)
+ clean_tmp_subp_a = subps.add_parser('clean_tmp', help='^')
+ clean_tmp_subp_a.set_defaults(func=clean_tmp)
return parser
| diff --git a/testinfra/development/test_development_application_settings.py b/testinfra/development/test_development_application_settings.py
--- a/testinfra/development/test_development_application_settings.py
+++ b/testinfra/development/test_development_application_settings.py
@@ -109,15 +109,20 @@ def test_development_app_directories_exist(File):
def test_development_clean_tmp_cron_job(Command, Sudo):
"""
- Ensure cron job for cleaning the temporary directory for the app code exists.
+ Ensure cron job for cleaning the temporary directory for the app code
+ exists. Also, ensure that the older format for the cron job is absent,
+ since we updated manage.py subcommands to use hyphens instead of
+ underscores (e.g. `clean_tmp` -> `clean-tmp`).
"""
with Sudo():
c = Command.check_output('crontab -l')
- # TODO: this should be using property, but the ansible role
- # doesn't use a var, it's hard-coded. update ansible, then fix test.
- # it { should have_entry "@daily #{property['securedrop_code']}/manage.py clean-tmp" }
assert "@daily {}/manage.py clean-tmp".format(sd_test_vars.securedrop_code) in c
+ assert "@daily {}/manage.py clean_tmp".format(sd_test_vars.securedrop_code) not in c
+ assert "clean_tmp".format(sd_test_vars.securedrop_code) not in c
+ # Make sure that the only cron lines are a comment and the actual job.
+ # We don't want any duplicates.
+ assert len(c.split("\n")) == 2
def test_development_default_logo_exists(File):
| Updated `manage.py` subcommands should have aliases
In #1347 we converted the subcommands for the `manage.py` script to use hyphens instead of underscores, e.g. `add_admin` -> `add-admin`. That works well for the most part, assuming existing Admins read the docs on the update, but it will cause problems for currently running instances, because a cron job exists for the `clean_tmp` subcommand, and that subcommand will no longer exist in 0.4. We should do two things to resolve:
1. Implement subcommand aliases in `manage.py` so that the old subcommands still work.
2. Update the Ansible config to ensure state=absent on the old cronjob, and state=present on the new one. Doing so will ensure that Admins running the playbooks do not create duplicate cron jobs.
If we don't do this, the nightly cron job for cleaning the temp directory will fail on all Application Servers going forward. Therefore this'll need to go on the 0.4 milestone, lest we break the temp cleanup logic currently used in production.
| Do you think we need to update all subcommands or would just the one be sufficient? @conorsch
Let's do allβI still find myself typing `./manage.py add_admin` when setting up new accounts, and am frustrated by the lack of backwards compatibility every time. | 2017-04-27T02:10:57Z | [] | [] |
freedomofpress/securedrop | 1,694 | freedomofpress__securedrop-1694 | [
"1693"
] | a98cf85d5b82e1da0e8f4c1182061f72365102d2 | diff --git a/securedrop/journalist.py b/securedrop/journalist.py
--- a/securedrop/journalist.py
+++ b/securedrop/journalist.py
@@ -190,8 +190,9 @@ def admin_add_user():
Journalist.MIN_PASSWORD_LEN, Journalist.MAX_PASSWORD_LEN
), "error")
except IntegrityError as e:
+ db_session.rollback()
form_valid = False
- if "username is not unique" in str(e):
+ if "UNIQUE constraint failed: journalists.username" in str(e):
flash("That username is already in use",
"error")
else:
diff --git a/securedrop/manage.py b/securedrop/manage.py
--- a/securedrop/manage.py
+++ b/securedrop/manage.py
@@ -64,12 +64,6 @@ def add_journalist(): # pragma: no cover
def _add_user(is_admin=False): # pragma: no cover
while True:
username = raw_input('Username: ')
- if Journalist.query.filter_by(username=username).first():
- print('Sorry, that username is already in use.')
- else:
- break
-
- while True:
password = getpass('Password: ')
password_again = getpass('Confirm Password: ')
@@ -106,7 +100,8 @@ def _add_user(is_admin=False): # pragma: no cover
db_session.add(user)
db_session.commit()
except Exception as exc:
- if 'username is not unique' in exc:
+ db_session.rollback()
+ if "UNIQUE constraint failed: journalists.username" in str(exc):
print('ERROR: That username is already taken!')
else:
exc_type, exc_value, exc_traceback = sys.exc_info()
| diff --git a/securedrop/tests/test_journalist.py b/securedrop/tests/test_journalist.py
--- a/securedrop/tests/test_journalist.py
+++ b/securedrop/tests/test_journalist.py
@@ -97,7 +97,6 @@ def test_user_has_link_to_edit_account_page_in_index_page(self):
"Edit Account")
self.assertIn(edit_account_link, resp.data)
-
def test_admin_has_link_to_admin_index_page_in_index_page(self):
resp = self.client.post(url_for('login'),
data=dict(username=self.admin.username,
@@ -218,6 +217,15 @@ def test_admin_add_user_password_mismatch_warning(self):
is_admin=False))
self.assertIn('Passwords didn', resp.data)
+ def test_admin_add_user_when_username_already_in_use(self):
+ self._login_admin()
+ resp = self.client.post(url_for('admin_add_user'),
+ data=dict(username=self.admin.username,
+ password='testtesttest',
+ password_again='testtesttest',
+ is_admin=False))
+ self.assertIn('That username is already in use', resp.data)
+
def test_max_password_length(self):
"""Creating a Journalist with a password that is greater than the
maximum password length should raise an exception"""
diff --git a/securedrop/tests/test_manage.py b/securedrop/tests/test_manage.py
--- a/securedrop/tests/test_manage.py
+++ b/securedrop/tests/test_manage.py
@@ -1,10 +1,43 @@
# -*- coding: utf-8 -*-
import manage
+import mock
+from StringIO import StringIO
+import sys
import unittest
+import __builtin__
+
+import utils
class TestManagePy(unittest.TestCase):
def test_parse_args(self):
# just test that the arg parser is stable
manage.get_args()
+
+
+class TestManagementCommand(unittest.TestCase):
+ def setUp(self):
+ utils.env.setup()
+
+ def tearDown(self):
+ utils.env.teardown()
+
+ @mock.patch("__builtin__.raw_input", return_value='N')
+ @mock.patch("manage.getpass", return_value='testtesttest')
+ @mock.patch("sys.stdout", new_callable=StringIO)
+ def test_exception_handling_when_duplicate_username(self, mock_raw_input,
+ mock_getpass,
+ mock_stdout):
+ """Regression test for duplicate username logic in manage.py"""
+
+ # Inserting the user for the first time should succeed
+ return_value = manage._add_user()
+ self.assertEqual(return_value, 0)
+ self.assertIn('successfully added', sys.stdout.getvalue())
+
+ # Inserting the user for a second time should fail
+ return_value = manage._add_user()
+ self.assertEqual(return_value, 1)
+ self.assertIn('ERROR: That username is already taken!',
+ sys.stdout.getvalue())
| Handle IntegrityError on /admin/add
# Steps to reproduce
1. Provision development VM
2. Make an administrator account
3. Sign in
4. Click "Admin"
5. Click "Add user" button
6. Fill in the form, setting the username to username of an existing user
7. Click "Add user"
# What should happen
Ideally a flashed message would appear saying "That username is already in use".
# What actually happens
![screen shot 2017-05-08 at 3 36 59 pm](https://cloud.githubusercontent.com/assets/7832803/25828322/68340ec0-3404-11e7-9fd4-10dc39bd0cc9.png)
# Proposed Fix
Catch the exception and flash the friendly error message as suggested above. Whoever fixes this ideally should also write a unit test to guard against future regressions.
| 2017-05-08T23:33:49Z | [] | [] |
|
freedomofpress/securedrop | 1,709 | freedomofpress__securedrop-1709 | [
"1708"
] | 616dada05951c7cb16da55941f673ddba69c27af | diff --git a/install_files/ansible-base/action_plugins/synchronize.py b/install_files/ansible-base/action_plugins/synchronize.py
new file mode 100644
--- /dev/null
+++ b/install_files/ansible-base/action_plugins/synchronize.py
@@ -0,0 +1,415 @@
+# -*- coding: utf-8 -*-
+
+# (c) 2012-2013, Timothy Appnel <[email protected]>
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import os.path
+from collections import MutableSequence
+
+from ansible import constants as C
+from ansible.module_utils.six import string_types
+from ansible.module_utils._text import to_text
+from ansible.playbook.play_context import MAGIC_VARIABLE_MAPPING
+from ansible.plugins.action import ActionBase
+from ansible.plugins import connection_loader
+
+boolean = C.mk_boolean
+
+
+class ActionModule(ActionBase):
+
+ def _get_absolute_path(self, path):
+ original_path = path
+
+ if path.startswith('rsync://'):
+ return path
+
+ if self._task._role is not None:
+ path = self._loader.path_dwim_relative(self._task._role._role_path, 'files', path)
+ else:
+ path = self._loader.path_dwim_relative(self._loader.get_basedir(), 'files', path)
+
+ if original_path and original_path[-1] == '/' and path[-1] != '/':
+ # make sure the dwim'd path ends in a trailing "/"
+ # if the original path did
+ path += '/'
+
+ return path
+
+ def _host_is_ipv6_address(self, host):
+ return ':' in host
+
+ def _format_rsync_rsh_target(self, host, path, user):
+ ''' formats rsync rsh target, escaping ipv6 addresses if needed '''
+
+ user_prefix = ''
+
+ if path.startswith('rsync://'):
+ return path
+
+ # If using docker, do not add user information
+ if self._remote_transport not in [ 'docker' ] and user:
+ user_prefix = '%s@' % (user, )
+
+ if self._host_is_ipv6_address(host):
+ return '[%s%s]:%s' % (user_prefix, host, path)
+ else:
+ return '%s%s:%s' % (user_prefix, host, path)
+
+ def _process_origin(self, host, path, user):
+
+ if host not in C.LOCALHOST:
+ return self._format_rsync_rsh_target(host, path, user)
+
+ if ':' not in path and not path.startswith('/'):
+ path = self._get_absolute_path(path=path)
+ return path
+
+ def _process_remote(self, task_args, host, path, user, port_matches_localhost_port):
+ """
+ :arg host: hostname for the path
+ :arg path: file path
+ :arg user: username for the transfer
+ :arg port_matches_localhost_port: boolean whether the remote port
+ matches the port used by localhost's sshd. This is used in
+ conjunction with seeing whether the host is localhost to know
+ if we need to have the module substitute the pathname or if it
+ is a different host (for instance, an ssh tunnelled port or an
+ alternative ssh port to a vagrant host.)
+ """
+ transport = self._connection.transport
+ # If we're connecting to a remote host or we're delegating to another
+ # host or we're connecting to a different ssh instance on the
+ # localhost then we have to format the path as a remote rsync path
+ if host not in C.LOCALHOST or transport != "local" or \
+ (host in C.LOCALHOST and not port_matches_localhost_port):
+ # If we're delegating to non-localhost and but the
+ # inventory_hostname host is localhost then we need the module to
+ # fix up the rsync path to use the controller's public DNS/IP
+ # instead of "localhost"
+ if port_matches_localhost_port and host in C.LOCALHOST:
+ task_args['_substitute_controller'] = True
+ return self._format_rsync_rsh_target(host, path, user)
+
+ if ':' not in path and not path.startswith('/'):
+ path = self._get_absolute_path(path=path)
+ return path
+
+ def _override_module_replaced_vars(self, task_vars):
+ """ Some vars are substituted into the modules. Have to make sure
+ that those are correct for localhost when synchronize creates its own
+ connection to localhost."""
+
+ # Clear the current definition of these variables as they came from the
+ # connection to the remote host
+ if 'ansible_syslog_facility' in task_vars:
+ del task_vars['ansible_syslog_facility']
+ for key in list(task_vars.keys()):
+ if key.startswith("ansible_") and key.endswith("_interpreter"):
+ del task_vars[key]
+
+ # Add the definitions from localhost
+ for host in C.LOCALHOST:
+ if host in task_vars['hostvars']:
+ localhost = task_vars['hostvars'][host]
+ break
+ if 'ansible_syslog_facility' in localhost:
+ task_vars['ansible_syslog_facility'] = localhost['ansible_syslog_facility']
+ for key in localhost:
+ if key.startswith("ansible_") and key.endswith("_interpreter"):
+ task_vars[key] = localhost[key]
+
+ def run(self, tmp=None, task_vars=None):
+ ''' generates params and passes them on to the rsync module '''
+ # When modifying this function be aware of the tricky convolutions
+ # your thoughts have to go through:
+ #
+ # In normal ansible, we connect from controller to inventory_hostname
+ # (playbook's hosts: field) or controller to delegate_to host and run
+ # a module on one of those hosts.
+ #
+ # So things that are directly related to the core of ansible are in
+ # terms of that sort of connection that always originate on the
+ # controller.
+ #
+ # In synchronize we use ansible to connect to either the controller or
+ # to the delegate_to host and then run rsync which makes its own
+ # connection from controller to inventory_hostname or delegate_to to
+ # inventory_hostname.
+ #
+ # That means synchronize needs to have some knowledge of the
+ # controller to inventory_host/delegate host that ansible typically
+ # establishes and use those to construct a command line for rsync to
+ # connect from the inventory_host to the controller/delegate. The
+ # challenge for coders is remembering which leg of the trip is
+ # associated with the conditions that you're checking at any one time.
+ if task_vars is None:
+ task_vars = dict()
+
+ # We make a copy of the args here because we may fail and be asked to
+ # retry. If that happens we don't want to pass the munged args through
+ # to our next invocation. Munged args are single use only.
+ _tmp_args = self._task.args.copy()
+
+ result = super(ActionModule, self).run(tmp, task_vars)
+
+ # Store remote connection type
+ self._remote_transport = self._connection.transport
+
+ # Handle docker connection options
+ if self._remote_transport == 'docker':
+ self._docker_cmd = self._connection.docker_cmd
+ if self._play_context.docker_extra_args:
+ self._docker_cmd = "%s %s" % (self._docker_cmd, self._play_context.docker_extra_args)
+
+ # self._connection accounts for delegate_to so
+ # remote_transport is the transport ansible thought it would need
+ # between the controller and the delegate_to host or the controller
+ # and the remote_host if delegate_to isn't set.
+
+ remote_transport = False
+ if self._connection.transport != 'local':
+ remote_transport = True
+
+ try:
+ delegate_to = self._task.delegate_to
+ except (AttributeError, KeyError):
+ delegate_to = None
+
+ # ssh paramiko docker and local are fully supported transports. Anything
+ # else only works with delegate_to
+ if delegate_to is None and self._connection.transport not in ('ssh', 'paramiko', 'local', 'docker'):
+ result['failed'] = True
+ result['msg'] = ("synchronize uses rsync to function. rsync needs to connect to the remote host via ssh, docker client or a direct filesystem "
+ "copy. This remote host is being accessed via %s instead so it cannot work." % self._connection.transport)
+ return result
+
+ use_ssh_args = _tmp_args.pop('use_ssh_args', None)
+
+ # Parameter name needed by the ansible module
+ _tmp_args['_local_rsync_path'] = task_vars.get('ansible_rsync_path') or 'rsync'
+
+ # rsync thinks that one end of the connection is localhost and the
+ # other is the host we're running the task for (Note: We use
+ # ansible's delegate_to mechanism to determine which host rsync is
+ # running on so localhost could be a non-controller machine if
+ # delegate_to is used)
+ src_host = '127.0.0.1'
+ inventory_hostname = task_vars.get('inventory_hostname')
+ dest_host_inventory_vars = task_vars['hostvars'].get(inventory_hostname)
+ try:
+ dest_host = dest_host_inventory_vars['ansible_host']
+ except KeyError:
+ dest_host = dest_host_inventory_vars.get('ansible_ssh_host', inventory_hostname)
+
+ dest_host_ids = [hostid for hostid in (dest_host_inventory_vars.get('inventory_hostname'),
+ dest_host_inventory_vars.get('ansible_host'),
+ dest_host_inventory_vars.get('ansible_ssh_host'))
+ if hostid is not None]
+
+ localhost_ports = set()
+ for host in C.LOCALHOST:
+ localhost_vars = task_vars['hostvars'].get(host, {})
+ for port_var in MAGIC_VARIABLE_MAPPING['port']:
+ port = localhost_vars.get(port_var, None)
+ if port:
+ break
+ else:
+ port = C.DEFAULT_REMOTE_PORT
+ localhost_ports.add(port)
+
+ # dest_is_local tells us if the host rsync runs on is the same as the
+ # host rsync puts the files on. This is about *rsync's connection*,
+ # not about the ansible connection to run the module.
+ dest_is_local = False
+ if delegate_to is None and remote_transport is False:
+ dest_is_local = True
+ elif delegate_to is not None and delegate_to in dest_host_ids:
+ dest_is_local = True
+
+ # CHECK FOR NON-DEFAULT SSH PORT
+ inv_port = task_vars.get('ansible_ssh_port', None) or C.DEFAULT_REMOTE_PORT
+ if _tmp_args.get('dest_port', None) is None:
+ if inv_port is not None:
+ _tmp_args['dest_port'] = inv_port
+
+ # Set use_delegate if we are going to run rsync on a delegated host
+ # instead of localhost
+ use_delegate = False
+ if delegate_to is not None and delegate_to in dest_host_ids:
+ # edge case: explicit delegate and dest_host are the same
+ # so we run rsync on the remote machine targeting its localhost
+ # (itself)
+ dest_host = '127.0.0.1'
+ use_delegate = True
+ elif delegate_to is not None and remote_transport:
+ # If we're delegating to a remote host then we need to use the
+ # delegate_to settings
+ use_delegate = True
+
+ # Delegate to localhost as the source of the rsync unless we've been
+ # told (via delegate_to) that a different host is the source of the
+ # rsync
+ if not use_delegate and remote_transport:
+ # Create a connection to localhost to run rsync on
+ new_stdin = self._connection._new_stdin
+
+ # Unike port, there can be only one shell
+ localhost_shell = None
+ for host in C.LOCALHOST:
+ localhost_vars = task_vars['hostvars'].get(host, {})
+ for shell_var in MAGIC_VARIABLE_MAPPING['shell']:
+ localhost_shell = localhost_vars.get(shell_var, None)
+ if localhost_shell:
+ break
+ if localhost_shell:
+ break
+ else:
+ localhost_shell = os.path.basename(C.DEFAULT_EXECUTABLE)
+ self._play_context.shell = localhost_shell
+
+ # Unike port, there can be only one executable
+ localhost_executable = None
+ for host in C.LOCALHOST:
+ localhost_vars = task_vars['hostvars'].get(host, {})
+ for executable_var in MAGIC_VARIABLE_MAPPING['executable']:
+ localhost_executable = localhost_vars.get(executable_var, None)
+ if localhost_executable:
+ break
+ if localhost_executable:
+ break
+ else:
+ localhost_executable = C.DEFAULT_EXECUTABLE
+ self._play_context.executable = localhost_executable
+
+ new_connection = connection_loader.get('local', self._play_context, new_stdin)
+ self._connection = new_connection
+ self._override_module_replaced_vars(task_vars)
+
+ # SWITCH SRC AND DEST HOST PER MODE
+ if _tmp_args.get('mode', 'push') == 'pull':
+ (dest_host, src_host) = (src_host, dest_host)
+
+ # MUNGE SRC AND DEST PER REMOTE_HOST INFO
+ src = _tmp_args.get('src', None)
+ dest = _tmp_args.get('dest', None)
+ if src is None or dest is None:
+ return dict(failed=True,
+ msg="synchronize requires both src and dest parameters are set")
+
+ if not dest_is_local:
+ # Private key handling
+ private_key = self._play_context.private_key_file
+
+ if private_key is not None:
+ private_key = os.path.expanduser(private_key)
+ _tmp_args['private_key'] = private_key
+
+ # Src and dest rsync "path" handling
+ # Determine if we need a user@
+ user = None
+ if boolean(_tmp_args.get('set_remote_user', 'yes')):
+ if use_delegate:
+ user = task_vars.get('ansible_delegated_vars', dict()).get('ansible_ssh_user', None)
+ if not user:
+ user = C.DEFAULT_REMOTE_USER
+
+ else:
+ user = task_vars.get('ansible_ssh_user') or self._play_context.remote_user
+
+ # use the mode to define src and dest's url
+ if _tmp_args.get('mode', 'push') == 'pull':
+ # src is a remote path: <user>@<host>, dest is a local path
+ src = self._process_remote(_tmp_args, src_host, src, user, inv_port in localhost_ports)
+ dest = self._process_origin(dest_host, dest, user)
+ else:
+ # src is a local path, dest is a remote path: <user>@<host>
+ src = self._process_origin(src_host, src, user)
+ dest = self._process_remote(_tmp_args, dest_host, dest, user, inv_port in localhost_ports)
+ else:
+ # Still need to munge paths (to account for roles) even if we aren't
+ # copying files between hosts
+ if not src.startswith('/'):
+ src = self._get_absolute_path(path=src)
+ if not dest.startswith('/'):
+ dest = self._get_absolute_path(path=dest)
+
+ _tmp_args['src'] = src
+ _tmp_args['dest'] = dest
+
+ # Allow custom rsync path argument
+ rsync_path = _tmp_args.get('rsync_path', None)
+
+ # backup original become as we are probably about to unset it
+ become = self._play_context.become
+
+ if not dest_is_local:
+ # don't escalate for docker. doing --rsync-path with docker exec fails
+ # and we can switch directly to the user via docker arguments
+ if self._play_context.become and not rsync_path and self._remote_transport != 'docker':
+ # If no rsync_path is set, become was originally set, and dest is
+ # remote then add privilege escalation here.
+ if self._play_context.become_method == 'sudo':
+ rsync_path = 'sudo rsync'
+ # TODO: have to add in the rest of the become methods here
+
+ # We cannot use privilege escalation on the machine running the
+ # module. Instead we run it on the machine rsync is connecting
+ # to.
+ self._play_context.become = False
+
+ _tmp_args['rsync_path'] = rsync_path
+
+ if use_ssh_args:
+ ssh_args = [
+ getattr(self._play_context, 'ssh_args', ''),
+ getattr(self._play_context, 'ssh_common_args', ''),
+ getattr(self._play_context, 'ssh_extra_args', ''),
+ ]
+ _tmp_args['ssh_args'] = ' '.join([a for a in ssh_args if a])
+
+ # If launching synchronize against docker container
+ # use rsync_opts to support container to override rsh options
+ if self._remote_transport in [ 'docker' ]:
+ # Replicate what we do in the module argumentspec handling for lists
+ if not isinstance(_tmp_args.get('rsync_opts'), MutableSequence):
+ tmp_rsync_opts = _tmp_args.get('rsync_opts', [])
+ if isinstance(tmp_rsync_opts, string_types):
+ tmp_rsync_opts = tmp_rsync_opts.split(',')
+ elif isinstance(tmp_rsync_opts, (int, float)):
+ tmp_rsync_opts = [to_text(tmp_rsync_opts)]
+ _tmp_args['rsync_opts'] = tmp_rsync_opts
+
+ if '--blocking-io' not in _tmp_args['rsync_opts']:
+ _tmp_args['rsync_opts'].append('--blocking-io')
+ if become and self._play_context.become_user:
+ _tmp_args['rsync_opts'].append("--rsh='%s exec -u %s -i'" % (self._docker_cmd, self._play_context.become_user))
+ elif user is not None:
+ _tmp_args['rsync_opts'].append("--rsh='%s exec -u %s -i'" % (self._docker_cmd, user))
+ else:
+ _tmp_args['rsync_opts'].append("--rsh='%s exec -i'" % self._docker_cmd)
+
+ # run the module and store the result
+ result.update(self._execute_module('synchronize', module_args=_tmp_args, task_vars=task_vars))
+
+ if 'SyntaxError' in result.get('exception', result.get('msg', '')):
+ # Emit a warning about using python3 because synchronize is
+ # somewhat unique in running on localhost
+ result['exception'] = result['msg']
+ result['msg'] = ('SyntaxError parsing module. Perhaps invoking "python" on your local (or delegate_to) machine invokes python3. '
+ 'You can set ansible_python_interpreter for localhost (or the delegate_to machine) to the location of python2 to fix this')
+ return result
| diff --git a/testinfra/common/test_user_config.py b/testinfra/common/test_user_config.py
--- a/testinfra/common/test_user_config.py
+++ b/testinfra/common/test_user_config.py
@@ -1,6 +1,7 @@
import os
import pytest
import re
+import getpass
hostenv = os.environ['SECUREDROP_TESTINFRA_TARGET_HOST']
@@ -74,7 +75,9 @@ def test_sudoers_tmux_env_deprecated(File):
admin_user = "vagrant"
if os.environ.get("FPF_CI", None):
- admin_user = os.environ["USER"]
+ admin_user = getpass.getuser()
+ if admin_user == "root":
+ admin_user = "ubuntu"
f = File("/home/{}/.bashrc".format(admin_user))
assert not f.contains("^. \/etc\/bashrc\.securedrop_additions$")
diff --git a/testinfra/development/test_development_environment.py b/testinfra/development/test_development_environment.py
--- a/testinfra/development/test_development_environment.py
+++ b/testinfra/development/test_development_environment.py
@@ -1,7 +1,6 @@
import pytest
import os
-
-hostenv = os.environ['SECUREDROP_TESTINFRA_TARGET_HOST']
+import getpass
def test_development_app_dependencies(Package):
"""
@@ -49,8 +48,8 @@ def test_development_pip_dependencies(Command, pip_package, version):
assert "{}=={}".format(pip_package, version) in c.stdout.rstrip()
[email protected](hostenv == 'travis',
- reason="Bashrc tests dont make sense on Travis")
[email protected](getpass.getuser() != 'vagrant',
+ reason="vagrant bashrc checks dont make sense in CI")
def test_development_securedrop_env_var(File):
"""
Ensure that the SECUREDROP_ENV var is set to "dev".
diff --git a/testinfra/test.py b/testinfra/test.py
--- a/testinfra/test.py
+++ b/testinfra/test.py
@@ -47,6 +47,7 @@ def run_testinfra(target_host, verbose=True):
Handler for executing testinfra against `target_host`.
Queries list of roles via helper def `get_target_roles`.
"""
+ conn_type = "ssh"
target_roles = get_target_roles(target_host)
if verbose:
# Print informative output prior to test run.
@@ -87,6 +88,8 @@ def run_testinfra(target_host, verbose=True):
ssh_config_path = ""
testinfra_command_template = "testinfra -vv {target_roles}"
else:
+ if target_host == "build":
+ conn_type = "docker"
ssh_config_path = "{}/.ssh/sshconfig-securedrop-ci-{}".format(
os.environ["HOME"],
os.environ["BUILD_NUM"])
@@ -94,7 +97,7 @@ def run_testinfra(target_host, verbose=True):
testinfra \
-vv \
-n 8 \
- --connection ssh \
+ --connection {connection_type} \
--ssh-config \
{ssh_config_path}\
--junit-xml=./{target_host}-results.xml\
@@ -119,6 +122,7 @@ def run_testinfra(target_host, verbose=True):
testinfra_command = testinfra_command_template.format(
target_host=target_host,
ssh_config_path=ssh_config_path,
+ connection_type=conn_type,
target_roles=" ".join(target_roles),
).split()
| Convert CI `build` VM to docker container
During the `Great CI Event` (GCE) of 2017 - where `staging` provisioning and tests were thrown into the pipeline, the price of such greatness was a stab to FPF wallets. Here is a dramatic re-enactment of what went down:
:hocho: :moneybag:
To explain, we spent more than desired on cloud services during the course of introducing the automated CI system for a few days. There are a number of initiatives currently underway to reign in these cloud costs. The first low hanging fruit is to convert the `build` vm (the machine that handles packaging debian packages) into a docker instance. This requires a number of tweaks to playbooks, to the testinfra scripts, and to the circle CI configuration to make this work-right.
| 2017-05-15T19:45:21Z | [] | [] |
|
freedomofpress/securedrop | 1,780 | freedomofpress__securedrop-1780 | [
"1738"
] | c02e526dfbbbe3b966e7a72c98e68c02cd4c111c | diff --git a/securedrop/source.py b/securedrop/source.py
--- a/securedrop/source.py
+++ b/securedrop/source.py
@@ -400,16 +400,16 @@ def logout():
return redirect(url_for('index'))
[email protected]('/howto-disable-js')
-def howto_disable_js():
- return render_template("howto-disable-js.html")
-
-
@app.route('/tor2web-warning')
def tor2web_warning():
return render_template("tor2web-warning.html")
[email protected]('/use-tor')
+def recommend_tor_browser():
+ return render_template("use-tor-browser.html")
+
+
@app.route('/journalist-key')
def download_journalist_pubkey():
journalist_pubkey = crypto_util.gpg.export_keys(config.JOURNALIST_KEY)
| diff --git a/securedrop/tests/functional/test_source_warnings.py b/securedrop/tests/functional/test_source_warnings.py
new file mode 100644
--- /dev/null
+++ b/securedrop/tests/functional/test_source_warnings.py
@@ -0,0 +1,25 @@
+from selenium import webdriver
+import unittest
+
+import source_navigation_steps
+import functional_test
+
+
+class SourceInterfaceBannerWarnings(
+ unittest.TestCase,
+ functional_test.FunctionalTest,
+ source_navigation_steps.SourceNavigationSteps):
+
+ def setUp(self):
+ functional_test.FunctionalTest.setUp(self)
+
+ def tearDown(self):
+ functional_test.FunctionalTest.tearDown(self)
+
+ def test_warning_appears_if_tor_browser_not_in_use(self):
+ self.driver.get(self.source_location)
+
+ warning_banner = self.driver.find_element_by_class_name('use-tor-browser')
+
+ self.assertIn("We recommend using Tor Browser to access SecureDrop",
+ warning_banner.text)
diff --git a/securedrop/tests/test_source.py b/securedrop/tests/test_source.py
--- a/securedrop/tests/test_source.py
+++ b/securedrop/tests/test_source.py
@@ -277,17 +277,16 @@ def test_tor2web_warning(self):
self.assertEqual(resp.status_code, 200)
self.assertIn("Why is there a warning about Tor2Web?", resp.data)
+ def test_why_use_tor_browser(self):
+ resp = self.client.get('/use-tor')
+ self.assertEqual(resp.status_code, 200)
+ self.assertIn("You Should Use Tor Browser", resp.data)
+
def test_why_journalist_key(self):
resp = self.client.get('/why-journalist-key')
self.assertEqual(resp.status_code, 200)
self.assertIn("Why download the journalist's public key?", resp.data)
- def test_howto_disable_js(self):
- resp = self.client.get('/howto-disable-js')
- self.assertEqual(resp.status_code, 200)
- self.assertIn("Turn the Security Slider to High to Protect Your "
- "Anonymity", resp.data)
-
def test_metadata_route(self):
resp = self.client.get('/metadata')
self.assertEqual(resp.status_code, 200)
| Source Interface instructions contain broken/bad URL
When a Source follows the instructions to bump up the Security Slider settings in Tor Browser, we provide rationale for why, and include two links:
* https://www.wired.com/threatlevel/2013/09/freedom-hosting-fbi
* https://www.theguardian.com/world/interactive/2013/oct/04/egotistical-giraffe-nsa-tor-document
The second link is broken. It returns an HTTP 200, but the document it's linking to is not displayed. At first glance it appears to be a mixed content/CSP error, in which case we could contact TG folks to correct it. Maybe it's simpler to find different coverage and link to that.
| See also https://github.com/freedomofpress/securedrop/issues/1475 (read the follow-up comments too).
Looking into this a bit deeper, it appears that `/howto-disable-js` is dead code and as such we should just remove the entire page. An explanation follows.
## Source uses Tor Browser with JS enabled
Currently the user flow on the source interface as follows:
1. Source is using Tor Browser but does not have the Tor security slider set to high. They see this page indicating how to set it to high:
![screen shot 2017-06-06 at 10 08 43 am](https://user-images.githubusercontent.com/7832803/26842036-4dcde0e4-4aa0-11e7-8dd9-09e0dfe03454.png)
2. Source clicks "Learn how to set it to high"
3. If a source is using Tor Browser, then `is_likely_tor_browser()` in `source.js` is True, and following the logic in `source.js` the default browser behavior of following that link will not occur (due to the `return false` on line 42).
4. Instead, a bubble will appear providing easy instructions on how to turn the security slider to high:
![screen shot 2017-06-06 at 10 08 47 am](https://user-images.githubusercontent.com/7832803/26842303-2fe98776-4aa1-11e7-8799-8100ece788b4.png)
## Source uses Tor Browser with JS disabled
If a source has JS disabled, then the purple bar will not be shown, as the top code stanza in `source.js` that displays the bar will not execute.
## Source is not using Tor Browser
If a source is not using Tor Browser, they cannot access the hidden service. If they use Tor2Web to access it anyway, we flash a warning telling them they are not anonymous. Asking them to disable JS or turn the security slider to high is irrelevant at this stage.
## tl;dr
The source never sees `/howto-disable-js`, a search through the SecureDrop document tree indicates that it is not linked to anywhere, and we should remove this dead code. | 2017-06-07T00:27:25Z | [] | [] |
freedomofpress/securedrop | 1,859 | freedomofpress__securedrop-1859 | [
"1816"
] | f5808a29d89b0813a3c5ec7db99b96f048525e7c | diff --git a/testinfra/conftest.py b/testinfra/conftest.py
--- a/testinfra/conftest.py
+++ b/testinfra/conftest.py
@@ -10,7 +10,6 @@
import sys
import yaml
import pytest
-from jinja2 import Template
target_host = os.environ['SECUREDROP_TESTINFRA_TARGET_HOST']
@@ -26,19 +25,18 @@ def securedrop_import_testinfra_vars(hostname, with_header=False):
Vars must be stored in `testinfra/vars/<hostname>.yml`.
"""
filepath = os.path.join(os.path.dirname(__file__), "vars", hostname+".yml")
- file_load = open(filepath, 'r')
+ with open(filepath, 'r') as f:
+ hostvars = yaml.safe_load(f)
+ # The directory Travis runs builds in varies by PR, so we cannot hardcode
+ # it in the YAML testvars. Read it from env var and concatenate.
if hostname.lower() == 'travis':
build_env = os.environ["TRAVIS_BUILD_DIR"]
- file_load = Template(file_load.read()).render(BUILD_DIR=build_env)
- hostvars = yaml.load(file_load)
-
+ hostvars['securedrop_code'] = build_env+"/securedrop"
+
if with_header:
hostvars = dict(securedrop_test_vars=hostvars)
return hostvars
-
-
-
def pytest_namespace():
return securedrop_import_testinfra_vars(target_host, with_header=True)
| diff --git a/testinfra/development/test_development_environment.py b/testinfra/development/test_development_environment.py
--- a/testinfra/development/test_development_environment.py
+++ b/testinfra/development/test_development_environment.py
@@ -18,23 +18,23 @@ def test_development_app_dependencies(Package):
@pytest.mark.parametrize('pip_package,version', [
- ('Flask-Testing', '0.6.1'),
- ('Flask', '0.11.1'),
- ('Jinja2', '2.8'),
- ('MarkupSafe', '0.23'),
- ('Werkzeug', '0.11.11'),
- ('beautifulsoup4', '4.5.1'),
- ('click', '6.6'),
- ('coverage', '4.2'),
+ ('Flask-Testing', '0.6.2'),
+ ('Flask', '0.12.2'),
+ ('Jinja2', '2.9.6'),
+ ('MarkupSafe', '1.0'),
+ ('Werkzeug', '0.12.2'),
+ ('beautifulsoup4', '4.6.0'),
+ ('click', '6.7'),
+ ('coverage', '4.4.1'),
('first', '2.0.1'),
('funcsigs', '1.0.2'),
('itsdangerous', '0.24'),
('mock', '2.0.0'),
- ('pbr', '1.10.0'),
- ('pip-tools', '1.7.0'),
- ('py', '1.4.31'),
- ('pytest-cov', '2.4.0'),
- ('pytest', '3.0.3'),
+ ('pbr', '3.0.1'),
+ ('pip-tools', '1.9.0'),
+ ('py', '1.4.34'),
+ ('pytest-cov', '2.5.1'),
+ ('pytest', '3.1.1'),
('selenium', '2.53.6'),
('six', '1.10.0'),
])
| Testinfra tests are skipped in Travis
# Bug
## Description
All of the config tests in the Travis CI run are marked as "skipped." There are a few config tests that are not relevant on the Travis host, since that host is a combination of an Ansible controller and the development VM, given the localhost provisioning strategy, but we don't want to skip _everything_.
While working on #1736 and running the config tests locally, I discovered that #1770 broke the development VM config tests by upgrading Python dependencies without touching the config tests. Travis CI should have caught that, but didn't. Note that the pip checks may be one of the config tests we need to skip in Travis, due to the insufficient separation of Python library environments. Still, worth a shot.
## Steps to Reproduce
1. Commit a breaking change in a feature branch.
2. Confirm that `./testinfra/test.py development` fails locally as a result of the breaking change.
3. Push feature branch to remote and see the Travis checks fail.
## Expected Behavior
Config tests run in Travis. If not all of them, then most of them.
## Actual Behavior
No config tests run in Travis. All are marked as "SKIPPED."
## Comments
The `testinfra/test.py` wrapper script has gotten ugly. The solution is either to add a "travis" target to that script, or fix the handling of the "development" target to execute against localhost when running on Travis. It's also possible to execute testinfra directly, bypassing the wrapper, since the default behavior of testinfra is to execute against localhost.
| 2017-06-19T19:13:08Z | [] | [] |
|
freedomofpress/securedrop | 1,901 | freedomofpress__securedrop-1901 | [
"1900"
] | af480f135dff4999376a3ad994ffe441e98d90f4 | diff --git a/securedrop/crypto_util.py b/securedrop/crypto_util.py
--- a/securedrop/crypto_util.py
+++ b/securedrop/crypto_util.py
@@ -46,9 +46,9 @@ def do_runtime_tests():
gpg = gnupg.GPG(binary='gpg2', homedir=config.GPG_KEY_DIR)
-words = open(config.WORD_LIST).read().split('\n')
-nouns = open(config.NOUNS).read().split('\n')
-adjectives = open(config.ADJECTIVES).read().split('\n')
+words = open(config.WORD_LIST).read().rstrip('\n').split('\n')
+nouns = open(config.NOUNS).read().rstrip('\n').split('\n')
+adjectives = open(config.ADJECTIVES).read().rstrip('\n').split('\n')
class CryptoException(Exception):
| diff --git a/securedrop/tests/test_crypto_util.py b/securedrop/tests/test_crypto_util.py
--- a/securedrop/tests/test_crypto_util.py
+++ b/securedrop/tests/test_crypto_util.py
@@ -20,6 +20,11 @@ def setUp(self):
def tearDown(self):
utils.env.teardown()
+ def test_word_list_does_not_contain_empty_strings(self):
+ self.assertNotIn('', (crypto_util.words
+ + crypto_util.nouns
+ + crypto_util.adjectives))
+
def test_clean(self):
ok = (' !#%$&)(+*-1032547698;:=?@acbedgfihkjmlonqpsrutwvyxzABCDEFGHIJ'
'KLMNOPQRSTUVWXYZ')
| Wordlists are not being parsed correctly
# Bug
## Description
`crypo_util.{words,nouns,adjectives}` all contain an empty string as their last element.
| 2017-06-28T20:24:21Z | [] | [] |
|
freedomofpress/securedrop | 1,985 | freedomofpress__securedrop-1985 | [
"1978"
] | ebb2dc9c97b4fa6fe1ecf7a4e5c9995ab97e5aad | diff --git a/securedrop/source.py b/securedrop/source.py
--- a/securedrop/source.py
+++ b/securedrop/source.py
@@ -399,7 +399,7 @@ def logout():
if logged_in():
session.clear()
msg = render_template('logout_flashed_message.html')
- flash(Markup(msg), "important")
+ flash(Markup(msg), "important hide-if-not-tor-browser")
return redirect(url_for('index'))
| diff --git a/securedrop/tests/functional/source_navigation_steps.py b/securedrop/tests/functional/source_navigation_steps.py
--- a/securedrop/tests/functional/source_navigation_steps.py
+++ b/securedrop/tests/functional/source_navigation_steps.py
@@ -161,5 +161,4 @@ def _source_deletes_a_journalist_reply(self):
def _source_logs_out(self):
self.driver.find_element_by_id('logout').click()
- notification = self.driver.find_element_by_css_selector('.important')
- assert 'Thank you for exiting your session!' in notification.text
+ assert self.driver.find_element_by_css_selector('.important')
| inconsistent display when exiting /use-tor
# Bug
## Description
The display shows both "you are not using tor" and "Please select "New Identity" from the green Onion button in the Tor browser".
## Steps to Reproduce
* navigate to the source interface with firefox
* click on the "Learn how to install it" link
* click on the exit button
## Expected Behavior
![expected](https://user-images.githubusercontent.com/433594/28218975-98345dec-68ba-11e7-895f-a2f96e049557.png)
## Actual Behavior
![inconsistent](https://user-images.githubusercontent.com/433594/28218983-a340c504-68ba-11e7-88fd-d65b98a58742.png)
| Great catch @dachary, thanks! This has irked me before, but since I've switched to use Tor Browser for all SD testing, I haven't seen this lately. Going to toss this on the 0.4.1 milestone, so it'll get patched soon, but not block ongoing QA for 0.4. | 2017-07-17T10:28:03Z | [] | [] |
freedomofpress/securedrop | 1,992 | freedomofpress__securedrop-1992 | [
"1965"
] | 0b0e4acf6586da97fad34b4b153086757118bb66 | diff --git a/securedrop/source.py b/securedrop/source.py
--- a/securedrop/source.py
+++ b/securedrop/source.py
@@ -124,9 +124,7 @@ def check_tor2web():
@app.route('/')
def index():
- return render_template('index.html',
- custom_notification=getattr(
- config, 'CUSTOM_NOTIFICATION', ''))
+ return render_template('index.html')
def generate_unique_codename():
| diff --git a/securedrop/tests/test_source.py b/securedrop/tests/test_source.py
--- a/securedrop/tests/test_source.py
+++ b/securedrop/tests/test_source.py
@@ -263,28 +263,6 @@ def test_submit_sanitizes_filename(self, gzipfile):
mode=ANY,
fileobj=ANY)
- def test_custom_notification(self):
- """Test that `CUSTOM_NOTIFICATION` string in config file
- is rendered on the Source Interface page. We cannot assume
- it will be present in production instances, since it is added
- via the Ansible config, not the Debian package scripts."""
- custom_msg = config.CUSTOM_NOTIFICATION
-
- dev_msg = ("This is an insecure SecureDrop Development server "
- "for testing ONLY. Do NOT submit documents here.")
- staging_msg = "This is a SecureDrop Staging VM for testing ONLY"
-
- self.assertTrue(custom_msg in (dev_msg, staging_msg))
- resp = self.client.get('/')
- self.assertEqual(resp.status_code, 200)
- # The app-tests aren't host-aware, so we can't accurately predict
- # which custom notification message we want. Let's check for both,
- # and fail only if both are not found.
- try:
- self.assertIn(dev_msg, resp.data)
- except AssertionError:
- self.assertIn(staging_msg, resp.data)
-
def test_tor2web_warning_headers(self):
resp = self.client.get('/', headers=[('X-tor2web', 'encrypted')])
self.assertEqual(resp.status_code, 200)
| Back out CUSTOM_NOTIFICATION for 0.4
While QA'ing the 0.4 release, we realized that the `CUSTOM_NOTIFICATION` configuration option introduced in #1399 does not have a good deployment story for currently deployed instances because we generally do not have a good story for automated migrations of the SecureDrop web application configuration.
Currently, any changes that are made to `config.py.example` cannot be propagated to currently deployed instances because the [Ansible playbooks](https://github.com/freedomofpress/securedrop/blob/892cf52299f17ed9c70df0bb0bec68137101bab2/install_files/ansible-base/roles/app/tasks/initialize_securedrop_app.yml#L29-L41) will avoid touching `config.py` if it has already been generated. Thus, *new* instances of 0.4 would be able to use the new `CUSTOM_NOTIFICATION` feature, but *existing* instances that are upgrading to 0.4 would not be able to take advantage of the feature. If the administrators of existing instances tried to modify the configuration on the Admin Workstation, those changes would simply not be propagated to the production machines.
We discussed this internally and agreed that the best course of action is to back out #1399 and any related changes for the 0.4 release, to avoid confusing and frustrating SecureDrop administrators with a new feature that would work inconsistently across old and new SecureDrop instances. We also agreed that it would be good to target the next release after 0.4, 0.4.1, for re-landing #1399, along with the configuration management changes required to support it consistently across current and new SecureDrop instances. I will file a separate follow-up issue to summarize the automated configuration upgrade issue and track work towards a resolution.
| Follow-up issue to discuss automated migration of the web application configuration: #1966. | 2017-07-18T00:16:52Z | [] | [] |
freedomofpress/securedrop | 2,047 | freedomofpress__securedrop-2047 | [
"2022",
"1950",
"2034"
] | fe4e3a358effcedf7b40f3aab069e1ab0c53ce22 | diff --git a/docs/conf.py b/docs/conf.py
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -51,7 +51,7 @@
# General information about the project.
project = u'SecureDrop'
-copyright = u'2015, Freedom of the Press Foundation'
+copyright = u'2017, Freedom of the Press Foundation'
author = u'SecureDrop Team and Contributors'
# The version info for the project you're documenting, acts as replacement for
@@ -59,9 +59,9 @@
# built documents.
#
# The short X.Y version.
-version = '0.3.12'
+version = '0.4'
# The full version, including alpha/beta/rc tags.
-release = '0.3.12'
+release = '0.4'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
@@ -146,7 +146,7 @@
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
-#html_logo = None
+html_logo = '../securedrop/static/i/favicon.png'
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
@@ -156,7 +156,7 @@
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
-html_static_path = ['_static']
+# html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
diff --git a/install_files/ansible-base/action_plugins/synchronize.py b/install_files/ansible-base/action_plugins/synchronize.py
new file mode 100644
--- /dev/null
+++ b/install_files/ansible-base/action_plugins/synchronize.py
@@ -0,0 +1,415 @@
+# -*- coding: utf-8 -*-
+
+# (c) 2012-2013, Timothy Appnel <[email protected]>
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import os.path
+from collections import MutableSequence
+
+from ansible import constants as C
+from ansible.module_utils.six import string_types
+from ansible.module_utils._text import to_text
+from ansible.playbook.play_context import MAGIC_VARIABLE_MAPPING
+from ansible.plugins.action import ActionBase
+from ansible.plugins import connection_loader
+
+boolean = C.mk_boolean
+
+
+class ActionModule(ActionBase):
+
+ def _get_absolute_path(self, path):
+ original_path = path
+
+ if path.startswith('rsync://'):
+ return path
+
+ if self._task._role is not None:
+ path = self._loader.path_dwim_relative(self._task._role._role_path, 'files', path)
+ else:
+ path = self._loader.path_dwim_relative(self._loader.get_basedir(), 'files', path)
+
+ if original_path and original_path[-1] == '/' and path[-1] != '/':
+ # make sure the dwim'd path ends in a trailing "/"
+ # if the original path did
+ path += '/'
+
+ return path
+
+ def _host_is_ipv6_address(self, host):
+ return ':' in host
+
+ def _format_rsync_rsh_target(self, host, path, user):
+ ''' formats rsync rsh target, escaping ipv6 addresses if needed '''
+
+ user_prefix = ''
+
+ if path.startswith('rsync://'):
+ return path
+
+ # If using docker, do not add user information
+ if self._remote_transport not in [ 'docker' ] and user:
+ user_prefix = '%s@' % (user, )
+
+ if self._host_is_ipv6_address(host):
+ return '[%s%s]:%s' % (user_prefix, host, path)
+ else:
+ return '%s%s:%s' % (user_prefix, host, path)
+
+ def _process_origin(self, host, path, user):
+
+ if host not in C.LOCALHOST:
+ return self._format_rsync_rsh_target(host, path, user)
+
+ if ':' not in path and not path.startswith('/'):
+ path = self._get_absolute_path(path=path)
+ return path
+
+ def _process_remote(self, task_args, host, path, user, port_matches_localhost_port):
+ """
+ :arg host: hostname for the path
+ :arg path: file path
+ :arg user: username for the transfer
+ :arg port_matches_localhost_port: boolean whether the remote port
+ matches the port used by localhost's sshd. This is used in
+ conjunction with seeing whether the host is localhost to know
+ if we need to have the module substitute the pathname or if it
+ is a different host (for instance, an ssh tunnelled port or an
+ alternative ssh port to a vagrant host.)
+ """
+ transport = self._connection.transport
+ # If we're connecting to a remote host or we're delegating to another
+ # host or we're connecting to a different ssh instance on the
+ # localhost then we have to format the path as a remote rsync path
+ if host not in C.LOCALHOST or transport != "local" or \
+ (host in C.LOCALHOST and not port_matches_localhost_port):
+ # If we're delegating to non-localhost and but the
+ # inventory_hostname host is localhost then we need the module to
+ # fix up the rsync path to use the controller's public DNS/IP
+ # instead of "localhost"
+ if port_matches_localhost_port and host in C.LOCALHOST:
+ task_args['_substitute_controller'] = True
+ return self._format_rsync_rsh_target(host, path, user)
+
+ if ':' not in path and not path.startswith('/'):
+ path = self._get_absolute_path(path=path)
+ return path
+
+ def _override_module_replaced_vars(self, task_vars):
+ """ Some vars are substituted into the modules. Have to make sure
+ that those are correct for localhost when synchronize creates its own
+ connection to localhost."""
+
+ # Clear the current definition of these variables as they came from the
+ # connection to the remote host
+ if 'ansible_syslog_facility' in task_vars:
+ del task_vars['ansible_syslog_facility']
+ for key in list(task_vars.keys()):
+ if key.startswith("ansible_") and key.endswith("_interpreter"):
+ del task_vars[key]
+
+ # Add the definitions from localhost
+ for host in C.LOCALHOST:
+ if host in task_vars['hostvars']:
+ localhost = task_vars['hostvars'][host]
+ break
+ if 'ansible_syslog_facility' in localhost:
+ task_vars['ansible_syslog_facility'] = localhost['ansible_syslog_facility']
+ for key in localhost:
+ if key.startswith("ansible_") and key.endswith("_interpreter"):
+ task_vars[key] = localhost[key]
+
+ def run(self, tmp=None, task_vars=None):
+ ''' generates params and passes them on to the rsync module '''
+ # When modifying this function be aware of the tricky convolutions
+ # your thoughts have to go through:
+ #
+ # In normal ansible, we connect from controller to inventory_hostname
+ # (playbook's hosts: field) or controller to delegate_to host and run
+ # a module on one of those hosts.
+ #
+ # So things that are directly related to the core of ansible are in
+ # terms of that sort of connection that always originate on the
+ # controller.
+ #
+ # In synchronize we use ansible to connect to either the controller or
+ # to the delegate_to host and then run rsync which makes its own
+ # connection from controller to inventory_hostname or delegate_to to
+ # inventory_hostname.
+ #
+ # That means synchronize needs to have some knowledge of the
+ # controller to inventory_host/delegate host that ansible typically
+ # establishes and use those to construct a command line for rsync to
+ # connect from the inventory_host to the controller/delegate. The
+ # challenge for coders is remembering which leg of the trip is
+ # associated with the conditions that you're checking at any one time.
+ if task_vars is None:
+ task_vars = dict()
+
+ # We make a copy of the args here because we may fail and be asked to
+ # retry. If that happens we don't want to pass the munged args through
+ # to our next invocation. Munged args are single use only.
+ _tmp_args = self._task.args.copy()
+
+ result = super(ActionModule, self).run(tmp, task_vars)
+
+ # Store remote connection type
+ self._remote_transport = self._connection.transport
+
+ # Handle docker connection options
+ if self._remote_transport == 'docker':
+ self._docker_cmd = self._connection.docker_cmd
+ if self._play_context.docker_extra_args:
+ self._docker_cmd = "%s %s" % (self._docker_cmd, self._play_context.docker_extra_args)
+
+ # self._connection accounts for delegate_to so
+ # remote_transport is the transport ansible thought it would need
+ # between the controller and the delegate_to host or the controller
+ # and the remote_host if delegate_to isn't set.
+
+ remote_transport = False
+ if self._connection.transport != 'local':
+ remote_transport = True
+
+ try:
+ delegate_to = self._task.delegate_to
+ except (AttributeError, KeyError):
+ delegate_to = None
+
+ # ssh paramiko docker and local are fully supported transports. Anything
+ # else only works with delegate_to
+ if delegate_to is None and self._connection.transport not in ('ssh', 'paramiko', 'local', 'docker'):
+ result['failed'] = True
+ result['msg'] = ("synchronize uses rsync to function. rsync needs to connect to the remote host via ssh, docker client or a direct filesystem "
+ "copy. This remote host is being accessed via %s instead so it cannot work." % self._connection.transport)
+ return result
+
+ use_ssh_args = _tmp_args.pop('use_ssh_args', None)
+
+ # Parameter name needed by the ansible module
+ _tmp_args['_local_rsync_path'] = task_vars.get('ansible_rsync_path') or 'rsync'
+
+ # rsync thinks that one end of the connection is localhost and the
+ # other is the host we're running the task for (Note: We use
+ # ansible's delegate_to mechanism to determine which host rsync is
+ # running on so localhost could be a non-controller machine if
+ # delegate_to is used)
+ src_host = '127.0.0.1'
+ inventory_hostname = task_vars.get('inventory_hostname')
+ dest_host_inventory_vars = task_vars['hostvars'].get(inventory_hostname)
+ try:
+ dest_host = dest_host_inventory_vars['ansible_host']
+ except KeyError:
+ dest_host = dest_host_inventory_vars.get('ansible_ssh_host', inventory_hostname)
+
+ dest_host_ids = [hostid for hostid in (dest_host_inventory_vars.get('inventory_hostname'),
+ dest_host_inventory_vars.get('ansible_host'),
+ dest_host_inventory_vars.get('ansible_ssh_host'))
+ if hostid is not None]
+
+ localhost_ports = set()
+ for host in C.LOCALHOST:
+ localhost_vars = task_vars['hostvars'].get(host, {})
+ for port_var in MAGIC_VARIABLE_MAPPING['port']:
+ port = localhost_vars.get(port_var, None)
+ if port:
+ break
+ else:
+ port = C.DEFAULT_REMOTE_PORT
+ localhost_ports.add(port)
+
+ # dest_is_local tells us if the host rsync runs on is the same as the
+ # host rsync puts the files on. This is about *rsync's connection*,
+ # not about the ansible connection to run the module.
+ dest_is_local = False
+ if delegate_to is None and remote_transport is False:
+ dest_is_local = True
+ elif delegate_to is not None and delegate_to in dest_host_ids:
+ dest_is_local = True
+
+ # CHECK FOR NON-DEFAULT SSH PORT
+ inv_port = task_vars.get('ansible_ssh_port', None) or C.DEFAULT_REMOTE_PORT
+ if _tmp_args.get('dest_port', None) is None:
+ if inv_port is not None:
+ _tmp_args['dest_port'] = inv_port
+
+ # Set use_delegate if we are going to run rsync on a delegated host
+ # instead of localhost
+ use_delegate = False
+ if delegate_to is not None and delegate_to in dest_host_ids:
+ # edge case: explicit delegate and dest_host are the same
+ # so we run rsync on the remote machine targeting its localhost
+ # (itself)
+ dest_host = '127.0.0.1'
+ use_delegate = True
+ elif delegate_to is not None and remote_transport:
+ # If we're delegating to a remote host then we need to use the
+ # delegate_to settings
+ use_delegate = True
+
+ # Delegate to localhost as the source of the rsync unless we've been
+ # told (via delegate_to) that a different host is the source of the
+ # rsync
+ if not use_delegate and remote_transport:
+ # Create a connection to localhost to run rsync on
+ new_stdin = self._connection._new_stdin
+
+ # Unike port, there can be only one shell
+ localhost_shell = None
+ for host in C.LOCALHOST:
+ localhost_vars = task_vars['hostvars'].get(host, {})
+ for shell_var in MAGIC_VARIABLE_MAPPING['shell']:
+ localhost_shell = localhost_vars.get(shell_var, None)
+ if localhost_shell:
+ break
+ if localhost_shell:
+ break
+ else:
+ localhost_shell = os.path.basename(C.DEFAULT_EXECUTABLE)
+ self._play_context.shell = localhost_shell
+
+ # Unike port, there can be only one executable
+ localhost_executable = None
+ for host in C.LOCALHOST:
+ localhost_vars = task_vars['hostvars'].get(host, {})
+ for executable_var in MAGIC_VARIABLE_MAPPING['executable']:
+ localhost_executable = localhost_vars.get(executable_var, None)
+ if localhost_executable:
+ break
+ if localhost_executable:
+ break
+ else:
+ localhost_executable = C.DEFAULT_EXECUTABLE
+ self._play_context.executable = localhost_executable
+
+ new_connection = connection_loader.get('local', self._play_context, new_stdin)
+ self._connection = new_connection
+ self._override_module_replaced_vars(task_vars)
+
+ # SWITCH SRC AND DEST HOST PER MODE
+ if _tmp_args.get('mode', 'push') == 'pull':
+ (dest_host, src_host) = (src_host, dest_host)
+
+ # MUNGE SRC AND DEST PER REMOTE_HOST INFO
+ src = _tmp_args.get('src', None)
+ dest = _tmp_args.get('dest', None)
+ if src is None or dest is None:
+ return dict(failed=True,
+ msg="synchronize requires both src and dest parameters are set")
+
+ if not dest_is_local:
+ # Private key handling
+ private_key = self._play_context.private_key_file
+
+ if private_key is not None:
+ private_key = os.path.expanduser(private_key)
+ _tmp_args['private_key'] = private_key
+
+ # Src and dest rsync "path" handling
+ # Determine if we need a user@
+ user = None
+ if boolean(_tmp_args.get('set_remote_user', 'yes')):
+ if use_delegate:
+ user = task_vars.get('ansible_delegated_vars', dict()).get('ansible_ssh_user', None)
+ if not user:
+ user = C.DEFAULT_REMOTE_USER
+
+ else:
+ user = task_vars.get('ansible_ssh_user') or self._play_context.remote_user
+
+ # use the mode to define src and dest's url
+ if _tmp_args.get('mode', 'push') == 'pull':
+ # src is a remote path: <user>@<host>, dest is a local path
+ src = self._process_remote(_tmp_args, src_host, src, user, inv_port in localhost_ports)
+ dest = self._process_origin(dest_host, dest, user)
+ else:
+ # src is a local path, dest is a remote path: <user>@<host>
+ src = self._process_origin(src_host, src, user)
+ dest = self._process_remote(_tmp_args, dest_host, dest, user, inv_port in localhost_ports)
+ else:
+ # Still need to munge paths (to account for roles) even if we aren't
+ # copying files between hosts
+ if not src.startswith('/'):
+ src = self._get_absolute_path(path=src)
+ if not dest.startswith('/'):
+ dest = self._get_absolute_path(path=dest)
+
+ _tmp_args['src'] = src
+ _tmp_args['dest'] = dest
+
+ # Allow custom rsync path argument
+ rsync_path = _tmp_args.get('rsync_path', None)
+
+ # backup original become as we are probably about to unset it
+ become = self._play_context.become
+
+ if not dest_is_local:
+ # don't escalate for docker. doing --rsync-path with docker exec fails
+ # and we can switch directly to the user via docker arguments
+ if self._play_context.become and not rsync_path and self._remote_transport != 'docker':
+ # If no rsync_path is set, become was originally set, and dest is
+ # remote then add privilege escalation here.
+ if self._play_context.become_method == 'sudo':
+ rsync_path = "'sudo rsync'"
+ # TODO: have to add in the rest of the become methods here
+
+ # We cannot use privilege escalation on the machine running the
+ # module. Instead we run it on the machine rsync is connecting
+ # to.
+ self._play_context.become = False
+
+ _tmp_args['rsync_path'] = rsync_path
+
+ if use_ssh_args:
+ ssh_args = [
+ getattr(self._play_context, 'ssh_args', ''),
+ getattr(self._play_context, 'ssh_common_args', ''),
+ getattr(self._play_context, 'ssh_extra_args', ''),
+ ]
+ _tmp_args['ssh_args'] = ' '.join([a for a in ssh_args if a])
+
+ # If launching synchronize against docker container
+ # use rsync_opts to support container to override rsh options
+ if self._remote_transport in [ 'docker' ]:
+ # Replicate what we do in the module argumentspec handling for lists
+ if not isinstance(_tmp_args.get('rsync_opts'), MutableSequence):
+ tmp_rsync_opts = _tmp_args.get('rsync_opts', [])
+ if isinstance(tmp_rsync_opts, string_types):
+ tmp_rsync_opts = tmp_rsync_opts.split(',')
+ elif isinstance(tmp_rsync_opts, (int, float)):
+ tmp_rsync_opts = [to_text(tmp_rsync_opts)]
+ _tmp_args['rsync_opts'] = tmp_rsync_opts
+
+ if '--blocking-io' not in _tmp_args['rsync_opts']:
+ _tmp_args['rsync_opts'].append('--blocking-io')
+ if become and self._play_context.become_user:
+ _tmp_args['rsync_opts'].append("--rsh='%s exec -u %s -i'" % (self._docker_cmd, self._play_context.become_user))
+ elif user is not None:
+ _tmp_args['rsync_opts'].append("--rsh='%s exec -u %s -i'" % (self._docker_cmd, user))
+ else:
+ _tmp_args['rsync_opts'].append("--rsh='%s exec -i'" % self._docker_cmd)
+
+ # run the module and store the result
+ result.update(self._execute_module('synchronize', module_args=_tmp_args, task_vars=task_vars))
+
+ if 'SyntaxError' in result.get('exception', result.get('msg', '')):
+ # Emit a warning about using python3 because synchronize is
+ # somewhat unique in running on localhost
+ result['exception'] = result['msg']
+ result['msg'] = ('SyntaxError parsing module. Perhaps invoking "python" on your local (or delegate_to) machine invokes python3. '
+ 'You can set ansible_python_interpreter for localhost (or the delegate_to machine) to the location of python2 to fix this')
+ return result
diff --git a/install_files/ansible-base/callback_plugins/profile_tasks.py b/install_files/ansible-base/callback_plugins/profile_tasks.py
new file mode 100644
--- /dev/null
+++ b/install_files/ansible-base/callback_plugins/profile_tasks.py
@@ -0,0 +1,73 @@
+# Source: https://github.com/jlafon/ansible-profile
+# License: MIT
+# More info: http://jlafon.io/ansible-profiling.html
+# The profiling functionality will be provided by Ansible v2,
+# since this callback_plugin has been merged into core,
+# but we're including here to support older versions of Ansible.
+import datetime
+import os
+import time
+
+
+class CallbackModule(object):
+ """
+ A plugin for timing tasks
+ """
+ def __init__(self):
+ self.stats = {}
+ self.current = None
+
+ def playbook_on_task_start(self, name, is_conditional):
+ """
+ Logs the start of each task
+ """
+
+ if os.getenv("ANSIBLE_PROFILE_DISABLE") is not None:
+ return
+
+ if self.current is not None:
+ # Record the running time of the last executed task
+ self.stats[self.current] = time.time() - self.stats[self.current]
+
+ # Record the start time of the current task
+ self.current = name
+ self.stats[self.current] = time.time()
+
+ def playbook_on_stats(self, stats):
+ """
+ Prints the timings
+ """
+
+ if os.getenv("ANSIBLE_PROFILE_DISABLE") is not None:
+ return
+
+ # Record the timing of the very last task
+ if self.current is not None:
+ self.stats[self.current] = time.time() - self.stats[self.current]
+
+ # Sort the tasks by their running time
+ results = sorted(
+ self.stats.items(),
+ key=lambda value: value[1],
+ reverse=True,
+ )
+
+ # Just keep the top 10
+ results = results[:10]
+
+ # Print the timings
+ for name, elapsed in results:
+ print(
+ "{0:-<70}{1:->9}".format(
+ '{0} '.format(name),
+ ' {0:.02f}s'.format(elapsed),
+ )
+ )
+
+ total_seconds = sum([x[1] for x in self.stats.items()])
+ print("\nPlaybook finished: {0}, {1} total tasks. {2} elapsed. \n".format(
+ time.asctime(),
+ len(self.stats.items()),
+ datetime.timedelta(seconds=(int(total_seconds)))
+ )
+ )
diff --git a/install_files/ansible-base/roles/backup/files/0.3_collect.py b/install_files/ansible-base/roles/backup/files/0.3_collect.py
new file mode 100755
--- /dev/null
+++ b/install_files/ansible-base/roles/backup/files/0.3_collect.py
@@ -0,0 +1,82 @@
+#!/usr/bin/python2.7
+"""
+
+This script should be copied to the App server and ran by the anisble
+plabook. When run (as root), it collects all of the necessary information
+to backup the 0.3 system and stores it in /tmp/sd-backup-0.3-TIME_STAMP.zip.gpg
+
+"""
+
+import sys
+import os
+import re
+import zipfile
+from datetime import datetime
+import functools
+# Import the application config.py file
+sys.path.append("/var/www/securedrop")
+import config
+import gnupg
+import subprocess
+
+TOR_SERVICES = "/var/lib/tor/services"
+TOR_CONFIG = "/etc/tor/torrc"
+
+
+def collect_config_file(zf):
+ config_file_path = os.path.join(config.SECUREDROP_ROOT, "config.py")
+ zf.write(config_file_path)
+
+
+def collect_securedrop_data_root(zf):
+ # The store and key dirs are shared between both interfaces
+ for root, dirs, files in os.walk(config.SECUREDROP_DATA_ROOT):
+ for name in files:
+ zf.write(os.path.join(root, name))
+
+
+def collect_custom_header_image(zf):
+ # The custom header image is copied over the deafult `static/i/logo.png`.
+ zf.write(os.path.join(config.SECUREDROP_ROOT, "static/i/logo.png"))
+
+
+def collect_tor_files(zf):
+ # All of the tor hidden service private keys are stored in the THS specific
+ # subdirectory `/var/lib/tor/services` backing up this directory will back
+ # up all of the THS and ATHS required keys needed to restore all the hidden
+ # services on that system.
+ for root, dirs, files in os.walk(TOR_SERVICES):
+ for name in files:
+ zf.write(os.path.join(root, name))
+
+ # The tor config file has the ATHS client names required to restore
+ # the ATHS info. These names are also in the the specific client_key file
+ # but backing up this file makes it easier than parsing the files during a
+ # restore.
+ zf.write(TOR_CONFIG)
+
+
+def encrypt_zip_file(zf_fn):
+ # Encrypt the backup zip file with the application's gpg public key
+ gpg = gnupg.GPG(binary='gpg2', homedir=config.GPG_KEY_DIR)
+ e_fn = '{}.gpg'.format(zf_fn)
+
+ stream = open(zf_fn, "rb")
+ gpg.encrypt_file(stream, config.JOURNALIST_KEY, always_trust='True',
+ output=e_fn)
+
+
+def main():
+ # name append a timestamp to the sd-backup zip filename
+ dt = str(datetime.utcnow().strftime("%Y-%m-%d--%H-%M-%S"))
+ zf_fn = 'sd-backup-{}.zip'.format(dt)
+ with zipfile.ZipFile(zf_fn, 'w') as zf:
+ collect_config_file(zf)
+ collect_securedrop_data_root(zf)
+ collect_custom_header_image(zf)
+ collect_tor_files(zf)
+ encrypt_zip_file(zf_fn)
+ print zf_fn
+
+if __name__ == "__main__":
+ main()
diff --git a/install_files/ansible-base/roles/build-ossec-deb-pkg/library/ossec_urls.py b/install_files/ansible-base/roles/build-ossec-deb-pkg/library/ossec_urls.py
new file mode 100644
--- /dev/null
+++ b/install_files/ansible-base/roles/build-ossec-deb-pkg/library/ossec_urls.py
@@ -0,0 +1,128 @@
+#!/usr/bin/env python
+DOCUMENTATION = '''
+---
+module: ossec_urls
+short_description: Gather facts for OSSEC download URLs
+description:
+ - Gather version, checksum, and URL info for OSSEC downloads
+author:
+ - Conor Schaefer (@conorsch)
+ - Freedom of the Press Foundation (@freedomofpress)
+requirements:
+ - requests
+options:
+ ossec_version:
+ description:
+ - version number of release to download
+ default: "2.8.2"
+ required: no
+notes:
+ - The OSSEC version to download is hardcoded to avoid surprises.
+ If you want a newer version than the current default, you should
+ pass the version in via I(ossec_version).
+'''
+EXAMPLES = '''
+- ossec_urls:
+ ossec_version: "2.8.2"
+'''
+
+from StringIO import StringIO
+from urlparse import urljoin
+import re
+
+HAS_REQUESTS = True
+try:
+ import requests
+except ImportError:
+ HAS_REQUESTS = False
+
+
+
+class OSSECURLs():
+
+ def __init__(self, ossec_version):
+ self.ossec_version = ossec_version
+
+ checksums = self.parse_checksums()
+
+ self.ansible_facts = dict(
+ ossec_version=self.ossec_version,
+ ossec_tarball_filename=self.ossec_tarball_filename,
+ ossec_tarball_url=self.ossec_tarball_url,
+ ossec_checksum_filename=self.ossec_checksum_filename,
+ ossec_checksum_url=self.ossec_checksum_url,
+ )
+
+ self.ansible_facts.update(checksums)
+
+ @property
+ def ossec_tarball_filename(self):
+ return "ossec-hids-{}.tar.gz".format(self.ossec_version)
+
+
+ @property
+ def ossec_tarball_url(self):
+ return "https://github.com/ossec/ossec-hids/archive/{}.tar.gz".format(
+ self.ossec_version)
+
+
+ @property
+ def ossec_checksum_url(self):
+ return "https://github.com/ossec/ossec-hids/releases/download/{}/{}".format(
+ self.ossec_version, self.ossec_checksum_filename)
+
+
+ @property
+ def ossec_checksum_filename(self):
+ return "{}-checksum.txt".format(self.ossec_tarball_filename)
+
+
+ def parse_checksums(self):
+ r = requests.get(self.ossec_checksum_url)
+ checksum_regex = re.compile(r'''
+ ^MD5\(
+ '''
+ +re.escape(self.ossec_tarball_filename)+
+ r'''\)=\s+(?P<ossec_md5_checksum>[0-9a-f]{32})\s+
+ SHA1\(
+ '''
+ +re.escape(self.ossec_tarball_filename)+
+ r'''\)=\s+(?P<ossec_sha1_checksum>[0-9a-f]{40})$
+ ''', re.VERBOSE | re.MULTILINE
+ )
+ checksum_list = r.content.rstrip()
+ results = re.match(checksum_regex, checksum_list).groupdict()
+ return results
+
+
+def main():
+ module = AnsibleModule(
+ argument_spec=dict(
+ ossec_version=dict(default="2.8.2" ),
+ ),
+ supports_check_mode=False
+ )
+ if not HAS_REQUESTS:
+ module.fail_json(msg='requests required for this module')
+
+ ossec_version = module.params['ossec_version']
+ try:
+ ossec_config = OSSECURLs(ossec_version=ossec_version)
+ except:
+ msg = ("Failed to find checksum information for OSSEC v{}."
+ "Ensure you have the proper release specified, "
+ "and check the download page to confirm: "
+ "http://www.ossec.net/?page_id=19".format(ossec_version))
+ module.fail_json(msg=msg)
+
+ results = ossec_config.ansible_facts
+
+ if results:
+ module.exit_json(changed=False, ansible_facts=results)
+ else:
+ msg = "Failed to fetch OSSEC URL facts."
+ module.fail_json(msg=msg)
+
+
+from ansible.module_utils.basic import *
+main()
diff --git a/install_files/ansible-base/roles/restore/files/0.3_restore.py b/install_files/ansible-base/roles/restore/files/0.3_restore.py
new file mode 100755
--- /dev/null
+++ b/install_files/ansible-base/roles/restore/files/0.3_restore.py
@@ -0,0 +1,171 @@
+#!/usr/bin/python2.7
+"""
+
+This script and decrypted backup zip should be copied to the App server
+and run by the anisble plabook. When run (as root), it restores the 0.3
+backup file.
+
+python 0.3_restore.py sd-backup-TIMESTAMP.zip
+
+"""
+
+import sys
+import os
+import re
+import zipfile
+import subprocess
+import shutil
+from datetime import datetime
+from operator import itemgetter
+import calendar
+import traceback
+
+
+def replace_prefix(path, p1, p2):
+ """
+ Replace p1 in path with p2
+
+ >>> replace_prefix("/tmp/files/foo.bar", "/tmp", "/home/me")
+ "home/me/files/foo.bar"
+ """
+ common_prefix = os.path.commonprefix([path, p1])
+ if common_prefix:
+ assert path.find(common_prefix) == 0
+ # +1 so chop off the next path separator, which otherwise becomes a
+ # leading path separate and confuses os.path.join
+ path = path[len(common_prefix)+1:]
+ return os.path.join(p2, path)
+
+
+def extract_to_path(archive, member, path, user):
+ """
+ Extract from the zip archive `archive` the member `member` and write it to
+ `path`, preserving file metadata and chown'ing the file using `user`
+ """
+ # Create all upper directories if necessary
+ upperdirs = os.path.dirname(path)
+ if upperdirs and not os.path.exists(upperdirs):
+ os.makedirs(upperdirs)
+
+ with archive.open(member) as source, file(path, "wb") as target:
+ shutil.copyfileobj(source, target)
+
+ # Update the timestamps as well (as best we can, thanks, conversion to
+ # localtime). This only actually works if the .zip was created on a
+ # machine where the timezone was set to UTC, but it might be good
+ # enough since we just need the relative order of timestamps (they will
+ # all be normalized anyway).
+ if hasattr(member, 'date_time'):
+ timestamp = calendar.timegm(member.date_time)
+ os.utime(path, (timestamp, timestamp))
+
+ ug = "{}:{}".format(user, user)
+ subprocess.call(['chown', '-R', ug, path])
+
+
+def restore_config_file(zf):
+ print "* Migrating SecureDrop config file from backup..."
+
+ # Restore the original config file
+ for zi in zf.infolist():
+ if "var/www/securedrop/config.py" in zi.filename:
+ extract_to_path(zf, "var/www/securedrop/config.py",
+ "/var/www/securedrop/config.py", "www-data")
+
+
+def restore_securedrop_root(zf):
+ print "* Migrating directories from SECUREDROP_ROOT..."
+
+ # Restore the original source directories and key files
+ for zi in zf.infolist():
+ if "var/lib/securedrop/store" in zi.filename:
+ extract_to_path(zf, zi,
+ replace_prefix(zi.filename,
+ "var/lib/securedrop/store",
+ "/var/lib/securedrop/store"),
+ "www-data")
+ elif "var/lib/securedrop/keys" in zi.filename:
+ # TODO: is it a bad idea to migrate the random_seed from the
+ # previous installation?
+ extract_to_path(zf, zi,
+ replace_prefix(zi.filename,
+ "var/lib/securedrop/keys",
+ "/var/lib/securedrop/keys"),
+ "www-data")
+
+
+def restore_database(zf):
+ print "* Migrating database..."
+
+ extract_to_path(zf, "var/lib/securedrop/db.sqlite",
+ "/var/lib/securedrop/db.sqlite", "www-data")
+
+
+def restore_custom_header_image(zf):
+ print "* Migrating custom header image..."
+ extract_to_path(zf,
+ "var/www/securedrop/static/i/logo.png",
+ "/var/www/securedrop/static/i/logo.png", "www-data")
+
+
+def restore_tor_files(zf):
+ tor_root_dir = "/var/lib/tor"
+ ths_root_dir = os.path.join(tor_root_dir, "services")
+ source_ths_dir = os.path.join(ths_root_dir, "source")
+ journalist_ths_dir = os.path.join(ths_root_dir, "journalist")
+
+ print "* Deleting previous source THS interface..."
+
+ for fn in os.listdir(source_ths_dir):
+ os.remove(os.path.join(source_ths_dir, fn))
+
+ print "* Deleting previous journalist ATHS interface..."
+
+ for fn in os.listdir(journalist_ths_dir):
+ os.remove(os.path.join(journalist_ths_dir, fn))
+
+ print "* Migrating source and journalist interface .onion..."
+
+ for zi in zf.infolist():
+ if "var/lib/tor/services/source" in zi.filename:
+ extract_to_path(zf, zi,
+ replace_prefix(zi.filename,
+ "var/lib/tor/services/source",
+ "/var/lib/tor/services/source"),
+ "debian-tor")
+ elif "var/lib/tor/services/journalist" in zi.filename:
+ extract_to_path(zf, zi,
+ replace_prefix(zi.filename,
+ "var/lib/tor/services/journalist",
+ "/var/lib/tor/services/journalist"),
+ "debian-tor")
+
+ # Reload Tor to trigger registering the old Tor Hidden Services
+ # reloading Tor compared to restarting tor will not break the current tor
+ # connections for SSH
+ subprocess.call(['service', 'tor', 'reload'])
+
+
+def main():
+ if len(sys.argv) <= 1:
+ print ("Usage: 0.3_restore.py <filename>\n\n"
+ " <filename>\tPath to a SecureDrop 0.3 backup .zip file"
+ "created by 0.3_collect.py")
+ sys.exit(1)
+
+ try:
+ zf_fn = sys.argv[1]
+ with zipfile.ZipFile(zf_fn, 'r') as zf:
+ restore_config_file(zf)
+ restore_securedrop_root(zf)
+ restore_database(zf)
+ restore_custom_header_image(zf)
+ restore_tor_files(zf)
+ except:
+ print "\n!!! Something went wrong, please file an issue.\n"
+ print traceback.format_exc()
+ else:
+ print "Done!"
+
+if __name__ == "__main__":
+ main()
diff --git a/install_files/ansible-base/roles/backup/files/restore.py b/install_files/ansible-base/roles/restore/files/restore.py
similarity index 100%
rename from install_files/ansible-base/roles/backup/files/restore.py
rename to install_files/ansible-base/roles/restore/files/restore.py
diff --git a/tails_files/securedrop_init.py b/install_files/ansible-base/roles/tails-config/files/securedrop_init.py
similarity index 71%
rename from tails_files/securedrop_init.py
rename to install_files/ansible-base/roles/tails-config/files/securedrop_init.py
--- a/tails_files/securedrop_init.py
+++ b/install_files/ansible-base/roles/tails-config/files/securedrop_init.py
@@ -41,7 +41,14 @@
except subprocess.CalledProcessError:
sys.exit('Error reloading Tor')
+# Turn off "automatic-decompression" in Nautilus to ensure the original
+# submission filename is restored (see
+# https://github.com/freedomofpress/securedrop/issues/1862#issuecomment-311519750).
+subprocess.call(['/usr/bin/dconf', 'write',
+ '/org/gnome/nautilus/preferences/automatic-decompression',
+ 'false'])
+
# notify the user
subprocess.call(['tails-notify-user',
'SecureDrop successfully auto-configured!',
- 'You can now access the Document Interface.\nIf you are an admin, you can now SSH to the servers.'])
+ 'You can now access the Journalist Interface.\nIf you are an admin, you can now SSH to the servers.'])
diff --git a/install_files/ansible-base/roles/upgrade/files/0.3pre_upgrade.py b/install_files/ansible-base/roles/upgrade/files/0.3pre_upgrade.py
--- a/install_files/ansible-base/roles/upgrade/files/0.3pre_upgrade.py
+++ b/install_files/ansible-base/roles/upgrade/files/0.3pre_upgrade.py
@@ -40,7 +40,7 @@ def cleanup_deleted_sources(store_dir, c):
"""
In 0.3pre and 0.3, there were two bugs that could potentially lead
to the source directory failing to be deleted when a source was
- deleted from the Document Interface. We clean up these leftover
+ deleted from the Journalist Interface. We clean up these leftover
directories as part of the migration.
These sources can be identified because they have a source_dir in
diff --git a/migration_scripts/0.2.1/0.2.1_collect.py b/migration_scripts/0.2.1/0.2.1_collect.py
--- a/migration_scripts/0.2.1/0.2.1_collect.py
+++ b/migration_scripts/0.2.1/0.2.1_collect.py
@@ -10,14 +10,17 @@
import re
import tarfile
+
# Arbitrarily pick the source chroot jail (doesn't matter)
securedrop_root = "/var/chroot/source/var/www/securedrop"
+
def collect_config_file(backup):
config_file = os.path.join(securedrop_root, "config.py")
backup.add(config_file)
return config_file
+
def collect_securedrop_root(backup):
# The store and key dirs are shared between the chroot jails in
# 0.2.1, and are both linked from /var/securedrop
@@ -25,13 +28,15 @@ def collect_securedrop_root(backup):
backup.add(securedrop_root)
return securedrop_root
+
def collect_database(backup):
- # Copy the db file, which is only present in the document interface's
+ # Copy the db file, which is only present in the journalist interface's
# chroot jail in 0.2.1
db_file = "/var/chroot/document/var/www/securedrop/db.sqlite"
backup.add(db_file)
return db_file
+
def collect_custom_header_image(backup):
# 0.2.1's deployment didn't actually use
# config.CUSTOM_HEADER_IMAGE - it just overwrote the default
@@ -40,6 +45,7 @@ def collect_custom_header_image(backup):
backup.add(header_image)
return header_image
+
def collect_tor_files(backup):
tor_files = [
"/etc/tor/torrc",
diff --git a/migration_scripts/0.2.1/old_crypto_util.py b/migration_scripts/0.2.1/old_crypto_util.py
--- a/migration_scripts/0.2.1/old_crypto_util.py
+++ b/migration_scripts/0.2.1/old_crypto_util.py
@@ -4,7 +4,8 @@
import os
import random as badrandom
-# Find the absolute path relative to this file so this script can be run anywhere
+# Find the absolute path relative to this file so this script can be run
+# anywhere
SRC_DIR = os.path.dirname(os.path.realpath(__file__))
nouns = file(os.path.join(SRC_DIR, "nouns.txt")).read().split('\n')
@@ -14,4 +15,5 @@
def displayid(n):
badrandom_value = badrandom.WichmannHill()
badrandom_value.seed(n)
- return badrandom_value.choice(adjectives) + " " + badrandom_value.choice(nouns)
+ return badrandom_value.choice(
+ adjectives) + " " + badrandom_value.choice(nouns)
diff --git a/securedrop/_genwordlist.py b/securedrop/_genwordlist.py
deleted file mode 100644
--- a/securedrop/_genwordlist.py
+++ /dev/null
@@ -1,27 +0,0 @@
-"""
-Generates `wordlist` from The English Open Word List http://dreamsteep.com/projects/the-english-open-word-list.html
-Usage: Unzip the CSV files from the archive with the command `unzip EOWL-v1.1.2.zip EOWL-v1.1.2/CSV\ Format/*.csv`
-"""
-import re
-import string
-
-
-def just7(x):
- return all(c in string.printable for c in x)
-
-words = set()
-
-for i in map(chr, range(65, 91)):
- words.update(x.strip()
- for x in file('EOWL-v1.1.2/CSV Format/%s Words.csv' % i) if just7(x))
-
-fh = file('wordlist', 'w')
-for word in words:
- if re.search('[^a-z0-9]', word): # punctuation is right out
- continue
- if re.match(r'^([a-z])\1\1\1*$', word): # yyyy is not a real word
- continue
- # EOWL contains bigrams xf, xg, xh, etc.
- if re.match(r'^[a-z][a-z]$', word):
- continue
- fh.write('%s\n' % word)
diff --git a/securedrop/crypto_util.py b/securedrop/crypto_util.py
--- a/securedrop/crypto_util.py
+++ b/securedrop/crypto_util.py
@@ -1,7 +1,9 @@
+#!/usr/bin/env python
# -*- coding: utf-8 -*-
+
+from base64 import b32encode
import os
import subprocess
-from base64 import b32encode
from Crypto.Random import random
import gnupg
@@ -20,7 +22,7 @@
# use these settings in production)
GPG_KEY_LENGTH = 1024
SCRYPT_PARAMS = dict(N=2**1, r=1, p=1)
-else:
+else: # pragma: no cover
GPG_KEY_LENGTH = 4096
SCRYPT_PARAMS = config.SCRYPT_PARAMS
@@ -40,25 +42,14 @@ def do_runtime_tests():
except subprocess.CalledProcessError:
pass
+
do_runtime_tests()
-# HACK: use_agent=True is used to avoid logging noise.
-#
-# --use-agent is a dummy option in gpg2, which is the only version of
-# gpg used by SecureDrop. If use_agent=False, gpg2 prints a warning
-# message every time it runs because the option is deprecated and has
-# no effect. This message cannot be silenced even if you change the
-# --debug-level (controlled via the verbose= keyword argument to the
-# gnupg.GPG constructor), and creates a lot of logging noise.
-#
-# The best solution here would be to avoid passing either --use-agent
-# or --no-use-agent to gpg2, and I have filed an issue upstream to
-# address this: https://github.com/isislovecruft/python-gnupg/issues/96
-gpg = gnupg.GPG(binary='gpg2', homedir=config.GPG_KEY_DIR, use_agent=True)
-
-words = file(config.WORD_LIST).read().split('\n')
-nouns = file(config.NOUNS).read().split('\n')
-adjectives = file(config.ADJECTIVES).read().split('\n')
+gpg = gnupg.GPG(binary='gpg2', homedir=config.GPG_KEY_DIR)
+
+words = open(config.WORD_LIST).read().rstrip('\n').split('\n')
+nouns = open(config.NOUNS).read().rstrip('\n').split('\n')
+adjectives = open(config.ADJECTIVES).read().rstrip('\n').split('\n')
class CryptoException(Exception):
@@ -70,16 +61,17 @@ def clean(s, also=''):
>>> clean("Hello, world!")
Traceback (most recent call last):
...
- CryptoException: invalid input
+ CryptoException: invalid input: Hello, world!
>>> clean("Helloworld")
'Helloworld'
"""
# safe characters for every possible word in the wordlist includes capital
# letters because codename hashes are base32-encoded with capital letters
- ok = ' !#%$&)(+*-1032547698;:=?@acbedgfihkjmlonqpsrutwvyxzABCDEFGHIJKLMNOPQRSTUVWXYZ'
+ ok = (' !#%$&)(+*-1032547698;:=?@acbedgfihkjmlonqpsrutwvyxzABCDEFGHIJ'
+ 'KLMNOPQRSTUVWXYZ')
for c in s:
if c not in ok and c not in also:
- raise CryptoException("invalid input: %s" % s)
+ raise CryptoException("invalid input: {0}".format(s))
# scrypt.hash requires input of type str. Since the wordlist is all ASCII
# characters, this conversion is not problematic
return str(s)
@@ -94,20 +86,34 @@ def display_id():
def hash_codename(codename, salt=SCRYPT_ID_PEPPER):
- """
- >>> hash_codename('Hello, world!')
- 'EQZGCJBRGISGOTC2NZVWG6LILJBHEV3CINNEWSCLLFTUWZLFHBTS6WLCHFHTOLRSGQXUQLRQHFMXKOKKOQ4WQ6SXGZXDAS3Z'
+ """Salts and hashes a codename using scrypt.
+
+ :param str codename: A source's codename.
+ :param str salt: The salt to mix with the codename when hashing.
+ :returns: A base32 encoded string; the salted codename hash.
"""
return b32encode(scrypt.hash(clean(codename), salt, **SCRYPT_PARAMS))
def genkeypair(name, secret):
- """
+ """Generate a GPG key through batch file key generation. A source's
+ codename is salted with SCRYPT_GPG_PEPPER and hashed with scrypt to
+ provide the passphrase used to encrypt their private key. Their name
+ should be their filesystem id.
+
>>> if not gpg.list_keys(hash_codename('randomid')):
... genkeypair(hash_codename('randomid'), 'randomid').type
... else:
... u'P'
u'P'
+
+ :param str name: The source's filesystem id (their codename, salted
+ with SCRYPT_ID_PEPPER, and hashed with scrypt).
+ :param str secret: The source's codename.
+ :returns: a :class:`GenKey <gnupg._parser.GenKey>` object, on which
+ the ``__str__()`` method may be called to return the
+ generated key's fingeprint.
+
"""
name = clean(name)
secret = hash_codename(secret, salt=SCRYPT_GPG_PEPPER)
@@ -118,9 +124,10 @@ def genkeypair(name, secret):
))
-def delete_reply_keypair(source_id):
- key = getkey(source_id)
- # If this source was never flagged for review, they won't have a reply keypair
+def delete_reply_keypair(source_filesystem_id):
+ key = getkey(source_filesystem_id)
+ # If this source was never flagged for review, they won't have a reply
+ # keypair
if not key:
return
# The private key needs to be deleted before the public key can be deleted
@@ -138,21 +145,16 @@ def getkey(name):
return None
-def get_key_by_fingerprint(fingerprint):
- matches = filter(lambda k: k['fingerprint'] == fingerprint, gpg.list_keys())
- return matches[0] if matches else None
-
-
def encrypt(plaintext, fingerprints, output=None):
# Verify the output path
if output:
store.verify(output)
- # Remove any spaces from provided fingerprints
- # GPG outputs fingerprints with spaces for readability, but requires the
- # spaces to be removed when using fingerprints to specify recipients.
if not isinstance(fingerprints, (list, tuple)):
fingerprints = [fingerprints, ]
+ # Remove any spaces from provided fingerprints GPG outputs fingerprints
+ # with spaces for readability, but requires the spaces to be removed when
+ # using fingerprints to specify recipients.
fingerprints = [fpr.replace(' ', '') for fpr in fingerprints]
if not _is_stream(plaintext):
@@ -172,14 +174,15 @@ def encrypt(plaintext, fingerprints, output=None):
def decrypt(secret, ciphertext):
"""
>>> key = genkeypair('randomid', 'randomid')
- >>> decrypt('randomid', 'randomid',
- ... encrypt('randomid', 'Goodbye, cruel world!')
+ >>> decrypt('randomid',
+ ... encrypt('Goodbye, cruel world!', str(key))
... )
'Goodbye, cruel world!'
"""
hashed_codename = hash_codename(secret, salt=SCRYPT_GPG_PEPPER)
return gpg.decrypt(ciphertext, passphrase=hashed_codename).data
-if __name__ == "__main__":
+
+if __name__ == "__main__": # pragma: no cover
import doctest
doctest.testmod()
diff --git a/securedrop/db.py b/securedrop/db.py
--- a/securedrop/db.py
+++ b/securedrop/db.py
@@ -23,9 +23,9 @@
import qrcode.image.svg
import config
-import crypto_util
import store
+
LOGIN_HARDENING = True
# Unfortunately, the login hardening measures mess with the tests in
# non-deterministic ways. TODO rewrite the tests so we can more
@@ -40,7 +40,7 @@
config.DATABASE_ENGINE + ":///" +
config.DATABASE_FILE
)
-else:
+else: # pragma: no cover
engine = create_engine(
config.DATABASE_ENGINE + '://' +
config.DATABASE_USERNAME + ':' +
@@ -60,7 +60,9 @@ def get_one_or_else(query, logger, failure_method):
try:
return query.one()
except MultipleResultsFound as e:
- logger.error("Found multiple while executing %s when one was expected: %s" % (query, e, ))
+ logger.error(
+ "Found multiple while executing %s when one was expected: %s" %
+ (query, e, ))
failure_method(500)
except NoResultFound as e:
logger.error("Found none when one was expected: %s" % (e,))
@@ -76,13 +78,15 @@ class Source(Base):
last_updated = Column(DateTime, default=datetime.datetime.utcnow)
star = relationship("SourceStar", uselist=False, backref="source")
- # sources are "pending" and don't get displayed to journalists until they submit something
+ # sources are "pending" and don't get displayed to journalists until they
+ # submit something
pending = Column(Boolean, default=True)
# keep track of how many interactions have happened, for filenames
interaction_count = Column(Integer, default=0, nullable=False)
# Don't create or bother checking excessively long codenames to prevent DoS
+ NUM_WORDS = 7
MAX_CODENAME_LEN = 128
def __init__(self, filesystem_id=None, journalist_designation=None):
@@ -95,7 +99,8 @@ def __repr__(self):
@property
def journalist_filename(self):
valid_chars = 'abcdefghijklmnopqrstuvwxyz1234567890-_'
- return ''.join([c for c in self.journalist_designation.lower().replace(' ', '_') if c in valid_chars])
+ return ''.join([c for c in self.journalist_designation.lower().replace(
+ ' ', '_') if c in valid_chars])
def documents_messages_count(self):
try:
@@ -105,7 +110,8 @@ def documents_messages_count(self):
for submission in self.submissions:
if submission.filename.endswith('msg.gpg'):
self.docs_msgs_count['messages'] += 1
- elif submission.filename.endswith('doc.gz.gpg') or submission.filename.endswith('doc.zip.gpg'):
+ elif (submission.filename.endswith('doc.gz.gpg') or
+ submission.filename.endswith('doc.zip.gpg')):
self.docs_msgs_count['documents'] += 1
return self.docs_msgs_count
@@ -124,7 +130,11 @@ class Submission(Base):
__tablename__ = 'submissions'
id = Column(Integer, primary_key=True)
source_id = Column(Integer, ForeignKey('sources.id'))
- source = relationship("Source", backref=backref('submissions', order_by=id))
+ source = relationship(
+ "Source",
+ backref=backref("submissions", order_by=id, cascade="delete")
+ )
+
filename = Column(String(255), nullable=False)
size = Column(Integer, nullable=False)
downloaded = Column(Boolean, default=False)
@@ -143,10 +153,17 @@ class Reply(Base):
id = Column(Integer, primary_key=True)
journalist_id = Column(Integer, ForeignKey('journalists.id'))
- journalist = relationship("Journalist", backref=backref('replies', order_by=id))
+ journalist = relationship(
+ "Journalist",
+ backref=backref(
+ 'replies',
+ order_by=id))
source_id = Column(Integer, ForeignKey('sources.id'))
- source = relationship("Source", backref=backref('replies', order_by=id))
+ source = relationship(
+ "Source",
+ backref=backref("replies", order_by=id, cascade="delete")
+ )
filename = Column(String(255), nullable=False)
size = Column(Integer, nullable=False)
@@ -169,7 +186,8 @@ class SourceStar(Base):
def __eq__(self, other):
if isinstance(other, SourceStar):
- return self.source_id == other.source_id and self.id == other.id and self.starred == other.starred
+ return (self.source_id == other.source_id and
+ self.id == other.id and self.starred == other.starred)
return NotImplemented
def __init__(self, source, starred=True):
@@ -178,18 +196,23 @@ def __init__(self, source, starred=True):
class InvalidUsernameException(Exception):
+
"""Raised when a user logs in with an invalid username"""
class LoginThrottledException(Exception):
- """Raised when a user attempts to log in too many times in a given time period"""
+
+ """Raised when a user attempts to log in
+ too many times in a given time period"""
class WrongPasswordException(Exception):
+
"""Raised when a user logs in with an incorrect password"""
class BadTokenException(Exception):
+
"""Raised when a user logins in with an incorrect TOTP token"""
@@ -203,6 +226,10 @@ def __init__(self, password):
def __str__(self):
if self.pw_len > Journalist.MAX_PASSWORD_LEN:
return "Password too long (len={})".format(self.pw_len)
+ if self.pw_len < Journalist.MIN_PASSWORD_LEN:
+ return "Password needs to be at least {} characters".format(
+ Journalist.MIN_PASSWORD_LEN
+ )
class Journalist(Base):
@@ -220,7 +247,9 @@ class Journalist(Base):
created_on = Column(DateTime, default=datetime.datetime.utcnow)
last_access = Column(DateTime)
- login_attempts = relationship("JournalistLoginAttempt", backref="journalist")
+ login_attempts = relationship(
+ "JournalistLoginAttempt",
+ backref="journalist")
def __init__(self, username, password, is_admin=False, otp_secret=None):
self.username = username
@@ -230,8 +259,9 @@ def __init__(self, username, password, is_admin=False, otp_secret=None):
self.set_hotp_secret(otp_secret)
def __repr__(self):
- return "<Journalist {0}{1}>".format(self.username,
- " [admin]" if self.is_admin else "")
+ return "<Journalist {0}{1}>".format(
+ self.username,
+ " [admin]" if self.is_admin else "")
def _gen_salt(self, salt_bytes=32):
return os.urandom(salt_bytes)
@@ -244,11 +274,18 @@ def _scrypt_hash(self, password, salt, params=None):
return scrypt.hash(str(password), salt, **params)
MAX_PASSWORD_LEN = 128
+ MIN_PASSWORD_LEN = 12
def set_password(self, password):
+ # Don't do anything if user's password hasn't changed.
+ if self.pw_hash and self.valid_password(password):
+ return
# Enforce a reasonable maximum length for passwords to avoid DoS
if len(password) > self.MAX_PASSWORD_LEN:
raise InvalidPasswordLength(password)
+ # Enforce a reasonable minimum length for new passwords
+ if len(password) < self.MIN_PASSWORD_LEN:
+ raise InvalidPasswordLength(password)
self.pw_salt = self._gen_salt()
self.pw_hash = self._scrypt_hash(password, self.pw_salt)
@@ -256,14 +293,22 @@ def valid_password(self, password):
# Avoid hashing passwords that are over the maximum length
if len(password) > self.MAX_PASSWORD_LEN:
raise InvalidPasswordLength(password)
- return self._scrypt_hash(password, self.pw_salt) == self.pw_hash
+ # No check on minimum password length here because some passwords
+ # may have been set prior to setting the minimum password length.
+ return pyotp.utils.compare_digest(
+ self._scrypt_hash(password, self.pw_salt),
+ self.pw_hash)
def regenerate_totp_shared_secret(self):
self.otp_secret = pyotp.random_base32()
def set_hotp_secret(self, otp_secret):
self.is_totp = False
- self.otp_secret = base64.b32encode(binascii.unhexlify(otp_secret.replace(" ", "")))
+ self.otp_secret = base64.b32encode(
+ binascii.unhexlify(
+ otp_secret.replace(
+ " ",
+ "")))
self.hotp_counter = 0
@property
@@ -276,7 +321,9 @@ def hotp(self):
@property
def shared_secret_qrcode(self):
- uri = self.totp.provisioning_uri(self.username, issuer_name="SecureDrop")
+ uri = self.totp.provisioning_uri(
+ self.username,
+ issuer_name="SecureDrop")
qr = qrcode.QRCode(
box_size=15,
@@ -295,46 +342,42 @@ def formatted_otp_secret(self):
lowercase and split into four groups of four characters. The secret is
base32-encoded, so it is case insensitive."""
sec = self.otp_secret
- chunks = [sec[i:i + 4] for i in xrange(0, len(sec), 4)]
+ chunks = [sec[i:i + 4] for i in range(0, len(sec), 4)]
return ' '.join(chunks).lower()
def _format_token(self, token):
- """Strips from authentication tokens the whitespace that many clients add for readability"""
+ """Strips from authentication tokens the whitespace
+ that many clients add for readability"""
return ''.join(token.split())
def verify_token(self, token):
token = self._format_token(token)
- # Only allow each authentication token to be used once. This
- # prevents some MITM attacks.
- if token == self.last_token and LOGIN_HARDENING:
- raise BadTokenException("previously used token {}".format(token))
- else:
- self.last_token = token
- db_session.commit()
+ # Store latest token to prevent OTP token reuse
+ self.last_token = token
+ db_session.commit()
if self.is_totp:
# Also check the given token against the previous and next
# valid tokens, to compensate for potential time skew
# between the client and the server. The total valid
# window is 1:30s.
- now = datetime.datetime.now()
- interval = datetime.timedelta(seconds=30)
- times = [now - interval, now, now + interval]
- return any([self.totp.verify(token, for_time=time) for time in times])
+ return self.totp.verify(token, valid_window=1)
else:
- for counter_val in range(self.hotp_counter, self.hotp_counter + 20):
+ for counter_val in range(
+ self.hotp_counter,
+ self.hotp_counter + 20):
if self.hotp.verify(token, counter_val):
self.hotp_counter = counter_val + 1
db_session.commit()
return True
return False
+ _LOGIN_ATTEMPT_PERIOD = 60 # seconds
+ _MAX_LOGIN_ATTEMPTS_PER_PERIOD = 5
+
@classmethod
def throttle_login(cls, user):
- _LOGIN_ATTEMPT_PERIOD = 60 # seconds
- _MAX_LOGIN_ATTEMPTS_PER_PERIOD = 5
-
# Record the login attempt...
login_attempt = JournalistLoginAttempt(user)
db_session.add(login_attempt)
@@ -342,23 +385,31 @@ def throttle_login(cls, user):
# ...and reject it if they have exceeded the threshold
login_attempt_period = datetime.datetime.utcnow() - \
- datetime.timedelta(seconds=_LOGIN_ATTEMPT_PERIOD)
+ datetime.timedelta(seconds=cls._LOGIN_ATTEMPT_PERIOD)
attempts_within_period = JournalistLoginAttempt.query.filter(
JournalistLoginAttempt.timestamp > login_attempt_period).all()
- if len(attempts_within_period) > _MAX_LOGIN_ATTEMPTS_PER_PERIOD:
- raise LoginThrottledException("throttled ({} attempts in last {} seconds)".format(
- len(attempts_within_period), _LOGIN_ATTEMPT_PERIOD))
+ if len(attempts_within_period) > cls._MAX_LOGIN_ATTEMPTS_PER_PERIOD:
+ raise LoginThrottledException(
+ "throttled ({} attempts in last {} seconds)".format(
+ len(attempts_within_period),
+ cls._LOGIN_ATTEMPT_PERIOD))
@classmethod
def login(cls, username, password, token):
try:
user = Journalist.query.filter_by(username=username).one()
except NoResultFound:
- raise InvalidUsernameException("invalid username '{}'".format(username))
+ raise InvalidUsernameException(
+ "invalid username '{}'".format(username))
if LOGIN_HARDENING:
cls.throttle_login(user)
+ # Prevent TOTP token reuse
+ if user.last_token is not None:
+ if pyotp.utils.compare_digest(token, user.last_token):
+ raise BadTokenException("previously used token "
+ "{}".format(token))
if not user.verify_token(token):
raise BadTokenException("invalid token")
if not user.valid_password(password):
@@ -367,9 +418,10 @@ def login(cls, username, password, token):
class JournalistLoginAttempt(Base):
+
"""This model keeps track of journalist's login attempts so we can
rate limit them in order to prevent attackers from brute forcing
- passwords or two factor tokens."""
+ passwords or two-factor tokens."""
__tablename__ = "journalist_login_attempt"
id = Column(Integer, primary_key=True)
timestamp = Column(DateTime, default=datetime.datetime.utcnow)
diff --git a/securedrop/journalist.py b/securedrop/journalist.py
--- a/securedrop/journalist.py
+++ b/securedrop/journalist.py
@@ -1,12 +1,13 @@
# -*- coding: utf-8 -*-
-import sys
+
import os
from datetime import datetime
import functools
from flask import (Flask, request, render_template, send_file, redirect, flash,
url_for, g, abort, session)
-from flask_wtf.csrf import CsrfProtect
+from flask_wtf.csrf import CSRFProtect
+from flask_assets import Environment
from sqlalchemy.orm.exc import MultipleResultsFound, NoResultFound
from sqlalchemy.exc import IntegrityError
@@ -16,14 +17,15 @@
import store
import template_filters
from db import (db_session, Source, Journalist, Submission, Reply,
- SourceStar, get_one_or_else, NoResultFound,
- WrongPasswordException, BadTokenException,
+ SourceStar, get_one_or_else, WrongPasswordException,
LoginThrottledException, InvalidPasswordLength)
import worker
app = Flask(__name__, template_folder=config.JOURNALIST_TEMPLATES_DIR)
app.config.from_object(config.JournalistInterfaceFlaskConfig)
-CsrfProtect(app)
+CSRFProtect(app)
+
+assets = Environment(app)
app.jinja_env.globals['version'] = version.__version__
if getattr(config, 'CUSTOM_HEADER_IMAGE', None):
@@ -112,10 +114,14 @@ def login():
login_flashed_msg = "Login failed."
if isinstance(e, LoginThrottledException):
- login_flashed_msg += " Please wait at least 60 seconds before logging in again."
+ login_flashed_msg += (
+ " Please wait at least {} seconds "
+ "before logging in again.".format(
+ Journalist._LOGIN_ATTEMPT_PERIOD))
else:
try:
- user = Journalist.query.filter_by(username=request.form['username']).one()
+ user = Journalist.query.filter_by(
+ username=request.form['username']).one()
if user.is_totp:
login_flashed_msg += " Please wait for a new two-factor token before logging in again."
except:
@@ -182,16 +188,21 @@ def admin_add_user():
db_session.commit()
except InvalidPasswordLength:
form_valid = False
- flash("Your password is too long (maximum length {} characters)".format(
- Journalist.MAX_PASSWORD_LEN), "error")
+ flash("Your password must be between {} and {} characters.".format(
+ Journalist.MIN_PASSWORD_LEN, Journalist.MAX_PASSWORD_LEN
+ ), "error")
except IntegrityError as e:
+ db_session.rollback()
form_valid = False
- if "username is not unique" in str(e):
+ if "UNIQUE constraint failed: journalists.username" in str(e):
flash("That username is already in use",
"error")
else:
- flash("An error occurred saving this user to the database",
+ flash("An error occurred saving this user to the database."
+ " Please check the application logs.",
"error")
+ app.logger.error("Adding user '{}' failed: {}".format(
+ username, e))
if form_valid:
return redirect(url_for('admin_new_user_two_factor',
@@ -208,10 +219,13 @@ def admin_new_user_two_factor():
if request.method == 'POST':
token = request.form['token']
if user.verify_token(token):
- flash("Two factor token successfully verified for user {}!".format(user.username), "notification")
+ flash(
+ "Two-factor token successfully verified for user {}!".format(
+ user.username),
+ "notification")
return redirect(url_for("admin_index"))
else:
- flash("Two factor token failed to verify", "error")
+ flash("Two-factor token failed to verify", "error")
return render_template("admin_new_user_two_factor.html", user=user)
@@ -234,58 +248,163 @@ def admin_reset_two_factor_hotp():
otp_secret = request.form.get('otp_secret', None)
if otp_secret:
user = Journalist.query.get(uid)
- user.set_hotp_secret(otp_secret)
- db_session.commit()
- return redirect(url_for('admin_new_user_two_factor', uid=uid))
+ try:
+ user.set_hotp_secret(otp_secret)
+ except TypeError as e:
+ if "Non-hexadecimal digit found" in str(e):
+ flash("Invalid secret format: "
+ "please only submit letters A-F and numbers 0-9.",
+ "error")
+ elif "Odd-length string" in str(e):
+ flash("Invalid secret format: "
+ "odd-length secret. Did you mistype the secret?",
+ "error")
+ else:
+ flash("An unexpected error occurred! "
+ "Please check the application "
+ "logs or inform your adminstrator.", "error")
+ app.logger.error(
+ "set_hotp_secret '{}' (id {}) failed: {}".format(
+ otp_secret, uid, e))
+ return render_template('admin_edit_hotp_secret.html', uid=uid)
+ else:
+ db_session.commit()
+ return redirect(url_for('admin_new_user_two_factor', uid=uid))
else:
return render_template('admin_edit_hotp_secret.html', uid=uid)
+class PasswordMismatchError(Exception):
+ pass
+
+
+def edit_account_password(user, password, password_again):
+ if password:
+ if password != password_again:
+ flash("Passwords didn't match!", "error")
+ raise PasswordMismatchError
+ try:
+ user.set_password(password)
+ except InvalidPasswordLength:
+ flash("Your password must be between {} and {} characters.".format(
+ Journalist.MIN_PASSWORD_LEN, Journalist.MAX_PASSWORD_LEN
+ ), "error")
+ raise
+
+
+def commit_account_changes(user):
+ if db_session.is_modified(user):
+ try:
+ db_session.add(user)
+ db_session.commit()
+ except Exception as e:
+ flash("An unexpected error occurred! Please check the application "
+ "logs or inform your adminstrator.", "error")
+ app.logger.error("Account changes for '{}' failed: {}".format(user,
+ e))
+ db_session.rollback()
+ else:
+ flash("Account successfully updated!", "success")
+
+
@app.route('/admin/edit/<int:user_id>', methods=('GET', 'POST'))
@admin_required
def admin_edit_user(user_id):
user = Journalist.query.get(user_id)
if request.method == 'POST':
- if request.form['username'] != "":
- user.username = request.form['username']
-
- if request.form['password'] != "":
- if request.form['password'] != request.form['password_again']:
- flash("Passwords didn't match", "error")
- return redirect(url_for("admin_edit_user", user_id=user_id))
- try:
- user.set_password(request.form['password'])
- except InvalidPasswordLength:
- flash("Your password is too long "
- "(maximum length {} characters)".format(
- Journalist.MAX_PASSWORD_LEN), "error")
+ if request.form['username']:
+ new_username = request.form['username']
+ if new_username == user.username:
+ pass
+ elif Journalist.query.filter_by(
+ username=new_username).one_or_none():
+ flash('Username "{}" is already taken!'.format(new_username),
+ "error")
return redirect(url_for("admin_edit_user", user_id=user_id))
+ else:
+ user.username = new_username
+
+ try:
+ edit_account_password(user, request.form['password'],
+ request.form['password_again'])
+ except (PasswordMismatchError, InvalidPasswordLength):
+ return redirect(url_for("admin_edit_user", user_id=user_id))
user.is_admin = bool(request.form.get('is_admin'))
- try:
- db_session.add(user)
- db_session.commit()
- except Exception, e:
- db_session.rollback()
- if "username is not unique" in str(e):
- flash("That username is already in use", "notification")
- else:
- flash("An unknown error occurred, please inform your administrator", "error")
+ commit_account_changes(user)
- return render_template("admin_edit_user.html", user=user)
+ return render_template("edit_account.html", user=user)
@app.route('/admin/delete/<int:user_id>', methods=('POST',))
@admin_required
def admin_delete_user(user_id):
user = Journalist.query.get(user_id)
- db_session.delete(user)
- db_session.commit()
+ if user:
+ db_session.delete(user)
+ db_session.commit()
+ flash("Deleted user '{}'".format(user.username), "notification")
+ else:
+ app.logger.error(
+ "Admin {} tried to delete nonexistent user with pk={}".format(
+ g.user.username, user_id))
+ abort(404)
+
return redirect(url_for('admin_index'))
[email protected]('/account', methods=('GET', 'POST'))
+@login_required
+def edit_account():
+ if request.method == 'POST':
+ try:
+ edit_account_password(g.user, request.form['password'],
+ request.form['password_again'])
+ except (PasswordMismatchError, InvalidPasswordLength):
+ return redirect(url_for('edit_account'))
+
+ commit_account_changes(g.user)
+
+ return render_template('edit_account.html')
+
+
[email protected]('/account/2fa', methods=('GET', 'POST'))
+@login_required
+def account_new_two_factor():
+ if request.method == 'POST':
+ token = request.form['token']
+ if g.user.verify_token(token):
+ flash("Two-factor token successfully verified!", "notification")
+ return redirect(url_for('edit_account'))
+ else:
+ flash("Two-factor token failed to verify", "error")
+
+ return render_template('account_new_two_factor.html', user=g.user)
+
+
[email protected]('/account/reset-2fa-totp', methods=['POST'])
+@login_required
+def account_reset_two_factor_totp():
+ g.user.is_totp = True
+ g.user.regenerate_totp_shared_secret()
+ db_session.commit()
+ return redirect(url_for('account_new_two_factor'))
+
+
[email protected]('/account/reset-2fa-hotp', methods=['POST'])
+@login_required
+def account_reset_two_factor_hotp():
+ otp_secret = request.form.get('otp_secret', None)
+ if otp_secret:
+ g.user.set_hotp_secret(otp_secret)
+ db_session.commit()
+ return redirect(url_for('account_new_two_factor'))
+ else:
+ return render_template('account_edit_hotp_secret.html')
+
+
def make_star_true(sid):
source = get_source(sid)
if source.star:
@@ -355,7 +474,7 @@ def col(sid):
def delete_collection(source_id):
# Delete the source's collection of submissions
- worker.enqueue(store.delete_source_directory, source_id)
+ job = worker.enqueue(store.delete_source_directory, source_id)
# Delete the source's reply keypair
crypto_util.delete_reply_keypair(source_id)
@@ -364,16 +483,21 @@ def delete_collection(source_id):
source = get_source(source_id)
db_session.delete(source)
db_session.commit()
+ return job
@app.route('/col/process', methods=('POST',))
@login_required
def col_process():
- actions = {'delete': col_delete, 'star': col_star, 'un-star': col_un_star}
+ actions = {'download-unread': col_download_unread,
+ 'download-all': col_download_all, 'star': col_star,
+ 'un-star': col_un_star, 'delete': col_delete}
if 'cols_selected' not in request.form:
+ flash('No collections selected!', 'error')
return redirect(url_for('index'))
- cols_selected = request.form.getlist('cols_selected') # getlist is cgi.FieldStorage.getlist
+ # getlist is cgi.FieldStorage.getlist
+ cols_selected = request.form.getlist('cols_selected')
action = request.form['action']
if action not in actions:
@@ -383,6 +507,28 @@ def col_process():
return method(cols_selected)
+def col_download_unread(cols_selected):
+ """Download all unread submissions from all selected sources."""
+ submissions = []
+ for sid in cols_selected:
+ id = Source.query.filter(Source.filesystem_id == sid).one().id
+ submissions += Submission.query.filter(Submission.downloaded == False,
+ Submission.source_id == id).all()
+ if submissions == []:
+ flash("No unread submissions in collections selected!", "error")
+ return redirect(url_for('index'))
+ return download("unread", submissions)
+
+
+def col_download_all(cols_selected):
+ """Download all submissions from all selected sources."""
+ submissions = []
+ for sid in cols_selected:
+ id = Source.query.filter(Source.filesystem_id == sid).one().id
+ submissions += Submission.query.filter(Submission.source_id == id).all()
+ return download("all", submissions)
+
+
def col_star(cols_selected):
for sid in cols_selected:
make_star_true(sid)
@@ -405,7 +551,9 @@ def col_delete_single(sid):
"""deleting a single collection from its /col page"""
source = get_source(sid)
delete_collection(sid)
- flash("%s's collection deleted" % (source.journalist_designation,), "notification")
+ flash(
+ "%s's collection deleted" %
+ (source.journalist_designation,), "notification")
return redirect(url_for('index'))
@@ -426,32 +574,68 @@ def col_delete(cols_selected):
@app.route('/col/<sid>/<fn>')
@login_required
-def doc(sid, fn):
+def download_single_submission(sid, fn):
+ """Sends a client the contents of a single submission."""
if '..' in fn or fn.startswith('/'):
abort(404)
+
try:
- Submission.query.filter(Submission.filename == fn).one().downloaded = True
+ Submission.query.filter(
+ Submission.filename == fn).one().downloaded = True
+ db_session.commit()
except NoResultFound as e:
app.logger.error("Could not mark " + fn + " as downloaded: %s" % (e,))
- db_session.commit()
+
return send_file(store.path(sid, fn), mimetype="application/pgp-encrypted")
@app.route('/reply', methods=('POST',))
@login_required
def reply():
+ """Attempt to send a Reply from a Journalist to a Source. Empty
+ messages are rejected, and an informative error message is flashed
+ on the client. In the case of unexpected errors involving database
+ transactions (potentially caused by racing request threads that
+ modify the same the database object) logging is done in such a way
+ so as not to write potentially sensitive information to disk, and a
+ generic error message is flashed on the client.
+
+ Returns:
+ flask.Response: The user is redirected to the same Source
+ collection view, regardless if the Reply is created
+ successfully.
+ """
+ msg = request.form['msg']
+ # Reject empty replies
+ if not msg:
+ flash("You cannot send an empty reply!", "error")
+ return redirect(url_for('col', sid=g.sid))
+
g.source.interaction_count += 1
filename = "{0}-{1}-reply.gpg".format(g.source.interaction_count,
g.source.journalist_filename)
- crypto_util.encrypt(request.form['msg'],
+ crypto_util.encrypt(msg,
[crypto_util.getkey(g.sid), config.JOURNALIST_KEY],
output=store.path(g.sid, filename))
reply = Reply(g.user, g.source, filename)
- db_session.add(reply)
- db_session.commit()
- flash("Thanks! Your reply has been stored.", "notification")
- return redirect(url_for('col', sid=g.sid))
+ try:
+ db_session.add(reply)
+ db_session.commit()
+ except Exception as exc:
+ flash("An unexpected error occurred! Please check the application "
+ "logs or inform your adminstrator.", "error")
+ # We take a cautious approach to logging here because we're dealing
+ # with responses to sources. It's possible the exception message could
+ # contain information we don't want to write to disk.
+ app.logger.error(
+ "Reply from '{}' (id {}) failed: {}!".format(g.user.username,
+ g.user.id,
+ exc.__class__))
+ else:
+ flash("Thanks! Your reply has been stored.", "notification")
+ finally:
+ return redirect(url_for('col', sid=g.sid))
@app.route('/regenerate-code', methods=('POST',))
@@ -461,21 +645,31 @@ def generate_code():
g.source.journalist_designation = crypto_util.display_id()
for item in g.source.collection:
- item.filename = store.rename_submission(g.sid, item.filename, g.source.journalist_filename)
+ item.filename = store.rename_submission(
+ g.sid,
+ item.filename,
+ g.source.journalist_filename)
db_session.commit()
- flash("The source '%s' has been renamed to '%s'" % (original_journalist_designation, g.source.journalist_designation), "notification")
+ flash(
+ "The source '%s' has been renamed to '%s'" %
+ (original_journalist_designation,
+ g.source.journalist_designation),
+ "notification")
return redirect('/col/' + g.sid)
@app.route('/download_unread/<sid>')
@login_required
-def download_unread(sid):
+def download_unread_sid(sid):
id = Source.query.filter(Source.filesystem_id == sid).one().id
- docs = Submission.query.filter(
- Submission.source_id == id,
- Submission.downloaded == False).all()
- return bulk_download(sid, docs)
+ submissions = Submission.query.filter(Submission.source_id == id,
+ Submission.downloaded == False).all()
+ if submissions == []:
+ flash("No unread submissions for this source!")
+ return redirect(url_for('col', sid=sid))
+ source = get_source(sid)
+ return download(source.journalist_filename, submissions)
@app.route('/bulk', methods=('POST',))
@@ -486,16 +680,16 @@ def bulk():
doc_names_selected = request.form.getlist('doc_names_selected')
selected_docs = [doc for doc in g.source.collection
if doc.filename in doc_names_selected]
-
if selected_docs == []:
if action == 'download':
flash("No collections selected to download!", "error")
- elif action == 'delete' or action == 'confirm_delete':
+ elif action in ('delete', 'confirm_delete'):
flash("No collections selected to delete!", "error")
return redirect(url_for('col', sid=g.sid))
if action == 'download':
- return bulk_download(g.sid, selected_docs)
+ source = get_source(g.sid)
+ return download(source.journalist_filename, selected_docs)
elif action == 'delete':
return bulk_delete(g.sid, selected_docs)
elif action == 'confirm_delete':
@@ -518,23 +712,34 @@ def bulk_delete(sid, items_selected):
db_session.delete(item)
db_session.commit()
- flash("Submission{} deleted.".format("s" if len(items_selected) > 1 else ""), "notification")
+ flash(
+ "Submission{} deleted.".format(
+ "s" if len(items_selected) > 1 else ""),
+ "notification")
return redirect(url_for('col', sid=sid))
-def bulk_download(sid, items_selected):
- source = get_source(sid)
- filenames = [store.path(sid, item.filename) for item in items_selected]
+def download(zip_basename, submissions):
+ """Send client contents of zipfile *zip_basename*-<timestamp>.zip
+ containing *submissions*. The zipfile, being a
+ :class:`tempfile.NamedTemporaryFile`, is stored on disk only
+ temporarily.
- # Mark the submissions that are about to be downloaded as such
- for item in items_selected:
- if isinstance(item, Submission):
- item.downloaded = True
+ :param str zip_basename: The basename of the zipfile download.
+
+ :param list submissions: A list of :class:`db.Submission`s to
+ include in the zipfile.
+ """
+ zf = store.get_bulk_archive(submissions,
+ zip_directory=zip_basename)
+ attachment_filename = "{}--{}.zip".format(
+ zip_basename, datetime.utcnow().strftime("%Y-%m-%d--%H-%M-%S"))
+
+ # Mark the submissions that have been downloaded as such
+ for submission in submissions:
+ submission.downloaded = True
db_session.commit()
- zf = store.get_bulk_archive(filenames, zip_directory=source.journalist_filename)
- attachment_filename = "{}--{}.zip".format(source.journalist_filename,
- datetime.utcnow().strftime("%Y-%m-%d--%H-%M-%S"))
return send_file(zf.name, mimetype="application/zip",
attachment_filename=attachment_filename,
as_attachment=True)
@@ -549,13 +754,6 @@ def flag():
codename=g.source.journalist_designation)
-def write_pidfile():
- pid = str(os.getpid())
- with open(config.JOURNALIST_PIDFILE, 'w') as fp:
- fp.write(pid)
-
-
-if __name__ == "__main__":
- write_pidfile()
- # TODO make sure debug=False in production
- app.run(debug=True, host='0.0.0.0', port=8081)
+if __name__ == "__main__": # pragma: no cover
+ debug = getattr(config, 'env', 'prod') != 'prod'
+ app.run(debug=debug, host='0.0.0.0', port=8081)
diff --git a/securedrop/manage.py b/securedrop/manage.py
--- a/securedrop/manage.py
+++ b/securedrop/manage.py
@@ -1,221 +1,184 @@
#!/usr/bin/env python
+# -*- coding: utf-8 -*-
-import sys
+import argparse
+from getpass import getpass
import os
import shutil
-import subprocess
-import unittest
-import readline # makes the add_admin prompt kick ass
-from getpass import getpass
import signal
-from time import sleep
+import sys
+import traceback
-import qrcode
import psutil
+import qrcode
+from sqlalchemy.orm.exc import NoResultFound
-from db import db_session, Journalist
-
-# We need to import config in each function because we're running the tests
-# directly, so it's important to set the environment correctly, depending on
-# development or testing, before importing config.
-#
-# TODO: do we need to store *_PIDFILE in the application config? It seems like
-# an implementation detail that is specifc to this management script.
-
-os.environ['SECUREDROP_ENV'] = 'dev'
-
-WORKER_PIDFILE = "/tmp/test_rqworker.pid"
-
-
-def get_pid_from_pidfile(pid_file_name):
- with open(pid_file_name) as fp:
- return int(fp.read())
+os.environ['SECUREDROP_ENV'] = 'dev' # noqa
+import config
+from db import db_session, init_db, Journalist
+from management import run
-def _start_test_rqworker(config):
- # needed to determine the directory to run the worker in
- worker_running = False
+def reset(): # pragma: no cover
+ """Clears the SecureDrop development applications' state, restoring them to
+ the way they were immediately after running `setup_dev.sh`. This command:
+ 1. Erases the development sqlite database file.
+ 2. Regenerates the database.
+ 3. Erases stored submissions and replies from the store dir.
+ """
+ # Erase the development db file
+ assert hasattr(config, 'DATABASE_FILE'), ("TODO: ./manage.py doesn't know "
+ 'how to clear the db if the '
+ 'backend is not sqlite')
try:
- if psutil.pid_exists(get_pid_from_pidfile(WORKER_PIDFILE)):
- worker_running = True
- except IOError:
+ os.remove(config.DATABASE_FILE)
+ except OSError:
pass
- if not worker_running:
- tmp_logfile = open("/tmp/test_rqworker.log", "w")
- subprocess.Popen(
- [
- "rqworker", "test",
- "-P", config.SECUREDROP_ROOT,
- "--pid", WORKER_PIDFILE,
- ],
- stdout=tmp_logfile,
- stderr=subprocess.STDOUT)
-
-
-def _stop_test_rqworker():
- os.kill(get_pid_from_pidfile(WORKER_PIDFILE), signal.SIGTERM)
-
-
-def start():
- import config
- source_rc = subprocess.call(['start-stop-daemon', '--start', '-b', '--quiet', '--pidfile',
- config.SOURCE_PIDFILE, '--startas', '/bin/bash', '--', '-c', 'cd /vagrant/securedrop && python source.py'])
- journo_rc = subprocess.call(['start-stop-daemon', '--start', '-b', '--quiet', '--pidfile',
- config.JOURNALIST_PIDFILE, '--startas', '/bin/bash', '--', '-c', 'cd /vagrant/securedrop && python journalist.py'])
-
- if source_rc + journo_rc == 0:
- print "The web application is running, and available on your Vagrant host at the following addresses:"
- print "Source interface: localhost:8080"
- print "Journalist interface: localhost:8081"
- else:
- print "The web application is already running. Please use './manage.py restart' to stop and start again."
-
+ # Regenerate the database
+ init_db()
-def stop():
- import config
- source_rc = subprocess.call(
- ['start-stop-daemon', '--stop', '--quiet', '--pidfile', config.SOURCE_PIDFILE])
- journo_rc = subprocess.call(
- ['start-stop-daemon', '--stop', '--quiet', '--pidfile', config.JOURNALIST_PIDFILE])
- if source_rc + journo_rc == 0:
- print "The web application has been stopped."
+ # Clear submission/reply storage
+ try:
+ os.stat(config.STORE_DIR)
+ except OSError:
+ pass
else:
- print "There was a problem stopping the web application."
-
-
-def restart():
- stop()
- sleep(0.1)
- start()
-
-
-def test():
- """
- Runs the test suite
- """
- os.environ['SECUREDROP_ENV'] = 'test'
- import config
- _start_test_rqworker(config)
- test_cmds = [["py.test", "--cov"], "./test.sh"]
- test_rc = int(any([subprocess.call(cmd) for cmd in test_cmds]))
- _stop_test_rqworker()
- sys.exit(test_rc)
-
-
-def test_unit():
- """
- Runs the unit tests.
- """
- os.environ['SECUREDROP_ENV'] = 'test'
- import config
- _start_test_rqworker(config)
- test_rc = int(subprocess.call(["py.test", "--cov"]))
- _stop_test_rqworker()
- sys.exit(test_rc)
-
+ for source_dir in os.listdir(config.STORE_DIR):
+ try:
+ # Each entry in STORE_DIR is a directory corresponding
+ # to a source
+ shutil.rmtree(os.path.join(config.STORE_DIR, source_dir))
+ except OSError:
+ pass
+ return 0
-def reset():
- """
- Clears the SecureDrop development application's state, restoring it to the
- way it was immediately after running `setup_dev.sh`. This command:
- 1. Erases the development sqlite database file
- 2. Regenerates the database
- 3. Erases stored submissions and replies from the store dir
- """
- import config
- import db
- # Erase the development db file
- assert hasattr(config,
- 'DATABASE_FILE'), "TODO: ./manage.py doesn't know how to clear the db if the backend is not sqlite"
- os.remove(config.DATABASE_FILE)
+def add_admin(): # pragma: no cover
+ return _add_user(is_admin=True)
- # Regenerate the database
- db.init_db()
- # Clear submission/reply storage
- for source_dir in os.listdir(config.STORE_DIR):
- # Each entry in STORE_DIR is a directory corresponding to a source
- shutil.rmtree(os.path.join(config.STORE_DIR, source_dir))
+def add_journalist(): # pragma: no cover
+ return _add_user()
-def add_admin():
+def _add_user(is_admin=False): # pragma: no cover
while True:
- username = raw_input("Username: ")
- if Journalist.query.filter_by(username=username).first():
- print "Sorry, that username is already in use."
- else:
- break
-
- while True:
- password = getpass("Password: ")
- password_again = getpass("Confirm Password: ")
+ username = raw_input('Username: ')
+ password = getpass('Password: ')
+ password_again = getpass('Confirm Password: ')
if len(password) > Journalist.MAX_PASSWORD_LEN:
- print ("Your password is too long (maximum length {} characters). "
- "Please pick a shorter password.".format(
- Journalist.MAX_PASSWORD_LEN))
+ print('Your password is too long (maximum length {} characters). '
+ 'Please pick a shorter '
+ 'password.'.format(Journalist.MAX_PASSWORD_LEN))
+ continue
+
+ if len(password) < Journalist.MIN_PASSWORD_LEN:
+ print('Error: Password needs to be at least {} characters.'.format(
+ Journalist.MIN_PASSWORD_LEN
+ ))
continue
if password == password_again:
break
- print "Passwords didn't match!"
+ print("Passwords didn't match!")
- hotp_input = raw_input("Is this admin using a YubiKey [HOTP]? (y/N): ")
+ hotp_input = raw_input('Will this user be using a YubiKey [HOTP]? (y/N): ')
otp_secret = None
- if hotp_input.lower() == "y" or hotp_input.lower() == "yes":
+ if hotp_input.lower() in ('y', 'yes'):
while True:
- otp_secret = raw_input("Please configure your YubiKey and enter the secret: ")
+ otp_secret = raw_input(
+ 'Please configure your YubiKey and enter the secret: ')
if otp_secret:
break
try:
- admin = Journalist(username=username,
- password=password,
- is_admin=True,
- otp_secret=otp_secret)
- db_session.add(admin)
+ user = Journalist(username=username,
+ password=password,
+ is_admin=is_admin,
+ otp_secret=otp_secret)
+ db_session.add(user)
db_session.commit()
- except Exception, e:
- if "username is not unique" in str(e):
- print "ERROR: That username is already taken!"
+ except Exception as exc:
+ db_session.rollback()
+ if "UNIQUE constraint failed: journalists.username" in str(exc):
+ print('ERROR: That username is already taken!')
else:
- print "ERROR: An unexpected error occurred, traceback: \n{}".format(e)
+ exc_type, exc_value, exc_traceback = sys.exc_info()
+ print(repr(traceback.format_exception(exc_type, exc_value,
+ exc_traceback)))
+ return 1
else:
- print "Admin '{}' successfully added".format(username)
+ print('User "{}" successfully added'.format(username))
if not otp_secret:
- # Print the QR code for Google Authenticator
- print
- print "Scan the QR code below with Google Authenticator:"
- print
- uri = admin.totp.provisioning_uri(username, issuer_name="SecureDrop")
+ # Print the QR code for FreeOTP/ Google Authenticator
+ print('\nScan the QR code below with FreeOTP or Google '
+ 'Authenticator:\n')
+ uri = user.totp.provisioning_uri(username,
+ issuer_name='SecureDrop')
qr = qrcode.QRCode()
qr.add_data(uri)
qr.print_ascii(tty=sys.stdout.isatty())
- print
- print "If the barcode does not render correctly, try changing your terminal's font, (Monospace for Linux, Menlo for OS X)."
- print "If you are using iTerm on Mac OS X, you will need to change the \"Non-ASCII Font\", which is your profile's Text settings."
- print
- print "Can't scan the barcode? Enter the shared secret manually: {}".format(admin.formatted_otp_secret)
- print
-
-
-def clean_tmp():
- """Cleanup the SecureDrop temp directory. This is intended to be run as an
- automated cron job. We skip files that are currently in use to avoid
- deleting files that are currently being downloaded."""
- # Inspired by http://stackoverflow.com/a/11115521/1093000
- import config
+ print('\nIf the barcode does not render correctly, try changing '
+ "your terminal's font (Monospace for Linux, Menlo for OS "
+ 'X). If you are using iTerm on Mac OS X, you will need to '
+ 'change the "Non-ASCII Font", which is your profile\'s Text '
+ "settings.\n\nCan't scan the barcode? Enter following "
+ 'shared secret '
+ 'manually:\n{}\n'.format(user.formatted_otp_secret))
+ return 0
+
+
+def delete_user(): # pragma: no cover
+ """Deletes a journalist or administrator from the application."""
+ # Select user to delete
+ username = raw_input('Username to delete: ')
+ try:
+ selected_user = Journalist.query.filter_by(username=username).one()
+ except NoResultFound:
+ print('ERROR: That user was not found!')
+ return 0
+
+ # Confirm deletion if user is found
+ confirmation = raw_input('Are you sure you want to delete user '
+ '{} (y/n)?'.format(selected_user))
+ if confirmation.lower() != 'y':
+ print('Confirmation not received: user "{}" was NOT '
+ 'deleted'.format(username))
+ return 0
+
+ # Try to delete user from the database
+ try:
+ db_session.delete(selected_user)
+ db_session.commit()
+ except:
+ # If the user was deleted between the user selection and confirmation,
+ # (e.g., through the web app), we don't report any errors. If the user
+ # is still there, but there was a error deleting them from the
+ # database, we do report it.
+ try:
+ selected_user = Journalist.query.filter_by(username=username).one()
+ except NoResultFound:
+ pass
+ else:
+ raise
- def file_in_use(fname):
- in_use = False
+ print('User "{}" successfully deleted'.format(username))
+ return 0
+
+def clean_tmp(): # pragma: no cover
+ """Cleanup the SecureDrop temp directory. This is intended to be run
+ as an automated cron job. We skip files that are currently in use to
+ avoid deleting files that are currently being downloaded."""
+ # Inspired by http://stackoverflow.com/a/11115521/1093000
+ def file_in_use(fname):
for proc in psutil.process_iter():
try:
open_files = proc.open_files()
- in_use = in_use or any([open_file.path == fname
- for open_file in open_files])
+ in_use = False or any([open_file.path == fname
+ for open_file in open_files])
# Early return for perf
if in_use:
break
@@ -231,26 +194,67 @@ def listdir_fullpath(d):
# Thanks to http://stackoverflow.com/a/120948/1093000
return [os.path.join(d, f) for f in os.listdir(d)]
- for path in listdir_fullpath(config.TEMP_DIR):
- if not file_in_use(path):
- os.remove(path)
-
-
-def main():
- valid_cmds = ["start", "stop", "test_unit", "test", "restart", "reset", "add_admin", "clean_tmp"]
- help_str = "./manage.py {{{0}}}".format(','.join(valid_cmds))
-
- if len(sys.argv) != 2 or sys.argv[1] not in valid_cmds:
- print help_str
- sys.exit(1)
-
- cmd = sys.argv[1]
-
try:
- getattr(sys.modules[__name__], cmd)()
+ os.stat(config.TEMP_DIR)
+ except OSError:
+ pass
+ else:
+ for path in listdir_fullpath(config.TEMP_DIR):
+ if not file_in_use(path):
+ os.remove(path)
+
+ return 0
+
+
+def get_args():
+ parser = argparse.ArgumentParser(prog=__file__, description='Management '
+ 'and testing utility for SecureDrop.')
+ subps = parser.add_subparsers()
+ # Run WSGI app
+ run_subp = subps.add_parser('run', help='Run the Werkzeug source & '
+ 'journalist WSGI apps. WARNING!!! For '
+ 'development only, not to be used in '
+ 'production.')
+ run_subp.set_defaults(func=run)
+ # Add/remove journalists + admins
+ admin_subp = subps.add_parser('add-admin', help='Add an admin to the '
+ 'application.')
+ admin_subp.set_defaults(func=add_admin)
+ admin_subp_a = subps.add_parser('add_admin', help='^')
+ admin_subp_a.set_defaults(func=add_admin)
+ journalist_subp = subps.add_parser('add-journalist', help='Add a '
+ 'journalist to the application.')
+ journalist_subp.set_defaults(func=add_journalist)
+ journalist_subp_a = subps.add_parser('add_journalist', help='^')
+ journalist_subp_a.set_defaults(func=add_journalist)
+ delete_user_subp = subps.add_parser('delete-user', help='Delete a user '
+ 'from the application.')
+ delete_user_subp.set_defaults(func=delete_user)
+ delete_user_subp_a = subps.add_parser('delete_user', help='^')
+ delete_user_subp_a.set_defaults(func=delete_user)
+
+ # Reset application state
+ reset_subp = subps.add_parser('reset', help='DANGER!!! Clears the '
+ "SecureDrop application's state.")
+ reset_subp.set_defaults(func=reset)
+ # Cleanup the SD temp dir
+ clean_tmp_subp = subps.add_parser('clean-tmp', help='Cleanup the '
+ 'SecureDrop temp directory.')
+ clean_tmp_subp.set_defaults(func=clean_tmp)
+ clean_tmp_subp_a = subps.add_parser('clean_tmp', help='^')
+ clean_tmp_subp_a.set_defaults(func=clean_tmp)
+
+ return parser
+
+
+def _run_from_commandline(): # pragma: no cover
+ try:
+ args = get_args().parse_args()
+ rc = args.func()
+ sys.exit(rc)
except KeyboardInterrupt:
- print # So our prompt appears on a nice new line
+ sys.exit(signal.SIGINT)
-if __name__ == "__main__":
- main()
+if __name__ == '__main__': # pragma: no cover
+ _run_from_commandline()
diff --git a/securedrop/management/__init__.py b/securedrop/management/__init__.py
new file mode 100644
--- /dev/null
+++ b/securedrop/management/__init__.py
@@ -0,0 +1 @@
+from run import run
diff --git a/securedrop/management/run.py b/securedrop/management/run.py
new file mode 100644
--- /dev/null
+++ b/securedrop/management/run.py
@@ -0,0 +1,182 @@
+import atexit
+import os
+import select
+import signal
+import subprocess
+import sys
+
+__all__ = ['run']
+
+
+def colorize(s, color, bold=False):
+ """
+ Returns the string s surrounded by shell metacharacters to display
+ it with the given color and optionally bolded.
+ """
+ # List of shell colors from https://www.siafoo.net/snippet/88
+ shell_colors = {
+ 'gray': '30',
+ 'red': '31',
+ 'green': '32',
+ 'yellow': '33',
+ 'blue': '34',
+ 'magenta': '35',
+ 'cyan': '36',
+ 'white': '37',
+ 'crimson': '38',
+ 'highlighted_red': '41',
+ 'highlighted_green': '42',
+ 'highlighted_brown': '43',
+ 'highlighted_blue': '44',
+ 'highlighted_magenta': '45',
+ 'highlighted_cyan': '46',
+ 'highlighted_gray': '47',
+ 'highlighted_crimson': '48'
+ }
+
+ # Based on http://stackoverflow.com/a/2330297/1093000
+ attrs = []
+ attrs.append(shell_colors[color])
+ if bold:
+ attrs.append('1')
+
+ return '\x1b[{}m{}\x1b[0m'.format(';'.join(attrs), s)
+
+
+class DevServerProcess(subprocess.Popen): # pragma: no cover
+
+ def __init__(self, label, cmd, color):
+ self.label = label
+ self.cmd = cmd
+ self.color = color
+
+ super(DevServerProcess, self).__init__(
+ self.cmd,
+ stdin = subprocess.PIPE,
+ stdout = subprocess.PIPE,
+ stderr = subprocess.STDOUT,
+ preexec_fn = os.setsid)
+
+ def print_label(self, to):
+ label = "\n => {} <= \n\n".format(self.label)
+ if to.isatty():
+ label = colorize(label, self.color, True)
+ to.write(label)
+
+ def fileno(self):
+ """
+ Implement fileno() in order to use DevServerProcesses with select.select
+ directly.
+
+ Note this method assumes we only want to select this process'
+ stdout. This is a reasonable assumption for a DevServerProcess
+ because the __init__ redirects stderr to stdout, so all output is
+ available on stdout.
+ """
+ return self.stdout.fileno()
+
+
+class DevServerProcessMonitor(object): # pragma: no cover
+
+ def __init__(self, proc_funcs):
+ self.procs = []
+ self.last_proc = None
+ atexit.register(self.cleanup)
+
+ for pf in proc_funcs:
+ self.procs.append(pf())
+
+ def monitor(self):
+ while True:
+ # TODO: we currently don't handle input, which makes using an
+ # interactive debugger like pdb impossible. Since Flask provides
+ # a featureful in-browser debugger, I'll accept that pdb is
+ # broken for now. If someone really wants it, they should be
+ # able to change this function to make it work (although I'm not
+ # sure how hard that would be).
+ #
+ # If you really want to use pdb, you can just run the
+ # application scripts individually (`python source.py` or
+ # `python journalist.py`).
+ rprocs, _, _ = select.select(self.procs, [], [])
+
+ for proc in rprocs:
+ # To keep track of which process output what, print a
+ # helpful label every time the process sending output
+ # changes.
+ if proc != self.last_proc:
+ proc.print_label(sys.stdout)
+ self.last_proc = proc
+
+ line = proc.stdout.readline()
+ sys.stdout.write(line)
+ sys.stdout.flush()
+
+ if any(proc.poll() is not None for proc in self.procs):
+ # If any of the processes terminates (for example, due to
+ # a syntax error causing a reload to fail), kill them all
+ # so we don't get stuck.
+ sys.stdout.write(colorize(
+ "\nOne of the development servers exited unexpectedly. "
+ "See the traceback above for details.\n"
+ "Once you have resolved the issue, you can re-run "
+ "'./manage.py run' to continue developing.\n\n",
+ "red", True))
+ self.cleanup()
+ break
+
+ for proc in self.procs:
+ proc.wait()
+
+ def cleanup(self):
+ for proc in self.procs:
+ if proc.poll() is None:
+ # When the development servers use automatic reloading, they
+ # spawn new subprocesses frequently. In order to make sure we
+ # kill all of the subprocesses, we need to send SIGTERM to
+ # the process group and not just the process we initially
+ # created. See http://stackoverflow.com/a/4791612/1093000
+ os.killpg(proc.pid, signal.SIGTERM)
+ proc.terminate()
+
+
+def run(): # pragma: no cover
+ """
+ Starts development servers for both the Source Interface and the
+ Journalist Interface concurrently. Their output is collected,
+ labeled, and sent to stdout to present a unified view to the
+ developer.
+
+ Ctrl-C will kill the servers and return you to the terminal.
+
+ Useful resources:
+ * https://stackoverflow.com/questions/22565606/python-asynhronously-print-stdout-from-multiple-subprocesses
+
+ """
+ print \
+"""
+ ____ ____
+/\\ _`\\ /\\ _`\\
+\\ \\,\\L\\_\\ __ ___ __ __ _ __ __\\ \\ \\/\\ \\ _ __ ___ _____
+ \\/_\\__ \\ /'__`\\ /'___\\/\\ \\/\\ \\/\\`'__\\/'__`\\ \\ \\ \\ \\/\\`'__\\/ __`\\/\\ '__`\\
+ /\\ \\L\\ \\/\\ __//\\ \\__/\\ \\ \\_\\ \\ \\ \\//\\ __/\\ \\ \\_\\ \\ \\ \\//\\ \\L\\ \\ \\ \\L\\ \\
+ \\ `\\____\\ \\____\\ \\____\\\\ \\____/\\ \\_\\\\ \\____\\\\ \\____/\\ \\_\\\\ \\____/\\ \\ ,__/
+ \\/_____/\\/____/\\/____/ \\/___/ \\/_/ \\/____/ \\/___/ \\/_/ \\/___/ \\ \\ \\/
+ \\ \\_\\
+ \\/_/
+"""
+
+ procs = [
+ lambda: DevServerProcess('Source Interface',
+ ['python', 'source.py'],
+ 'blue'),
+ lambda: DevServerProcess('Document Interface',
+ ['python', 'journalist.py'],
+ 'cyan'),
+ lambda: DevServerProcess('SASS Compiler',
+ ['sass', '--watch', 'sass:static/css'],
+ 'magenta'),
+ ]
+
+ monitor = DevServerProcessMonitor(procs)
+ monitor.monitor()
diff --git a/securedrop/secure_tempfile.py b/securedrop/secure_tempfile.py
--- a/securedrop/secure_tempfile.py
+++ b/securedrop/secure_tempfile.py
@@ -1,85 +1,120 @@
+# -*- coding: utf-8 -*-
import base64
import os
from tempfile import _TemporaryFileWrapper
+from gnupg._util import _STREAMLIKE_TYPES
from Crypto.Cipher import AES
from Crypto.Random import random
from Crypto.Util import Counter
-from gnupg._util import _STREAMLIKE_TYPES
-class SecureTemporaryFile(_TemporaryFileWrapper):
- """Temporary file that is ephemerally encrypted on the fly.
+class SecureTemporaryFile(_TemporaryFileWrapper, object):
+ """Temporary file that provides on-the-fly encryption.
- Since only encrypted data is ever written to disk, using this
- classes minimizes the chances of plaintext recovery through
- forensic disk analysis.
+ Buffering large submissions in memory as they come in requires too
+ much memory for too long a period. By writing the file to disk as it
+ comes in using a stream cipher, we are able to minimize memory usage
+ as submissions come in, while minimizing the chances of plaintext
+ recovery through forensic disk analysis. They key used to encrypt
+ each secure temporary file is also ephemeral, and is only stored in
+ memory only for as long as needed.
- Adapted from Globaleaks' GLSecureTemporaryFile: https://github.com/globaleaks/GlobaLeaks/blob/master/backend/globaleaks/security.py#L35
+ Adapted from Globaleaks' GLSecureTemporaryFile:
+ https://github.com/globaleaks/GlobaLeaks/blob/master/backend/globaleaks/security.py#L35
WARNING: you can't use this like a normal file object. It supports
- being written to exactly once, then read from exactly once.
+ being appended to however many times you wish (although content may not be
+ overwritten), and then it's contents may be read only once (although it may
+ be done in chunks) and only after it's been written to.
"""
-
AES_key_size = 256
AES_block_size = 128
def __init__(self, store_dir):
+ """Generates an AES key and an initialization vector, and opens
+ a file in the `store_dir` directory with a
+ pseudorandomly-generated filename.
+
+ Args:
+ store_dir (str): the directory to create the secure
+ temporary file under.
+
+ Returns: self
+ """
self.last_action = 'init'
self.create_key()
-
self.tmp_file_id = base64.urlsafe_b64encode(os.urandom(32)).strip('=')
- self.filepath = os.path.join(store_dir, "{}.aes".format(self.tmp_file_id))
+ self.filepath = os.path.join(store_dir,
+ '{}.aes'.format(self.tmp_file_id))
self.file = open(self.filepath, 'w+b')
-
- _TemporaryFileWrapper.__init__(self, self.file, self.filepath, delete=True)
+ super(SecureTemporaryFile, self).__init__(self.file, self.filepath)
def create_key(self):
- """
- Randomly generate an AES key to encrypt the file
+ """Generates a unique, pseudorandom AES key, stored ephemerally in
+ memory as an instance attribute. Its destruction is ensured by the
+ automatic nightly reboots of the SecureDrop application server combined
+ with the freed memory-overwriting PAX_MEMORY_SANITIZE feature of the
+ grsecurity-patched kernel it uses (for further details consult
+ https://github.com/freedomofpress/securedrop/pull/477#issuecomment-168445450).
"""
self.key = os.urandom(self.AES_key_size / 8)
self.iv = random.getrandbits(self.AES_block_size)
self.initialize_cipher()
def initialize_cipher(self):
+ """Creates the cipher-related objects needed for AES-CTR
+ encryption and decryption.
+ """
self.ctr_e = Counter.new(self.AES_block_size, initial_value=self.iv)
self.ctr_d = Counter.new(self.AES_block_size, initial_value=self.iv)
self.encryptor = AES.new(self.key, AES.MODE_CTR, counter=self.ctr_e)
self.decryptor = AES.new(self.key, AES.MODE_CTR, counter=self.ctr_d)
def write(self, data):
+ """Write `data` to the secure temporary file. This method may be
+ called any number of times following instance initialization,
+ but after calling :meth:`read`, you cannot write to the file
+ again.
"""
- We track the internal status and don't allow writing after reading.
- It might be possible to be smarter about this.
- """
- assert self.last_action != 'read', "You cannot write after read!"
+ assert self.last_action != 'read', 'You cannot write after reading!'
self.last_action = 'write'
- try:
- if isinstance(data, unicode):
- data = data.encode('utf-8')
- self.file.write(self.encryptor.encrypt(data))
- except Exception as err:
- raise err
+ if isinstance(data, unicode): # noqa
+ data = data.encode('utf-8')
+
+ self.file.write(self.encryptor.encrypt(data))
def read(self, count=None):
+ """Read `data` from the secure temporary file. This method may
+ be called any number of times following instance initialization
+ and once :meth:`write has been called at least once, but not
+ before.
+
+ Before the first read operation, `seek(0, 0)` is called. So
+ while you can call this method any number of times, the full
+ contents of the file can only be read once. Additional calls to
+ read will return an empty str, which is desired behavior in that
+ it matches :class:`file` and because other modules depend on
+ this behavior to let them know they've reached the end of the
+ file.
+
+ Args:
+ count (int): the number of bytes to try to read from the
+ file from the current position.
"""
- The first time 'read' is called after a write, automatically seek(0).
- """
+ assert self.last_action != 'init', 'You must write before reading!'
if self.last_action == 'write':
self.seek(0, 0)
self.last_action = 'read'
- if count is None:
- return self.decryptor.decrypt(self.file.read())
- else:
+ if count:
return self.decryptor.decrypt(self.file.read(count))
+ else:
+ return self.decryptor.decrypt(self.file.read())
- def close(self):
- return _TemporaryFileWrapper.close(self)
# python-gnupg will not recognize our SecureTemporaryFile as a stream-like type
# and will attempt to call encode on it, thinking it's a string-like type. To
-# avoid this we add it the list of stream-like types.
+# avoid this we append it the list of stream-like types.
_STREAMLIKE_TYPES.append(_TemporaryFileWrapper)
diff --git a/securedrop/source.py b/securedrop/source.py
--- a/securedrop/source.py
+++ b/securedrop/source.py
@@ -1,21 +1,21 @@
# -*- coding: utf-8 -*-
import os
from datetime import datetime
-import uuid
from functools import wraps
-import zipfile
from cStringIO import StringIO
import subprocess
from threading import Thread
import operator
from flask import (Flask, request, render_template, session, redirect, url_for,
- flash, abort, g, send_file, Markup)
-from flask_wtf.csrf import CsrfProtect
+ flash, abort, g, send_file, Markup, make_response)
+from flask_wtf.csrf import CSRFProtect
+from flask_assets import Environment
from sqlalchemy.orm.exc import MultipleResultsFound, NoResultFound
from sqlalchemy.exc import IntegrityError
import config
+import json
import version
import crypto_util
import store
@@ -33,10 +33,12 @@
app.request_class = RequestThatSecuresFileUploads
app.config.from_object(config.SourceInterfaceFlaskConfig)
+assets = Environment(app)
+
# The default CSRF token expiration is 1 hour. Since large uploads can
# take longer than an hour over Tor, we increase the valid window to 24h.
app.config['WTF_CSRF_TIME_LIMIT'] = 60 * 60 * 24
-CsrfProtect(app)
+CSRFProtect(app)
app.jinja_env.globals['version'] = version.__version__
if getattr(config, 'CUSTOM_HEADER_IMAGE', None):
@@ -71,7 +73,8 @@ def decorated_function(*args, **kwargs):
def ignore_static(f):
- """Only executes the wrapped function if we're not loading a static resource."""
+ """Only executes the wrapped function if we're not loading
+ a static resource."""
@wraps(f)
def decorated_function(*args, **kwargs):
if request.path.startswith('/static'):
@@ -93,10 +96,14 @@ def setup_g():
try:
g.source = Source.query.filter(Source.filesystem_id == g.sid).one()
except MultipleResultsFound as e:
- app.logger.error("Found multiple Sources when one was expected: %s" % (e,))
+ app.logger.error(
+ "Found multiple Sources when one was expected: %s" %
+ (e,))
abort(500)
except NoResultFound as e:
- app.logger.error("Found no Sources when one was expected: %s" % (e,))
+ app.logger.error(
+ "Found no Sources when one was expected: %s" %
+ (e,))
del session['logged_in']
del session['codename']
return redirect(url_for('index'))
@@ -106,8 +113,8 @@ def setup_g():
@app.before_request
@ignore_static
def check_tor2web():
- # ignore_static here so we only flash a single message warning about Tor2Web,
- # corresponding to the intial page load.
+ # ignore_static here so we only flash a single message warning
+ # about Tor2Web, corresponding to the initial page load.
if 'X-tor2web' in request.headers:
flash('<strong>WARNING:</strong> You appear to be using Tor2Web. '
'This <strong>does not</strong> provide anonymity. '
@@ -120,13 +127,13 @@ def index():
return render_template('index.html')
-def generate_unique_codename(num_words=7):
+def generate_unique_codename():
"""Generate random codenames until we get an unused one"""
while True:
- codename = crypto_util.genrandomid(num_words)
+ codename = crypto_util.genrandomid(Source.NUM_WORDS)
- # The maximum length of a word in the wordlist is 6 letters and the
- # maximum codename length is 10 words, so it is currently impossible to
+ # The maximum length of a word in the wordlist is 9 letters and the
+ # codename length is 7 words, so it is currently impossible to
# generate a codename that is longer than the maximum codename length
# (currently 128 characters). This code is meant to be defense in depth
# to guard against potential future changes, such as modifications to
@@ -139,7 +146,8 @@ def generate_unique_codename(num_words=7):
continue
sid = crypto_util.hash_codename(codename) # scrypt (slow)
- matching_sources = Source.query.filter(Source.filesystem_id == sid).all()
+ matching_sources = Source.query.filter(
+ Source.filesystem_id == sid).all()
if len(matching_sources) == 0:
return codename
@@ -147,8 +155,9 @@ def generate_unique_codename(num_words=7):
@app.route('/generate', methods=('GET', 'POST'))
def generate():
if logged_in():
- flash("You were redirected because you are already logged in. If you want"
- "to create a new account, you should log out first.", "notification")
+ flash("You were redirected because you are already logged in. "
+ "If you want to create a new account, you should log out first.",
+ "notification")
return redirect(url_for('lookup'))
codename = generate_unique_codename()
@@ -165,7 +174,9 @@ def create():
try:
db_session.commit()
except IntegrityError as e:
- app.logger.error("Attempt to create a source with duplicate codename: %s" % (e,))
+ app.logger.error(
+ "Attempt to create a source with duplicate codename: %s" %
+ (e,))
else:
os.mkdir(store.path(sid))
@@ -185,7 +196,7 @@ def async_genkey(sid, codename):
crypto_util.genkeypair(sid, codename)
# Register key generation as update to the source, so sources will
- # filter to the top of the list in the document interface if a
+ # filter to the top of the list in the journalist interface if a
# flagged source logs in and has a key generated for them. #789
try:
source = Source.query.filter(Source.filesystem_id == sid).one()
@@ -202,11 +213,14 @@ def lookup():
for reply in g.source.replies:
reply_path = store.path(g.sid, reply.filename)
try:
- reply.decrypted = crypto_util.decrypt(g.codename, file(reply_path).read()).decode('utf-8')
+ reply.decrypted = crypto_util.decrypt(
+ g.codename,
+ open(reply_path).read()).decode('utf-8')
except UnicodeDecodeError:
app.logger.error("Could not decode reply %s" % reply.filename)
else:
- reply.date = datetime.utcfromtimestamp(os.stat(reply_path).st_mtime)
+ reply.date = datetime.utcfromtimestamp(
+ os.stat(reply_path).st_mtime)
replies.append(reply)
# Sort the replies by date
@@ -218,8 +232,13 @@ def lookup():
if not crypto_util.getkey(g.sid) and g.source.flagged:
async_genkey(g.sid, g.codename)
- return render_template('lookup.html', codename=g.codename, replies=replies,
- flagged=g.source.flagged, haskey=crypto_util.getkey(g.sid))
+ return render_template(
+ 'lookup.html',
+ codename=g.codename,
+ replies=replies,
+ flagged=g.source.flagged,
+ haskey=crypto_util.getkey(
+ g.sid))
def normalize_timestamps(sid):
@@ -235,7 +254,10 @@ def normalize_timestamps(sid):
args.extend(sub_paths[:-1])
rc = subprocess.call(args)
if rc != 0:
- app.logger.warning("Couldn't normalize submission timestamps (touch exited with %d)" % rc)
+ app.logger.warning(
+ "Couldn't normalize submission "
+ "timestamps (touch exited with %d)" %
+ rc)
@app.route('/submit', methods=('POST',))
@@ -255,22 +277,37 @@ def submit():
if msg:
g.source.interaction_count += 1
- fnames.append(store.save_message_submission(g.sid, g.source.interaction_count,
- journalist_filename, msg))
+ fnames.append(
+ store.save_message_submission(
+ g.sid,
+ g.source.interaction_count,
+ journalist_filename,
+ msg))
if fh:
g.source.interaction_count += 1
- fnames.append(store.save_file_submission(g.sid, g.source.interaction_count,
- journalist_filename, fh.filename, fh.stream))
+ fnames.append(
+ store.save_file_submission(
+ g.sid,
+ g.source.interaction_count,
+ journalist_filename,
+ fh.filename,
+ fh.stream))
if first_submission:
- flash("Thanks for submitting something to SecureDrop! Please check back later for replies.",
- "notification")
+ msg = render_template('first_submission_flashed_message.html')
+ flash(Markup(msg), "success")
+
else:
- if msg:
- flash("Thanks! We received your message.", "notification")
- if fh:
- flash('{} "{}".'.format("Thanks! We received your document",
- fh.filename or '[unnamed]'), "notification")
+ if msg and not fh:
+ things = 'message'
+ elif not msg and fh:
+ things = 'document'
+ else:
+ things = 'message and document'
+
+ msg = render_template('next_submission_flashed_message.html',
+ things=things)
+ flash(Markup(msg), "success")
for fname in fnames:
submission = Submission(g.source, fname)
@@ -280,7 +317,8 @@ def submit():
g.source.pending = False
# Generate a keypair now, if there's enough entropy (issue #303)
- entropy_avail = int(open('/proc/sys/kernel/random/entropy_avail').read())
+ entropy_avail = int(
+ open('/proc/sys/kernel/random/entropy_avail').read())
if entropy_avail >= 2400:
async_genkey(g.sid, g.codename)
@@ -294,7 +332,8 @@ def submit():
@app.route('/delete', methods=('POST',))
@login_required
def delete():
- query = Reply.query.filter(Reply.filename == request.form['reply_filename'])
+ query = Reply.query.filter(
+ Reply.filename == request.form['reply_filename'])
reply = get_one_or_else(query, app.logger, abort)
store.secure_unlink(store.path(g.sid, reply.filename))
db_session.delete(reply)
@@ -304,6 +343,22 @@ def delete():
return redirect(url_for('lookup'))
[email protected]('/delete-all', methods=('POST',))
+@login_required
+def batch_delete():
+ replies = g.source.replies
+ if len(replies) == 0:
+ app.logger.error("Found no replies when at least one was expected")
+ return redirect(url_for('lookup'))
+ for reply in replies:
+ store.secure_unlink(store.path(g.sid, reply.filename))
+ db_session.delete(reply)
+ db_session.commit()
+
+ flash("All replies have been deleted", "notification")
+ return redirect(url_for('lookup'))
+
+
def valid_codename(codename):
# Ignore codenames that are too long to avoid DoS
if len(codename) > Source.MAX_CODENAME_LEN:
@@ -341,21 +396,21 @@ def login():
def logout():
if logged_in():
session.clear()
- tor_msg = render_template('logout_flashed_message.html')
- flash(Markup(tor_msg), "error")
+ msg = render_template('logout_flashed_message.html')
+ flash(Markup(msg), "important")
return redirect(url_for('index'))
[email protected]('/howto-disable-js')
-def howto_disable_js():
- return render_template("howto-disable-js.html")
-
-
@app.route('/tor2web-warning')
def tor2web_warning():
return render_template("tor2web-warning.html")
[email protected]('/use-tor')
+def recommend_tor_browser():
+ return render_template("use-tor-browser.html")
+
+
@app.route('/journalist-key')
def download_journalist_pubkey():
journalist_pubkey = crypto_util.gpg.export_keys(config.JOURNALIST_KEY)
@@ -370,6 +425,16 @@ def why_download_journalist_pubkey():
return render_template("why-journalist-key.html")
[email protected]('/metadata')
+def metadata():
+ meta = {'gpg_fpr': config.JOURNALIST_KEY,
+ 'sd_version': version.__version__,
+ }
+ resp = make_response(json.dumps(meta))
+ resp.headers['Content-Type'] = 'application/json'
+ return resp
+
+
@app.errorhandler(404)
def page_not_found(error):
return render_template('notfound.html'), 404
@@ -380,12 +445,6 @@ def internal_error(error):
return render_template('error.html'), 500
-def write_pidfile():
- pid = str(os.getpid())
- with open(config.SOURCE_PIDFILE, 'w') as fp:
- fp.write(pid)
-
-if __name__ == "__main__":
- write_pidfile()
- # TODO make sure debug is not on in production
- app.run(debug=True, host='0.0.0.0', port=8080)
+if __name__ == "__main__": # pragma: no cover
+ debug = getattr(config, 'env', 'prod') != 'prod'
+ app.run(debug=debug, host='0.0.0.0', port=8080)
diff --git a/securedrop/store.py b/securedrop/store.py
--- a/securedrop/store.py
+++ b/securedrop/store.py
@@ -4,10 +4,8 @@
import config
import zipfile
import crypto_util
-import uuid
import tempfile
import subprocess
-from cStringIO import StringIO
import gzip
from werkzeug import secure_filename
@@ -16,13 +14,15 @@
import logging
log = logging.getLogger(__name__)
-VALIDATE_FILENAME = re.compile("^(?P<index>\d+)\-[a-z0-9-_]*(?P<file_type>msg|doc\.(gz|zip)|reply)\.gpg$").match
+VALIDATE_FILENAME = re.compile(
+ "^(?P<index>\d+)\-[a-z0-9-_]*"
+ "(?P<file_type>msg|doc\.(gz|zip)|reply)\.gpg$").match
class PathException(Exception):
- """An exception raised by `store.verify` when it encounters a bad path. A path
- can be bad when it is not absolute, not normalized, not within
- `config.STORE_DIR`, or doesn't match the filename format.
+
+ """An exception raised by `util.verify` when it encounters a bad path. A path
+ can be bad when it is not absolute or not normalized.
"""
pass
@@ -35,10 +35,11 @@ def verify(p):
raise PathException("config.STORE_DIR(%s) is not absolute" % (
config.STORE_DIR, ))
- # os.path.abspath makes the path absolute and normalizes '/foo/../bar' to
- # '/bar', etc. We have to check that the path is normalized before checking
- # that it starts with the `config.STORE_DIR` or else a malicious actor could
- # append a bunch of '../../..' to access files outside of the store.
+ # os.path.abspath makes the path absolute and normalizes
+ # '/foo/../bar' to '/bar', etc. We have to check that the path is
+ # normalized before checking that it starts with the
+ # `config.STORE_DIR` or else a malicious actor could append a
+ # bunch of '../../..' to access files outside of the store.
if not p == os.path.abspath(p):
raise PathException("The path is not absolute and/or normalized")
@@ -66,17 +67,31 @@ def path(*s):
return absolute
-def get_bulk_archive(filenames, zip_directory=''):
+def get_bulk_archive(selected_submissions, zip_directory=''):
+ """Generate a zip file from the selected submissions"""
zip_file = tempfile.NamedTemporaryFile(prefix='tmp_securedrop_bulk_dl_',
dir=config.TEMP_DIR,
delete=False)
+ sources = set([i.source.journalist_designation
+ for i in selected_submissions])
+ # The below nested for-loops are there to create a more usable
+ # folder structure per #383
with zipfile.ZipFile(zip_file, 'w') as zip:
- for filename in filenames:
- verify(filename)
- zip.write(filename, arcname=os.path.join(
- zip_directory,
- os.path.basename(filename)
- ))
+ for source in sources:
+ submissions = [s for s in selected_submissions
+ if s.source.journalist_designation == source]
+ for submission in submissions:
+ filename = path(submission.source.filesystem_id,
+ submission.filename)
+ verify(filename)
+ document_number = submission.filename.split('-')[0]
+ zip.write(filename, arcname=os.path.join(
+ zip_directory,
+ source,
+ "%s_%s" % (document_number,
+ submission.source.last_updated.date()),
+ os.path.basename(filename)
+ ))
return zip_file
@@ -96,10 +111,13 @@ def save_file_submission(sid, count, journalist_filename, filename, stream):
# file. Given various usability constraints in GPG and Tails, this
# is the most user-friendly way we have found to do this.
- encrypted_file_name = "{0}-{1}-doc.gz.gpg".format(count, journalist_filename)
+ encrypted_file_name = "{0}-{1}-doc.gz.gpg".format(
+ count,
+ journalist_filename)
encrypted_file_path = path(sid, encrypted_file_name)
with SecureTemporaryFile("/tmp") as stf:
- with gzip.GzipFile(filename=sanitized_filename, mode='wb', fileobj=stf) as gzf:
+ with gzip.GzipFile(filename=sanitized_filename,
+ mode='wb', fileobj=stf) as gzf:
# Buffer the stream into the gzip file to avoid excessive
# memory consumption
while True:
@@ -144,7 +162,9 @@ def secure_unlink(fn, recursive=False):
command.append('-r')
command.append(fn)
subprocess.check_call(command)
+ return "success"
def delete_source_directory(source_id):
secure_unlink(path(source_id), recursive=True)
+ return "success"
diff --git a/securedrop/template_filters.py b/securedrop/template_filters.py
--- a/securedrop/template_filters.py
+++ b/securedrop/template_filters.py
@@ -18,7 +18,9 @@ def _relative_timestamp(dt):
Format a human readable relative time for timestamps up to 30 days old
"""
delta = datetime.utcnow() - dt
- diff = (delta.microseconds + (delta.seconds + delta.days * 24 * 3600) * 1e6) / 1e6
+ diff = (
+ delta.microseconds + (delta.seconds +
+ delta.days * 24 * 3600) * 1e6) / 1e6
if diff < 45:
return '{} second{}'.format(int(diff), '' if int(diff) == 1 else 's')
elif diff < 90:
diff --git a/securedrop/version.py b/securedrop/version.py
--- a/securedrop/version.py
+++ b/securedrop/version.py
@@ -1 +1 @@
-__version__ = '0.3.12'
+__version__ = '0.4'
diff --git a/securedrop/worker.py b/securedrop/worker.py
--- a/securedrop/worker.py
+++ b/securedrop/worker.py
@@ -3,11 +3,12 @@
from redis import Redis
from rq import Queue
-queue_name = 'test' if os.environ.get('SECUREDROP_ENV') == 'test' else 'default'
+queue_name = 'test' if os.environ.get(
+ 'SECUREDROP_ENV') == 'test' else 'default'
# `srm` can take a long time on large files, so allow it run for up to an hour
q = Queue(name=queue_name, connection=Redis(), default_timeout=3600)
def enqueue(*args, **kwargs):
- q.enqueue(*args, **kwargs)
+ return q.enqueue(*args, **kwargs)
diff --git a/testinfra/combine-junit.py b/testinfra/combine-junit.py
new file mode 100755
--- /dev/null
+++ b/testinfra/combine-junit.py
@@ -0,0 +1,64 @@
+#!/usr/bin/env python
+#
+# Corey Goldberg, Dec 2012
+# Original source from gist.github.com/cgoldberg/4320815
+
+import os
+import sys
+import xml.etree.ElementTree as ET
+
+
+"""Merge multiple JUnit XML files into a single results file.
+
+Output dumps to sdtdout.
+
+example usage:
+ $ python merge_junit_results.py results1.xml results2.xml > results.xml
+"""
+
+
+def main():
+ args = sys.argv[1:]
+ if not args:
+ usage()
+ sys.exit(2)
+ if '-h' in args or '--help' in args:
+ usage()
+ sys.exit(2)
+ merge_results(args[:])
+
+
+def merge_results(xml_files):
+ failures = 0
+ tests = 0
+ errors = 0
+ time = 0.0
+ cases = []
+
+ for file_name in xml_files:
+ tree = ET.parse(file_name)
+ test_suite = tree.getroot()
+ failures += int(test_suite.attrib['failures'])
+ tests += int(test_suite.attrib['tests'])
+ errors += int(test_suite.attrib['errors'])
+ time += float(test_suite.attrib['time'])
+ cases.append(test_suite.getchildren())
+
+ new_root = ET.Element('testsuite')
+ new_root.attrib['failures'] = '%s' % failures
+ new_root.attrib['tests'] = '%s' % tests
+ new_root.attrib['errors'] = '%s' % errors
+ new_root.attrib['time'] = '%s' % time
+ for case in cases:
+ new_root.extend(case)
+ new_tree = ET.ElementTree(new_root)
+ ET.dump(new_tree)
+
+
+def usage():
+ this_file = os.path.basename(__file__)
+ print 'Usage: %s results1.xml results2.xml' % this_file
+
+
+if __name__ == '__main__':
+ main()
diff --git a/testinfra/conftest.py b/testinfra/conftest.py
new file mode 100644
--- /dev/null
+++ b/testinfra/conftest.py
@@ -0,0 +1,42 @@
+"""
+Configuration for TestInfra test suite for SecureDrop.
+Handles importing host-specific test vars, so test functions
+can be reused across multiple hosts, with varied targets.
+
+Vars should be placed in `testinfra/vars/<hostname>.yml`.
+"""
+
+import os
+import sys
+import yaml
+import pytest
+
+
+target_host = os.environ['SECUREDROP_TESTINFRA_TARGET_HOST']
+assert target_host != ""
+
+
+def securedrop_import_testinfra_vars(hostname, with_header=False):
+ """
+ Import vars from a YAML file to populate tests with host-specific
+ values used in checks. For instance, the SecureDrop docroot will
+ be under /vagrant in development, but /var/www/securedrop in staging.
+
+ Vars must be stored in `testinfra/vars/<hostname>.yml`.
+ """
+ filepath = os.path.join(os.path.dirname(__file__), "vars", hostname+".yml")
+ with open(filepath, 'r') as f:
+ hostvars = yaml.safe_load(f)
+ # The directory Travis runs builds in varies by PR, so we cannot hardcode
+ # it in the YAML testvars. Read it from env var and concatenate.
+ if hostname.lower() == 'travis':
+ build_env = os.environ["TRAVIS_BUILD_DIR"]
+ hostvars['securedrop_code'] = build_env+"/securedrop"
+
+ if with_header:
+ hostvars = dict(securedrop_test_vars=hostvars)
+ return hostvars
+
+
+def pytest_namespace():
+ return securedrop_import_testinfra_vars(target_host, with_header=True)
| diff --git a/docs/development/spec_tests.rst b/docs/development/spec_tests.rst
deleted file mode 100644
--- a/docs/development/spec_tests.rst
+++ /dev/null
@@ -1,100 +0,0 @@
-Serverspec Tests
-================
-
-serverspec_ tests verify the end state of the vagrant machines. Any
-changes to the Ansible configuration should have a corresponding
-spectest.
-
-.. _serverspec: http://serverspec.org
-
-Install directions (Ubuntu)
----------------------------
-
-.. code:: sh
-
- apt-get install bundler
- cd spec_tests/
- bundle update
-
-Running the tests
------------------
-
-.. code:: sh
-
- cd spec_tests/
- bundle exec rake spec
-
-This will run the tests against all configured hosts, specifically:
-
-- development
-- app-staging
-- mon-staging
-- build
-
-In order to run the tests, each VM will be created and provisioned, if
-necessary. Running all VMs concurrently may cause performance
-problems if you have less than 8GB of RAM. You can isolate specific
-machines for faster testing:
-
-.. code:: sh
-
- cd spec_tests
- bundle exec rake --tasks # check output for desired machine
- bundle exec rake spec:development
-
-Updating the tests
-------------------
-
-Changes to the ansible config should result in failing spectests, but
-only if an existing task was modified. If you add a new task, make
-sure to add a corresponding spectest to validate that state after a
-new provisioning run. Tests import variables from separate YAML files
-than the Ansible playbooks: ::
-
- spec_tests/spec/vars
- βββ development.yml
- βββ staging.yml
-
-Any variable changes in the Ansible config should have a corresponding
-entry in these vars files. These vars are dynamically loaded for each
-host via the ``spec_helper.rb`` file. Make sure to add your tests to
-relevant location for the host you plan to test: ::
-
- spec_tests/spec/app-staging
- βββ apache_spec.rb
- βββ apparmor_spec.rb
- βββ iptables_spec.rb
- βββ ossec_agent_spec.rb
- βββ securedrop_app_spec.rb
- βββ securedrop_app_test_spec.rb
- βββ tor_spec.rb
-
-In the example above, to add a new test for the ``app-staging`` host,
-add a new file to the ``spec_tests/spec/app-staging`` directory.
-
-Spectest layout
----------------
-
-The serverspec tests are mostly broken up according to machines in the
-Vagrantfile: ::
-
- spec_tests/spec
- βββ app-staging
- βββ build
- βββ common-development
- βββ common-staging
- βββ development
- βββ mon-staging
- βββ vars
-
-There are a few exceptions:
-
-- ``common-development`` shares tests between ``development`` and
- ``app-staging``
-- ``common-staging`` shares tests between ``app-staging`` and
- ``mon-staging``
-
-Ideally the serverspec tests would be broken up according to roles,
-mirroring the Ansible configuration. Prior to the reorganization of
-the Ansible layout, the tests are rather tightly coupled to hosts. The
-layout of spectests is therefore subject to change.
diff --git a/docs/development/testing_application_tests.rst b/docs/development/testing_application_tests.rst
new file mode 100644
--- /dev/null
+++ b/docs/development/testing_application_tests.rst
@@ -0,0 +1,98 @@
+.. _app_tests:
+
+Testing: Application Tests
+==========================
+
+The application test suite uses:
+
+ * Pytest_
+ * Selenium_
+ * Coveralls_
+
+The application tests consist of unit tests for the Python application code
+and functional tests that verify the functionality of the application code
+from the perspective of the user through a web browser.
+
+The functional tests use an outdated version of Firefox chosen specifically
+for compatibility with Selenium 2, and a rough approximation of the most
+recent Tor Browser.
+
+.. note:: We're working on running the Selenium tests in Tor Browser.
+ See `GitHub #1629`_ for more info.
+
+.. _`GitHub #1629`: https://github.com/freedomofpress/securedrop/pull/1629
+
+.. _Pytest: https://docs.pytest.org/en/latest/
+.. _Selenium: http://docs.seleniumhq.org/docs/
+.. _Coveralls: https://github.com/coveralls-clients/coveralls-python
+
+Installation
+------------
+
+The application tests are installed automatically in the development
+and app-staging VMs, based on the contents of
+``securedrop/requirements/test-requirements.txt``.
+If you wish to change the dependencies, see :ref:`updating_pip_dependencies`.
+
+Running the application tests
+-----------------------------
+
+The tests can be run inside the development VM:
+
+.. code:: sh
+
+ vagrant ssh development
+ cd /vagrant/securedrop
+ pytest -v tests
+
+Or the app-staging VM:
+
+.. code:: sh
+
+ vagrant ssh app-staging
+ sudo su www-data -s /bin/bash
+ cd /var/www/securedrop
+ pytest -v tests
+
+For explanation of the difference between these machines, see
+:doc:`virtual_environments`.
+
+If you just want to run the functional tests, you can use:
+
+.. code:: sh
+
+ pytest -v tests/functional/
+
+Similarly, if you want to run a single test, you can specify it through the
+file, class, and test name:
+
+.. code:: sh
+
+ pytest tests/test_journalist.py::TestJournalistApp::test_invalid_credentials
+
+
+Updating the application tests
+------------------------------
+
+Unit tests are stored in the ``securedrop/tests/`` directory and functional
+tests are stored in the functional test directory::
+
+ securedrop/tests/
+ βββ functional
+ β βββ test_admin_interface.py
+ β βββ test_submit_and_retrieve_file.py
+ β β ...
+ β βββ submission_not_in_memory.py
+ βββ utils
+ β βββ db_helper.py
+ β βββ env.py
+ β βββ async.py
+ βββ test_journalist.py
+ βββ test_source.py
+ β ...
+ βββ test_store.py
+
+``securedrop/tests/utils`` contains helper functions for writing tests.
+If you want to add a test, you should see if there is an existing file
+appropriate for the kind of test, e.g. a new unit testing ``manage.py``
+should go in ``test_manage.py``.
diff --git a/docs/development/testing_configuration_tests.rst b/docs/development/testing_configuration_tests.rst
new file mode 100644
--- /dev/null
+++ b/docs/development/testing_configuration_tests.rst
@@ -0,0 +1,124 @@
+.. _config_tests:
+
+Testing: Configuration Tests
+============================
+
+Testinfra_ tests verify the end state of the Vagrant machines. Any
+changes to the Ansible configuration should have a corresponding
+spectest.
+
+.. _Testinfra: https://testinfra.readthedocs.io/en/latest/
+
+Installation
+------------
+
+.. code:: sh
+
+ pip install -r securedrop/requirements/develop-requirements.txt
+
+Running the config tests
+------------------------
+
+In order to run the tests, first create and provision the VM you intend
+to test. For the development VM:
+
+.. code:: sh
+
+ vagrant up development
+
+For the staging VMs:
+
+.. code:: sh
+
+ vagrant up build --no-provision
+ vagrant up /staging/
+
+.. note:: The staging machines must be rebooted via in order to finalize
+ the iptables config. You must manually reboot the machines via
+ ``vagrant reload /staging/`` prior to running the config tests
+ to ensure the config is valid.
+
+Running all VMs concurrently may cause performance
+problems if you have less than 8GB of RAM. You can isolate specific
+machines for faster testing:
+
+.. code:: sh
+
+ ./testinfra/test.py development
+ ./testinfra/test.py app-staging
+ ./testinfra/test.py mon-staging
+
+.. note:: The config tests for the ``app-prod`` and ``mon-prod`` hosts are
+ incomplete. Further changes are necessary to run the tests via
+ SSH over Authenticated Tor Hidden Service (ATHS), for both local
+ testing via Vagrant and automated testing via CI.
+
+Test failure against any host will generate a report with informative output
+about the specific test that triggered the error. The wrapper script
+will also exit with a non-zero status code.
+
+Updating the config tests
+-------------------------
+
+Changes to the Ansible config should result in failing config tests, but
+only if an existing task was modified. If you add a new task, make
+sure to add a corresponding spectest to validate that state after a
+new provisioning run. Tests import variables from separate YAML files
+than the Ansible playbooks: ::
+
+ testinfra/vars/
+ βββ app-prod.yml
+ βββ app-staging.yml
+ βββ build.yml
+ βββ development.yml
+ βββ mon-prod.yml
+ βββ mon-staging.yml
+
+Any variable changes in the Ansible config should have a corresponding
+entry in these vars files. These vars are dynamically loaded for each
+host via the ``testinfra/conftest.py`` file. Make sure to add your tests to
+relevant location for the host you plan to test: ::
+
+ testinfra/app/
+ βββ apache
+ β βββ test_apache_journalist_interface.py
+ β βββ test_apache_service.py
+ β βββ test_apache_source_interface.py
+ β βββ test_apache_system_config.py
+ βββ test_apparmor.py
+ βββ test_appenv.py
+ βββ test_network.py
+ βββ test_ossec.py
+
+In the example above, to add a new test for the ``app-staging`` host,
+add a new file to the ``testinfra/spec/app-staging`` directory.
+
+Config test layout
+------------------
+
+The config tests are mostly broken up according to machines in the
+Vagrantfile: ::
+
+ testinfra/
+ βββ app
+ βββ app-code
+ βββ build
+ βββ common
+ βββ development
+ βββ mon
+
+Ideally the config tests would be broken up according to roles,
+mirroring the Ansible configuration. Prior to the reorganization of
+the Ansible layout, the tests are rather tightly coupled to hosts. The
+layout of config tests is therefore subject to change.
+
+Config testing strategy
+-----------------------
+
+The config tests currently emphasize testing implementation rather than
+functionality. This is a temporary measure to increase the current testing
+baseline for validating the Ansible provisioning flow, to aid in migrating
+to a current version of Ansible (v2+). After the Ansible version is current,
+the config tests can be improved to validate behavior, such as confirming
+ports are blocked via external network calls, rather than simply checking
+that the iptables rules are formatted as expected.
diff --git a/docs/development/testing_continuous_integration.rst b/docs/development/testing_continuous_integration.rst
new file mode 100644
--- /dev/null
+++ b/docs/development/testing_continuous_integration.rst
@@ -0,0 +1,118 @@
+.. _ci_tests:
+
+Testing: CI
+===========
+
+The SecureDrop project uses multiple automated third-party solutions
+for running automated test suites on code changes:
+
+ * Travis_
+ * CircleCI_
+
+.. _Travis: https://travis-ci.org/freedomofpress/securedrop/
+.. _CircleCI: http://circleci.com/gh/freedomofpress/securedrop/
+
+Travis tests
+------------
+
+The Travis_ test suite provisions the development VM and runs the application
+test suite against the latest version of the code. It also performs basic
+linting and validation, e.g. checking for mistakes in the Sphinx documentation
+(see :doc:`documentation_guidelines`).
+
+CI test layout
+--------------
+
+The relevant files for configuring the CI tests are: ::
+
+ βββ .circleci <--- folder contains config for CircleCI
+ βββ devops
+ β βββ inventory <-- environment specific inventory
+ β βββ playbooks <-- playbooks to start CI boxes
+ β βββ scripts <-- shell wrapper scripts
+ β βββ templates <-- contains templates for ansible tasks
+ β βββ vars <-- environment specific variables
+ βββ .travis.yml <--- config for development tests on travis
+ βββ Makefile <-- defines make task shortcuts
+
+The files under ``devops/`` are used to create a minimized staging environment
+on AWS EC2. The CircleCI host is used as the Ansible controller to provision
+the machines and run the :ref:`config_tests` against them.
+
+Running the CI staging environment
+----------------------------------
+
+The staging environment tests will run automatically in CircleCI,
+when changes are submitted by Freedom of the Press Foundation staff
+(i.e. members of the ``freedomofpress`` GitHub organization).
+
+.. tip:: You will need an Amazon Web Services EC2 account to proceed.
+ See the `AWS Getting Started Guide`_ for detailed instructions.
+
+.. _AWS Getting Started Guide: https://aws.amazon.com/ec2/getting-started/
+
+In addition to an EC2 account, you will need a working `Docker installation`_ in
+order to run the container that builds the deb packages.
+
+You can verify that your Docker installation is working by running
+``docker run hello-world`` and confirming you see "Hello from Docker" in the
+output as shown below:
+
+.. code:: sh
+
+ $ docker run hello-world
+
+ Hello from Docker!
+ This message shows that your installation appears to be working correctly.
+ ...
+
+.. _Docker installation: https://www.docker.com/community-edition#/download
+
+Setup environment parameters
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Source the setup script using the following command:
+
+.. code:: sh
+
+ $ source ./devops/scripts/local-setup.sh
+
+You will be prompted for the values of the required environment variables. There
+are some defaults set that you may want to change. You will need to determine
+the value of your VPC ID to use which is outside the scope of this guide.
+
+
+Use Makefile to provision hosts
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+Run ``make help`` to see the full list of CI commands in the Makefile:
+
+.. code:: sh
+
+ $ make help
+ Makefile for developing and testing SecureDrop.
+ Subcommands:
+ docs: Build project documentation in live reload for editing.
+ docs-lint: Check documentation for common syntax errors.
+ ci-spinup: Creates AWS EC2 hosts for testing staging environment.
+ ci-teardown: Destroy AWS EC2 hosts for testing staging environment.
+ ci-run: Provisions AWS EC2 hosts for testing staging environment.
+ ci-test: Tests AWS EC2 hosts for testing staging environment.
+ ci-go: Creates, provisions, tests, and destroys AWS EC2 hosts
+ for testing staging environment.
+ ci-debug: Prevents automatic destruction of AWS EC2 hosts on error.
+
+To run the tests locally:
+
+.. code:: sh
+
+ make ci-debug # hosts will not be destroyed automatically
+ make ci-go
+
+You can use ``make ci-run`` to provision the remote hosts while making changes,
+including rebuilding the Debian packages used in the Staging environment.
+See :doc:`virtual_environments` for more information.
+
+Note that if you typed ``make ci-debug`` above, you will have to manually remove
+a blank file in ``${HOME}/.FPF_CI_DEBUG`` and then run ``make ci-teardown`` to
+bring down the CI environment. Otherwise, specifically for AWS, you will be
+charged hourly charges until those machines are terminated.
diff --git a/docs/development/testing_securedrop.rst b/docs/development/testing_securedrop.rst
new file mode 100644
--- /dev/null
+++ b/docs/development/testing_securedrop.rst
@@ -0,0 +1,14 @@
+Testing SecureDrop
+==================
+
+The SecureDrop project ships both application code for running on servers
+hosted on-site at news organizations, as well as configuration scripts
+for provisioning the servers to accept updates to the application code,
+and to harden the system state. Therefore testing for the project includes
+:ref:`Application Tests<app_tests>` for validating that the app code behaves
+as expected, and :ref:`Configuration Tests<config_tests>` to ensure that the
+servers are appropriately locked down, and able to accept updates to the app code.
+
+In addition, the :ref:`Continuous Integration<ci_tests>` automatically runs
+the above Application and Configuration tests against cloud hosts,
+to aid in PR review.
diff --git a/docs/images/printer_setup_guide/print_test_page.png b/docs/images/printer_setup_guide/print_test_page.png
old mode 100755
new mode 100644
diff --git a/docs/test_the_installation.rst b/docs/test_the_installation.rst
--- a/docs/test_the_installation.rst
+++ b/docs/test_the_installation.rst
@@ -7,18 +7,18 @@ Test connectivity
SSH to both servers over Tor
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-On the Admin Workstation, you should be able to SSH to the App
-Server and the Monitor Server. ::
+On the *Admin Workstation*, you should be able to SSH to the App
+Server and the *Monitor Server*. ::
- $ ssh app
- $ ssh mon
+ ssh app
+ ssh mon
The SSH aliases should have been configured automatically by running
-the ``install.sh`` script. If you're unable to connect via aliases,
+the ``./securedrop-admin tailsconfig`` tool. If you're unable to connect via aliases,
try using the verbose command format to troubleshoot: ::
- $ ssh <username>@<app .onion>
- $ ssh <username>@<mon .onion>
+ ssh <username>@<app .onion>
+ ssh <username>@<mon .onion>
.. tip:: You can find the Onion URLs for SSH in ``app-ssh-aths`` and
``mon-ssh-aths`` inside the ``install_files/ansible-base`` directory.
@@ -27,7 +27,7 @@ Log in to both servers via TTY
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
All access to the SecureDrop servers should be performed over SSH from the
-Admin Workstation. To aid in troubleshooting, physical logins via TTY are
+*Admin Workstation*. To aid in troubleshooting, physical logins via TTY are
supported, but require 2FA to be configured. See the :doc:`2FA setup guide
<google_authenticator>` for information how to enable console logins.
@@ -48,14 +48,17 @@ On each server:
#. Check that you can execute privileged commands by running ``sudo su``.
#. Verify that you are booted into a grsec kernel: run ``uname -r``
and verify that the name of the running kernel ends with ``-grsec``.
-#. Check the AppArmor status with ``sudo aa-status``. On a production
- instance all profiles should be in enforce mode.
#. Check the current applied iptables rules with ``iptables-save``. It
should output *approximately* 50 lines.
#. You should have received an email alert from OSSEC when it first
started. If not, review our :doc:`OSSEC Alerts
Guide <ossec_alerts>`.
+On the *Application Server*:
+
+#. Check the AppArmor status with ``sudo aa-status``. On a production
+ instance all profiles should be in enforce mode.
+
Test the web interfaces
-----------------------
@@ -69,14 +72,14 @@ Test the web interfaces
- Usage of the Source Interface is covered by our :doc:`Source User
Manual <source>`.
-#. Test that you can access the Document Interface, and that you can log
+#. Test that you can access the Journalist Interface, and that you can log
in as the admin user you just created.
- Open the Tor Browser and navigate to the onion URL from
- app-document-aths. Enter your password and two-factor
+ app-journalist-aths. Enter your password and two-factor
authentication code to log in.
- - If you have problems logging in to the Admin/Document Interface,
- SSH to the App Server and restart the ntp daemon to synchronize
+ - If you have problems logging in to the Admin/Journalist Interface,
+ SSH to the *Application Server* and restart the ntp daemon to synchronize
the time: ``sudo service ntp restart``. Also check that your
smartphone's time is accurate and set to network time in its
device settings.
@@ -85,7 +88,7 @@ Test the web interfaces
- While logged in as an admin, you can send a reply to the test
source submission you made earlier.
- - Usage of the Document Interface is covered by our :doc:`Journalist
+ - Usage of the Journalist Interface is covered by our :doc:`Journalist
User Manual <journalist>`.
#. Test that the source received the reply.
@@ -96,10 +99,10 @@ Test the web interfaces
is present.
#. We highly recommend that you create persistent bookmarks for the
- Source and Document Interface addresses within Tor Browser.
+ Source and Journalist Interface addresses within Tor Browser.
#. Remove the test submissions you made prior to putting SecureDrop to
- real use. On the main Document Interface page, select all sources and
+ real use. On the main Journalist Interface page, select all sources and
click 'Delete selected'.
Once you've tested the installation and verified that everything is
diff --git a/install_files/ansible-base/roles/app-test/tasks/dev_setup_xvfb_for_functional_tests.yml b/install_files/ansible-base/roles/app-test/tasks/dev_setup_xvfb_for_functional_tests.yml
--- a/install_files/ansible-base/roles/app-test/tasks/dev_setup_xvfb_for_functional_tests.yml
+++ b/install_files/ansible-base/roles/app-test/tasks/dev_setup_xvfb_for_functional_tests.yml
@@ -1,10 +1,17 @@
---
-- name: install pip dependencies for running the unit and functional tests
- pip: requirements="{{ test_pip_requirements }}"
+- name: Install pip dependencies for running the unit and functional tests.
+ pip:
+ requirements: "{{ test_pip_requirements }}"
+ tags:
+ - pip
-- name: install testing deb pkg dependencies
- apt: name="{{ item }}" state=latest
- with_items: test_apt_dependencies
+- name: Install testing package dependencies.
+ apt:
+ name: "{{ item }}"
+ state: present
+ with_items: "{{ test_apt_dependencies }}"
+ tags:
+ - apt
# Selenium 3 makes breaking changes with the 2.X API, and requires the
# installation of the Mozilla geckodriver. Since the Aaron Swartz Day Hackathon
@@ -15,12 +22,11 @@
# remove the following three tasks (as well as add firefox back to the
# `test_apt_dependencies` list).
- name: Download Firefox 46.0.1 for compatibility with Selenium 2.53.6.
- sudo: no
get_url:
# Since the whole tasklisk is run as root, the ansible_env.HOME fact is
# /root. Since this command doesn't need to be run as root and is part of a
- # crutch anyway, I've just hardcoded /home/vagrant.
- dest: /home/vagrant/
+ # crutch anyway, I've just hardcoded /tmp
+ dest: "/opt/firefox_46.0.1+build1-0ubuntu0.14.04.3_amd64.deb"
url: https://launchpad.net/~ubuntu-mozilla-security/+archive/ubuntu/ppa/+build/9727836/+files/firefox_46.0.1+build1-0ubuntu0.14.04.3_amd64.deb
sha256sum: 88d25053306d33658580973b063cd459a56e3596a3a298c1fb8ab1d52171d860
tags:
@@ -39,23 +45,48 @@
- name: Install Firefox 46.0.1 for compatibility with Selenium 2.53.6.
apt:
- deb: /home/vagrant/firefox_46.0.1+build1-0ubuntu0.14.04.3_amd64.deb
+ deb: "/opt/firefox_46.0.1+build1-0ubuntu0.14.04.3_amd64.deb"
tags:
- apt
+- name: Set apt hold on Firefox version (via apt).
+ command: apt-mark hold firefox
+ register: apt_hold_firefox_result
+ # apt-mark will return output to report changed status; subsequent runs
+ # will report "firefox was already set on hold."
+ changed_when: "'firefox set on hold' in apt_hold_firefox_result.stdout"
+
+- name: Set apt hold on Firefox version (via aptitude).
+ command: aptitude hold firefox
+ # `aptitude hold <package>` doesn't report meaningful changed status,
+ # so mark the task as not changed.
+ changed_when: false
+
- name: Copy xvfb init script.
copy:
src: xvfb
dest: /etc/init.d/xvfb
owner: root
- mode: '700'
+ mode: "0700"
tags:
- xvfb
- permissions
-- name: update rc.d to run xvfb at boot
- command: "update-rc.d xvfb defaults"
+- name: Update rc.d to run xvfb at boot.
+ command: update-rc.d xvfb defaults
+ register: xvfb_setup
+ changed_when: "'System start/stop links for /etc/init.d/xvfb already exist' not in xvfb_setup.stdout"
notify: start xvfb
+ tags:
+ - xvfb
-- name: set DISPLAY environment variable for xvfb on reboot
- copy: src=xvfb_display.sh dest=/etc/profile.d/xvfb_display.sh owner=root mode=444
+- name: Set DISPLAY environment variable for xvfb.
+ copy:
+ src: xvfb_display.sh
+ dest: /etc/profile.d/xvfb_display.sh
+ owner: root
+ mode: "0444"
+ tags:
+ - xvfb
+ - environment
+ - permissions
diff --git a/install_files/ansible-base/roles/ossec_server/files/test_admin_key.pub b/install_files/ansible-base/roles/ossec-server/files/test_admin_key.pub
similarity index 100%
rename from install_files/ansible-base/roles/ossec_server/files/test_admin_key.pub
rename to install_files/ansible-base/roles/ossec-server/files/test_admin_key.pub
diff --git a/install_files/ansible-base/roles/ossec_server/files/test_admin_key.sec b/install_files/ansible-base/roles/ossec-server/files/test_admin_key.sec
similarity index 100%
rename from install_files/ansible-base/roles/ossec_server/files/test_admin_key.sec
rename to install_files/ansible-base/roles/ossec-server/files/test_admin_key.sec
diff --git a/securedrop/test.sh b/securedrop/test.sh
deleted file mode 100755
--- a/securedrop/test.sh
+++ /dev/null
@@ -1,16 +0,0 @@
-#!/bin/bash
-
-if [ $(which vagrant) ] ; then
- echo ""
- echo "*** You probably want to run tests from vagrant. Run 'vagrant ssh', then 'cd /vagrant/securedrop' and re-run this script***"
- echo ""
-fi
-
-export PYTHONPATH=./tests
-export SECUREDROP_ENV=test
-
-# -f makes unittest fail fast, so we can use && to avoid burying test failures
-python -m unittest -fv tests.functional.submit_and_retrieve_message && \
-python -m unittest -fv tests.functional.submit_and_retrieve_file && \
-python -m unittest -fv tests.functional.admin_interface
-
diff --git a/securedrop/tests/__init__.py b/securedrop/tests/__init__.py
--- a/securedrop/tests/__init__.py
+++ b/securedrop/tests/__init__.py
@@ -0,0 +1,8 @@
+# -*- coding: utf-8 -*-
+from os.path import abspath, dirname, join, realpath
+import sys
+
+# The tests directory should be adjacent to the securedrop directory. By adding
+# the securedrop directory to sys.path here, all test modules are able to
+# directly import modules in the securedrop directory.
+sys.path.append(abspath(join(dirname(realpath(__file__)), '..', 'securedrop')))
diff --git a/securedrop/tests/common.py b/securedrop/tests/common.py
deleted file mode 100644
--- a/securedrop/tests/common.py
+++ /dev/null
@@ -1,90 +0,0 @@
-import os
-import shutil
-import uuid
-import subprocess
-
-import gnupg
-
-import config
-from db import init_db, db_session, Source, Submission
-import crypto_util
-
-
-def clean_root():
- shutil.rmtree(config.SECUREDROP_DATA_ROOT)
-
-
-def create_directories():
- # Create directories for the file store and the GPG keyring
- for d in (config.SECUREDROP_DATA_ROOT, config.STORE_DIR,
- config.GPG_KEY_DIR, config.TEMP_DIR):
- if not os.path.isdir(d):
- os.mkdir(d)
-
-
-def init_gpg():
- # Initialize the GPG keyring
- gpg = gnupg.GPG(homedir=config.GPG_KEY_DIR)
- # Import the journalist key for testing (faster to import a pre-generated
- # key than to gen a new one every time)
- for keyfile in ("test_journalist_key.pub", "test_journalist_key.sec"):
- gpg.import_keys(open(keyfile).read())
- return gpg
-
-
-def setup_test_docs(sid, files):
- filenames = [os.path.join(config.STORE_DIR, sid, file) for file in files]
-
- for filename in filenames:
- dirname = os.path.dirname(filename)
- if not os.path.exists(dirname):
- os.makedirs(dirname)
- with open(filename, 'w') as fp:
- fp.write(str(uuid.uuid4()))
-
- # Add Submission to the db
- source = Source.query.filter(Source.filesystem_id == sid).one()
- submission = Submission(source, os.path.basename(filename))
- db_session.add(submission)
- db_session.commit()
-
- return filenames
-
-
-def new_codename(client, session):
- """Helper function to go through the "generate codename" flow"""
- with client as c:
- rv = c.get('/generate')
- codename = session['codename']
- rv = c.post('/create')
- return codename
-
-
-def shared_setup():
- """Set up the file system, GPG, and database"""
- create_directories()
- init_gpg()
- init_db()
-
- # Do tests that should always run on app startup
- crypto_util.do_runtime_tests()
-
- # Start the Python-RQ worker if it's not already running
- if not os.path.exists(config.WORKER_PIDFILE):
- subprocess.Popen(["rqworker", "-P", config.SECUREDROP_ROOT,
- "--pid", config.WORKER_PIDFILE])
-
-
-def shared_teardown():
- clean_root()
-
-
-def logout(test_client):
- # See http://flask.pocoo.org/docs/testing/#accessing-and-modifying-sessions
- # This is necessary because SecureDrop doesn't have a logout button, so a
- # user is logged in until they close the browser, which clears the session.
- # For testing, this function simulates closing the browser at places
- # where a source is likely to do so (for instance, between submitting a
- # document and checking for a journalist reply).
- with test_client.session_transaction() as sess:
- sess.clear()
diff --git a/securedrop/tests/conftest.py b/securedrop/tests/conftest.py
new file mode 100644
--- /dev/null
+++ b/securedrop/tests/conftest.py
@@ -0,0 +1,61 @@
+# -*- coding: utf-8 -*-
+import os
+import shutil
+import signal
+import subprocess
+
+import psutil
+import pytest
+
+os.environ['SECUREDROP_ENV'] = 'test' # noqa
+import config
+
+# TODO: the PID file for the redis worker is hard-coded below.
+# Ideally this constant would be provided by a test harness.
+# It has been intentionally omitted from `config.py.example`
+# in order to isolate the test vars from prod vars.
+TEST_WORKER_PIDFILE = '/tmp/securedrop_test_worker.pid'
+
+
[email protected](scope='session')
+def setUptearDown():
+ _start_test_rqworker(config)
+ yield
+ _stop_test_rqworker()
+ _cleanup_test_securedrop_dataroot(config)
+
+
+def _start_test_rqworker(config):
+ if not psutil.pid_exists(_get_pid_from_file(TEST_WORKER_PIDFILE)):
+ tmp_logfile = open('/tmp/test_rqworker.log', 'w')
+ subprocess.Popen(['rqworker', 'test',
+ '-P', config.SECUREDROP_ROOT,
+ '--pid', TEST_WORKER_PIDFILE],
+ stdout=tmp_logfile,
+ stderr=subprocess.STDOUT)
+
+
+def _stop_test_rqworker():
+ rqworker_pid = _get_pid_from_file(TEST_WORKER_PIDFILE)
+ if rqworker_pid:
+ os.kill(rqworker_pid, signal.SIGTERM)
+ try:
+ os.remove(TEST_WORKER_PIDFILE)
+ except OSError:
+ pass
+
+
+def _get_pid_from_file(pid_file_name):
+ try:
+ return int(open(pid_file_name).read())
+ except IOError:
+ return None
+
+
+def _cleanup_test_securedrop_dataroot(config):
+ # Keyboard interrupts or dropping to pdb after a test failure sometimes
+ # result in the temporary test SecureDrop data root not being deleted.
+ try:
+ shutil.rmtree(config.SECUREDROP_DATA_ROOT)
+ except OSError:
+ pass
diff --git a/securedrop/test_journalist_key.pub b/securedrop/tests/files/test_journalist_key.pub
similarity index 100%
rename from securedrop/test_journalist_key.pub
rename to securedrop/tests/files/test_journalist_key.pub
diff --git a/securedrop/test_journalist_key.sec b/securedrop/tests/files/test_journalist_key.sec
similarity index 100%
rename from securedrop/test_journalist_key.sec
rename to securedrop/tests/files/test_journalist_key.sec
diff --git a/securedrop/tests/functional/functional_test.py b/securedrop/tests/functional/functional_test.py
--- a/securedrop/tests/functional/functional_test.py
+++ b/securedrop/tests/functional/functional_test.py
@@ -1,31 +1,33 @@
-import unittest
-from selenium import webdriver
-from selenium.webdriver.firefox import firefox_binary
-from selenium.common.exceptions import WebDriverException
+# -*- coding: utf-8 -*-
+
+from datetime import datetime
+import mock
from multiprocessing import Process
-import socket
-import shutil
import os
-import gnupg
-import urllib2
+from os.path import abspath, dirname, join, realpath
+import shutil
+import signal
+import socket
import sys
-
-
-import config
-
-import source
-import journalist
-from tests import common
+import time
+import traceback
+import unittest
import urllib2
-import signal
-import traceback
-from datetime import datetime
-import time
-import mock
+from Crypto import Random
+import gnupg
+from selenium import webdriver
+from selenium.common.exceptions import WebDriverException
+from selenium.webdriver.firefox import firefox_binary
os.environ['SECUREDROP_ENV'] = 'test'
+import config
+import db
+import journalist
+import source
+import tests.utils.env as env
+LOG_DIR = abspath(join(dirname(realpath(__file__)), '..', 'log'))
class FunctionalTest():
@@ -37,8 +39,10 @@ def _unused_port(self):
return port
def _create_webdriver(self):
- log_file = open('tests/log/firefox.log', 'a')
- log_file.write('\n\n[%s] Running Functional Tests\n' % str(datetime.now()))
+ log_file = open(join(LOG_DIR, 'firefox.log'), 'a')
+ log_file.write(
+ '\n\n[%s] Running Functional Tests\n' % str(
+ datetime.now()))
log_file.flush()
firefox = firefox_binary.FirefoxBinary(log_file=log_file)
return webdriver.Firefox(firefox_binary=firefox)
@@ -52,9 +56,9 @@ def setUp(self):
signal.signal(signal.SIGUSR1, lambda _, s: traceback.print_stack(s))
- common.create_directories()
- self.gpg = common.init_gpg()
- common.init_db()
+ env.create_directories()
+ self.gpg = env.init_gpg()
+ db.init_db()
source_port = self._unused_port()
journalist_port = self._unused_port()
@@ -63,10 +67,26 @@ def setUp(self):
self.journalist_location = "http://localhost:%d" % journalist_port
def start_source_server():
- source.app.run(port=source_port, debug=True, use_reloader=False)
+ # We call Random.atfork() here because we fork the source and
+ # journalist server from the main Python process we use to drive
+ # our browser with multiprocessing.Process() below. These child
+ # processes inherit the same RNG state as the parent process, which
+ # is a problem because they would produce identical output if we
+ # didn't re-seed them after forking.
+ Random.atfork()
+ source.app.run(
+ port=source_port,
+ debug=True,
+ use_reloader=False,
+ threaded=True)
def start_journalist_server():
- journalist.app.run(port=journalist_port, debug=True, use_reloader=False)
+ Random.atfork()
+ journalist.app.run(
+ port=journalist_port,
+ debug=True,
+ use_reloader=False,
+ threaded=True)
self.source_process = Process(target=start_source_server)
self.journalist_process = Process(target=start_journalist_server)
@@ -89,14 +109,15 @@ def start_journalist_server():
self.secret_message = 'blah blah blah'
def tearDown(self):
- common.clean_root()
+ env.teardown()
self.driver.quit()
self.source_process.terminate()
self.journalist_process.terminate()
def wait_for(self, function_with_assertion, timeout=5):
"""Polling wait for an arbitrary assertion."""
- # Thanks to http://chimera.labs.oreilly.com/books/1234000000754/ch20.html#_a_common_selenium_problem_race_conditions
+ # Thanks to
+ # http://chimera.labs.oreilly.com/books/1234000000754/ch20.html#_a_common_selenium_problem_race_conditions
start_time = time.time()
while time.time() - start_time < timeout:
try:
diff --git a/securedrop/tests/functional/journalist_navigation_steps.py b/securedrop/tests/functional/journalist_navigation_steps.py
--- a/securedrop/tests/functional/journalist_navigation_steps.py
+++ b/securedrop/tests/functional/journalist_navigation_steps.py
@@ -6,7 +6,8 @@
from selenium.common.exceptions import NoSuchElementException
-from db import db_session, Journalist
+import tests.utils.db_helper as db_helper
+from db import Journalist
class JournalistNavigationSteps():
@@ -26,54 +27,37 @@ def _get_submission_content(self, file_url, raw_content):
def _login_user(self, username, password, token):
self.driver.get(self.journalist_location + "/login")
- username_field = self.driver.find_element_by_css_selector('input[name="username"]')
+ username_field = self.driver.find_element_by_css_selector(
+ 'input[name="username"]')
username_field.send_keys(username)
- password_field = self.driver.find_element_by_css_selector('input[name="password"]')
+ password_field = self.driver.find_element_by_css_selector(
+ 'input[name="password"]')
password_field.send_keys(password)
- token_field = self.driver.find_element_by_css_selector('input[name="token"]')
+ token_field = self.driver.find_element_by_css_selector(
+ 'input[name="token"]')
token_field.send_keys(token)
- submit_button = self.driver.find_element_by_css_selector('button[type=submit]')
+ submit_button = self.driver.find_element_by_css_selector(
+ 'button[type=submit]')
submit_button.click()
# Successful login should redirect to the index
- self.assertEquals(self.driver.current_url, self.journalist_location + '/')
+ self.assertEquals(self.driver.current_url,
+ self.journalist_location + '/')
def _journalist_logs_in(self):
# Create a test user for logging in
- test_user_info = dict(
- username='test',
- password='test')
- test_user = Journalist(**test_user_info)
- db_session.add(test_user)
- db_session.commit()
-
- self._login_user(test_user_info['username'],
- test_user_info['password'],
- test_user.totp.now())
+ self.user, self.user_pw = db_helper.init_journalist()
+ self._login_user(self.user.username, self.user_pw, 'mocked')
headline = self.driver.find_element_by_css_selector('span.headline')
self.assertIn('Sources', headline.text)
def _admin_logs_in(self):
- # Create a test admin user for logging in
- admin_user_info = dict(
- username='admin',
- password='admin',
- is_admin=True)
- admin_user = Journalist(**admin_user_info)
- db_session.add(admin_user)
- db_session.commit()
-
- # Stash the admin user on self so we can use it in later tests
- self.admin_user = admin_user_info
- self.admin_user['orm_obj'] = admin_user
-
- self._login_user(admin_user_info['username'],
- admin_user_info['password'],
- admin_user.totp.now())
+ self.admin, self.admin_pw = db_helper.init_journalist(is_admin=True)
+ self._login_user(self.admin.username, self.admin_pw, 'mocked')
# Admin user should log in to the same interface as a normal user,
# since there may be users who wish to be both journalists and admins.
@@ -88,36 +72,38 @@ def _admin_visits_admin_interface(self):
admin_interface_link = self.driver.find_element_by_link_text('Admin')
admin_interface_link.click()
- h2s = self.driver.find_elements_by_tag_name('h2')
- self.assertIn("Admin Interface", [el.text for el in h2s])
-
- users_table_rows = self.driver.find_elements_by_css_selector('table#users tr.user')
- self.assertEquals(len(users_table_rows), 1)
+ h1s = self.driver.find_elements_by_tag_name('h1')
+ self.assertIn("Admin Interface", [el.text for el in h1s])
def _add_user(self, username, password, is_admin=False):
- username_field = self.driver.find_element_by_css_selector('input[name="username"]')
+ username_field = self.driver.find_element_by_css_selector(
+ 'input[name="username"]')
username_field.send_keys(username)
- password_field = self.driver.find_element_by_css_selector('input[name="password"]')
+ password_field = self.driver.find_element_by_css_selector(
+ 'input[name="password"]')
password_field.send_keys(password)
- password_again_field = self.driver.find_element_by_css_selector('input[name="password_again"]')
+ password_again_field = self.driver.find_element_by_css_selector(
+ 'input[name="password_again"]')
password_again_field.send_keys(password)
if is_admin:
# TODO implement (checkbox is unchecked by default)
pass
- submit_button = self.driver.find_element_by_css_selector('button[type=submit]')
+ submit_button = self.driver.find_element_by_css_selector(
+ 'button[type=submit]')
submit_button.click()
def _admin_adds_a_user(self):
- add_user_btn = self.driver.find_element_by_css_selector('button#add-user')
+ add_user_btn = self.driver.find_element_by_css_selector(
+ 'button#add-user')
add_user_btn.click()
- # The add user page has a form with an "Add user" button
+ # The add user page has a form with an "ADD USER" button
btns = self.driver.find_elements_by_tag_name('button')
- self.assertIn('Add user', [el.text for el in btns])
+ self.assertIn('ADD USER', [el.text for el in btns])
self.new_user = dict(
username='dellsberg',
@@ -135,16 +121,20 @@ def _admin_adds_a_user(self):
self.new_user['orm_obj'] = Journalist.query.filter(
Journalist.username == self.new_user['username']).one()
- # Verify the two factor authentication
- token_field = self.driver.find_element_by_css_selector('input[name="token"]')
- token_field.send_keys(self.new_user['orm_obj'].totp.now())
- submit_button = self.driver.find_element_by_css_selector('button[type=submit]')
+ # Verify the two-factor authentication
+ token_field = self.driver.find_element_by_css_selector(
+ 'input[name="token"]')
+ token_field.send_keys('mocked')
+ submit_button = self.driver.find_element_by_css_selector(
+ 'button[type=submit]')
submit_button.click()
# Successfully verifying the code should redirect to the admin
# interface, and flash a message indicating success
- flashed_msgs = self.driver.find_elements_by_css_selector('p.flash')
- self.assertIn("Two factor token successfully verified for user {}!".format(self.new_user['username']), [el.text for el in flashed_msgs])
+ flashed_msgs = self.driver.find_elements_by_css_selector('.flash')
+ self.assertIn(("Two-factor token successfully verified for user"
+ " {}!").format(self.new_user['username']),
+ [el.text for el in flashed_msgs])
def _logout(self):
# Click the logout link
@@ -159,23 +149,11 @@ def _logout(self):
def _check_login_with_otp(self, otp):
self._logout()
- self._login_user(self.new_user['username'], self.new_user['password'], otp)
+ self._login_user(self.new_user['username'],
+ self.new_user['password'], otp)
# Test that the new user was logged in successfully
self.assertIn('Sources', self.driver.page_source)
- def _check_login_with_skewed_otp(self):
- interval = 30
-
- # Client is behind server
- otp = self.new_user['orm_obj'].totp.at(
- datetime.datetime.now() - datetime.timedelta(seconds=interval))
- self._check_login_with_otp(otp)
-
- # Client is ahead of server
- otp = self.new_user['orm_obj'].totp.at(
- datetime.datetime.now() + datetime.timedelta(seconds=interval))
- self._check_login_with_otp(otp)
-
def _new_user_can_log_in(self):
# Log the admin user out
self._logout()
@@ -183,7 +161,7 @@ def _new_user_can_log_in(self):
# Log the new user in
self._login_user(self.new_user['username'],
self.new_user['password'],
- self.new_user['orm_obj'].totp.now())
+ 'mocked')
# Test that the new user was logged in successfully
self.assertIn('Sources', self.driver.page_source)
@@ -194,23 +172,82 @@ def _new_user_can_log_in(self):
self.driver.find_element_by_link_text,
'Admin')
- # Check that the user can log in with slightly skewed OTP tokens
- self._check_login_with_skewed_otp()
+ def _edit_account(self):
+ edit_account_link = self.driver.find_element_by_link_text(
+ 'Edit Account')
+ edit_account_link.click()
+
+ # The header says "Edit your account"
+ h1s = self.driver.find_elements_by_tag_name('h1')[0]
+ self.assertEqual('Edit your account', h1s.text)
+ # There's no link back to the admin interface.
+ with self.assertRaises(NoSuchElementException):
+ self.driver.find_element_by_partial_link_text('Back to admin interface')
+ # There's no field to change your username.
+ with self.assertRaises(NoSuchElementException):
+ self.driver.find_element_by_css_selector('#username')
+ # There's no checkbox to change the administrator status of your
+ # account.
+ with self.assertRaises(NoSuchElementException):
+ username_field = self.driver.find_element_by_css_selector('#is_admin')
+ # 2FA reset buttons at the bottom point to the user URLs for reset.
+ totp_reset_button = self.driver.find_elements_by_css_selector(
+ '#reset-two-factor-totp')[0]
+ self.assertRegexpMatches(totp_reset_button.get_attribute('action'),
+ '/account/reset-2fa-totp')
+ hotp_reset_button = self.driver.find_elements_by_css_selector(
+ '#reset-two-factor-hotp')[0]
+ self.assertRegexpMatches(hotp_reset_button.get_attribute('action'),
+ '/account/reset-2fa-hotp')
def _edit_user(self, username):
+ user = Journalist.query.filter_by(username=username).one()
+
new_user_edit_links = filter(
lambda el: el.get_attribute('data-username') == username,
self.driver.find_elements_by_tag_name('a'))
self.assertEquals(len(new_user_edit_links), 1)
new_user_edit_links[0].click()
+ # The header says "Edit user "username"".
+ h1s = self.driver.find_elements_by_tag_name('h1')[0]
+ self.assertEqual('Edit user "{}"'.format(username), h1s.text)
+ # There's a convenient link back to the admin interface.
+ admin_interface_link = self.driver.find_element_by_partial_link_text(
+ 'Back to admin interface')
+ self.assertRegexpMatches(admin_interface_link.get_attribute('href'),
+ '/admin$')
+ # There's a field to change the user's username and it's already filled
+ # out with the user's username.
+ username_field = self.driver.find_element_by_css_selector('#username')
+ self.assertEqual(username_field.get_attribute('placeholder'), username)
+ # There's a checkbox to change the administrator status of the user and
+ # it's already checked appropriately to reflect the current status of
+ # our user.
+ username_field = self.driver.find_element_by_css_selector('#is_admin')
+ self.assertEqual(bool(username_field.get_attribute('checked')),
+ user.is_admin)
+ # 2FA reset buttons at the bottom point to the admin URLs for
+ # resettting 2FA and include the correct user id in the hidden uid.
+ totp_reset_button = self.driver.find_elements_by_css_selector(
+ '#reset-two-factor-totp')[0]
+ self.assertRegexpMatches(totp_reset_button.get_attribute('action'),
+ '/admin/reset-2fa-totp')
+ totp_reset_uid = totp_reset_button.find_element_by_name('uid')
+ self.assertEqual(int(totp_reset_uid.get_attribute('value')), user.id)
+ self.assertFalse(totp_reset_uid.is_displayed())
+ hotp_reset_button = self.driver.find_elements_by_css_selector(
+ '#reset-two-factor-hotp')[0]
+ self.assertRegexpMatches(hotp_reset_button.get_attribute('action'),
+ '/admin/reset-2fa-hotp')
+ hotp_reset_uid = hotp_reset_button.find_element_by_name('uid')
+ self.assertEqual(int(hotp_reset_uid.get_attribute('value')), user.id)
+ self.assertFalse(hotp_reset_uid.is_displayed())
def _admin_can_edit_new_user(self):
# Log the new user out
self._logout()
- self._login_user(self.admin_user['username'],
- self.admin_user['password'],
- self.admin_user['orm_obj'].totp.now())
+ self._login_user(self.admin.username, self.admin_pw, 'mocked')
# Go to the admin interface
admin_interface_link = self.driver.find_element_by_link_text('Admin')
@@ -231,9 +268,11 @@ def _admin_can_edit_new_user(self):
new_username = self.new_user['username'] + "2"
- username_field = self.driver.find_element_by_css_selector('input[name="username"]')
+ username_field = self.driver.find_element_by_css_selector(
+ 'input[name="username"]')
username_field.send_keys(new_username)
- update_user_btn = self.driver.find_element_by_css_selector('button[type=submit]')
+ update_user_btn = self.driver.find_element_by_css_selector(
+ 'button[type=submit]')
update_user_btn.click()
self.wait_for(
@@ -248,16 +287,14 @@ def _admin_can_edit_new_user(self):
self._logout()
self._login_user(self.new_user['username'],
self.new_user['password'],
- self.new_user['orm_obj'].totp.now())
+ 'mocked')
self.wait_for(
lambda: self.assertIn('Sources', self.driver.page_source)
)
# Log the admin user back in
self._logout()
- self._login_user(self.admin_user['username'],
- self.admin_user['password'],
- self.admin_user['orm_obj'].totp.now())
+ self._login_user(self.admin.username, self.admin_pw, 'mocked')
# Go to the admin interface
admin_interface_link = self.driver.find_element_by_link_text('Admin')
@@ -266,11 +303,14 @@ def _admin_can_edit_new_user(self):
self._edit_user(self.new_user['username'])
new_password = self.new_user['password'] + "2"
- password_field = self.driver.find_element_by_css_selector('input[name="password"]')
+ password_field = self.driver.find_element_by_css_selector(
+ 'input[name="password"]')
password_field.send_keys(new_password)
- password_again_field = self.driver.find_element_by_css_selector('input[name="password_again"]')
+ password_again_field = self.driver.find_element_by_css_selector(
+ 'input[name="password_again"]')
password_again_field.send_keys(new_password)
- update_user_btn = self.driver.find_element_by_css_selector('button#update-user')
+ update_user_btn = self.driver.find_element_by_css_selector(
+ 'button#update')
update_user_btn.click()
# Wait until page refreshes to avoid causing a broken pipe error (#623)
@@ -287,7 +327,7 @@ def _admin_can_edit_new_user(self):
self._logout()
self._login_user(self.new_user['username'],
self.new_user['password'],
- self.new_user['orm_obj'].totp.now())
+ 'mocked')
self.wait_for(
lambda: self.assertIn('Sources', self.driver.page_source)
)
@@ -303,10 +343,38 @@ def _journalist_checks_messages(self):
unread_span = self.driver.find_element_by_css_selector('span.unread')
self.assertIn("1 unread", unread_span.text)
+ def _journalist_stars_and_unstars_single_message(self):
+ # Message begins unstarred
+ with self.assertRaises(NoSuchElementException):
+ self.driver.find_element_by_id('starred-source-link-1')
+
+ # Journalist stars the message
+ self.driver.find_element_by_class_name('button-star').click()
+ starred = self.driver.find_elements_by_id('starred-source-link-1')
+ self.assertEquals(1, len(starred))
+
+ # Journalist unstars the message
+ self.driver.find_element_by_class_name('button-star').click()
+ with self.assertRaises(NoSuchElementException):
+ self.driver.find_element_by_id('starred-source-link-1')
+
+ def _journalist_selects_all_sources_then_selects_none(self):
+ self.driver.find_element_by_id('select_all').click()
+ checkboxes = self.driver.find_elements_by_id('checkbox')
+ for checkbox in checkboxes:
+ self.assertTrue(checkbox.is_selected())
+
+ self.driver.find_element_by_id('select_none').click()
+ checkboxes = self.driver.find_elements_by_id('checkbox')
+ for checkbox in checkboxes:
+ self.assertFalse(checkbox.is_selected())
+
def _journalist_downloads_message(self):
- self.driver.find_element_by_css_selector('#un-starred-source-link-1').click()
+ self.driver.find_element_by_css_selector(
+ '#un-starred-source-link-1').click()
- submissions = self.driver.find_elements_by_css_selector('#submissions a')
+ submissions = self.driver.find_elements_by_css_selector(
+ '#submissions a')
self.assertEqual(1, len(submissions))
file_url = submissions[0].get_attribute('href')
@@ -323,10 +391,21 @@ def cookie_string_from_selenium_cookies(cookies):
return ' '.join(cookie_strs)
submission_req = urllib2.Request(file_url)
- submission_req.add_header('Cookie',
- cookie_string_from_selenium_cookies(self.driver.get_cookies()))
+ submission_req.add_header(
+ 'Cookie',
+ cookie_string_from_selenium_cookies(
+ self.driver.get_cookies()))
raw_content = urllib2.urlopen(submission_req).read()
decrypted_submission = self.gpg.decrypt(raw_content)
- submission = self._get_submission_content(file_url, decrypted_submission)
+ submission = self._get_submission_content(file_url,
+ decrypted_submission)
self.assertEqual(self.secret_message, submission)
+
+ def _journalist_sends_reply_to_source(self):
+ self.driver.find_element_by_id('reply-text-field').send_keys('Nice docs')
+
+ self.driver.find_element_by_id('reply-button').click()
+
+ self.assertIn("Thanks! Your reply has been stored.",
+ self.driver.page_source)
diff --git a/securedrop/tests/functional/make_account_changes.py b/securedrop/tests/functional/make_account_changes.py
new file mode 100644
--- /dev/null
+++ b/securedrop/tests/functional/make_account_changes.py
@@ -0,0 +1,24 @@
+# -*- coding: utf-8 -*-
+from unittest import TestCase
+
+from functional_test import FunctionalTest
+from journalist_navigation_steps import JournalistNavigationSteps
+
+class MakeAccountChanges(FunctionalTest, JournalistNavigationSteps, TestCase):
+ def test_admin_edit_account_html_template_rendering(self):
+ """The edit_account.html template is used both when an admin is editing
+ a user's account, and when a user is editing their own account. While
+ there is no security risk in doing so, we do want to ensure the UX is
+ as expected: that only the elements that belong in a particular view
+ are exposed there."""
+ self._admin_logs_in()
+ self._admin_visits_admin_interface()
+ # Admin view of admin user
+ self._edit_user('admin')
+ self._admin_visits_admin_interface()
+ self._admin_adds_a_user()
+ # Admin view of non-admin user
+ self._edit_user('dellsberg')
+ # User view of self
+ self._edit_account()
+ self._logout()
diff --git a/securedrop/tests/functional/source_navigation_steps.py b/securedrop/tests/functional/source_navigation_steps.py
--- a/securedrop/tests/functional/source_navigation_steps.py
+++ b/securedrop/tests/functional/source_navigation_steps.py
@@ -1,13 +1,16 @@
import tempfile
+
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.common.by import By
+
class SourceNavigationSteps():
def _source_visits_source_homepage(self):
self.driver.get(self.source_location)
- self.assertEqual("SecureDrop | Protecting Journalists and Sources", self.driver.title)
+ self.assertEqual("SecureDrop | Protecting Journalists and Sources",
+ self.driver.title)
def _source_chooses_to_submit_documents(self):
# First move the cursor to a known position in case it happens to
@@ -41,6 +44,37 @@ def _source_chooses_to_submit_documents(self):
self.assertTrue(len(codename.text) > 0)
self.source_name = codename.text
+ def _source_chooses_to_login(self):
+ self.driver.find_element_by_id('login-button').click()
+
+ logins = self.driver.find_elements_by_id('login-with-existing-codename')
+
+ self.assertTrue(len(logins) > 0)
+
+ def _source_hits_cancel_at_login_page(self):
+ self.driver.find_element_by_id('cancel').click()
+
+ self.driver.get(self.source_location)
+
+ self.assertEqual("SecureDrop | Protecting Journalists and Sources",
+ self.driver.title)
+
+ def _source_proceeds_to_login(self):
+ codename_input = self.driver.find_element_by_id('login-with-existing-codename')
+ codename_input.send_keys(self.source_name)
+
+ continue_button = self.driver.find_element_by_id('login')
+ continue_button.click()
+
+ self.assertEqual("SecureDrop | Protecting Journalists and Sources",
+ self.driver.title)
+
+ def _source_hits_cancel_at_submit_page(self):
+ self.driver.find_element_by_id('cancel').click()
+
+ headline = self.driver.find_element_by_class_name('headline')
+ self.assertEqual('Submit Materials', headline.text)
+
def _source_continues_to_submit_page(self):
continue_button = self.driver.find_element_by_id('continue-button')
@@ -61,7 +95,7 @@ def _source_continues_to_submit_page(self):
continue_button.click()
headline = self.driver.find_element_by_class_name('headline')
- self.assertEqual('Submit documents and messages', headline.text)
+ self.assertEqual('Submit Materials', headline.text)
def _source_submits_a_file(self):
with tempfile.NamedTemporaryFile() as file:
@@ -71,7 +105,8 @@ def _source_submits_a_file(self):
filename = file.name
filebasename = filename.split('/')[-1]
- file_upload_box = self.driver.find_element_by_css_selector('[name=fh]')
+ file_upload_box = self.driver.find_element_by_css_selector(
+ '[name=fh]')
file_upload_box.send_keys(filename)
submit_button = self.driver.find_element_by_id('submit-doc-button')
@@ -84,25 +119,46 @@ def _source_submits_a_file(self):
submit_button.click()
- notification = self.driver.find_element_by_css_selector('p.notification')
- expected_notification = 'Thanks for submitting something to SecureDrop! Please check back later for replies.'
+ notification = self.driver.find_element_by_css_selector(
+ '.success')
+ expected_notification = 'Thank you for sending this information to us'
self.assertIn(expected_notification, notification.text)
def _source_submits_a_message(self):
text_box = self.driver.find_element_by_css_selector('[name=msg]')
-
text_box.send_keys(self.secret_message) # send_keys = type into text box
submit_button = self.driver.find_element_by_id('submit-doc-button')
submit_button.click()
notification = self.driver.find_element_by_css_selector(
- 'p.notification')
- self.assertIn('Thanks for submitting something to SecureDrop!'
- ' Please check back later for replies.',
+ '.success')
+ self.assertIn('Thank you for sending this information to us',
notification.text)
+ def _source_deletes_a_journalist_reply(self):
+ # Get the reply filename so we can use IDs to select the delete buttons
+ reply_filename_element = self.driver.find_element_by_name('reply_filename')
+ reply_filename = reply_filename_element.get_attribute('value')
+
+ delete_button_id = 'delete-reply-{}'.format(reply_filename)
+ delete_button = self.driver.find_element_by_id(delete_button_id)
+ delete_button.click()
+
+ confirm_button_id = 'confirm-delete-reply-button-{}'.format(reply_filename)
+ confirm_button = self.driver.find_element_by_id(confirm_button_id)
+ self.assertTrue(confirm_button.is_displayed())
+ confirm_button.click()
+
+ notification = self.driver.find_element_by_class_name('notification')
+ self.assertIn('Reply deleted', notification.text)
+
def _source_logs_out(self):
logout_button = self.driver.find_element_by_id('logout').click()
- notification = self.driver.find_element_by_css_selector('p.error')
- self.assertIn('Thank you for logging out!', notification.text)
+ notification = self.driver.find_element_by_css_selector('.important')
+ self.assertIn('Thank you for exiting your session!', notification.text)
+
+ def _source_not_found(self):
+ self.driver.get(self.source_location + "/unlikely")
+ message = self.driver.find_element_by_id('page_not_found')
+ self.assertTrue(message.is_displayed())
diff --git a/securedrop/tests/functional/submission_not_in_memory.py b/securedrop/tests/functional/submission_not_in_memory.py
--- a/securedrop/tests/functional/submission_not_in_memory.py
+++ b/securedrop/tests/functional/submission_not_in_memory.py
@@ -8,7 +8,8 @@
import re
-class SubmissionNotInMemoryTest(TestCase, FunctionalTest, SourceNavigationSteps):
+class SubmissionNotInMemoryTest(TestCase, FunctionalTest,
+ SourceNavigationSteps):
def setUp(self):
self.devnull = open('/dev/null', 'r')
@@ -21,8 +22,11 @@ def _memory_dump(self, pid):
core_dump_base_name = '/tmp/core_dump'
core_dump_file_name = core_dump_base_name + '.' + pid
try:
- subprocess.call(["sudo", "gcore", "-o", core_dump_base_name, pid], stdout=self.devnull, stderr=self.devnull)
- subprocess.call(["sudo", "chown", getpass.getuser(), core_dump_file_name])
+ subprocess.call(["sudo", "gcore", "-o",
+ core_dump_base_name, pid], stdout=self.devnull,
+ stderr=self.devnull)
+ subprocess.call(["sudo", "chown", getpass.getuser(),
+ core_dump_file_name])
with open(core_dump_file_name, 'r') as fp:
return fp.read()
finally:
@@ -40,7 +44,8 @@ def test_message_is_not_retained_in_memory(self):
source_server_pid = str(self.source_process.pid)
memory_dump = self._memory_dump(source_server_pid)
- secrets_in_memory = self._num_strings_in(self.secret_message, memory_dump)
+ secrets_in_memory = self._num_strings_in(self.secret_message,
+ memory_dump)
self.assertLess(secrets_in_memory, 1)
@@ -53,6 +58,7 @@ def test_file_upload_is_not_retained_in_memory(self):
source_server_pid = str(self.source_process.pid)
memory_dump = self._memory_dump(source_server_pid)
- secrets_in_memory = self._num_strings_in(self.secret_message, memory_dump)
+ secrets_in_memory = self._num_strings_in(self.secret_message,
+ memory_dump)
self.assertLess(secrets_in_memory, 1)
diff --git a/securedrop/tests/functional/admin_interface.py b/securedrop/tests/functional/test_admin_interface.py
similarity index 91%
rename from securedrop/tests/functional/admin_interface.py
rename to securedrop/tests/functional/test_admin_interface.py
--- a/securedrop/tests/functional/admin_interface.py
+++ b/securedrop/tests/functional/test_admin_interface.py
@@ -20,6 +20,3 @@ def test_admin_interface(self):
self._admin_adds_a_user()
self._new_user_can_log_in()
self._admin_can_edit_new_user()
-
-if __name__ == "__main__":
- unittest.main(verbosity=2)
diff --git a/securedrop/tests/functional/test_source_notfound.py b/securedrop/tests/functional/test_source_notfound.py
new file mode 100644
--- /dev/null
+++ b/securedrop/tests/functional/test_source_notfound.py
@@ -0,0 +1,19 @@
+import unittest
+
+import source_navigation_steps
+import functional_test
+
+
+class SourceInterfaceBannerWarnings(
+ unittest.TestCase,
+ functional_test.FunctionalTest,
+ source_navigation_steps.SourceNavigationSteps):
+
+ def setUp(self):
+ functional_test.FunctionalTest.setUp(self)
+
+ def tearDown(self):
+ functional_test.FunctionalTest.tearDown(self)
+
+ def test_not_found(self):
+ self._source_not_found()
diff --git a/securedrop/tests/functional/test_source_warnings.py b/securedrop/tests/functional/test_source_warnings.py
new file mode 100644
--- /dev/null
+++ b/securedrop/tests/functional/test_source_warnings.py
@@ -0,0 +1,25 @@
+from selenium import webdriver
+import unittest
+
+import source_navigation_steps
+import functional_test
+
+
+class SourceInterfaceBannerWarnings(
+ unittest.TestCase,
+ functional_test.FunctionalTest,
+ source_navigation_steps.SourceNavigationSteps):
+
+ def setUp(self):
+ functional_test.FunctionalTest.setUp(self)
+
+ def tearDown(self):
+ functional_test.FunctionalTest.tearDown(self)
+
+ def test_warning_appears_if_tor_browser_not_in_use(self):
+ self.driver.get(self.source_location)
+
+ warning_banner = self.driver.find_element_by_class_name('use-tor-browser')
+
+ self.assertIn("We recommend using Tor Browser to access SecureDrop",
+ warning_banner.text)
diff --git a/securedrop/tests/functional/submit_and_retrieve_file.py b/securedrop/tests/functional/test_submit_and_retrieve_file.py
similarity index 52%
rename from securedrop/tests/functional/submit_and_retrieve_file.py
rename to securedrop/tests/functional/test_submit_and_retrieve_file.py
--- a/securedrop/tests/functional/submit_and_retrieve_file.py
+++ b/securedrop/tests/functional/test_submit_and_retrieve_file.py
@@ -24,4 +24,22 @@ def test_submit_and_retrieve_happy_path(self):
self._source_logs_out()
self._journalist_logs_in()
self._journalist_checks_messages()
+ self._journalist_stars_and_unstars_single_message()
+ self._journalist_selects_all_sources_then_selects_none()
self._journalist_downloads_message()
+ self._journalist_sends_reply_to_source()
+ self._source_visits_source_homepage()
+ self._source_chooses_to_login()
+ self._source_proceeds_to_login()
+ self._source_deletes_a_journalist_reply()
+
+ def test_source_cancels_at_login_page(self):
+ self._source_visits_source_homepage()
+ self._source_chooses_to_login()
+ self._source_hits_cancel_at_login_page()
+
+ def test_source_cancels_at_submit_page(self):
+ self._source_visits_source_homepage()
+ self._source_chooses_to_submit_documents()
+ self._source_continues_to_submit_page()
+ self._source_hits_cancel_at_submit_page()
diff --git a/securedrop/tests/functional/submit_and_retrieve_message.py b/securedrop/tests/functional/test_submit_and_retrieve_message.py
similarity index 93%
rename from securedrop/tests/functional/submit_and_retrieve_message.py
rename to securedrop/tests/functional/test_submit_and_retrieve_message.py
--- a/securedrop/tests/functional/submit_and_retrieve_message.py
+++ b/securedrop/tests/functional/test_submit_and_retrieve_message.py
@@ -26,7 +26,3 @@ def test_submit_and_retrieve_happy_path(self):
self._journalist_logs_in()
self._journalist_checks_messages()
self._journalist_downloads_message()
-
-
-if __name__ == "__main__":
- unittest.main(verbosity=2)
diff --git a/securedrop/tests/pytest.ini b/securedrop/tests/pytest.ini
new file mode 100644
--- /dev/null
+++ b/securedrop/tests/pytest.ini
@@ -0,0 +1,4 @@
+[pytest]
+testpaths = . functional
+usefixtures = setUptearDown
+addopts = --cov=../securedrop/
diff --git a/securedrop/tests/test_2fa.py b/securedrop/tests/test_2fa.py
new file mode 100644
--- /dev/null
+++ b/securedrop/tests/test_2fa.py
@@ -0,0 +1,123 @@
+# -*- coding: utf-8 -*-
+import os
+
+from flask import url_for
+import flask_testing
+
+os.environ['SECUREDROP_ENV'] = 'test' # noqa
+from db import Journalist, BadTokenException
+import journalist
+import utils
+
+
+class TestJournalist2FA(flask_testing.TestCase):
+ def create_app(self):
+ return journalist.app
+
+ def setUp(self):
+ utils.env.setup()
+ self.admin, self.admin_pw = utils.db_helper.init_journalist(
+ is_admin=True)
+ self.user, self.user_pw = utils.db_helper.init_journalist()
+
+ def tearDown(self):
+ utils.env.teardown()
+
+ def _login_admin(self, token=None):
+ """Login to the Journalist Interface as an admin user with the
+ Werkzeug client.
+
+ Args:
+ token (str): The TOTP token to attempt login with. Defaults
+ to the correct token for the current time window.
+ """
+ if token is None:
+ token = self.admin.totp.now()
+ self.client.post(url_for('login'),
+ data=dict(username=self.admin.username,
+ password=self.admin_pw,
+ token=token))
+
+ def _login_user(self, token=None):
+ """Analagous to `_login_admin()` except for a non-admin user.
+ """
+ if token is None:
+ token = self.user.totp.now()
+ resp = self.client.post(url_for('login'),
+ data=dict(username=self.user.username,
+ password=self.user_pw,
+ token=token))
+ return resp
+
+ def test_totp_reuse_protections(self):
+ """Ensure that logging in twice with the same TOTP token
+ fails.
+ """
+ token = self.user.totp.now()
+ resp = self._login_user(token)
+ self.assertRedirects(resp, url_for('index'))
+
+ resp = self._login_user(token)
+ self.assert200(resp)
+ self.assertIn("Login failed", resp.data)
+
+ def test_totp_reuse_protections2(self):
+ """More granular than the preceeding test, we want to make sure
+ the right exception is being raised in the right place.
+ """
+ valid_token = self.user.totp.now()
+ Journalist.login(self.user.username, self.user_pw, valid_token)
+ with self.assertRaises(BadTokenException):
+ Journalist.login(self.user.username, self.user_pw, valid_token)
+
+ def test_bad_token_fails_to_verify_on_admin_new_user_two_factor_page(self):
+ # Regression test
+ # https://github.com/freedomofpress/securedrop/pull/1692
+ self._login_admin()
+
+ # Create and submit an invalid 2FA token
+ invalid_token = u'000000'
+ resp = self.client.post(url_for('admin_new_user_two_factor',
+ uid=self.admin.id),
+ data=dict(token=invalid_token))
+
+ self.assert200(resp)
+ self.assertMessageFlashed('Two-factor token failed to verify', 'error')
+ # last_token should be set to the invalid token we just tried to use
+ self.assertEqual(self.admin.last_token, invalid_token)
+
+ # Submit the same invalid token again
+ resp = self.client.post(url_for('admin_new_user_two_factor',
+ uid=self.admin.id),
+ data=dict(token=invalid_token))
+
+ # A flashed message should appear
+ self.assertMessageFlashed('Two-factor token failed to verify', 'error')
+
+ def test_bad_token_fails_to_verify_on_new_user_two_factor_page(self):
+ # Regression test
+ # https://github.com/freedomofpress/securedrop/pull/1692
+ self._login_user()
+
+ # Create and submit an invalid 2FA token
+ invalid_token = u'000000'
+ resp = self.client.post(url_for('account_new_two_factor'),
+ data=dict(token=invalid_token))
+
+ self.assert200(resp)
+ self.assertMessageFlashed('Two-factor token failed to verify', 'error')
+ # last_token should be set to the invalid token we just tried to use
+ self.assertEqual(self.user.last_token, invalid_token)
+
+ # Submit the same invalid token again
+ resp = self.client.post(url_for('account_new_two_factor'),
+ data=dict(token=invalid_token))
+
+ # A flashed message should appear
+ self.assertMessageFlashed('Two-factor token failed to verify', 'error')
+
+ @classmethod
+ def tearDownClass(cls):
+ # Reset the module variables that were changed to mocks so we don't
+ # break other tests
+ reload(journalist)
diff --git a/securedrop/tests/test_crypto_util.py b/securedrop/tests/test_crypto_util.py
new file mode 100644
--- /dev/null
+++ b/securedrop/tests/test_crypto_util.py
@@ -0,0 +1,185 @@
+# -*- coding: utf-8 -*-
+import os
+import unittest
+
+os.environ['SECUREDROP_ENV'] = 'test' # noqa
+import config
+import crypto_util
+import db
+import store
+import utils
+
+
+class TestCryptoUtil(unittest.TestCase):
+
+ """The set of tests for crypto_util.py."""
+
+ def setUp(self):
+ utils.env.setup()
+
+ def tearDown(self):
+ utils.env.teardown()
+
+ def test_word_list_does_not_contain_empty_strings(self):
+ self.assertNotIn('', (crypto_util.words
+ + crypto_util.nouns
+ + crypto_util.adjectives))
+
+ def test_clean(self):
+ ok = (' !#%$&)(+*-1032547698;:=?@acbedgfihkjmlonqpsrutwvyxzABCDEFGHIJ'
+ 'KLMNOPQRSTUVWXYZ')
+ invalid_1 = 'foo bar`'
+ invalid_2 = 'bar baz~'
+
+ self.assertEqual(ok, crypto_util.clean(ok))
+ with self.assertRaisesRegexp(crypto_util.CryptoException,
+ 'invalid input: {}'.format(invalid_1)):
+ crypto_util.clean(invalid_1)
+ with self.assertRaisesRegexp(crypto_util.CryptoException,
+ 'invalid input: {}'.format(invalid_2)):
+ crypto_util.clean(invalid_2)
+
+ def test_encrypt_success(self):
+ source, _ = utils.db_helper.init_source()
+ message = str(os.urandom(1))
+ ciphertext = crypto_util.encrypt(
+ message,
+ [crypto_util.getkey(source.filesystem_id), config.JOURNALIST_KEY],
+ store.path(source.filesystem_id, 'somefile.gpg'))
+
+ self.assertIsInstance(ciphertext, str)
+ self.assertNotEqual(ciphertext, message)
+ self.assertGreater(len(ciphertext), 0)
+
+ def test_encrypt_failure(self):
+ source, _ = utils.db_helper.init_source()
+ with self.assertRaisesRegexp(crypto_util.CryptoException,
+ 'no terminal at all requested'):
+ crypto_util.encrypt(
+ str(os.urandom(1)),
+ [],
+ store.path(source.filesystem_id, 'other.gpg'))
+
+ def test_encrypt_without_output(self):
+ """We simply do not specify the option output keyword argument
+ to crypto_util.encrypt() here in order to confirm encryption
+ works when it defaults to `None`.
+ """
+ source, codename = utils.db_helper.init_source()
+ message = str(os.urandom(1))
+ ciphertext = crypto_util.encrypt(
+ message,
+ [crypto_util.getkey(source.filesystem_id), config.JOURNALIST_KEY])
+ plaintext = crypto_util.decrypt(codename, ciphertext)
+
+ self.assertEqual(message, plaintext)
+
+ def test_encrypt_binary_stream(self):
+ """Generally, we pass unicode strings (the type form data is
+ returned as) as plaintext to crypto_util.encrypt(). These have
+ to be converted to "binary stream" types (such as `file`) before
+ we can actually call gnupg.GPG.encrypt() on them. This is done
+ in crypto_util.encrypt() with an `if` branch that uses
+ `gnupg._util._is_stream(plaintext)` as the predicate, and calls
+ `gnupg._util._make_binary_stream(plaintext)` if necessary. This
+ test ensures our encrypt function works even if we provide
+ inputs such that this `if` branch is skipped (i.e., the object
+ passed for `plaintext` is one such that
+ `gnupg._util._is_stream(plaintext)` returns `True`).
+ """
+ source, codename = utils.db_helper.init_source()
+ with open(os.path.realpath(__file__)) as fh:
+ ciphertext = crypto_util.encrypt(
+ fh,
+ [crypto_util.getkey(source.filesystem_id),
+ config.JOURNALIST_KEY],
+ store.path(source.filesystem_id, 'somefile.gpg'))
+ plaintext = crypto_util.decrypt(codename, ciphertext)
+
+ with open(os.path.realpath(__file__)) as fh:
+ self.assertEqual(fh.read(), plaintext)
+
+ def test_encrypt_fingerprints_not_a_list_or_tuple(self):
+ """If passed a single fingerprint as a string, encrypt should
+ correctly place that string in a list, and encryption/
+ decryption should work as intended."""
+ source, codename = utils.db_helper.init_source()
+ message = str(os.urandom(1))
+ ciphertext = crypto_util.encrypt(
+ message,
+ crypto_util.getkey(source.filesystem_id),
+ store.path(source.filesystem_id, 'somefile.gpg'))
+ plaintext = crypto_util.decrypt(codename, ciphertext)
+
+ self.assertEqual(message, plaintext)
+
+ def test_basic_encrypt_then_decrypt_multiple_recipients(self):
+ source, codename = utils.db_helper.init_source()
+ message = str(os.urandom(1))
+ ciphertext = crypto_util.encrypt(
+ message,
+ [crypto_util.getkey(source.filesystem_id),
+ config.JOURNALIST_KEY],
+ store.path(source.filesystem_id, 'somefile.gpg'))
+ plaintext = crypto_util.decrypt(codename, ciphertext)
+
+ self.assertEqual(message, plaintext)
+
+ # Since there's no way to specify which key to use for
+ # decryption to python-gnupg, we delete the `source`'s key and
+ # ensure we can decrypt with the `config.JOURNALIST_KEY`.
+ crypto_util.delete_reply_keypair(source.filesystem_id)
+ plaintext_ = crypto_util.gpg.decrypt(ciphertext).data
+
+ self.assertEqual(message, plaintext_)
+
+ def test_genrandomid(self):
+ id = crypto_util.genrandomid()
+ id_words = id.split()
+
+ self.assertEqual(id, crypto_util.clean(id))
+ self.assertEqual(len(id_words), crypto_util.DEFAULT_WORDS_IN_RANDOM_ID)
+ for word in id_words:
+ self.assertIn(word, crypto_util.words)
+
+ def test_display_id(self):
+ id = crypto_util.display_id()
+ id_words = id.split()
+
+ self.assertEqual(len(id_words), 2)
+ self.assertIn(id_words[0], crypto_util.adjectives)
+ self.assertIn(id_words[1], crypto_util.nouns)
+
+ def test_hash_codename(self):
+ codename = crypto_util.genrandomid()
+ hashed_codename = crypto_util.hash_codename(codename)
+
+ self.assertRegexpMatches(hashed_codename, '^[2-7A-Z]{103}=$')
+
+ def test_genkeypair(self):
+ codename = crypto_util.genrandomid()
+ filesystem_id = crypto_util.hash_codename(codename)
+ journalist_filename = crypto_util.display_id()
+ source = db.Source(filesystem_id, journalist_filename)
+ db.db_session.add(source)
+ db.db_session.commit()
+ crypto_util.genkeypair(source.filesystem_id, codename)
+
+ self.assertIsNotNone(crypto_util.getkey(filesystem_id))
+
+ def test_delete_reply_keypair(self):
+ source, _ = utils.db_helper.init_source()
+ crypto_util.delete_reply_keypair(source.filesystem_id)
+
+ self.assertIsNone(crypto_util.getkey(source.filesystem_id))
+
+ def test_delete_reply_keypair_no_key(self):
+ """No exceptions should be raised when provided a filesystem id that
+ does not exist.
+ """
+ crypto_util.delete_reply_keypair('Reality Winner')
+
+ def test_getkey(self):
+ source, _ = utils.db_helper.init_source()
+
+ self.assertIsNotNone(crypto_util.getkey(source.filesystem_id))
diff --git a/securedrop/tests/test_db.py b/securedrop/tests/test_db.py
new file mode 100644
--- /dev/null
+++ b/securedrop/tests/test_db.py
@@ -0,0 +1,82 @@
+# -*- coding: utf-8 -*-
+from flask_testing import TestCase
+import mock
+
+import journalist
+from utils import db_helper, env
+from db import (Journalist, Submission, Reply, get_one_or_else,
+ LoginThrottledException)
+
+
+class TestDatabase(TestCase):
+
+ def create_app(self):
+ return journalist.app
+
+ def setUp(self):
+ env.setup()
+
+ def tearDown(self):
+ env.teardown()
+
+ @mock.patch('flask.abort')
+ def test_get_one_or_else_returns_one(self, mock):
+ new_journo, _ = db_helper.init_journalist()
+
+ query = Journalist.query.filter(
+ Journalist.username == new_journo.username)
+ with mock.patch('logger') as mock_logger:
+ selected_journo = get_one_or_else(query, mock_logger, mock)
+ self.assertEqual(new_journo, selected_journo)
+
+ @mock.patch('flask.abort')
+ def test_get_one_or_else_multiple_results(self, mock):
+ journo_1, _ = db_helper.init_journalist()
+ journo_2, _ = db_helper.init_journalist()
+
+ with mock.patch('logger') as mock_logger:
+ get_one_or_else(Journalist.query, mock_logger, mock)
+ mock_logger.error.assert_called() # Not specifying very long log line
+ mock.assert_called_with(500)
+
+ @mock.patch('flask.abort')
+ def test_get_one_or_else_no_result_found(self, mock):
+ query = Journalist.query.filter(Journalist.username == "alice")
+
+ with mock.patch('logger') as mock_logger:
+ get_one_or_else(query, mock_logger, mock)
+ log_line = ('Found none when one was expected: '
+ 'No row was found for one()')
+ mock_logger.error.assert_called_with(log_line)
+ mock.assert_called_with(404)
+
+ # Check __repr__ do not throw exceptions
+
+ def test_submission_string_representation(self):
+ source, _ = db_helper.init_source()
+ db_helper.submit(source, 2)
+
+ test_submission = Submission.query.first()
+ test_submission.__repr__()
+
+ def test_reply_string_representation(self):
+ journalist, _ = db_helper.init_journalist()
+ source, _ = db_helper.init_source()
+ db_helper.reply(journalist, source, 2)
+ test_reply = Reply.query.first()
+ test_reply.__repr__()
+
+ def test_journalist_string_representation(self):
+ test_journalist, _ = db_helper.init_journalist()
+ test_journalist.__repr__()
+
+ def test_source_string_representation(self):
+ test_source, _ = db_helper.init_source()
+ test_source.__repr__()
+
+ def test_throttle_login(self):
+ journalist, _ = db_helper.init_journalist()
+ for _ in range(Journalist._MAX_LOGIN_ATTEMPTS_PER_PERIOD):
+ Journalist.throttle_login(journalist)
+ with self.assertRaises(LoginThrottledException):
+ Journalist.throttle_login(journalist)
diff --git a/securedrop/tests/test_unit_integration.py b/securedrop/tests/test_integration.py
similarity index 58%
rename from securedrop/tests/test_unit_integration.py
rename to securedrop/tests/test_integration.py
--- a/securedrop/tests/test_unit_integration.py
+++ b/securedrop/tests/test_integration.py
@@ -1,38 +1,26 @@
-#!/usr/bin/env python
# -*- coding: utf-8 -*-
-import os
-import unittest
-import re
from cStringIO import StringIO
-import zipfile
-from time import sleep
-import tempfile
-import shutil
-import time
import gzip
-
import mock
+import os
+import re
+import shutil
+import tempfile
+import unittest
+import zipfile
-import gnupg
-from flask import session, g, escape
from bs4 import BeautifulSoup
+from flask import session, g, escape
+import gnupg
+os.environ['SECUREDROP_ENV'] = 'test' # noqa
import config
import crypto_util
-import source
-import journalist
-import common
from db import db_session, Journalist
+import journalist
+import source
import store
-
-# Set environment variable so config.py uses a test environment
-os.environ['SECUREDROP_ENV'] = 'test'
-
-
-def _block_on_reply_keypair_gen(codename):
- sid = crypto_util.hash_codename(codename)
- while not crypto_util.getkey(sid):
- sleep(0.1)
+import utils
class TestIntegration(unittest.TestCase):
@@ -41,23 +29,11 @@ def _login_user(self):
self.journalist_app.post('/login', data=dict(
username=self.user.username,
password=self.user_pw,
- token=self.user.totp.now()),
+ token='mocked'),
follow_redirects=True)
- def _wait_for(self, function_with_assertion, timeout=5):
- """Polling wait for an arbitrary assertion."""
- # Thanks to http://chimera.labs.oreilly.com/books/1234000000754/ch20.html#_a_common_selenium_problem_race_conditions
- start_time = time.time()
- while time.time() - start_time < timeout:
- try:
- return function_with_assertion()
- except AssertionError:
- time.sleep(0.1)
- # one more try, which will raise any errors if they are outstanding
- return function_with_assertion()
-
def setUp(self):
- common.shared_setup()
+ utils.env.setup()
self.source_app = source.app.test_client()
self.journalist_app = journalist.app.test_client()
@@ -72,7 +48,7 @@ def setUp(self):
# Add a test user to the journalist interface and log them in
# print Journalist.query.all()
- self.user_pw = "bar"
+ self.user_pw = "longpassword"
self.user = Journalist(username="foo",
password=self.user_pw)
db_session.add(self.user)
@@ -80,30 +56,30 @@ def setUp(self):
self._login_user()
def tearDown(self):
- common.shared_teardown()
+ utils.env.teardown()
def test_submit_message(self):
- """When a source creates an account, test that a new entry appears in the journalist interface"""
+ """When a source creates an account, test that a new entry appears
+ in the journalist interface"""
test_msg = "This is a test message."
with self.source_app as source_app:
- rv = source_app.get('/generate')
- rv = source_app.post('/create', follow_redirects=True)
- codename = session['codename']
+ resp = source_app.get('/generate')
+ resp = source_app.post('/create', follow_redirects=True)
sid = g.sid
# redirected to submission form
- rv = self.source_app.post('/submit', data=dict(
+ resp = self.source_app.post('/submit', data=dict(
msg=test_msg,
fh=(StringIO(''), ''),
), follow_redirects=True)
- self.assertEqual(rv.status_code, 200)
- common.logout(source_app)
+ self.assertEqual(resp.status_code, 200)
+ source_app.get('/logout')
- # Request the Document Interface index
+ # Request the Journalist Interface index
rv = self.journalist_app.get('/')
self.assertEqual(rv.status_code, 200)
self.assertIn("Sources", rv.data)
- soup = BeautifulSoup(rv.data)
+ soup = BeautifulSoup(rv.data, 'html.parser')
# The source should have a "download unread" link that says "1 unread"
col = soup.select('ul#cols > li')[0]
@@ -111,83 +87,83 @@ def test_submit_message(self):
self.assertIn("1 unread", unread_span.get_text())
col_url = soup.select('ul#cols > li a')[0]['href']
- rv = self.journalist_app.get(col_url)
- self.assertEqual(rv.status_code, 200)
- soup = BeautifulSoup(rv.data)
+ resp = self.journalist_app.get(col_url)
+ self.assertEqual(resp.status_code, 200)
+ soup = BeautifulSoup(resp.data, 'html.parser')
submission_url = soup.select('ul#submissions li a')[0]['href']
self.assertIn("-msg", submission_url)
span = soup.select('ul#submissions li span.info span')[0]
self.assertRegexpMatches(span['title'], "\d+ bytes")
- rv = self.journalist_app.get(submission_url)
- self.assertEqual(rv.status_code, 200)
- decrypted_data = self.gpg.decrypt(rv.data)
+ resp = self.journalist_app.get(submission_url)
+ self.assertEqual(resp.status_code, 200)
+ decrypted_data = self.gpg.decrypt(resp.data)
self.assertTrue(decrypted_data.ok)
self.assertEqual(decrypted_data.data, test_msg)
# delete submission
- rv = self.journalist_app.get(col_url)
- self.assertEqual(rv.status_code, 200)
- soup = BeautifulSoup(rv.data)
+ resp = self.journalist_app.get(col_url)
+ self.assertEqual(resp.status_code, 200)
+ soup = BeautifulSoup(resp.data, 'html.parser')
doc_name = soup.select(
'ul > li > input[name="doc_names_selected"]')[0]['value']
- rv = self.journalist_app.post('/bulk', data=dict(
+ resp = self.journalist_app.post('/bulk', data=dict(
action='confirm_delete',
sid=sid,
doc_names_selected=doc_name
))
- self.assertEqual(rv.status_code, 200)
- soup = BeautifulSoup(rv.data)
- self.assertIn("The following file has been selected for", rv.data)
+ self.assertEqual(resp.status_code, 200)
+ soup = BeautifulSoup(resp.data, 'html.parser')
+ self.assertIn("The following file has been selected for", resp.data)
# confirm delete submission
doc_name = soup.select
doc_name = soup.select(
'ul > li > input[name="doc_names_selected"]')[0]['value']
- rv = self.journalist_app.post('/bulk', data=dict(
+ resp = self.journalist_app.post('/bulk', data=dict(
action='delete',
sid=sid,
doc_names_selected=doc_name,
), follow_redirects=True)
- self.assertEqual(rv.status_code, 200)
- soup = BeautifulSoup(rv.data)
- self.assertIn("Submission deleted.", rv.data)
+ self.assertEqual(resp.status_code, 200)
+ soup = BeautifulSoup(resp.data, 'html.parser')
+ self.assertIn("Submission deleted.", resp.data)
# confirm that submission deleted and absent in list of submissions
- rv = self.journalist_app.get(col_url)
- self.assertEqual(rv.status_code, 200)
- self.assertIn("No documents to display.", rv.data)
+ resp = self.journalist_app.get(col_url)
+ self.assertEqual(resp.status_code, 200)
+ self.assertIn("No documents to display.", resp.data)
# the file should be deleted from the filesystem
# since file deletion is handled by a polling worker, this test needs
# to wait for the worker to get the job and execute it
- self._wait_for(
+ utils.async.wait_for_assertion(
lambda: self.assertFalse(os.path.exists(store.path(sid, doc_name)))
)
def test_submit_file(self):
- """When a source creates an account, test that a new entry appears in the journalist interface"""
+ """When a source creates an account, test that a new entry appears
+ in the journalist interface"""
test_file_contents = "This is a test file."
test_filename = "test.txt"
with self.source_app as source_app:
- rv = source_app.get('/generate')
- rv = source_app.post('/create', follow_redirects=True)
- codename = session['codename']
+ resp = source_app.get('/generate')
+ resp = source_app.post('/create', follow_redirects=True)
sid = g.sid
# redirected to submission form
- rv = self.source_app.post('/submit', data=dict(
+ resp = self.source_app.post('/submit', data=dict(
msg="",
fh=(StringIO(test_file_contents), test_filename),
), follow_redirects=True)
- self.assertEqual(rv.status_code, 200)
- common.logout(source_app)
+ self.assertEqual(resp.status_code, 200)
+ source_app.get('/logout')
- rv = self.journalist_app.get('/')
- self.assertEqual(rv.status_code, 200)
- self.assertIn("Sources", rv.data)
- soup = BeautifulSoup(rv.data)
+ resp = self.journalist_app.get('/')
+ self.assertEqual(resp.status_code, 200)
+ self.assertIn("Sources", resp.data)
+ soup = BeautifulSoup(resp.data, 'html.parser')
# The source should have a "download unread" link that says "1 unread"
col = soup.select('ul#cols > li')[0]
@@ -195,17 +171,17 @@ def test_submit_file(self):
self.assertIn("1 unread", unread_span.get_text())
col_url = soup.select('ul#cols > li a')[0]['href']
- rv = self.journalist_app.get(col_url)
- self.assertEqual(rv.status_code, 200)
- soup = BeautifulSoup(rv.data)
+ resp = self.journalist_app.get(col_url)
+ self.assertEqual(resp.status_code, 200)
+ soup = BeautifulSoup(resp.data, 'html.parser')
submission_url = soup.select('ul#submissions li a')[0]['href']
self.assertIn("-doc", submission_url)
span = soup.select('ul#submissions li span.info span')[0]
self.assertRegexpMatches(span['title'], "\d+ bytes")
- rv = self.journalist_app.get(submission_url)
- self.assertEqual(rv.status_code, 200)
- decrypted_data = self.gpg.decrypt(rv.data)
+ resp = self.journalist_app.get(submission_url)
+ self.assertEqual(resp.status_code, 200)
+ decrypted_data = self.gpg.decrypt(resp.data)
self.assertTrue(decrypted_data.ok)
sio = StringIO(decrypted_data.data)
@@ -214,43 +190,43 @@ def test_submit_file(self):
self.assertEqual(unzipped_decrypted_data, test_file_contents)
# delete submission
- rv = self.journalist_app.get(col_url)
- self.assertEqual(rv.status_code, 200)
- soup = BeautifulSoup(rv.data)
+ resp = self.journalist_app.get(col_url)
+ self.assertEqual(resp.status_code, 200)
+ soup = BeautifulSoup(resp.data, 'html.parser')
doc_name = soup.select(
'ul > li > input[name="doc_names_selected"]')[0]['value']
- rv = self.journalist_app.post('/bulk', data=dict(
+ resp = self.journalist_app.post('/bulk', data=dict(
action='confirm_delete',
sid=sid,
doc_names_selected=doc_name
))
- self.assertEqual(rv.status_code, 200)
- soup = BeautifulSoup(rv.data)
- self.assertIn("The following file has been selected for", rv.data)
+ self.assertEqual(resp.status_code, 200)
+ soup = BeautifulSoup(resp.data, 'html.parser')
+ self.assertIn("The following file has been selected for", resp.data)
# confirm delete submission
doc_name = soup.select
doc_name = soup.select(
'ul > li > input[name="doc_names_selected"]')[0]['value']
- rv = self.journalist_app.post('/bulk', data=dict(
+ resp = self.journalist_app.post('/bulk', data=dict(
action='delete',
sid=sid,
doc_names_selected=doc_name,
), follow_redirects=True)
- self.assertEqual(rv.status_code, 200)
- soup = BeautifulSoup(rv.data)
- self.assertIn("Submission deleted.", rv.data)
+ self.assertEqual(resp.status_code, 200)
+ soup = BeautifulSoup(resp.data, 'html.parser')
+ self.assertIn("Submission deleted.", resp.data)
# confirm that submission deleted and absent in list of submissions
- rv = self.journalist_app.get(col_url)
- self.assertEqual(rv.status_code, 200)
- self.assertIn("No documents to display.", rv.data)
+ resp = self.journalist_app.get(col_url)
+ self.assertEqual(resp.status_code, 200)
+ self.assertIn("No documents to display.", resp.data)
# the file should be deleted from the filesystem
# since file deletion is handled by a polling worker, this test needs
# to wait for the worker to get the job and execute it
- self._wait_for(
+ utils.async.wait_for_assertion(
lambda: self.assertFalse(os.path.exists(store.path(sid, doc_name)))
)
@@ -295,11 +271,14 @@ def _can_decrypt_with_key(self, msg, key_fpr, passphrase=None):
# Attempt decryption with the given key
if passphrase:
- passphrase = crypto_util.hash_codename(passphrase,
- salt=crypto_util.SCRYPT_GPG_PEPPER)
+ passphrase = crypto_util.hash_codename(
+ passphrase,
+ salt=crypto_util.SCRYPT_GPG_PEPPER)
decrypted_data = gpg.decrypt(msg, passphrase=passphrase)
- self.assertTrue(decrypted_data.ok,
- "Could not decrypt msg with key, gpg says: {}".format(decrypted_data.stderr))
+ self.assertTrue(
+ decrypted_data.ok,
+ "Could not decrypt msg with key, gpg says: {}".format(
+ decrypted_data.stderr))
# We have to clean up the temporary GPG dir
shutil.rmtree(gpg_tmp_dir)
@@ -308,121 +287,125 @@ def helper_test_reply(self, test_reply, expected_success=True):
test_msg = "This is a test message."
with self.source_app as source_app:
- rv = source_app.get('/generate')
- rv = source_app.post('/create', follow_redirects=True)
+ resp = source_app.get('/generate')
+ resp = source_app.post('/create', follow_redirects=True)
codename = session['codename']
sid = g.sid
# redirected to submission form
- rv = source_app.post('/submit', data=dict(
+ resp = source_app.post('/submit', data=dict(
msg=test_msg,
fh=(StringIO(''), ''),
), follow_redirects=True)
- self.assertEqual(rv.status_code, 200)
+ self.assertEqual(resp.status_code, 200)
self.assertFalse(g.source.flagged)
- common.logout(source_app)
+ source_app.get('/logout')
- rv = self.journalist_app.get('/')
- self.assertEqual(rv.status_code, 200)
- self.assertIn("Sources", rv.data)
- soup = BeautifulSoup(rv.data)
+ resp = self.journalist_app.get('/')
+ self.assertEqual(resp.status_code, 200)
+ self.assertIn("Sources", resp.data)
+ soup = BeautifulSoup(resp.data, 'html.parser')
col_url = soup.select('ul#cols > li a')[0]['href']
- rv = self.journalist_app.get(col_url)
- self.assertEqual(rv.status_code, 200)
+ resp = self.journalist_app.get(col_url)
+ self.assertEqual(resp.status_code, 200)
with self.source_app as source_app:
- rv = source_app.post('/login', data=dict(
+ resp = source_app.post('/login', data=dict(
codename=codename), follow_redirects=True)
- self.assertEqual(rv.status_code, 200)
+ self.assertEqual(resp.status_code, 200)
self.assertFalse(g.source.flagged)
- common.logout(source_app)
+ source_app.get('/logout')
with self.journalist_app as journalist_app:
- rv = journalist_app.post('/flag', data=dict(
+ resp = journalist_app.post('/flag', data=dict(
sid=sid))
- self.assertEqual(rv.status_code, 200)
+ self.assertEqual(resp.status_code, 200)
with self.source_app as source_app:
- rv = source_app.post('/login', data=dict(
+ resp = source_app.post('/login', data=dict(
codename=codename), follow_redirects=True)
- self.assertEqual(rv.status_code, 200)
+ self.assertEqual(resp.status_code, 200)
self.assertTrue(g.source.flagged)
source_app.get('/lookup')
self.assertTrue(g.source.flagged)
- common.logout(source_app)
+ source_app.get('/logout')
- # Block until the reply keypair has been generated, so we can test
- # sending a reply
- _block_on_reply_keypair_gen(codename)
+ # Block up to 15s for the reply keypair, so we can test sending a reply
+ utils.async.wait_for_assertion(
+ lambda: self.assertNotEqual(crypto_util.getkey(sid), None), 15)
# Create 2 replies to test deleting on journalist and source interface
for i in range(2):
- rv = self.journalist_app.post('/reply', data=dict(
+ resp = self.journalist_app.post('/reply', data=dict(
sid=sid,
msg=test_reply
), follow_redirects=True)
- self.assertEqual(rv.status_code, 200)
+ self.assertEqual(resp.status_code, 200)
if not expected_success:
pass
else:
- self.assertIn("Thanks! Your reply has been stored.", rv.data)
+ self.assertIn("Thanks! Your reply has been stored.", resp.data)
with self.journalist_app as journalist_app:
- rv = journalist_app.get(col_url)
- self.assertIn("reply-", rv.data)
+ resp = journalist_app.get(col_url)
+ self.assertIn("reply-", resp.data)
- soup = BeautifulSoup(rv.data)
+ soup = BeautifulSoup(resp.data, 'html.parser')
# Download the reply and verify that it can be decrypted with the
# journalist's key as well as the source's reply key
sid = soup.select('input[name="sid"]')[0]['value']
- checkbox_values = [soup.select('input[name="doc_names_selected"]')[1]['value']]
- rv = self.journalist_app.post('/bulk', data=dict(
+ checkbox_values = [
+ soup.select('input[name="doc_names_selected"]')[1]['value']]
+ resp = self.journalist_app.post('/bulk', data=dict(
sid=sid,
action='download',
doc_names_selected=checkbox_values
), follow_redirects=True)
- self.assertEqual(rv.status_code, 200)
+ self.assertEqual(resp.status_code, 200)
- zf = zipfile.ZipFile(StringIO(rv.data), 'r')
+ zf = zipfile.ZipFile(StringIO(resp.data), 'r')
data = zf.read(zf.namelist()[0])
self._can_decrypt_with_key(data, config.JOURNALIST_KEY)
self._can_decrypt_with_key(data, crypto_util.getkey(sid), codename)
# Test deleting reply on the journalist interface
- last_reply_number = len(soup.select('input[name="doc_names_selected"]')) - 1
+ last_reply_number = len(
+ soup.select('input[name="doc_names_selected"]')) - 1
self.helper_filenames_delete(soup, last_reply_number)
with self.source_app as source_app:
- rv = source_app.post('/login', data=dict(codename=codename),
- follow_redirects=True)
- self.assertEqual(rv.status_code, 200)
- rv = source_app.get('/lookup')
- self.assertEqual(rv.status_code, 200)
+ resp = source_app.post('/login', data=dict(codename=codename),
+ follow_redirects=True)
+ self.assertEqual(resp.status_code, 200)
+ resp = source_app.get('/lookup')
+ self.assertEqual(resp.status_code, 200)
if not expected_success:
# there should be no reply
- self.assertNotIn("You have received a reply.", rv.data)
+ self.assertNotIn("You have received a reply.", resp.data)
else:
self.assertIn(
- "You have received a reply. For your security, please delete all replies when you're done with them.", rv.data)
- self.assertIn(test_reply, rv.data)
- soup = BeautifulSoup(rv.data)
- msgid = soup.select('form.message > input[name="reply_filename"]')[0]['value']
- rv = source_app.post('/delete', data=dict(
+ "You have received a reply. To protect your identity",
+ resp.data)
+ self.assertIn(test_reply, resp.data)
+ soup = BeautifulSoup(resp.data, 'html.parser')
+ msgid = soup.select(
+ 'form.message > input[name="reply_filename"]')[0]['value']
+ resp = source_app.post('/delete', data=dict(
sid=sid,
reply_filename=msgid
), follow_redirects=True)
- self.assertEqual(rv.status_code, 200)
- self.assertIn("Reply deleted", rv.data)
+ self.assertEqual(resp.status_code, 200)
+ self.assertIn("Reply deleted", resp.data)
# Make sure the reply is deleted from the filesystem
- self._wait_for(
- lambda: self.assertFalse(os.path.exists(store.path(sid, msgid)))
- )
+ utils.async.wait_for_assertion(
+ lambda: self.assertFalse(os.path.exists(
+ store.path(sid, msgid))))
- common.logout(source_app)
+ source_app.get('/logout')
def test_delete_collection(self):
"""Test the "delete collection" button on each collection page"""
@@ -434,27 +417,28 @@ def test_delete_collection(self):
fh=(StringIO(''), ''),
), follow_redirects=True)
- rv = self.journalist_app.get('/')
+ resp = self.journalist_app.get('/')
# navigate to the collection page
- soup = BeautifulSoup(rv.data)
+ soup = BeautifulSoup(resp.data, 'html.parser')
first_col_url = soup.select('ul#cols > li a')[0]['href']
- rv = self.journalist_app.get(first_col_url)
- self.assertEqual(rv.status_code, 200)
+ resp = self.journalist_app.get(first_col_url)
+ self.assertEqual(resp.status_code, 200)
# find the delete form and extract the post parameters
- soup = BeautifulSoup(rv.data)
+ soup = BeautifulSoup(resp.data, 'html.parser')
delete_form_inputs = soup.select('form#delete_collection')[0]('input')
sid = delete_form_inputs[1]['value']
col_name = delete_form_inputs[2]['value']
- rv = self.journalist_app.post('/col/delete/' + sid,
- follow_redirects=True)
- self.assertEquals(rv.status_code, 200)
+ resp = self.journalist_app.post('/col/delete/' + sid,
+ follow_redirects=True)
+ self.assertEquals(resp.status_code, 200)
- self.assertIn(escape("%s's collection deleted" % (col_name,)), rv.data)
- self.assertIn("No documents have been submitted!", rv.data)
+ self.assertIn(escape("%s's collection deleted" % (col_name,)),
+ resp.data)
+ self.assertIn("No documents have been submitted!", resp.data)
# Make sure the collection is deleted from the filesystem
- self._wait_for(
+ utils.async.wait_for_assertion(
lambda: self.assertFalse(os.path.exists(store.path(sid)))
)
@@ -470,43 +454,44 @@ def test_delete_collections(self):
msg="This is a test " + str(i) + ".",
fh=(StringIO(''), ''),
), follow_redirects=True)
- common.logout(self.source_app)
+ self.source_app.get('/logout')
- rv = self.journalist_app.get('/')
+ resp = self.journalist_app.get('/')
# get all the checkbox values
- soup = BeautifulSoup(rv.data)
+ soup = BeautifulSoup(resp.data, 'html.parser')
checkbox_values = [checkbox['value'] for checkbox in
soup.select('input[name="cols_selected"]')]
- rv = self.journalist_app.post('/col/process', data=dict(
+ resp = self.journalist_app.post('/col/process', data=dict(
action='delete',
cols_selected=checkbox_values
), follow_redirects=True)
- self.assertEqual(rv.status_code, 200)
- self.assertIn("%s collections deleted" % (num_sources,), rv.data)
+ self.assertEqual(resp.status_code, 200)
+ self.assertIn("%s collections deleted" % (num_sources,), resp.data)
# Make sure the collections are deleted from the filesystem
- self._wait_for(
- lambda: self.assertFalse(any([os.path.exists(store.path(sid)) for sid in checkbox_values]))
- )
+ utils.async.wait_for_assertion(lambda: self.assertFalse(
+ any([os.path.exists(store.path(sid)) for sid in checkbox_values])))
def test_filenames(self):
- """Test pretty, sequential filenames when source uploads messages and files"""
+ """Test pretty, sequential filenames when source uploads messages
+ and files"""
# add a source and submit stuff
self.source_app.get('/generate')
self.source_app.post('/create')
self.helper_filenames_submit()
# navigate to the collection page
- rv = self.journalist_app.get('/')
- soup = BeautifulSoup(rv.data)
+ resp = self.journalist_app.get('/')
+ soup = BeautifulSoup(resp.data, 'html.parser')
first_col_url = soup.select('ul#cols > li a')[0]['href']
- rv = self.journalist_app.get(first_col_url)
- self.assertEqual(rv.status_code, 200)
+ resp = self.journalist_app.get(first_col_url)
+ self.assertEqual(resp.status_code, 200)
# test filenames and sort order
- soup = BeautifulSoup(rv.data)
+ soup = BeautifulSoup(resp.data, 'html.parser')
submission_filename_re = r'^{0}-[a-z0-9-_]+(-msg|-doc\.gz)\.gpg$'
- for i, submission_link in enumerate(soup.select('ul#submissions li a .filename')):
+ for i, submission_link in enumerate(
+ soup.select('ul#submissions li a .filename')):
filename = str(submission_link.contents[0])
self.assertTrue(re.match(submission_filename_re.format(i + 1),
filename))
@@ -519,27 +504,74 @@ def test_filenames_delete(self):
self.helper_filenames_submit()
# navigate to the collection page
- rv = self.journalist_app.get('/')
- soup = BeautifulSoup(rv.data)
+ resp = self.journalist_app.get('/')
+ soup = BeautifulSoup(resp.data, 'html.parser')
first_col_url = soup.select('ul#cols > li a')[0]['href']
- rv = self.journalist_app.get(first_col_url)
- self.assertEqual(rv.status_code, 200)
- soup = BeautifulSoup(rv.data)
+ resp = self.journalist_app.get(first_col_url)
+ self.assertEqual(resp.status_code, 200)
+ soup = BeautifulSoup(resp.data, 'html.parser')
# delete file #2
self.helper_filenames_delete(soup, 1)
- rv = self.journalist_app.get(first_col_url)
- soup = BeautifulSoup(rv.data)
+ resp = self.journalist_app.get(first_col_url)
+ soup = BeautifulSoup(resp.data, 'html.parser')
# test filenames and sort order
submission_filename_re = r'^{0}-[a-z0-9-_]+(-msg|-doc\.gz)\.gpg$'
- filename = str(soup.select('ul#submissions li a .filename')[0].contents[0])
+ filename = str(
+ soup.select('ul#submissions li a .filename')[0].contents[0])
self.assertTrue(re.match(submission_filename_re.format(1), filename))
- filename = str(soup.select('ul#submissions li a .filename')[1].contents[0])
+ filename = str(
+ soup.select('ul#submissions li a .filename')[1].contents[0])
self.assertTrue(re.match(submission_filename_re.format(3), filename))
- filename = str(soup.select('ul#submissions li a .filename')[2].contents[0])
+ filename = str(
+ soup.select('ul#submissions li a .filename')[2].contents[0])
self.assertTrue(re.match(submission_filename_re.format(4), filename))
+ def test_user_change_password(self):
+ """Test that a journalist can successfully login after changing
+ their password"""
+
+ # change password
+ self.journalist_app.post('/account', data=dict(
+ password='newlongpassword',
+ password_again='newlongpassword'
+ ))
+
+ # logout
+ self.journalist_app.get('/logout')
+
+ # login with new credentials should redirect to index page
+ resp = self.journalist_app.post('/login', data=dict(
+ username=self.user.username,
+ password='newlongpassword',
+ token='mocked',
+ follow_redirects=True))
+ self.assertEqual(resp.status_code, 302)
+
+ def test_login_after_regenerate_hotp(self):
+ """Test that journalists can login after resetting their HOTP 2fa"""
+
+ # edit hotp
+ self.journalist_app.post('/account/reset-2fa-hotp', data=dict(
+ otp_secret=123456))
+
+ # successful verificaton should redirect to /account
+ resp = self.journalist_app.post('/account/2fa', data=dict(
+ token=self.user.hotp))
+ self.assertEqual(resp.status_code, 302)
+
+ # log out
+ self.journalist_app.get('/logout')
+
+ # login with new 2fa secret should redirect to index page
+ resp = self.journalist_app.post('/login', data=dict(
+ username=self.user.username,
+ password=self.user_pw,
+ token=self.user.hotp,
+ follow_redirects=True))
+ self.assertEqual(resp.status_code, 302)
+
def helper_filenames_submit(self):
self.source_app.post('/submit', data=dict(
msg="This is a test.",
@@ -556,30 +588,31 @@ def helper_filenames_submit(self):
def helper_filenames_delete(self, soup, i):
sid = soup.select('input[name="sid"]')[0]['value']
- checkbox_values = [soup.select('input[name="doc_names_selected"]')[i]['value']]
+ checkbox_values = [
+ soup.select('input[name="doc_names_selected"]')[i]['value']]
# delete
- rv = self.journalist_app.post('/bulk', data=dict(
+ resp = self.journalist_app.post('/bulk', data=dict(
sid=sid,
action='confirm_delete',
doc_names_selected=checkbox_values
), follow_redirects=True)
- self.assertEqual(rv.status_code, 200)
- self.assertIn("The following file has been selected for <strong>permanent deletion</strong>", rv.data)
+ self.assertEqual(resp.status_code, 200)
+ self.assertIn(
+ "The following file has been selected for"
+ " <strong>permanent deletion</strong>",
+ resp.data)
# confirm delete
- rv = self.journalist_app.post('/bulk', data=dict(
+ resp = self.journalist_app.post('/bulk', data=dict(
sid=sid,
action='delete',
doc_names_selected=checkbox_values
), follow_redirects=True)
- self.assertEqual(rv.status_code, 200)
- self.assertIn("Submission deleted.", rv.data)
+ self.assertEqual(resp.status_code, 200)
+ self.assertIn("Submission deleted.", resp.data)
# Make sure the files were deleted from the filesystem
- self._wait_for(
- lambda: self.assertFalse(any([os.path.exists(store.path(sid, doc_name)) for doc_name in checkbox_values]))
- )
-
-if __name__ == "__main__":
- unittest.main(verbosity=2)
+ utils.async.wait_for_assertion(lambda: self.assertFalse(
+ any([os.path.exists(store.path(sid, doc_name))
+ for doc_name in checkbox_values])))
diff --git a/securedrop/tests/test_journalist.py b/securedrop/tests/test_journalist.py
--- a/securedrop/tests/test_journalist.py
+++ b/securedrop/tests/test_journalist.py
@@ -1,12 +1,907 @@
+# -*- coding: utf-8 -*-
+from cStringIO import StringIO
+import os
+import random
import unittest
+import zipfile
+from flask import url_for, escape
+from flask_testing import TestCase
from mock import patch, ANY, MagicMock
+from sqlalchemy.orm.exc import StaleDataError
+from sqlalchemy.exc import IntegrityError
+os.environ['SECUREDROP_ENV'] = 'test' # noqa
+import config
+import crypto_util
+from db import (db_session, InvalidPasswordLength, Journalist, Reply, Source,
+ Submission)
+import db
import journalist
-import common
-from db import Journalist, InvalidPasswordLength, db_session
+import utils
-class TestJournalist(unittest.TestCase):
+# Smugly seed the RNG for deterministic testing
+random.seed('Β―\_(γ)_/Β―')
+
+
+class TestJournalistApp(TestCase):
+
+ # A method required by flask_testing.TestCase
+ def create_app(self):
+ return journalist.app
+
+ def setUp(self):
+ utils.env.setup()
+
+ # Patch the two-factor verification to avoid intermittent errors
+ utils.db_helper.mock_verify_token(self)
+
+ # Setup test users: user & admin
+ self.user, self.user_pw = utils.db_helper.init_journalist()
+ self.admin, self.admin_pw = utils.db_helper.init_journalist(
+ is_admin=True)
+
+ def tearDown(self):
+ utils.env.teardown()
+
+ @patch('journalist.app.logger.error')
+ def test_reply_error_logging(self, mocked_error_logger):
+ source, _ = utils.db_helper.init_source()
+ sid = source.filesystem_id
+ self._login_user()
+
+ exception_class = StaleDataError
+ exception_msg = 'Potentially sensitive content!'
+
+ with patch('db.db_session.commit',
+ side_effect=exception_class(exception_msg)):
+ self.client.post(url_for('reply'), data={'sid': sid, 'msg': '_'})
+
+ # Notice the "potentially sensitive" exception_msg is not present in
+ # the log event.
+ mocked_error_logger.assert_called_once_with(
+ "Reply from '{}' (id {}) failed: {}!".format(self.user.username,
+ self.user.id,
+ exception_class))
+
+ def test_reply_error_flashed_message(self):
+ source, _ = utils.db_helper.init_source()
+ sid = source.filesystem_id
+ self._login_user()
+
+ exception_class = StaleDataError
+
+ with patch('db.db_session.commit', side_effect=exception_class()):
+ self.client.post(url_for('reply'), data={'sid': sid, 'msg': '_'})
+
+ self.assertMessageFlashed(
+ 'An unexpected error occurred! Please check '
+ 'the application logs or inform your adminstrator.', 'error')
+
+ def test_empty_replies_are_rejected(self):
+ source, _ = utils.db_helper.init_source()
+ sid = source.filesystem_id
+ self._login_user()
+
+ resp = self.client.post(url_for('reply'),
+ data={'sid': sid, 'msg': ''},
+ follow_redirects=True)
+
+ self.assertIn("You cannot send an empty reply!", resp.data)
+
+ def test_nonempty_replies_are_accepted(self):
+ source, _ = utils.db_helper.init_source()
+ sid = source.filesystem_id
+ self._login_user()
+
+ resp = self.client.post(url_for('reply'),
+ data={'sid': sid, 'msg': '_'},
+ follow_redirects=True)
+
+ self.assertNotIn("You cannot send an empty reply!", resp.data)
+
+ def test_unauthorized_access_redirects_to_login(self):
+ resp = self.client.get(url_for('index'))
+ self.assertRedirects(resp, url_for('login'))
+
+ def test_login_throttle(self):
+ db.LOGIN_HARDENING = True
+ try:
+ for _ in range(Journalist._MAX_LOGIN_ATTEMPTS_PER_PERIOD):
+ resp = self.client.post(url_for('login'),
+ data=dict(username=self.user.username,
+ password='invalid',
+ token='mocked'))
+ self.assert200(resp)
+ self.assertIn("Login failed", resp.data)
+
+ resp = self.client.post(url_for('login'),
+ data=dict(username=self.user.username,
+ password='invalid',
+ token='mocked'))
+ self.assert200(resp)
+ self.assertIn("Please wait at least {} seconds".format(
+ Journalist._LOGIN_ATTEMPT_PERIOD), resp.data)
+ finally:
+ db.LOGIN_HARDENING = False
+
+ def test_login_invalid_credentials(self):
+ resp = self.client.post(url_for('login'),
+ data=dict(username=self.user.username,
+ password='invalid',
+ token='mocked'))
+ self.assert200(resp)
+ self.assertIn("Login failed", resp.data)
+
+ def test_login_valid_credentials(self):
+ resp = self.client.post(url_for('login'),
+ data=dict(username=self.user.username,
+ password=self.user_pw,
+ token='mocked'),
+ follow_redirects=True)
+ self.assert200(resp) # successful login redirects to index
+ self.assertIn("Sources", resp.data)
+ self.assertIn("No documents have been submitted!", resp.data)
+
+ def test_admin_login_redirects_to_index(self):
+ resp = self.client.post(url_for('login'),
+ data=dict(username=self.admin.username,
+ password=self.admin_pw,
+ token='mocked'))
+ self.assertRedirects(resp, url_for('index'))
+
+ def test_user_login_redirects_to_index(self):
+ resp = self.client.post(url_for('login'),
+ data=dict(username=self.user.username,
+ password=self.user_pw,
+ token='mocked'))
+ self.assertRedirects(resp, url_for('index'))
+
+ def test_admin_has_link_to_edit_account_page_in_index_page(self):
+ resp = self.client.post(url_for('login'),
+ data=dict(username=self.admin.username,
+ password=self.admin_pw,
+ token='mocked'),
+ follow_redirects=True)
+ edit_account_link = '<a href="{}">{}</a>'.format(
+ url_for('edit_account'), "Edit Account")
+ self.assertIn(edit_account_link, resp.data)
+
+ def test_user_has_link_to_edit_account_page_in_index_page(self):
+ resp = self.client.post(url_for('login'),
+ data=dict(username=self.user.username,
+ password=self.user_pw,
+ token='mocked'),
+ follow_redirects=True)
+ edit_account_link = '<a href="{}">{}</a>'.format(
+ url_for('edit_account'), "Edit Account")
+ self.assertIn(edit_account_link, resp.data)
+
+ def test_admin_has_link_to_admin_index_page_in_index_page(self):
+ resp = self.client.post(url_for('login'),
+ data=dict(username=self.admin.username,
+ password=self.admin_pw,
+ token='mocked'),
+ follow_redirects=True)
+ admin_link = '<a href="{}">{}</a>'.format(
+ url_for('admin_index'), "Admin")
+ self.assertIn(admin_link, resp.data)
+
+ def test_user_lacks_link_to_admin_index_page_in_index_page(self):
+ resp = self.client.post(url_for('login'),
+ data=dict(username=self.user.username,
+ password=self.user_pw,
+ token='mocked'),
+ follow_redirects=True)
+ admin_link = '<a href="{}">{}</a>'.format(
+ url_for('admin_index'), "Admin")
+ self.assertNotIn(admin_link, resp.data)
+
+ # WARNING: we are purposely doing something that would not work in
+ # production in the _login_user and _login_admin methods. This is done as a
+ # reminder to the test developer that the flask_testing.TestCase only uses
+ # one request context per method (see
+ # https://github.com/freedomofpress/securedrop/issues/1444). By explicitly
+ # making a point of this, we hope to avoid the introduction of new tests,
+ # that do not truly prove their result because of this disconnect between
+ # request context in Flask Testing and production.
+ #
+ # TODO: either ditch Flask Testing or subclass it as discussed in the
+ # aforementioned issue to fix the described problem.
+ def _login_admin(self):
+ self._ctx.g.user = self.admin
+
+ def _login_user(self):
+ self._ctx.g.user = self.user
+
+ def test_admin_logout_redirects_to_index(self):
+ self._login_admin()
+ resp = self.client.get(url_for('logout'))
+ self.assertRedirects(resp, url_for('index'))
+
+ def test_user_logout_redirects_to_index(self):
+ self._login_user()
+ resp = self.client.get(url_for('logout'))
+ self.assertRedirects(resp, url_for('index'))
+
+ def test_admin_index(self):
+ self._login_admin()
+ resp = self.client.get(url_for('admin_index'))
+ self.assert200(resp)
+ self.assertIn("Admin Interface", resp.data)
+
+ def test_admin_delete_user(self):
+ # Verify journalist is in the database
+ self.assertNotEqual(Journalist.query.get(self.user.id), None)
+
+ self._login_admin()
+ resp = self.client.post(url_for('admin_delete_user',
+ user_id=self.user.id),
+ follow_redirects=True)
+
+ # Assert correct interface behavior
+ self.assert200(resp)
+ self.assertIn(escape("Deleted user '{}'".format(self.user.username)),
+ resp.data)
+ # Verify journalist is no longer in the database
+ self.assertEqual(Journalist.query.get(self.user.id), None)
+
+ def test_admin_deletes_invalid_user_404(self):
+ self._login_admin()
+ invalid_user_pk = max([user.id for user in Journalist.query.all()]) + 1
+ resp = self.client.post(url_for('admin_delete_user',
+ user_id=invalid_user_pk))
+ self.assert404(resp)
+
+ def test_admin_edits_user_password_success_response(self):
+ self._login_admin()
+
+ self.client.post(
+ url_for('admin_edit_user', user_id=self.user.id),
+ data=dict(username=self.user.username, is_admin=False,
+ password='validlongpassword',
+ password_again='validlongpassword'))
+
+ self.assertMessageFlashed("Account successfully updated!", 'success')
+
+ def test_user_edits_password_success_reponse(self):
+ self._login_user()
+ self.client.post(url_for('edit_account'),
+ data=dict(password='validlongpassword',
+ password_again='validlongpassword'))
+ self.assertMessageFlashed("Account successfully updated!", 'success')
+
+ def test_admin_edits_user_password_mismatch_warning(self):
+ self._login_admin()
+
+ self.client.post(
+ url_for('admin_edit_user', user_id=self.user.id),
+ data=dict(username=self.user.username, is_admin=False,
+ password='not', password_again='thesame'),
+ follow_redirects=True)
+
+ self.assertMessageFlashed("Passwords didn't match!", "error")
+
+ def test_user_edits_password_mismatch_redirect(self):
+ self._login_user()
+ resp = self.client.post(url_for('edit_account'), data=dict(
+ password='not',
+ password_again='thesame'))
+ self.assertRedirects(resp, url_for('edit_account'))
+
+ def test_admin_add_user_password_mismatch_warning(self):
+ self._login_admin()
+ resp = self.client.post(url_for('admin_add_user'),
+ data=dict(username='dellsberg',
+ password='not',
+ password_again='thesame',
+ is_admin=False))
+ self.assertIn('Passwords didn', resp.data)
+
+ def test_admin_add_user_when_username_already_in_use(self):
+ self._login_admin()
+ resp = self.client.post(url_for('admin_add_user'),
+ data=dict(username=self.admin.username,
+ password='testtesttest',
+ password_again='testtesttest',
+ is_admin=False))
+ self.assertIn('That username is already in use', resp.data)
+
+ def test_max_password_length(self):
+ """Creating a Journalist with a password that is greater than the
+ maximum password length should raise an exception"""
+ overly_long_password = 'a'*(Journalist.MAX_PASSWORD_LEN + 1)
+ with self.assertRaises(InvalidPasswordLength):
+ Journalist(username="My Password is Too Big!",
+ password=overly_long_password)
+
+ def test_min_password_length(self):
+ """Creating a Journalist with a password that is smaller than the
+ minimum password length should raise an exception"""
+ with self.assertRaises(InvalidPasswordLength):
+ Journalist(username="My Password is Too Small!",
+ password='tiny')
+
+ def test_admin_edits_user_password_too_long_warning(self):
+ self._login_admin()
+ overly_long_password = 'a' * (Journalist.MAX_PASSWORD_LEN + 1)
+
+ self.client.post(
+ url_for('admin_edit_user', user_id=self.user.id),
+ data=dict(username=self.user.username, is_admin=False,
+ password=overly_long_password,
+ password_again=overly_long_password),
+ follow_redirects=True)
+
+ self.assertMessageFlashed('Your password must be between {} and {} '
+ 'characters.'.format(
+ Journalist.MIN_PASSWORD_LEN,
+ Journalist.MAX_PASSWORD_LEN), 'error')
+
+ def test_user_edits_password_too_long_warning(self):
+ self._login_user()
+ overly_long_password = 'a' * (Journalist.MAX_PASSWORD_LEN + 1)
+
+ self.client.post(url_for('edit_account'),
+ data=dict(password=overly_long_password,
+ password_again=overly_long_password),
+ follow_redirects=True)
+
+ self.assertMessageFlashed('Your password must be between {} and {} '
+ 'characters.'.format(
+ Journalist.MIN_PASSWORD_LEN,
+ Journalist.MAX_PASSWORD_LEN), 'error')
+
+ def test_admin_add_user_password_too_long_warning(self):
+ self._login_admin()
+
+ overly_long_password = 'a' * (Journalist.MAX_PASSWORD_LEN + 1)
+ resp = self.client.post(
+ url_for('admin_add_user'),
+ data=dict(username='dellsberg', password=overly_long_password,
+ password_again=overly_long_password, is_admin=False))
+
+ self.assertIn('Your password must be between', resp.data)
+
+ def test_admin_edits_user_invalid_username(self):
+ """Test expected error message when admin attempts to change a user's
+ username to a username that is taken by another user."""
+ self._login_admin()
+ new_username = self.admin.username
+
+ self.client.post(
+ url_for('admin_edit_user', user_id=self.user.id),
+ data=dict(username=new_username, is_admin=False,
+ password='', password_again=''))
+
+ self.assertMessageFlashed('Username "{}" is already taken!'.format(
+ new_username), 'error')
+
+ def test_admin_resets_user_hotp(self):
+ self._login_admin()
+ old_hotp = self.user.hotp
+
+ resp = self.client.post(url_for('admin_reset_two_factor_hotp'),
+ data=dict(uid=self.user.id, otp_secret=123456))
+ new_hotp = self.user.hotp
+
+ # check that hotp is different
+ self.assertNotEqual(old_hotp.secret, new_hotp.secret)
+ # Redirect to admin 2FA view
+ self.assertRedirects(
+ resp,
+ url_for('admin_new_user_two_factor', uid=self.user.id))
+
+ def test_admin_resets_user_hotp_format_non_hexa(self):
+ self._login_admin()
+ old_hotp = self.user.hotp.secret
+
+ self.client.post(url_for('admin_reset_two_factor_hotp'),
+ data=dict(uid=self.user.id, otp_secret='ZZ'))
+ new_hotp = self.user.hotp.secret
+
+ self.assertEqual(old_hotp, new_hotp)
+ self.assertMessageFlashed(
+ "Invalid secret format: "
+ "please only submit letters A-F and numbers 0-9.", "error")
+
+ def test_admin_resets_user_hotp_format_odd(self):
+ self._login_admin()
+ old_hotp = self.user.hotp.secret
+
+ self.client.post(url_for('admin_reset_two_factor_hotp'),
+ data=dict(uid=self.user.id, otp_secret='Z'))
+ new_hotp = self.user.hotp.secret
+
+ self.assertEqual(old_hotp, new_hotp)
+ self.assertMessageFlashed(
+ "Invalid secret format: "
+ "odd-length secret. Did you mistype the secret?", "error")
+
+ @patch('db.Journalist.set_hotp_secret')
+ @patch('journalist.app.logger.error')
+ def test_admin_resets_user_hotp_error(self,
+ mocked_error_logger,
+ mock_set_hotp_secret):
+ self._login_admin()
+ old_hotp = self.user.hotp.secret
+
+ error_message = 'SOMETHING WRONG!'
+ mock_set_hotp_secret.side_effect = TypeError(error_message)
+
+ otp_secret = '1234'
+ self.client.post(url_for('admin_reset_two_factor_hotp'),
+ data=dict(uid=self.user.id, otp_secret=otp_secret))
+ new_hotp = self.user.hotp.secret
+
+ self.assertEqual(old_hotp, new_hotp)
+ self.assertMessageFlashed("An unexpected error occurred! "
+ "Please check the application "
+ "logs or inform your adminstrator.", "error")
+ mocked_error_logger.assert_called_once_with(
+ "set_hotp_secret '{}' (id {}) failed: {}".format(
+ otp_secret, self.user.id, error_message))
+
+ def test_user_resets_hotp(self):
+ self._login_user()
+ old_hotp = self.user.hotp
+
+ resp = self.client.post(url_for('account_reset_two_factor_hotp'),
+ data=dict(otp_secret=123456))
+ new_hotp = self.user.hotp
+
+ # check that hotp is different
+ self.assertNotEqual(old_hotp.secret, new_hotp.secret)
+ # should redirect to verification page
+ self.assertRedirects(resp, url_for('account_new_two_factor'))
+
+ def test_admin_resets_user_totp(self):
+ self._login_admin()
+ old_totp = self.user.totp
+
+ resp = self.client.post(
+ url_for('admin_reset_two_factor_totp'),
+ data=dict(uid=self.user.id))
+ new_totp = self.user.totp
+
+ self.assertNotEqual(old_totp.secret, new_totp.secret)
+
+ self.assertRedirects(
+ resp,
+ url_for('admin_new_user_two_factor', uid=self.user.id))
+
+ def test_user_resets_totp(self):
+ self._login_user()
+ old_totp = self.user.totp
+
+ resp = self.client.post(url_for('account_reset_two_factor_totp'))
+ new_totp = self.user.totp
+
+ # check that totp is different
+ self.assertNotEqual(old_totp.secret, new_totp.secret)
+
+ # should redirect to verification page
+ self.assertRedirects(resp, url_for('account_new_two_factor'))
+
+ def test_admin_resets_hotp_with_missing_otp_secret_key(self):
+ self._login_admin()
+ resp = self.client.post(url_for('admin_reset_two_factor_hotp'),
+ data=dict(uid=self.user.id))
+
+ self.assertIn('Change Secret', resp.data)
+
+ def test_admin_new_user_2fa_redirect(self):
+ self._login_admin()
+ resp = self.client.post(
+ url_for('admin_new_user_two_factor', uid=self.user.id),
+ data=dict(token='mocked'))
+ self.assertRedirects(resp, url_for('admin_index'))
+
+ def test_http_get_on_admin_new_user_two_factor_page(self):
+ self._login_admin()
+ resp = self.client.get(url_for('admin_new_user_two_factor',
+ uid=self.user.id))
+ # any GET req should take a user to the admin_new_user_two_factor page
+ self.assertIn('Authenticator', resp.data)
+
+ def test_http_get_on_admin_add_user_page(self):
+ self._login_admin()
+ resp = self.client.get(url_for('admin_add_user'))
+ # any GET req should take a user to the admin_add_user page
+ self.assertIn('ADD USER', resp.data)
+
+ def test_admin_add_user(self):
+ self._login_admin()
+ max_journalist_pk = max([user.id for user in Journalist.query.all()])
+
+ resp = self.client.post(url_for('admin_add_user'),
+ data=dict(username='dellsberg',
+ password='pentagonpapers',
+ password_again='pentagonpapers',
+ is_admin=False))
+
+ self.assertRedirects(resp, url_for('admin_new_user_two_factor',
+ uid=max_journalist_pk+1))
+
+ def test_admin_add_user_without_username(self):
+ self._login_admin()
+ resp = self.client.post(url_for('admin_add_user'),
+ data=dict(username='',
+ password='pentagonpapers',
+ password_again='pentagonpapers',
+ is_admin=False))
+ self.assertIn('Missing username', resp.data)
+
+ @patch('journalist.app.logger.error')
+ @patch('journalist.Journalist',
+ side_effect=IntegrityError('STATEMENT', 'PARAMETERS', None))
+ def test_admin_add_user_integrity_error(self,
+ mock_journalist,
+ mocked_error_logger):
+ self._login_admin()
+
+ self.client.post(url_for('admin_add_user'),
+ data=dict(username='username',
+ password='pentagonpapers',
+ password_again='pentagonpapers',
+ is_admin=False))
+
+ mocked_error_logger.assert_called_once_with(
+ "Adding user 'username' failed: (__builtin__.NoneType) "
+ "None [SQL: 'STATEMENT'] [parameters: 'PARAMETERS']")
+ self.assertMessageFlashed(
+ "An error occurred saving this user to the database."
+ " Please check the application logs.",
+ "error")
+
+ def test_admin_page_restriction_http_gets(self):
+ admin_urls = [url_for('admin_index'), url_for('admin_add_user'),
+ url_for('admin_edit_user', user_id=self.user.id)]
+
+ self._login_user()
+ for admin_url in admin_urls:
+ resp = self.client.get(admin_url)
+ self.assertStatus(resp, 302)
+
+ def test_admin_page_restriction_http_posts(self):
+ admin_urls = [url_for('admin_reset_two_factor_totp'),
+ url_for('admin_reset_two_factor_hotp'),
+ url_for('admin_add_user', user_id=self.user.id),
+ url_for('admin_new_user_two_factor'),
+ url_for('admin_reset_two_factor_totp'),
+ url_for('admin_reset_two_factor_hotp'),
+ url_for('admin_edit_user', user_id=self.user.id),
+ url_for('admin_delete_user', user_id=self.user.id)]
+ self._login_user()
+ for admin_url in admin_urls:
+ resp = self.client.post(admin_url)
+ self.assertStatus(resp, 302)
+
+ def test_user_authorization_for_gets(self):
+ urls = [url_for('index'), url_for('col', sid='1'),
+ url_for('download_single_submission', sid='1', fn='1'),
+ url_for('edit_account')]
+
+ for url in urls:
+ resp = self.client.get(url)
+ self.assertStatus(resp, 302)
+
+ def test_user_authorization_for_posts(self):
+ urls = [url_for('add_star', sid='1'), url_for('remove_star', sid='1'),
+ url_for('col_process'), url_for('col_delete_single', sid='1'),
+ url_for('reply'), url_for('generate_code'), url_for('bulk'),
+ url_for('account_new_two_factor'),
+ url_for('account_reset_two_factor_totp'),
+ url_for('account_reset_two_factor_hotp')]
+ for url in urls:
+ res = self.client.post(url)
+ self.assertStatus(res, 302)
+
+ def test_invalid_user_password_change(self):
+ self._login_user()
+ res = self.client.post(url_for('edit_account'), data=dict(
+ password='not',
+ password_again='thesame'))
+ self.assertRedirects(res, url_for('edit_account'))
+
+ def test_too_long_user_password_change(self):
+ self._login_user()
+ overly_long_password = 'a' * (Journalist.MAX_PASSWORD_LEN + 1)
+
+ self.client.post(url_for('edit_account'), data=dict(
+ password=overly_long_password,
+ password_again=overly_long_password),
+ follow_redirects=True)
+
+ self.assertMessageFlashed('Your password must be between {} and {} '
+ 'characters.'.format(
+ Journalist.MIN_PASSWORD_LEN,
+ Journalist.MAX_PASSWORD_LEN), 'error')
+
+ def test_valid_user_password_change(self):
+ self._login_user()
+ self.client.post(url_for('edit_account'), data=dict(
+ password='validlongpassword',
+ password_again='validlongpassword'))
+ self.assertMessageFlashed("Account successfully updated!", 'success')
+
+ def test_regenerate_totp(self):
+ self._login_user()
+ old_totp = self.user.totp
+
+ res = self.client.post(url_for('account_reset_two_factor_totp'))
+ new_totp = self.user.totp
+
+ # check that totp is different
+ self.assertNotEqual(old_totp.secret, new_totp.secret)
+
+ # should redirect to verification page
+ self.assertRedirects(res, url_for('account_new_two_factor'))
+
+ def test_edit_hotp(self):
+ self._login_user()
+ old_hotp = self.user.hotp
+
+ res = self.client.post(
+ url_for('account_reset_two_factor_hotp'),
+ data=dict(otp_secret=123456)
+ )
+ new_hotp = self.user.hotp
+
+ # check that hotp is different
+ self.assertNotEqual(old_hotp.secret, new_hotp.secret)
+
+ # should redirect to verification page
+ self.assertRedirects(res, url_for('account_new_two_factor'))
+
+ def test_delete_source_deletes_submissions(self):
+ """Verify that when a source is deleted, the submissions that
+ correspond to them are also deleted."""
+
+ self._delete_collection_setup()
+ journalist.delete_collection(self.source.filesystem_id)
+
+ # Source should be gone
+ results = db_session.query(Source).filter(
+ Source.id == self.source.id).all()
+ self.assertEqual(results, [])
+
+ def _delete_collection_setup(self):
+ self.source, _ = utils.db_helper.init_source()
+ utils.db_helper.submit(self.source, 2)
+ utils.db_helper.reply(self.user, self.source, 2)
+
+ def test_delete_collection_updates_db(self):
+ """Verify that when a source is deleted, their Source identity
+ record, as well as Reply & Submission records associated with
+ that record are purged from the database."""
+ self._delete_collection_setup()
+ journalist.delete_collection(self.source.filesystem_id)
+ results = Source.query.filter(Source.id == self.source.id).all()
+ self.assertEqual(results, [])
+ results = db_session.query(
+ Submission.source_id == self.source.id).all()
+ self.assertEqual(results, [])
+ results = db_session.query(Reply.source_id == self.source.id).all()
+ self.assertEqual(results, [])
+
+ def test_delete_source_deletes_source_key(self):
+ """Verify that when a source is deleted, the PGP key that corresponds
+ to them is also deleted."""
+ self._delete_collection_setup()
+
+ # Source key exists
+ source_key = crypto_util.getkey(self.source.filesystem_id)
+ self.assertNotEqual(source_key, None)
+
+ journalist.delete_collection(self.source.filesystem_id)
+
+ # Source key no longer exists
+ source_key = crypto_util.getkey(self.source.filesystem_id)
+ self.assertEqual(source_key, None)
+
+ def test_delete_source_deletes_docs_on_disk(self):
+ """Verify that when a source is deleted, the encrypted documents that
+ exist on disk is also deleted."""
+ self._delete_collection_setup()
+
+ # Encrypted documents exists
+ dir_source_docs = os.path.join(config.STORE_DIR,
+ self.source.filesystem_id)
+ self.assertTrue(os.path.exists(dir_source_docs))
+
+ job = journalist.delete_collection(self.source.filesystem_id)
+
+ # Wait up to 5s to wait for Redis worker `srm` operation to complete
+ utils.async.wait_for_redis_worker(job)
+
+ # Encrypted documents no longer exist
+ self.assertFalse(os.path.exists(dir_source_docs))
+
+ def test_download_selected_submissions_from_source(self):
+ source, _ = utils.db_helper.init_source()
+ submissions = utils.db_helper.submit(source, 4)
+ selected_submissions = random.sample(submissions, 2)
+ selected_fnames = [submission.filename
+ for submission in selected_submissions]
+ selected_fnames.sort()
+
+ self._login_user()
+ resp = self.client.post(
+ '/bulk', data=dict(action='download',
+ sid=source.filesystem_id,
+ doc_names_selected=selected_fnames))
+
+ # The download request was succesful, and the app returned a zipfile
+ self.assertEqual(resp.status_code, 200)
+ self.assertEqual(resp.content_type, 'application/zip')
+ self.assertTrue(zipfile.is_zipfile(StringIO(resp.data)))
+
+ # The submissions selected are in the zipfile
+ for filename in selected_fnames:
+ self.assertTrue(
+ # Check that the expected filename is in the zip file
+ zipfile.ZipFile(StringIO(resp.data)).getinfo(
+ os.path.join(
+ source.journalist_filename,
+ source.journalist_designation,
+ "%s_%s" % (filename.split('-')[0],
+ source.last_updated.date()),
+ filename
+ ))
+ )
+
+ # The submissions not selected are absent from the zipfile
+ not_selected_submissions = set(submissions).difference(
+ selected_submissions)
+ not_selected_fnames = [submission.filename
+ for submission in not_selected_submissions]
+
+ for filename in not_selected_fnames:
+ with self.assertRaises(KeyError):
+ zipfile.ZipFile(StringIO(resp.data)).getinfo(
+ os.path.join(
+ source.journalist_filename,
+ source.journalist_designation,
+ "%s_%s" % (filename.split('-')[0],
+ source.last_updated.date()),
+ filename
+ ))
+
+ def _bulk_download_setup(self):
+ """Create a couple sources, make some submissions on their behalf,
+ mark some of them as downloaded, and then perform *action* on all
+ sources."""
+ self.source0, _ = utils.db_helper.init_source()
+ self.source1, _ = utils.db_helper.init_source()
+ self.journo0, _ = utils.db_helper.init_journalist()
+ self.submissions0 = utils.db_helper.submit(self.source0, 2)
+ self.submissions1 = utils.db_helper.submit(self.source1, 3)
+ self.downloaded0 = random.sample(self.submissions0, 1)
+ utils.db_helper.mark_downloaded(*self.downloaded0)
+ self.not_downloaded0 = set(self.submissions0).difference(
+ self.downloaded0)
+ self.downloaded1 = random.sample(self.submissions1, 2)
+ utils.db_helper.mark_downloaded(*self.downloaded1)
+ self.not_downloaded1 = set(self.submissions1).difference(
+ self.downloaded1)
+
+ def test_download_unread_all_sources(self):
+ self._bulk_download_setup()
+ self._login_user()
+
+ # Download all unread messages from all sources
+ self.resp = self.client.post(
+ '/col/process',
+ data=dict(action='download-unread',
+ cols_selected=[self.source0.filesystem_id,
+ self.source1.filesystem_id]))
+
+ # The download request was succesful, and the app returned a zipfile
+ self.assertEqual(self.resp.status_code, 200)
+ self.assertEqual(self.resp.content_type, 'application/zip')
+ self.assertTrue(zipfile.is_zipfile(StringIO(self.resp.data)))
+
+ # All the not dowloaded submissions are in the zipfile
+ for submission in self.not_downloaded0:
+ self.assertTrue(
+ zipfile.ZipFile(StringIO(self.resp.data)).getinfo(
+ os.path.join(
+ "unread",
+ self.source0.journalist_designation,
+ "%s_%s" % (submission.filename.split('-')[0],
+ self.source0.last_updated.date()),
+ submission.filename
+ ))
+ )
+ for submission in self.not_downloaded1:
+ self.assertTrue(
+ zipfile.ZipFile(StringIO(self.resp.data)).getinfo(
+ os.path.join(
+ "unread",
+ self.source1.journalist_designation,
+ "%s_%s" % (submission.filename.split('-')[0],
+ self.source1.last_updated.date()),
+ submission.filename
+ ))
+ )
+
+ # All the downloaded submissions are absent from the zipfile
+ for submission in self.downloaded0:
+ with self.assertRaises(KeyError):
+ zipfile.ZipFile(StringIO(self.resp.data)).getinfo(
+ os.path.join(
+ "unread",
+ self.source0.journalist_designation,
+ "%s_%s" % (submission.filename.split('-')[0],
+ self.source0.last_updated.date()),
+ submission.filename
+ ))
+
+ for submission in self.downloaded1:
+ with self.assertRaises(KeyError):
+ zipfile.ZipFile(StringIO(self.resp.data)).getinfo(
+ os.path.join(
+ "unread",
+ self.source1.journalist_designation,
+ "%s_%s" % (submission.filename.split('-')[0],
+ self.source1.last_updated.date()),
+ submission.filename
+ ))
+
+ def test_download_all_selected_sources(self):
+ self._bulk_download_setup()
+ self._login_user()
+
+ # Dowload all messages from self.source1
+ self.resp = self.client.post(
+ '/col/process',
+ data=dict(action='download-all',
+ cols_selected=[self.source1.filesystem_id]))
+
+ resp = self.client.post(
+ '/col/process',
+ data=dict(action='download-all',
+ cols_selected=[self.source1.filesystem_id]))
+
+ # The download request was succesful, and the app returned a zipfile
+ self.assertEqual(resp.status_code, 200)
+ self.assertEqual(resp.content_type, 'application/zip')
+ self.assertTrue(zipfile.is_zipfile(StringIO(resp.data)))
+
+ # All messages from self.source1 are in the zipfile
+ for submission in self.submissions1:
+ self.assertTrue(
+ zipfile.ZipFile(StringIO(resp.data)).getinfo(
+ os.path.join(
+ "all",
+ self.source1.journalist_designation,
+ "%s_%s" % (submission.filename.split('-')[0],
+ self.source1.last_updated.date()),
+ submission.filename)
+ )
+ )
+
+ # All messages from self.source0 are absent from the zipfile
+ for submission in self.submissions0:
+ with self.assertRaises(KeyError):
+ zipfile.ZipFile(StringIO(resp.data)).getinfo(
+ os.path.join(
+ "all",
+ self.source0.journalist_designation,
+ "%s_%s" % (submission.filename.split('-')[0],
+ self.source0.last_updated.date()),
+ submission.filename)
+ )
+
+ def test_add_star_redirects_to_index(self):
+ source, _ = utils.db_helper.init_source()
+ self._login_user()
+ resp = self.client.post(url_for('add_star', sid=source.filesystem_id))
+ self.assertRedirects(resp, url_for('index'))
+
+
+class TestJournalistAppTwo(unittest.TestCase):
def setUp(self):
journalist.logged_in = MagicMock()
@@ -18,10 +913,15 @@ def setUp(self):
journalist.get_docs = MagicMock()
journalist.get_or_else = MagicMock()
+ def _set_up_request(self, cols_selected, action):
+ journalist.request.form.__contains__.return_value = True
+ journalist.request.form.getlist = MagicMock(return_value=cols_selected)
+ journalist.request.form.__getitem__.return_value = action
+
@patch("journalist.col_delete")
def test_col_process_delegates_to_col_delete(self, col_delete):
cols_selected = ['source_id']
- self.set_up_request(cols_selected, 'delete')
+ self._set_up_request(cols_selected, 'delete')
journalist.col_process()
@@ -30,7 +930,7 @@ def test_col_process_delegates_to_col_delete(self, col_delete):
@patch("journalist.col_star")
def test_col_process_delegates_to_col_star(self, col_star):
cols_selected = ['source_id']
- self.set_up_request(cols_selected, 'star')
+ self._set_up_request(cols_selected, 'star')
journalist.col_process()
@@ -39,7 +939,7 @@ def test_col_process_delegates_to_col_star(self, col_star):
@patch("journalist.col_un_star")
def test_col_process_delegates_to_col_un_star(self, col_un_star):
cols_selected = ['source_id']
- self.set_up_request(cols_selected, 'un-star')
+ self._set_up_request(cols_selected, 'un-star')
journalist.col_process()
@@ -48,7 +948,7 @@ def test_col_process_delegates_to_col_un_star(self, col_un_star):
@patch("journalist.abort")
def test_col_process_returns_404_with_bad_action(self, abort):
cols_selected = ['source_id']
- self.set_up_request(cols_selected, 'something-random')
+ self._set_up_request(cols_selected, 'something-random')
journalist.col_process()
@@ -67,11 +967,6 @@ def test_col_un_star_call_db(self, db_session):
db_session.commit.assert_called_with()
- def set_up_request(self, cols_selected, action):
- journalist.request.form.__contains__.return_value = True
- journalist.request.form.getlist = MagicMock(return_value=cols_selected)
- journalist.request.form.__getitem__.return_value = action
-
@classmethod
def tearDownClass(cls):
# Reset the module variables that were changed to mocks so we don't
@@ -82,29 +977,15 @@ def tearDownClass(cls):
class TestJournalistLogin(unittest.TestCase):
def setUp(self):
- common.shared_setup()
+ utils.env.setup()
# Patch the two-factor verification so it always succeeds
- patcher = patch('db.Journalist.verify_token')
- self.addCleanup(patcher.stop)
- self.mock_journalist_verify_token = patcher.start()
- self.mock_journalist_verify_token.return_value = True
-
- self.username = "test user"
- self.password = "test password"
- self.user = Journalist(
- username=self.username,
- password=self.password)
- db_session.add(self.user)
- db_session.commit()
-
- # Use a patched login function to avoid dealing with two-factor tokens
- # (which are being ignored here anyway)
- self.login = lambda username, password: \
- Journalist.login(username, password, "")
+ utils.db_helper.mock_verify_token(self)
+
+ self.user, self.user_pw = utils.db_helper.init_journalist()
def tearDown(self):
- common.shared_teardown()
+ utils.env.teardown()
# TODO: figure out why this is necessary here, but unnecessary in all
# of the tests in `tests/test_unit_*.py`. Without this, the session
# continues to return values even if the underlying database is deleted
@@ -113,21 +994,23 @@ def tearDown(self):
@patch('db.Journalist._scrypt_hash')
@patch('db.Journalist.valid_password', return_value=True)
- def test_login_with_valid_length_password_calls_scrypt(
- self, mock_scrypt_hash, mock_valid_password):
- self.login(self.username, self.password)
- self.assertTrue(mock_scrypt_hash.called,
- "Failed to call _scrypt_hash for password w/ valid length")
+ def test_valid_login_calls_scrypt(self,
+ mock_scrypt_hash,
+ mock_valid_password):
+ Journalist.login(self.user.username, self.user_pw, 'mocked')
+ self.assertTrue(
+ mock_scrypt_hash.called,
+ "Failed to call _scrypt_hash for password w/ valid length")
@patch('db.Journalist._scrypt_hash')
- def test_login_with_invalid_length_password_doesnt_call_scrypt(
- self, mock_scrypt_hash):
- print "test_login_with_invalid_length_password_calls_scrypt"
+ def test_login_with_invalid_password_doesnt_call_scrypt(self,
+ mock_scrypt_hash):
invalid_pw = 'a'*(Journalist.MAX_PASSWORD_LEN + 1)
with self.assertRaises(InvalidPasswordLength):
- self.login(self.username, invalid_pw)
- self.assertFalse(mock_scrypt_hash.called,
- "Called _scrypt_hash for password w/ invalid length")
+ Journalist.login(self.user.username, invalid_pw, 'mocked')
+ self.assertFalse(
+ mock_scrypt_hash.called,
+ "Called _scrypt_hash for password w/ invalid length")
@classmethod
def tearDownClass(cls):
@@ -135,3 +1018,61 @@ def tearDownClass(cls):
# break other tests
reload(journalist)
+
+class TestJournalist(unittest.TestCase):
+
+ def setUp(self):
+ journalist.logged_in = MagicMock()
+ journalist.make_star_true = MagicMock()
+ journalist.db_session = MagicMock()
+ journalist.url_for = MagicMock()
+ journalist.redirect = MagicMock()
+ journalist.get_one_or_else = MagicMock()
+
+ @patch('journalist.url_for')
+ @patch('journalist.redirect')
+ def test_add_star_renders_template(self, redirect, url_for):
+ redirect_template = journalist.add_star('sid')
+
+ self.assertEqual(redirect_template, redirect(url_for('index')))
+
+ @patch('journalist.db_session')
+ def test_add_star_makes_commits(self, db_session):
+ journalist.add_star('sid')
+
+ db_session.commit.assert_called_with()
+
+ @patch('journalist.make_star_true')
+ def test_single_delegates_to_make_star_true(self, make_star_true):
+ sid = 'sid'
+
+ journalist.add_star(sid)
+
+ make_star_true.assert_called_with(sid)
+
+ @patch('journalist.url_for')
+ @patch('journalist.redirect')
+ def test_remove_star_renders_template(self, redirect, url_for):
+ redirect_template = journalist.remove_star('sid')
+
+ self.assertEqual(redirect_template, redirect(url_for('index')))
+
+ @patch('journalist.db_session')
+ def test_remove_star_makes_commits(self, db_session):
+ journalist.remove_star('sid')
+
+ db_session.commit.assert_called_with()
+
+ @patch('journalist.make_star_false')
+ def test_remove_star_delegates_to_make_star_false(self, make_star_false):
+ sid = 'sid'
+
+ journalist.remove_star(sid)
+
+ make_star_false.assert_called_with(sid)
+
+ @classmethod
+ def tearDownClass(cls):
+ # Reset the module variables that were changed to mocks so we don't
+ # break other tests
+ reload(journalist)
diff --git a/securedrop/tests/test_manage.py b/securedrop/tests/test_manage.py
new file mode 100644
--- /dev/null
+++ b/securedrop/tests/test_manage.py
@@ -0,0 +1,42 @@
+# -*- coding: utf-8 -*-
+
+import manage
+import mock
+from StringIO import StringIO
+import sys
+import unittest
+
+import utils
+
+
+class TestManagePy(unittest.TestCase):
+ def test_parse_args(self):
+ # just test that the arg parser is stable
+ manage.get_args()
+
+
+class TestManagementCommand(unittest.TestCase):
+ def setUp(self):
+ utils.env.setup()
+
+ def tearDown(self):
+ utils.env.teardown()
+
+ @mock.patch("__builtin__.raw_input", return_value='N')
+ @mock.patch("manage.getpass", return_value='testtesttest')
+ @mock.patch("sys.stdout", new_callable=StringIO)
+ def test_exception_handling_when_duplicate_username(self, mock_raw_input,
+ mock_getpass,
+ mock_stdout):
+ """Regression test for duplicate username logic in manage.py"""
+
+ # Inserting the user for the first time should succeed
+ return_value = manage._add_user()
+ self.assertEqual(return_value, 0)
+ self.assertIn('successfully added', sys.stdout.getvalue())
+
+ # Inserting the user for a second time should fail
+ return_value = manage._add_user()
+ self.assertEqual(return_value, 1)
+ self.assertIn('ERROR: That username is already taken!',
+ sys.stdout.getvalue())
diff --git a/securedrop/tests/test_secure_tempfile.py b/securedrop/tests/test_secure_tempfile.py
new file mode 100644
--- /dev/null
+++ b/securedrop/tests/test_secure_tempfile.py
@@ -0,0 +1,103 @@
+# -*- coding: utf-8 -*-
+import os
+import unittest
+
+from gnupg._util import _is_stream
+
+os.environ['SECUREDROP_ENV'] = 'test'
+import config
+import secure_tempfile
+import utils
+
+
+class TestSecureTempfile(unittest.TestCase):
+ def setUp(self):
+ utils.env.setup()
+ self.f = secure_tempfile.SecureTemporaryFile(config.STORE_DIR)
+ self.msg = '410,757,864,530'
+
+ def tearDown(self):
+ utils.env.teardown()
+
+ def test_write_then_read_twice(self):
+ self.f.write(self.msg)
+ self.f.read()
+
+ self.assertEqual(self.f.read(), '')
+
+ def test_read_before_writing(self):
+ with self.assertRaisesRegexp(AssertionError,
+ 'You must write before reading!'):
+ self.f.read()
+
+ def test_write_then_read_once(self):
+ self.f.write(self.msg)
+
+ self.assertEqual(self.f.read(), self.msg)
+
+ def test_write_twice_then_read_once(self):
+ self.f.write(self.msg)
+ self.f.write(self.msg)
+
+ self.assertEqual(self.f.read(), self.msg*2)
+
+ def test_write_then_read_twice(self):
+ self.f.write(self.msg)
+
+ self.assertEqual(self.f.read(), self.msg)
+ self.assertEqual(self.f.read(), '')
+
+ def test_write_then_read_then_write(self):
+ self.f.write(self.msg)
+ self.f.read()
+
+ with self.assertRaisesRegexp(AssertionError,
+ 'You cannot write after reading!'):
+ self.f.write('BORN TO DIE')
+
+ def test_read_write_unicode(self):
+ unicode_msg = u'ι¬Όη₯ Kill Em All 1989'
+ self.f.write(unicode_msg)
+
+ self.assertEqual(self.f.read().decode('utf-8'), unicode_msg)
+
+ def test_file_seems_encrypted(self):
+ self.f.write(self.msg)
+ with open(self.f.filepath, 'rb') as fh:
+ contents = fh.read().decode()
+
+ self.assertNotIn(self.msg, contents)
+
+ def test_file_is_removed_from_disk(self):
+ fp = self.f.filepath
+ self.f.write(self.msg)
+ self.f.read()
+
+ self.assertTrue(os.path.exists(fp))
+
+ self.f.close()
+
+ self.assertFalse(os.path.exists(fp))
+
+ def test_SecureTemporaryFile_is_a_STREAMLIKE_TYPE(self):
+ self.assertTrue(_is_stream(secure_tempfile.SecureTemporaryFile('/tmp')))
+
+ def test_buffered_read(self):
+ msg = self.msg * 1000
+ self.f.write(msg)
+ str = ''
+ while True:
+ char = self.f.read(1024)
+ if char:
+ str += char
+ else:
+ break
+
+ self.assertEqual(str, msg)
+
+ def test_tmp_file_id_omits_invalid_chars(self):
+ """The `SecureTempFile.tmp_file_id` instance attribute is used as the filename
+ for the secure temporary file. This attribute should not contain
+ invalid characters such as '/' and '\0' (null)."""
+ self.assertNotIn('/', self.f.tmp_file_id)
+ self.assertNotIn('\0', self.f.tmp_file_id)
diff --git a/securedrop/tests/test_single_star.py b/securedrop/tests/test_single_star.py
deleted file mode 100644
--- a/securedrop/tests/test_single_star.py
+++ /dev/null
@@ -1,62 +0,0 @@
-import unittest
-import journalist
-from mock import patch, ANY, MagicMock
-
-
-class TestJournalist(unittest.TestCase):
-
- def setUp(self):
- journalist.logged_in = MagicMock()
- journalist.make_star_true = MagicMock()
- journalist.db_session = MagicMock()
- journalist.url_for = MagicMock()
- journalist.redirect = MagicMock()
- journalist.get_one_or_else = MagicMock()
-
- @patch('journalist.url_for')
- @patch('journalist.redirect')
- def test_add_star_renders_template(self, redirect, url_for):
- redirect_template = journalist.add_star('sid')
-
- self.assertEqual(redirect_template, redirect(url_for('index')))
-
- @patch('journalist.db_session')
- def test_add_star_makes_commits(self, db_session):
- journalist.add_star('sid')
-
- db_session.commit.assert_called_with()
-
- @patch('journalist.make_star_true')
- def test_single_delegates_to_make_star_true(self, make_star_true):
- sid = 'sid'
-
- journalist.add_star(sid)
-
- make_star_true.assert_called_with(sid)
-
- @patch('journalist.url_for')
- @patch('journalist.redirect')
- def test_remove_star_renders_template(self, redirect, url_for):
- redirect_template = journalist.remove_star('sid')
-
- self.assertEqual(redirect_template, redirect(url_for('index')))
-
- @patch('journalist.db_session')
- def test_remove_star_makes_commits(self, db_session):
- journalist.remove_star('sid')
-
- db_session.commit.assert_called_with()
-
- @patch('journalist.make_star_false')
- def test_remove_star_delegates_to_make_star_false(self, make_star_false):
- sid = 'sid'
-
- journalist.remove_star(sid)
-
- make_star_false.assert_called_with(sid)
-
- @classmethod
- def tearDownClass(cls):
- # Reset the module variables that were changed to mocks so we don't
- # break other tests
- reload(journalist)
diff --git a/securedrop/tests/test_source.py b/securedrop/tests/test_source.py
new file mode 100644
--- /dev/null
+++ b/securedrop/tests/test_source.py
@@ -0,0 +1,304 @@
+# -*- coding: utf-8 -*-
+from cStringIO import StringIO
+import gzip
+from mock import patch, ANY
+import os
+import re
+import unittest
+
+from bs4 import BeautifulSoup
+from flask import session, escape
+from flask_testing import TestCase
+
+from db import Source
+import source
+import version
+import utils
+import json
+import config
+
+
+class TestSourceApp(TestCase):
+
+ def create_app(self):
+ return source.app
+
+ def setUp(self):
+ utils.env.setup()
+
+ def tearDown(self):
+ utils.env.teardown()
+
+ def test_page_not_found(self):
+ """Verify the page not found condition returns the intended template"""
+ response = self.client.get('/UNKNOWN')
+ self.assert404(response)
+ self.assertTemplateUsed('notfound.html')
+
+ def test_index(self):
+ """Test that the landing page loads and looks how we expect"""
+ response = self.client.get('/')
+ self.assertEqual(response.status_code, 200)
+ self.assertIn("Submit documents for the first time", response.data)
+ self.assertIn("Already submitted something?", response.data)
+
+ def _find_codename(self, html):
+ """Find a source codename (diceware passphrase) in HTML"""
+ # Codenames may contain HTML escape characters, and the wordlist
+ # contains various symbols.
+ codename_re = r'<p [^>]*id="codename"[^>]*>(?P<codename>[a-z0-9 &#;?:=@_.*+()\'"$%!-]+)</p>'
+ codename_match = re.search(codename_re, html)
+ self.assertIsNotNone(codename_match)
+ return codename_match.group('codename')
+
+ def test_generate(self):
+ with self.client as c:
+ resp = c.get('/generate')
+ self.assertEqual(resp.status_code, 200)
+ session_codename = session['codename']
+ self.assertIn("This codename is what you will use in future visits", resp.data)
+ codename = self._find_codename(resp.data)
+ self.assertEqual(len(codename.split()), Source.NUM_WORDS)
+ # codename is also stored in the session - make sure it matches the
+ # codename displayed to the source
+ self.assertEqual(codename, escape(session_codename))
+
+ def test_generate_has_login_link(self):
+ """The generate page should have a link to remind people to login
+ if they already have a codename, rather than create a new one.
+ """
+ resp = self.client.get('/generate')
+ self.assertIn("ALREADY HAVE A CODENAME?", resp.data)
+ soup = BeautifulSoup(resp.data, 'html.parser')
+ already_have_codename_link = soup.select('a#already-have-codename')[0]
+ self.assertEqual(already_have_codename_link['href'], '/login')
+
+ def test_generate_already_logged_in(self):
+ self._new_codename()
+ # Make sure it redirects to /lookup when logged in
+ resp = self.client.get('/generate')
+ self.assertEqual(resp.status_code, 302)
+ # Make sure it flashes the message on the lookup page
+ resp = self.client.get('/generate', follow_redirects=True)
+ # Should redirect to /lookup
+ self.assertEqual(resp.status_code, 200)
+ self.assertIn("because you are already logged in.", resp.data)
+
+ def test_create(self):
+ with self.client as c:
+ resp = c.get('/generate')
+ codename = session['codename']
+ resp = c.post('/create', follow_redirects=True)
+ self.assertTrue(session['logged_in'])
+ # should be redirected to /lookup
+ self.assertIn("Submit Materials", resp.data)
+
+ def _new_codename(self):
+ return utils.db_helper.new_codename(self.client, session)
+
+ def test_lookup(self):
+ """Test various elements on the /lookup page."""
+ codename = self._new_codename()
+ resp = self.client.post('login', data=dict(codename=codename),
+ follow_redirects=True)
+ # redirects to /lookup
+ self.assertIn("public key", resp.data)
+ # download the public key
+ resp = self.client.get('journalist-key')
+ self.assertIn("BEGIN PGP PUBLIC KEY BLOCK", resp.data)
+
+ def test_login_and_logout(self):
+ resp = self.client.get('/login')
+ self.assertEqual(resp.status_code, 200)
+ self.assertIn("Enter Codename", resp.data)
+
+ codename = self._new_codename()
+ with self.client as c:
+ resp = c.post('/login', data=dict(codename=codename),
+ follow_redirects=True)
+ self.assertEqual(resp.status_code, 200)
+ self.assertIn("Submit Materials", resp.data)
+ self.assertTrue(session['logged_in'])
+ resp = c.get('/logout', follow_redirects=True)
+
+ with self.client as c:
+ resp = c.post('/login', data=dict(codename='invalid'),
+ follow_redirects=True)
+ self.assertEqual(resp.status_code, 200)
+ self.assertIn('Sorry, that is not a recognized codename.', resp.data)
+ self.assertNotIn('logged_in', session)
+
+ with self.client as c:
+ resp = c.post('/login', data=dict(codename=codename),
+ follow_redirects=True)
+ self.assertEqual(resp.status_code, 200)
+ self.assertTrue(session['logged_in'])
+ resp = c.get('/logout', follow_redirects=True)
+ self.assertTrue(not session)
+ self.assertIn('Thank you for exiting your session!', resp.data)
+
+ def test_login_with_whitespace(self):
+ """Test that codenames with leading or trailing whitespace still work"""
+ def login_test(codename):
+ resp = self.client.get('/login')
+ self.assertEqual(resp.status_code, 200)
+ self.assertIn("Enter Codename", resp.data)
+
+ with self.client as c:
+ resp = c.post('/login', data=dict(codename=codename),
+ follow_redirects=True)
+ self.assertEqual(resp.status_code, 200)
+ self.assertIn("Submit Materials", resp.data)
+ self.assertTrue(session['logged_in'])
+ resp = c.get('/logout', follow_redirects=True)
+
+ codename = self._new_codename()
+ login_test(codename + ' ')
+ login_test(' ' + codename + ' ')
+ login_test(' ' + codename)
+
+ def _dummy_submission(self):
+ """
+ Helper to make a submission (content unimportant), mostly useful in
+ testing notification behavior for a source's first vs. their
+ subsequent submissions
+ """
+ return self.client.post('/submit', data=dict(
+ msg="Pay no attention to the man behind the curtain.",
+ fh=(StringIO(''), ''),
+ ), follow_redirects=True)
+
+ def test_initial_submission_notification(self):
+ """
+ Regardless of the type of submission (message, file, or both), the
+ first submission is always greeted with a notification
+ reminding sources to check back later for replies.
+ """
+ self._new_codename()
+ resp = self._dummy_submission()
+ self.assertEqual(resp.status_code, 200)
+ self.assertIn(
+ "Thank you for sending this information to us.",
+ resp.data)
+
+ def test_submit_message(self):
+ self._new_codename()
+ self._dummy_submission()
+ resp = self.client.post('/submit', data=dict(
+ msg="This is a test.",
+ fh=(StringIO(''), ''),
+ ), follow_redirects=True)
+ self.assertEqual(resp.status_code, 200)
+ self.assertIn("Thanks! We received your message", resp.data)
+
+ def test_submit_empty_message(self):
+ self._new_codename()
+ resp = self.client.post('/submit', data=dict(
+ msg="",
+ fh=(StringIO(''), ''),
+ ), follow_redirects=True)
+ self.assertIn("You must enter a message or choose a file to submit.",
+ resp.data)
+
+ def test_submit_big_message(self):
+ '''
+ When the message is larger than 512KB it's written to disk instead of
+ just residing in memory. Make sure the different return type of
+ SecureTemporaryFile is handled as well as BytesIO.
+ '''
+ self._new_codename()
+ self._dummy_submission()
+ resp = self.client.post('/submit', data=dict(
+ msg="AA" * (1024 * 512),
+ fh=(StringIO(''), ''),
+ ), follow_redirects=True)
+ self.assertEqual(resp.status_code, 200)
+ self.assertIn("Thanks! We received your message", resp.data)
+
+ def test_submit_file(self):
+ self._new_codename()
+ self._dummy_submission()
+ resp = self.client.post('/submit', data=dict(
+ msg="",
+ fh=(StringIO('This is a test'), 'test.txt'),
+ ), follow_redirects=True)
+ self.assertEqual(resp.status_code, 200)
+ self.assertIn('Thanks! We received your document', resp.data)
+
+ def test_submit_both(self):
+ self._new_codename()
+ self._dummy_submission()
+ resp = self.client.post('/submit', data=dict(
+ msg="This is a test",
+ fh=(StringIO('This is a test'), 'test.txt'),
+ ), follow_redirects=True)
+ self.assertEqual(resp.status_code, 200)
+ self.assertIn("Thanks! We received your message and document",
+ resp.data)
+
+ def test_delete_all(self):
+ journalist, _ = utils.db_helper.init_journalist()
+ source, codename = utils.db_helper.init_source()
+ replies = utils.db_helper.reply(journalist, source, 1)
+ with self.client as c:
+ resp = c.post('/login', data=dict(codename=codename),
+ follow_redirects=True)
+ self.assertEqual(resp.status_code, 200)
+ resp = c.post('/delete-all', follow_redirects=True)
+ self.assertEqual(resp.status_code, 200)
+ self.assertIn("All replies have been deleted", resp.data)
+
+ @patch('gzip.GzipFile', wraps=gzip.GzipFile)
+ def test_submit_sanitizes_filename(self, gzipfile):
+ """Test that upload file name is sanitized"""
+ insecure_filename = '../../bin/gpg'
+ sanitized_filename = 'bin_gpg'
+
+ self._new_codename()
+ self.client.post('/submit', data=dict(
+ msg="",
+ fh=(StringIO('This is a test'), insecure_filename),
+ ), follow_redirects=True)
+ gzipfile.assert_called_with(filename=sanitized_filename,
+ mode=ANY,
+ fileobj=ANY)
+
+ def test_tor2web_warning_headers(self):
+ resp = self.client.get('/', headers=[('X-tor2web', 'encrypted')])
+ self.assertEqual(resp.status_code, 200)
+ self.assertIn("You appear to be using Tor2Web.", resp.data)
+
+ def test_tor2web_warning(self):
+ resp = self.client.get('/tor2web-warning')
+ self.assertEqual(resp.status_code, 200)
+ self.assertIn("Why is there a warning about Tor2Web?", resp.data)
+
+ def test_why_use_tor_browser(self):
+ resp = self.client.get('/use-tor')
+ self.assertEqual(resp.status_code, 200)
+ self.assertIn("You Should Use Tor Browser", resp.data)
+
+ def test_why_journalist_key(self):
+ resp = self.client.get('/why-journalist-key')
+ self.assertEqual(resp.status_code, 200)
+ self.assertIn("Why download the journalist's public key?", resp.data)
+
+ def test_metadata_route(self):
+ resp = self.client.get('/metadata')
+ self.assertEqual(resp.status_code, 200)
+ self.assertEqual(resp.headers.get('Content-Type'), 'application/json')
+ self.assertEqual(json.loads(resp.data.decode('utf-8')).get('sd_version'), version.__version__)
+
+ @patch('crypto_util.hash_codename')
+ def test_login_with_overly_long_codename(self, mock_hash_codename):
+ """Attempting to login with an overly long codename should result in
+ an error, and scrypt should not be called to avoid DoS."""
+ overly_long_codename = 'a' * (Source.MAX_CODENAME_LEN + 1)
+ with self.client as c:
+ resp = c.post('/login', data=dict(codename=overly_long_codename),
+ follow_redirects=True)
+ self.assertEqual(resp.status_code, 200)
+ self.assertIn("Sorry, that is not a recognized codename.", resp.data)
+ self.assertFalse(mock_hash_codename.called,
+ "Called hash_codename for codename w/ invalid "
+ "length")
diff --git a/securedrop/tests/test_store.py b/securedrop/tests/test_store.py
new file mode 100644
--- /dev/null
+++ b/securedrop/tests/test_store.py
@@ -0,0 +1,81 @@
+# -*- coding: utf-8 -*-
+import os
+import unittest
+import zipfile
+
+import crypto_util
+os.environ['SECUREDROP_ENV'] = 'test'
+import config
+from db import db_session, Source
+import mock
+import store
+import utils
+
+
+class TestStore(unittest.TestCase):
+
+ """The set of tests for store.py."""
+
+ def setUp(self):
+ utils.env.setup()
+
+ def tearDown(self):
+ utils.env.teardown()
+ db_session.remove()
+
+ def test_verify_path_not_absolute(self):
+ with self.assertRaises(store.PathException):
+ store.verify(os.path.join(config.STORE_DIR, '..', 'etc', 'passwd'))
+
+ def test_verify_in_store_dir(self):
+ with self.assertRaisesRegexp(store.PathException, 'Invalid directory'):
+ store.verify(config.STORE_DIR + "_backup")
+
+ def test_verify_store_dir_not_absolute(self):
+ STORE_DIR = config.STORE_DIR
+ try:
+ with self.assertRaisesRegexp(store.PathException,
+ 'config.STORE_DIR\(\S*\) is not absolute'):
+ config.STORE_DIR = '.'
+ store.verify('something')
+ finally:
+ config.STORE_DIR = STORE_DIR
+
+ def test_get_zip(self):
+ source, _ = utils.db_helper.init_source()
+ submissions = utils.db_helper.submit(source, 2)
+ filenames = [os.path.join(config.STORE_DIR,
+ source.filesystem_id,
+ submission.filename)
+ for submission in submissions]
+
+ archive = zipfile.ZipFile(store.get_bulk_archive(submissions))
+ archivefile_contents = archive.namelist()
+
+ for archived_file, actual_file in zip(archivefile_contents, filenames):
+ actual_file_content = open(actual_file).read()
+ zipped_file_content = archive.read(archived_file)
+ self.assertEquals(zipped_file_content, actual_file_content)
+
+ def test_rename_valid_submission(self):
+ source, _ = utils.db_helper.init_source()
+ old_journalist_filename = source.journalist_filename
+ old_filename = utils.db_helper.submit(source, 1)[0].filename
+ new_journalist_filename = 'nestor_makhno'
+ expected_filename = old_filename.replace(old_journalist_filename,
+ new_journalist_filename)
+ actual_filename = store.rename_submission(source.filesystem_id, old_filename,
+ new_journalist_filename)
+ self.assertEquals(actual_filename, expected_filename)
+
+ @mock.patch('store.subprocess.check_call')
+ def test_secure_unlink(self, mock_check_call):
+ path = os.path.join(config.STORE_DIR, 'FILENAME')
+ self.assertEqual(store.secure_unlink(path), "success")
+ mock_check_call.assert_called_with(['srm', path])
+
+ @mock.patch('store.subprocess.check_call')
+ def test_delete_source_directory(self, mock_check_call):
+ path = os.path.join(config.STORE_DIR, 'DIRNAME')
+ self.assertEqual(store.delete_source_directory('DIRNAME'), "success")
+ mock_check_call.assert_called_with(['srm', '-r', path])
diff --git a/securedrop/tests/test_template_filters.py b/securedrop/tests/test_template_filters.py
new file mode 100644
--- /dev/null
+++ b/securedrop/tests/test_template_filters.py
@@ -0,0 +1,57 @@
+# -*- coding: utf-8 -*-
+from datetime import datetime, timedelta
+import os
+import unittest
+
+import template_filters
+
+class TestTemplateFilters(unittest.TestCase):
+
+ def test_datetimeformat_default_fmt(self):
+ result = template_filters.datetimeformat(datetime(2016, 1, 1, 1, 1, 1))
+ self.assertEquals("Jan 01, 2016 01:01 AM", result)
+
+ def test_datetimeformat_unusual_fmt(self):
+ result = template_filters.datetimeformat(datetime(2016, 1, 1, 1, 1, 1),
+ fmt="%b %d %Y")
+ self.assertEquals("Jan 01 2016", result)
+
+ def test_relative_timestamp_seconds(self):
+ test_time = datetime.utcnow() - timedelta(seconds=5)
+ result = template_filters._relative_timestamp(test_time)
+ self.assertIn("seconds", result)
+
+ def test_relative_timestamp_one_minute(self):
+ test_time = datetime.utcnow() - timedelta(minutes=1)
+ result = template_filters._relative_timestamp(test_time)
+ self.assertEquals("a minute", result)
+
+ def test_relative_timestamp_minutes(self):
+ test_time = datetime.utcnow() - timedelta(minutes=10)
+ result = template_filters._relative_timestamp(test_time)
+ self.assertEquals("10 minutes", result)
+
+ def test_relative_timestamp_one_hour(self):
+ test_time = datetime.utcnow() - timedelta(hours=1)
+ result = template_filters._relative_timestamp(test_time)
+ self.assertEquals("an hour", result)
+
+ def test_relative_timestamp_hours(self):
+ test_time = datetime.utcnow() - timedelta(hours=10)
+ result = template_filters._relative_timestamp(test_time)
+ self.assertEquals("10 hours", result)
+
+ def test_relative_timestamp_one_day(self):
+ test_time = datetime.utcnow() - timedelta(days=1)
+ result = template_filters._relative_timestamp(test_time)
+ self.assertEquals("a day", result)
+
+ def test_relative_timestamp_days(self):
+ test_time = datetime.utcnow() - timedelta(days=4)
+ result = template_filters._relative_timestamp(test_time)
+ self.assertEquals("4 days", result)
+
+ def test_relative_timestamp_none(self):
+ test_time = datetime.utcnow() - timedelta(days=999)
+ result = template_filters._relative_timestamp(test_time)
+ self.assertEquals(None, result)
diff --git a/securedrop/tests/test_unit_journalist.py b/securedrop/tests/test_unit_journalist.py
deleted file mode 100644
--- a/securedrop/tests/test_unit_journalist.py
+++ /dev/null
@@ -1,188 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-import os
-from cStringIO import StringIO
-import unittest
-import zipfile
-
-import mock
-
-from flask import url_for
-from flask.ext.testing import TestCase
-
-import crypto_util
-import journalist
-import common
-from db import db_session, Source, Journalist, InvalidPasswordLength
-
-# Set environment variable so config.py uses a test environment
-os.environ['SECUREDROP_ENV'] = 'test'
-
-
-class TestJournalist(TestCase):
-
- def create_app(self):
- return journalist.app
-
- def setUp(self):
- common.shared_setup()
-
- # Patch the two-factor verification to avoid intermittent errors
- patcher = mock.patch('db.Journalist.verify_token')
- self.addCleanup(patcher.stop)
- self.mock_journalist_verify_token = patcher.start()
- self.mock_journalist_verify_token.return_value = True
-
- # Set up test users
- self.user_pw = "bar"
- self.user = Journalist(username="foo",
- password=self.user_pw)
- self.admin_user_pw = "admin"
- self.admin_user = Journalist(username="admin",
- password=self.admin_user_pw,
- is_admin=True)
- db_session.add(self.user)
- db_session.add(self.admin_user)
- db_session.commit()
-
- def tearDown(self):
- common.shared_teardown()
-
- def test_index_should_redirect_to_login(self):
- res = self.client.get(url_for('index'))
- self.assert_redirects(res, url_for('login'))
-
- def test_invalid_user_login_should_fail(self):
- res = self.client.post(url_for('login'), data=dict(
- username='invalid',
- password='invalid',
- token='123456'))
- self.assert200(res)
- self.assertIn("Login failed", res.data)
-
- def test_valid_user_login_should_succeed(self):
- res = self.client.post(url_for('login'), data=dict(
- username=self.user.username,
- password=self.user_pw,
- token=self.user.totp.now()),
- follow_redirects=True)
-
- self.assert200(res) # successful login redirects to index
- self.assertIn("Sources", res.data)
- self.assertIn("No documents have been submitted!", res.data)
-
- def test_normal_and_admin_user_login_should_redirect_to_index(self):
- """Normal users and admin users should both redirect to the index page after logging in successfully"""
- res = self.client.post(url_for('login'), data=dict(
- username=self.user.username,
- password=self.user_pw,
- token=self.user.totp.now()))
- self.assert_redirects(res, url_for('index'))
-
- res = self.client.post(url_for('login'), data=dict(
- username=self.admin_user.username,
- password=self.admin_user_pw,
- token=self.admin_user.totp.now()))
- self.assert_redirects(res, url_for('index'))
-
- def test_admin_user_has_admin_link_in_index(self):
- res = self.client.post(url_for('login'), data=dict(
- username=self.admin_user.username,
- password=self.admin_user_pw,
- token=self.admin_user.totp.now()),
- follow_redirects=True)
- admin_link = '<a href="{}">{}</a>'.format(url_for('admin_index'), "Admin")
- self.assertIn(admin_link, res.data)
-
- def _login_user(self):
- self.client.post(url_for('login'), data=dict(
- username=self.user.username,
- password=self.user_pw,
- token=self.user.totp.now()),
- follow_redirects=True)
-
- def _login_admin(self):
- self.client.post(url_for('login'), data=dict(
- username=self.admin_user.username,
- password=self.admin_user_pw,
- token=self.admin_user.totp.now()),
- follow_redirects=True)
-
- def test_admin_index(self):
- self._login_admin()
- res = self.client.get(url_for('admin_index'))
- self.assert200(res)
- self.assertIn("Admin Interface", res.data)
-
- def test_admin_authorization_for_gets(self):
- admin_urls = [url_for('admin_index'), url_for('admin_add_user'),
- url_for('admin_edit_user', user_id=1)]
-
- self._login_user()
- for admin_url in admin_urls:
- res = self.client.get(admin_url)
- self.assert_status(res, 302)
-
- def test_admin_authorization_for_posts(self):
- admin_urls = [url_for('admin_reset_two_factor_totp'),
- url_for('admin_reset_two_factor_hotp'), url_for('admin_add_user', user_id=1),
- url_for('admin_new_user_two_factor'), url_for('admin_reset_two_factor_totp'),
- url_for('admin_reset_two_factor_hotp'), url_for('admin_edit_user', user_id=1),
- url_for('admin_delete_user', user_id=1)]
- self._login_user()
- for admin_url in admin_urls:
- res = self.client.post(admin_url)
- self.assert_status(res, 302)
-
- def test_user_authorization_for_gets(self):
- urls = [url_for('index'), url_for('col', sid='1'),
- url_for('doc', sid='1', fn='1')]
-
- for url in urls:
- res = self.client.get(url)
- self.assert_status(res, 302)
-
- def test_user_authorization_for_posts(self):
- urls = [url_for('add_star', sid='1'), url_for('remove_star', sid='1'),
- url_for('col_process'), url_for('col_delete_single', sid='1'),
- url_for('reply'), url_for('generate_code'), url_for('bulk')]
- for url in urls:
- res = self.client.post(url)
- self.assert_status(res, 302)
-
- # TODO: more tests for admin interface
-
- def test_bulk_download(self):
- sid = 'EQZGCJBRGISGOTC2NZVWG6LILJBHEV3CINNEWSCLLFTUWZJPKJFECLS2NZ4G4U3QOZCFKTTPNZMVIWDCJBBHMUDBGFHXCQ3R'
- source = Source(sid, crypto_util.display_id())
- db_session.add(source)
- db_session.commit()
- files = ['1-abc1-msg.gpg', '2-abc2-msg.gpg']
- filenames = common.setup_test_docs(sid, files)
-
- self._login_user()
- rv = self.client.post('/bulk', data=dict(
- action='download',
- sid=sid,
- doc_names_selected=files
- ))
-
- self.assertEqual(rv.status_code, 200)
- self.assertEqual(rv.content_type, 'application/zip')
- self.assertTrue(zipfile.is_zipfile(StringIO(rv.data)))
- self.assertTrue(zipfile.ZipFile(StringIO(rv.data)).getinfo(
- os.path.join(source.journalist_filename, files[0])
- ))
-
- def test_max_password_length(self):
- """Creating a Journalist with a password that is greater than the
- maximum password length should raise an exception"""
- overly_long_password = 'a'*(Journalist.MAX_PASSWORD_LEN + 1)
- with self.assertRaises(InvalidPasswordLength):
- temp_journalist = Journalist(
- username="My Password is Too Big!",
- password=overly_long_password)
-
-
-if __name__ == "__main__":
- unittest.main(verbosity=2)
diff --git a/securedrop/tests/test_unit_source.py b/securedrop/tests/test_unit_source.py
deleted file mode 100644
--- a/securedrop/tests/test_unit_source.py
+++ /dev/null
@@ -1,260 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-import common
-import os
-import re
-import unittest
-from cStringIO import StringIO
-
-from bs4 import BeautifulSoup
-from flask.ext.testing import TestCase
-from flask import session, escape
-from mock import patch, ANY
-import source
-from db import Source
-os.environ['SECUREDROP_ENV'] = 'test'
-
-
-class TestSource(TestCase):
-
- def create_app(self):
- return source.app
-
- def setUp(self):
- common.shared_setup()
-
- def tearDown(self):
- common.shared_teardown()
-
- def test_index(self):
- """Test that the landing page loads and looks how we expect"""
- response = self.client.get('/')
- self.assertEqual(response.status_code, 200)
- self.assertIn("Submit documents for the first time", response.data)
- self.assertIn("Already submitted something?", response.data)
-
- def _find_codename(self, html):
- """Find a source codename (diceware passphrase) in HTML"""
- # Codenames may contain HTML escape characters, and the wordlist
- # contains various symbols.
- codename_re = r'<p id="codename">(?P<codename>[a-z0-9 &#;?:=@_.*+()\'"$%!-]+)</p>'
- codename_match = re.search(codename_re, html)
- self.assertIsNotNone(codename_match)
- return codename_match.group('codename')
-
- def test_generate(self):
- with self.client as c:
- rv = c.get('/generate')
- self.assertEqual(rv.status_code, 200)
- session_codename = session['codename']
- self.assertIn("Remember this codename and keep it secret", rv.data)
- self.assertIn(
- "To protect your identity, we're assigning you a unique codename.",
- rv.data)
- codename = self._find_codename(rv.data)
- # default codename length is 7 words
- self.assertEqual(len(codename.split()), 7)
- # codename is also stored in the session - make sure it matches the
- # codename displayed to the source
- self.assertEqual(codename, escape(session_codename))
-
- def test_generate_has_login_link(self):
- """The generate page should have a link to remind people to login
- if they already have a codename, rather than create a new one.
- """
- rv = self.client.get('/generate')
- self.assertIn("Already have a codename?", rv.data)
- soup = BeautifulSoup(rv.data)
- already_have_codename_link = soup.select('a#already-have-codename')[0]
- self.assertEqual(already_have_codename_link['href'], '/login')
-
- def test_generate_already_logged_in(self):
- self._new_codename()
- # Make sure it redirects to /lookup when logged in
- rv = self.client.get('/generate')
- self.assertEqual(rv.status_code, 302)
- # Make sure it flashes the message on the lookup page
- rv = self.client.get('/generate', follow_redirects=True)
- # Should redirect to /lookup
- self.assertEqual(rv.status_code, 200)
- self.assertIn("because you are already logged in.", rv.data)
-
- def test_create(self):
- with self.client as c:
- rv = c.get('/generate')
- codename = session['codename']
- rv = c.post('/create', follow_redirects=True)
- self.assertTrue(session['logged_in'])
- # should be redirected to /lookup
- self.assertIn("Submit documents and messages", rv.data)
-
- def _new_codename(self):
- return common.new_codename(self.client, session)
-
- def test_lookup(self):
- """Test various elements on the /lookup page"""
- codename = self._new_codename()
- rv = self.client.post('login', data=dict(codename=codename),
- follow_redirects=True)
- # redirects to /lookup
- self.assertIn("public key", rv.data)
- # download the public key
- rv = self.client.get('journalist-key')
- self.assertIn("BEGIN PGP PUBLIC KEY BLOCK", rv.data)
-
- def test_login_and_logout(self):
- rv = self.client.get('/login')
- self.assertEqual(rv.status_code, 200)
- self.assertIn("Login to check for responses", rv.data)
-
- codename = self._new_codename()
- with self.client as c:
- rv = c.post('/login', data=dict(codename=codename),
- follow_redirects=True)
- self.assertEqual(rv.status_code, 200)
- self.assertIn("Submit documents and messages", rv.data)
- self.assertTrue(session['logged_in'])
- common.logout(c)
-
- with self.client as c:
- rv = self.client.post('/login', data=dict(codename='invalid'),
- follow_redirects=True)
- self.assertEqual(rv.status_code, 200)
- self.assertIn('Sorry, that is not a recognized codename.', rv.data)
- self.assertNotIn('logged_in', session)
-
- with self.client as c:
- rv = c.post('/login', data=dict(codename=codename),
- follow_redirects=True)
- self.assertEqual(rv.status_code, 200)
- self.assertTrue(session['logged_in'])
- rv = c.get('/logout', follow_redirects=True)
- self.assertTrue(not session)
- self.assertIn('Thank you for logging out!', rv.data)
-
- def test_login_with_whitespace(self):
- """Test that codenames with leading or trailing whitespace still work"""
- def login_test(codename):
- rv = self.client.get('/login')
- self.assertEqual(rv.status_code, 200)
- self.assertIn("Login to check for responses", rv.data)
-
- with self.client as c:
- rv = c.post('/login', data=dict(codename=codename),
- follow_redirects=True)
- self.assertEqual(rv.status_code, 200)
- self.assertIn("Submit documents and messages", rv.data)
- self.assertTrue(session['logged_in'])
- common.logout(c)
-
- codename = self._new_codename()
- login_test(codename + ' ')
- login_test(' ' + codename + ' ')
- login_test(' ' + codename)
-
- def _dummy_submission(self):
- """
- Helper to make a submission (content unimportant), mostly useful in
- testing notification behavior for a source's first vs. their
- subsequent submissions
- """
- return self.client.post('/submit', data=dict(
- msg="Pay no attention to the man behind the curtain.",
- fh=(StringIO(''), ''),
- ), follow_redirects=True)
-
- def test_initial_submission_notification(self):
- """
- Regardless of the type of submission (message, file, or both), the
- first submission is always greeted with a notification
- reminding sources to check back later for replies.
- """
- self._new_codename()
- rv = self._dummy_submission()
- self.assertEqual(rv.status_code, 200)
- self.assertIn("Thanks for submitting something to SecureDrop! Please check back later for replies.", rv.data)
-
- def test_submit_message(self):
- self._new_codename()
- self._dummy_submission()
- rv = self.client.post('/submit', data=dict(
- msg="This is a test.",
- fh=(StringIO(''), ''),
- ), follow_redirects=True)
- self.assertEqual(rv.status_code, 200)
- self.assertIn("Thanks! We received your message.", rv.data)
-
- def test_submit_file(self):
- self._new_codename()
- self._dummy_submission()
- rv = self.client.post('/submit', data=dict(
- msg="",
- fh=(StringIO('This is a test'), 'test.txt'),
- ), follow_redirects=True)
- self.assertEqual(rv.status_code, 200)
- self.assertIn(escape('{} "{}"'.format("Thanks! We received your document", "test.txt")), rv.data)
-
- def test_submit_both(self):
- self._new_codename()
- self._dummy_submission()
- rv = self.client.post('/submit', data=dict(
- msg="This is a test",
- fh=(StringIO('This is a test'), 'test.txt'),
- ), follow_redirects=True)
- self.assertEqual(rv.status_code, 200)
- self.assertIn("Thanks! We received your message.", rv.data)
- self.assertIn(escape('{} "{}"'.format("Thanks! We received your document", 'test.txt')), rv.data)
-
- @patch('gzip.GzipFile')
- def test_submit_sanitizes_filename(self, gzipfile):
- """Test that upload file name is sanitized"""
- insecure_filename = '../../bin/gpg'
- sanitized_filename = 'bin_gpg'
-
- self._new_codename()
- self.client.post('/submit', data=dict(
- msg="",
- fh=(StringIO('This is a test'), insecure_filename),
- ), follow_redirects=True)
- gzipfile.assert_called_with(filename=sanitized_filename,
- mode=ANY,
- fileobj=ANY)
-
- def test_tor2web_warning_headers(self):
- rv = self.client.get('/', headers=[('X-tor2web', 'encrypted')])
- self.assertEqual(rv.status_code, 200)
- self.assertIn("You appear to be using Tor2Web.", rv.data)
-
- def test_tor2web_warning(self):
- rv = self.client.get('/tor2web-warning')
- self.assertEqual(rv.status_code, 200)
- self.assertIn("Why is there a warning about Tor2Web?", rv.data)
-
- def test_why_journalist_key(self):
- rv = self.client.get('/why-journalist-key')
- self.assertEqual(rv.status_code, 200)
- self.assertIn("Why download the journalist's public key?", rv.data)
-
- def test_howto_disable_js(self):
- rv = self.client.get('/howto-disable-js')
- self.assertEqual(rv.status_code, 200)
- self.assertIn("Turn the Security Slider to High to Protect Your Anonymity", rv.data)
-
- @patch('crypto_util.hash_codename')
- def test_login_with_overly_long_codename(self, mock_hash_codename):
- """Attempting to login with an overly long codename should result in
- an error, and scrypt should not be called to avoid DoS."""
- overly_long_codename = 'a' * (Source.MAX_CODENAME_LEN + 1)
- with self.client as client:
- rv = client.post(
- '/login',
- data=dict(codename=overly_long_codename),
- follow_redirects=True)
- self.assertEqual(rv.status_code, 200)
- self.assertIn("Sorry, that is not a recognized codename.", rv.data)
- self.assertFalse(mock_hash_codename.called,
- "Called hash_codename for codename w/ invalid length")
-
-
-if __name__ == "__main__":
- unittest.main(verbosity=2)
diff --git a/securedrop/tests/test_unit_store.py b/securedrop/tests/test_unit_store.py
deleted file mode 100644
--- a/securedrop/tests/test_unit_store.py
+++ /dev/null
@@ -1,50 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-import os
-import unittest
-import zipfile
-import config
-import store
-import common
-from db import db_session, Source
-import crypto_util
-
-# Set environment variable so config.py uses a test environment
-os.environ['SECUREDROP_ENV'] = 'test'
-
-
-class TestStore(unittest.TestCase):
- """The set of tests for store.py."""
- def setUp(self):
- common.shared_setup()
-
- def tearDown(self):
- common.shared_teardown()
-
- def test_verify(self):
- with self.assertRaises(store.PathException):
- store.verify(os.path.join(config.STORE_DIR, '..', 'etc', 'passwd'))
- with self.assertRaises(store.PathException):
- store.verify(config.STORE_DIR + "_backup")
-
- def test_get_zip(self):
- sid = 'EQZGCJBRGISGOTC2NZVWG6LILJBHEV3CINNEWSCLLFTUWZJPKJFECLS2NZ4G4U3QOZCFKTTPNZMVIWDCJBBHMUDBGFHXCQ3R'
- source = Source(sid, crypto_util.display_id())
- db_session.add(source)
- db_session.commit()
-
- files = ['1-abc1-msg.gpg', '2-abc2-msg.gpg']
- filenames = common.setup_test_docs(sid, files)
-
- archive = zipfile.ZipFile(store.get_bulk_archive(filenames))
-
- archivefile_contents = archive.namelist()
-
- for archived_file, actual_file in zip(archivefile_contents, filenames):
- actual_file_content = open(actual_file).read()
- zipped_file_content = archive.read(archived_file)
- self.assertEquals(zipped_file_content, actual_file_content)
-
-
-if __name__ == "__main__":
- unittest.main(verbosity=2)
diff --git a/securedrop/tests/utils/__init__.py b/securedrop/tests/utils/__init__.py
new file mode 100644
--- /dev/null
+++ b/securedrop/tests/utils/__init__.py
@@ -0,0 +1,3 @@
+import async
+import db_helper
+import env
diff --git a/securedrop/tests/utils/async.py b/securedrop/tests/utils/async.py
new file mode 100644
--- /dev/null
+++ b/securedrop/tests/utils/async.py
@@ -0,0 +1,47 @@
+# -*- coding: utf-8 -*-
+"""Testing utilites to block on and react to the success, failure, or
+timeout of asynchronous processes.
+"""
+import time
+
+REDIS_SUCCESS_RETURN_VALUE = 'success'
+
+
+def wait_for_redis_worker(job, timeout=5):
+ """Raise an error if the Redis job doesn't complete successfully
+ before a timeout.
+
+ :param rq.job.Job job: A Redis job to wait for.
+
+ :param int timeout: Seconds to wait for the job to finish.
+
+ :raises: An :exc:`AssertionError`.
+ """
+ start_time = time.time()
+ while time.time() - start_time < timeout:
+ if job.result == REDIS_SUCCESS_RETURN_VALUE:
+ return
+ elif job.result not in (None, REDIS_SUCCESS_RETURN_VALUE):
+ assert False, 'Redis worker failed!'
+ time.sleep(0.1)
+ assert False, 'Redis worker timed out!'
+
+
+def wait_for_assertion(assertion_expression, timeout=5):
+ """Calls an assertion_expression repeatedly, until the assertion
+ passes or a timeout is reached.
+
+ :param assertion_expression: An assertion expression. Generally
+ a call to a
+ :class:`unittest.TestCase` method.
+
+ :param int timeout: Seconds to wait for the function to return.
+ """
+ start_time = time.time()
+ while time.time() - start_time < timeout:
+ try:
+ return assertion_expression()
+ except AssertionError:
+ time.sleep(0.1)
+ # one more try, which will raise any errors if they are outstanding
+ return assertion_expression()
diff --git a/securedrop/tests/utils/db_helper.py b/securedrop/tests/utils/db_helper.py
new file mode 100644
--- /dev/null
+++ b/securedrop/tests/utils/db_helper.py
@@ -0,0 +1,157 @@
+# -*- coding: utf-8 -*-
+"""Testing utilities that involve database (and often related
+filesystem) interaction.
+"""
+import mock
+import os
+
+os.environ['SECUREDROP_ENV'] = 'test'
+import config
+import crypto_util
+import db
+import store
+
+## db.{Journalist, Reply}
+
+def init_journalist(is_admin=False):
+ """Initialize a journalist into the database. Return their
+ :class:`db.Journalist` object and password string.
+
+ :param bool is_admin: Whether the user is an admin.
+
+ :returns: A 2-tuple. The first entry, an :obj:`db.Journalist`
+ corresponding to the row just added to the database. The
+ second, their password string.
+ """
+ username = crypto_util.genrandomid()
+ user_pw = crypto_util.genrandomid()
+ user = db.Journalist(username, user_pw, is_admin)
+ db.db_session.add(user)
+ db.db_session.commit()
+ return user, user_pw
+
+
+def reply(journalist, source, num_replies):
+ """Generates and submits *num_replies* replies to *source*
+ from *journalist*. Returns reply objects as a list.
+
+ :param db.Journalist journalist: The journalist to write the
+ reply from.
+
+ :param db.Source source: The source to send the reply to.
+
+ :param int num_replies: Number of random-data replies to make.
+
+ :returns: A list of the :class:`db.Reply`s submitted.
+ """
+ assert num_replies >= 1
+ replies = []
+ for _ in range(num_replies):
+ source.interaction_count += 1
+ fname = "{}-{}-reply.gpg".format(source.interaction_count,
+ source.journalist_filename)
+ crypto_util.encrypt(str(os.urandom(1)),
+ [
+ crypto_util.getkey(source.filesystem_id),
+ config.JOURNALIST_KEY
+ ],
+ store.path(source.filesystem_id, fname))
+ reply = db.Reply(journalist, source, fname)
+ replies.append(reply)
+ db.db_session.add(reply)
+
+ db.db_session.commit()
+ return replies
+
+
+def mock_verify_token(testcase):
+ """Patch a :class:`unittest.TestCase` (or derivative class) so TOTP
+ token verification always succeeds.
+
+ :param unittest.TestCase testcase: The test case for which to patch
+ TOTP verification.
+ """
+ patcher = mock.patch('db.Journalist.verify_token')
+ testcase.addCleanup(patcher.stop)
+ testcase.mock_journalist_verify_token = patcher.start()
+ testcase.mock_journalist_verify_token.return_value = True
+
+
+def mark_downloaded(*submissions):
+ """Mark *submissions* as downloaded in the database.
+
+ :param db.Submission submissions: One or more submissions that
+ should be marked as downloaded.
+ """
+ for submission in submissions:
+ submission.downloaded = True
+ db.db_session.commit()
+
+
+## db.{Source,Submission}
+
+def init_source():
+ """Initialize a source: create their database record, the
+ filesystem directory that stores their submissions & replies,
+ and their GPG key encrypted with their codename. Return a source
+ object and their codename string.
+
+ :returns: A 2-tuple. The first entry, the :class:`db.Source`
+ initialized. The second, their codename string.
+ """
+ # Create source identity and database record
+ codename = crypto_util.genrandomid()
+ filesystem_id = crypto_util.hash_codename(codename)
+ journalist_filename = crypto_util.display_id()
+ source = db.Source(filesystem_id, journalist_filename)
+ db.db_session.add(source)
+ db.db_session.commit()
+ # Create the directory to store their submissions and replies
+ os.mkdir(store.path(source.filesystem_id))
+ # Generate their key, blocking for as long as necessary
+ crypto_util.genkeypair(source.filesystem_id, codename)
+
+ return source, codename
+
+
+def submit(source, num_submissions):
+ """Generates and submits *num_submissions*
+ :class:`db.Submission`s on behalf of a :class:`db.Source`
+ *source*.
+
+ :param db.Source source: The source on who's behalf to make
+ submissions.
+
+ :param int num_submissions: Number of random-data submissions
+ to make.
+
+ :returns: A list of the :class:`db.Submission`s submitted.
+ """
+ assert num_submissions >= 1
+ submissions = []
+ for _ in range(num_submissions):
+ source.interaction_count += 1
+ fpath = store.save_message_submission(source.filesystem_id,
+ source.interaction_count,
+ source.journalist_filename,
+ str(os.urandom(1)))
+ submission = db.Submission(source, fpath)
+ submissions.append(submission)
+ db.db_session.add(submission)
+
+ db.db_session.commit()
+ return submissions
+
+
+# NOTE: this method is potentially dangerous to rely on for now due
+# to the fact flask_testing.TestCase only uses on request context
+# per method (see
+# https://github.com/freedomofpress/securedrop/issues/1444).
+def new_codename(client, session):
+ """Helper function to go through the "generate codename" flow.
+ """
+ with client as c:
+ c.get('/generate')
+ codename = session['codename']
+ c.post('/create')
+ return codename
diff --git a/securedrop/tests/utils/env.py b/securedrop/tests/utils/env.py
new file mode 100644
--- /dev/null
+++ b/securedrop/tests/utils/env.py
@@ -0,0 +1,67 @@
+# -*- coding: utf-8 -*-
+"""Testing utilities related to setup and teardown of test environment.
+"""
+import os
+from os.path import abspath, dirname, exists, isdir, join, realpath
+import shutil
+import subprocess
+
+import gnupg
+
+os.environ['SECUREDROP_ENV'] = 'test'
+import config
+import crypto_util
+from db import init_db, db_session
+
+FILES_DIR = abspath(join(dirname(realpath(__file__)), '..', 'files'))
+
+# TODO: the PID file for the redis worker is hard-coded below. Ideally this
+# constant would be provided by a test harness. It has been intentionally
+# omitted from `config.py.example` in order to isolate the test vars from prod
+# vars. When refactoring the test suite, the test_worker_pidfile
+# test_worker_pidfile is also hard-coded in `manage.py`.
+TEST_WORKER_PIDFILE = "/tmp/securedrop_test_worker.pid"
+
+
+def create_directories():
+ """Create directories for the file store and the GPG keyring.
+ """
+ for d in (config.SECUREDROP_DATA_ROOT, config.STORE_DIR,
+ config.GPG_KEY_DIR, config.TEMP_DIR):
+ if not isdir(d):
+ os.mkdir(d)
+
+
+def init_gpg():
+ """Initialize the GPG keyring and import the journalist key for
+ testing.
+ """
+ gpg = gnupg.GPG(homedir=config.GPG_KEY_DIR)
+ # Faster to import a pre-generated key than to gen a new one every time.
+ for keyfile in (join(FILES_DIR, "test_journalist_key.pub"),
+ join(FILES_DIR, "test_journalist_key.sec")):
+ gpg.import_keys(open(keyfile).read())
+ return gpg
+
+
+def setup():
+ """Set up the file system, GPG, and database."""
+ create_directories()
+ init_gpg()
+ init_db()
+ # Do tests that should always run on app startup
+ crypto_util.do_runtime_tests()
+ # Start the Python-RQ worker if it's not already running
+ if not exists(TEST_WORKER_PIDFILE):
+ subprocess.Popen(["rqworker",
+ "-P", config.SECUREDROP_ROOT,
+ "--pid", TEST_WORKER_PIDFILE])
+
+
+def teardown():
+ db_session.remove()
+ try:
+ shutil.rmtree(config.SECUREDROP_DATA_ROOT)
+ except OSError as exc:
+ if 'No such file or directory' not in exc:
+ raise
diff --git a/spec_tests/.bundle/config b/spec_tests/.bundle/config
deleted file mode 100644
--- a/spec_tests/.bundle/config
+++ /dev/null
@@ -1,3 +0,0 @@
----
-BUNDLE_PATH: ./gems/
-BUNDLE_DISABLE_SHARED_GEMS: '1'
diff --git a/spec_tests/.gemrc b/spec_tests/.gemrc
deleted file mode 100644
--- a/spec_tests/.gemrc
+++ /dev/null
@@ -1,8 +0,0 @@
-:backtrace: false
-:benchmark: false
-:bulk_threshold: 1000
-:sources:
-- http://rubygems.org/
-:update_sources: true
-:verbose: true
-gem: --no-ri --no-rdoc
diff --git a/spec_tests/Gemfile b/spec_tests/Gemfile
deleted file mode 100644
--- a/spec_tests/Gemfile
+++ /dev/null
@@ -1,5 +0,0 @@
-source 'https://rubygems.org'
-
-gem 'serverspec'
-gem 'rake'
-
diff --git a/spec_tests/Rakefile b/spec_tests/Rakefile
deleted file mode 100644
--- a/spec_tests/Rakefile
+++ /dev/null
@@ -1,8 +0,0 @@
-require 'rake'
-require 'rspec/core/rake_task'
-
-RSpec::Core::RakeTask.new(:spec) do |t|
- t.pattern = 'spec/*/*_spec.rb'
-end
-
-task :default => :spec
diff --git a/spec_tests/spec/localhost/apache_spec.rb b/spec_tests/spec/localhost/apache_spec.rb
deleted file mode 100644
--- a/spec_tests/spec/localhost/apache_spec.rb
+++ /dev/null
@@ -1,89 +0,0 @@
-require 'spec_helper'
-
-['apache2-mpm-worker', 'libapache2-mod-wsgi', 'libapache2-mod-xsendfile' ].each do |pkg|
- describe package(pkg) do
- it { should be_installed }
- end
-end
-
-# Are the apache config file there
-describe file('/etc/apache2/apache2.conf') do
- it { should be_file }
- it { should be_owned_by 'root' }
- it { should be_mode '644' }
- its(:content) { should match "ErrorLog /dev/null" }
- its(:content) { should match "LogLevel crit" }
- its(:content) { should match "ServerTokens Prod" }
- its(:content) { should match "ServerSignature Off" }
- its(:content) { should match "TraceEnable Off" }
-end
-
-describe file('/etc/apache2/ports.conf') do
- it { should be_file }
- it { should be_owned_by 'root' }
- it { should be_mode '644' }
- its(:content) { should match "Listen 0.0.0.0:8080" }
- its(:content) { should match "Listen 0.0.0.0:80" }
-end
-
-describe file('/etc/apache2/security') do
- it { should be_file }
- it { should be_owned_by 'root' }
- it { should be_mode '644' }
- its(:content) { should match "ServerTokens Prod" }
- its(:content) { should match "ServerSignature Off" }
- its(:content) { should match "TraceEnable Off" }
-end
-
-describe file('/etc/apache2/sites-available/document.conf') do
- it { should be_file }
- it { should be_owned_by 'root' }
- it { should be_mode '644' }
- its(:content) { should match "<VirtualHost 0.0.0.0:8080>" }
- its(:content) { should match "WSGIScriptAlias / /var/www/document.wsgi/" }
-end
-
-describe file('/etc/apache2/sites-available/source.conf') do
- it { should be_file }
- it { should be_owned_by 'root' }
- it { should be_mode '644' }
- its(:content) { should match "<VirtualHost 0.0.0.0:80>" }
- its(:content) { should match "WSGIScriptAlias / /var/www/source.wsgi/" }
- its(:content) { should match "ErrorLog /var/log/apache2/source-error.log" }
-end
-
-# are the correct apache modules enabled
-['access_compat','authn_core','alias','authz_core','authz_host','authz_user','deflate','filter','dir','headers','mime','mpm_event','negotiation','reqtimeout','rewrite','wsgi','xsendfile'].each do |enModules|
- describe command("a2query -m #{enModules}") do
- it { should return_stdout /enabled by/ }
- end
-end
-
-# are the correct apache modules disabled
-['auth_basic','authn_file','autoindex','env','setenvif','status'].each do |disModules|
- describe command("a2query -m #{disModules}") do
- it { should return_stderr /No module matches/ }
- end
-end
-
-# Are default sites disabled?
-['000-default'].each do |dissites|
- describe command("a2query -s #{dissites}") do
- it { should return_stderr /No site matches/ }
- end
-end
-
-# Are default html files removed?
-
-# Is apache running as user X
-
-# Is apache listening only on localhost:80 and 8080
-describe port(80) do
- it { should be_listening.with('tcp') }
-end
-describe port(8080) do
- it { should be_listening.with('tcp') }
-end
-
-# Is the sites-available linked to sites-enabled source.conf document.conf
-# Check firewall rule
diff --git a/spec_tests/spec/localhost/ossec.rb b/spec_tests/spec/localhost/ossec.rb
deleted file mode 100644
--- a/spec_tests/spec/localhost/ossec.rb
+++ /dev/null
@@ -1,31 +0,0 @@
-require 'spec_helper'
-
-['postfix', 'procmail'].each do |pkg|
- describe package(pkg) do
- it { should be_installed }
- end
-end
-
-describe file('/etc/postfix/main.cf') do
- it { should be_file }
- its(:content) { should match /^mailbox_command = \/usr\/bin\/procmail$/ }
-end
-
-describe file("/var/ossec/.gnupg") do
- it { should be_directory }
- it { should be_owned_by "ossec" }
- it { should be_mode '700' }
-end
-
-describe file("/var/ossec/.procmailrc") do
- its(:content) { should match "/var/ossec/send_encrypted_alarm.sh" }
-end
-
-describe file("/var/ossec/send_encrypted_alarm.sh") do
- it { should be_mode '0755' }
-end
-
-describe file("/var/log/procmail.log") do
- it { should be_owned_by "ossec" }
-end
-
diff --git a/spec_tests/spec/localhost/securedrop_app_spec.rb b/spec_tests/spec/localhost/securedrop_app_spec.rb
deleted file mode 100644
--- a/spec_tests/spec/localhost/securedrop_app_spec.rb
+++ /dev/null
@@ -1,23 +0,0 @@
-require 'spec_helper'
-
-['/var/www/securedrop'].each do |myDir|
- describe file(myDir) do
- it { should be_directory }
- it { should be_owned_by 'www-data' }
- it { should be_mode '700' }
- end
-end
-
-['/var/lib/securedrop','/var/lib/securedrop/store','/var/lib/securedrop/keys'].each do |myDir|
- describe file(myDir) do
- it { should be_directory }
- it { should be_owned_by 'www-data' }
- it { should be_mode '700' }
- end
-end
-
-describe file('/var/www/securedrop/config.py') do
- it { should be_file }
- it { should be_owned_by 'www-data' }
- it { should be_mode '600' }
-end
diff --git a/spec_tests/spec/localhost/tor_spec.rb b/spec_tests/spec/localhost/tor_spec.rb
deleted file mode 100644
--- a/spec_tests/spec/localhost/tor_spec.rb
+++ /dev/null
@@ -1,15 +0,0 @@
-require 'spec_helper'
-
-describe package('tor') do
- it { should be_installed }
-end
-
-describe file('/etc/tor/torrc') do
- it { should be_file }
- its(:content) { should match "HiddenServiceAuthorizeClient stealth journalist" }
- its(:content) { should match "HiddenServiceAuthorizeClient stealth admin" }
-end
-
-describe command('sudo service tor status') do
- it { should return_exit_status 0 }
-end
diff --git a/spec_tests/spec/spec_helper.rb b/spec_tests/spec/spec_helper.rb
deleted file mode 100644
--- a/spec_tests/spec/spec_helper.rb
+++ /dev/null
@@ -1,46 +0,0 @@
-require 'serverspec'
-require 'pathname'
-require 'net/ssh'
-
-include SpecInfra::Helper::Ssh
-include SpecInfra::Helper::DetectOS
-
-RSpec.configure do |c|
- if ENV['ASK_SUDO_PASSWORD']
- require 'highline/import'
- c.sudo_password = ask("Enter sudo password: ") { |q| q.echo = false }
- else
- c.sudo_password = ENV['SUDO_PASSWORD']
- end
- c.before :all do
- block = self.class.metadata[:example_group_block]
- if RUBY_VERSION.start_with?('1.8')
- file = block.to_s.match(/.*@(.*):[0-9]+>/)[1]
- else
- file = block.source_location.first
- end
- host = File.basename(Pathname.new(file).dirname)
- if c.host != host
- c.ssh.close if c.ssh
- c.host = host
- options = Net::SSH::Config.for(c.host)
- user = options[:user] || Etc.getlogin
- vagrant_up = `vagrant up app-staging`
- config = `vagrant ssh-config app-staging`
- if config != ''
- config.each_line do |line|
- if match = /HostName (.*)/.match(line)
- host = match[1]
- elsif match = /User (.*)/.match(line)
- user = match[1]
- elsif match = /IdentityFile (.*)/.match(line)
- options[:keys] = [match[1].gsub(/"/,'')]
- elsif match = /Port (.*)/.match(line)
- options[:port] = match[1]
- end
- end
- end
- c.ssh = Net::SSH.start(host, user, options)
- end
- end
-end
diff --git a/testinfra/ansible/test_validate_users.py b/testinfra/ansible/test_validate_users.py
new file mode 100644
--- /dev/null
+++ b/testinfra/ansible/test_validate_users.py
@@ -0,0 +1,19 @@
+import pytest
+import os
+
+
[email protected](reason="Validation not fully implemented yet")
[email protected]('username', [
+ 'root',
+ 'amnesia',
+])
+def test_validate_users(LocalCommand, username):
+ """
+ Check that Ansible halts execution of the playbook if the Admin
+ username is set to any disallowed value.
+ """
+ var_override = "--tags validate --extra-vars ssh_users={}".format(username)
+ os.environ['ANSIBLE_ARGS'] = var_override
+ c = LocalCommand("vagrant provision /staging/")
+
+ assert c.rc != 0
diff --git a/testinfra/app-code/test_haveged.py b/testinfra/app-code/test_haveged.py
new file mode 100644
--- /dev/null
+++ b/testinfra/app-code/test_haveged.py
@@ -0,0 +1,34 @@
+def test_haveged_config(File):
+ """
+ Ensure haveged's low entrop watermark is sufficiently high.
+ """
+ f = File('/etc/default/haveged')
+ assert f.is_file
+ assert f.user == 'root'
+ assert f.group == 'root'
+ assert oct(f.mode) == '0644'
+ assert f.contains('^DAEMON_ARGS="-w 2400"$')
+
+
+def test_haveged_no_duplicate_lines(Command):
+ """
+ Regression test to check for duplicate entries. Earlier playbooks
+ for configuring the SD instances needlessly appended the `DAEMON_ARGS`
+ line everytime the playbook was run. Fortunately the duplicate lines don't
+ break the service, but it's still poor form.
+ """
+ c = Command("uniq --repeated /etc/default/haveged")
+ assert c.rc == 0
+ assert c.stdout == ""
+
+
+def test_haveged_is_running(Service, Sudo):
+ """
+ Ensure haveged service is running, to provide additional entropy.
+ """
+ # Sudo is necessary to read /proc when running under grsecurity,
+ # which the App hosts do. Not technically necessary under development.
+ with Sudo():
+ s = Service("haveged")
+ assert s.is_running
+ assert s.is_enabled
diff --git a/testinfra/app-code/test_redis_worker.py b/testinfra/app-code/test_redis_worker.py
new file mode 100644
--- /dev/null
+++ b/testinfra/app-code/test_redis_worker.py
@@ -0,0 +1,44 @@
+import pytest
+import re
+
+
+securedrop_test_vars = pytest.securedrop_test_vars
+
+
[email protected]('config_line', [
+ '[program:securedrop_worker]',
+ 'command=/usr/local/bin/rqworker',
+ "directory={}".format(securedrop_test_vars.securedrop_code),
+ 'autostart=true',
+ 'autorestart=true',
+ 'startretries=3',
+ 'stderr_logfile=/var/log/securedrop_worker/err.log',
+ 'stdout_logfile=/var/log/securedrop_worker/out.log',
+ "user={}".format(securedrop_test_vars.securedrop_user),
+ 'environment=HOME="/tmp/python-gnupg"',
+])
+def test_redis_worker_configuration(File, config_line):
+ """
+ Ensure SecureDrop Redis worker config for supervisor service
+ management is configured correctly.
+ """
+ f = File('/etc/supervisor/conf.d/securedrop_worker.conf')
+ # Config lines may have special characters such as [] which will
+ # throw off the regex matching, so let's escape those chars.
+ regex = re.escape(config_line)
+ assert f.contains('^{}$'.format(regex))
+
+
+def test_redis_worker_config_file(File):
+ """
+ Ensure SecureDrop Redis worker config for supervisor service
+ management has proper ownership and mode.
+
+ Using separate test so that the parametrization doesn't rerun
+ the file mode checks, which would be useless.
+ """
+ f = File('/etc/supervisor/conf.d/securedrop_worker.conf')
+ assert f.is_file
+ assert oct(f.mode) == '0644'
+ assert f.user == "root"
+ assert f.group == "root"
diff --git a/testinfra/app-code/test_securedrop_app_code.py b/testinfra/app-code/test_securedrop_app_code.py
new file mode 100644
--- /dev/null
+++ b/testinfra/app-code/test_securedrop_app_code.py
@@ -0,0 +1,85 @@
+import os
+import pytest
+
+
+securedrop_test_vars = pytest.securedrop_test_vars
+hostenv = os.environ['SECUREDROP_TESTINFRA_TARGET_HOST']
+
+
+def test_apache_default_docroot_is_absent(File):
+ """
+ Ensure that the default docroot for Apache, containing static HTML
+ under Debian, has been removed. Leaving it in place can be a privacy
+ leak, as it displays version information by default.
+ """
+ assert not File('/var/www/html').exists
+
+
[email protected]('package', [
+ 'apparmor-utils',
+ 'gnupg2',
+ 'haveged',
+ 'python',
+ 'python-pip',
+ 'redis-server',
+ 'secure-delete',
+ 'sqlite',
+ 'supervisor',
+])
+def test_securedrop_application_apt_dependencies(Package, package):
+ """
+ Ensure apt dependencies required to install `securedrop-app-code`
+ are present. These should be pulled in automatically via apt,
+ due to specification in Depends in package control file.
+ """
+ assert Package(package).is_installed
+
+
+def test_securedrop_application_test_journalist_key(File, Sudo):
+ """
+ Ensure the SecureDrop Application GPG public key file is present.
+ This is a test-only pubkey provided in the repository strictly for testing.
+ """
+ pubkey_file = File("{}/test_journalist_key.pub".format(
+ securedrop_test_vars.securedrop_data))
+ # Sudo is only necessary when testing against app hosts, since the
+ # permissions are tighter. Let's elevate privileges so we're sure
+ # we can read the correct file attributes and test them.
+ with Sudo():
+ assert pubkey_file.is_file
+ assert pubkey_file.user == "root"
+ assert pubkey_file.group == "root"
+ assert oct(pubkey_file.mode) == "0644"
+
+ # Let's make sure the corresponding fingerprint is specified
+ # in the SecureDrop app configuration.
+ securedrop_config = File("{}/config.py".format(
+ securedrop_test_vars.securedrop_code))
+ with Sudo():
+ assert securedrop_config.is_file
+ # travis needs the config.py file ran owned by root not sure why
+ # just saw this note in the travis.yml config
+ if hostenv == "travis":
+ assert securedrop_config.user == "root"
+ assert securedrop_config.group == "root"
+ else:
+ assert securedrop_config.user == securedrop_test_vars.securedrop_user
+ assert securedrop_config.group == securedrop_test_vars.securedrop_user
+ assert oct(securedrop_config.mode) == "0600"
+ assert securedrop_config.contains(
+ "^JOURNALIST_KEY = '65A1B5FF195B56353CC63DFFCC40EF1228271441'$")
+
+
+def test_securedrop_application_sqlite_db(File, Sudo):
+ """
+ Ensure sqlite database exists for application. The database file should be
+ created by Ansible on first run.
+ """
+ # Sudo is necessary under the App hosts, which have restrictive file
+ # permissions on the doc root. Not technically necessary under dev host.
+ with Sudo():
+ f = File("{}/db.sqlite".format(securedrop_test_vars.securedrop_data))
+ assert f.is_file
+ assert f.user == securedrop_test_vars.securedrop_user
+ assert f.group == securedrop_test_vars.securedrop_user
+ assert oct(f.mode) == "0644"
diff --git a/testinfra/app/apache/test_apache_journalist_interface.py b/testinfra/app/apache/test_apache_journalist_interface.py
new file mode 100644
--- /dev/null
+++ b/testinfra/app/apache/test_apache_journalist_interface.py
@@ -0,0 +1,149 @@
+import pytest
+import re
+
+
+securedrop_test_vars = pytest.securedrop_test_vars
+
+# Setting once so it can be reused in multiple tests.
+wanted_apache_headers = [
+ 'Header edit Set-Cookie ^(.*)$ $1;HttpOnly',
+ 'Header always append X-Frame-Options: DENY',
+ 'Header set X-XSS-Protection: "1; mode=block"',
+ 'Header set X-Content-Type-Options: nosniff',
+ 'Header set X-Download-Options: noopen',
+ "Header set X-Content-Security-Policy: \"default-src 'self'\"",
+ "Header set Content-Security-Policy: \"default-src 'self'\"",
+ 'Header unset Etag',
+]
+
+# Test is not DRY; haven't figured out how to parametrize on
+# multiple inputs, so explicitly redeclaring test logic.
[email protected]("header", wanted_apache_headers)
+def test_apache_headers_journalist_interface(File, header):
+ """
+ Test for expected headers in Document Interface vhost config.
+ """
+ f = File("/etc/apache2/sites-available/journalist.conf")
+ assert f.is_file
+ assert f.user == "root"
+ assert f.group == "root"
+ assert oct(f.mode) == "0644"
+ header_regex = "^{}$".format(re.escape(header))
+ assert re.search(header_regex, f.content, re.M)
+
+# Block of directory declarations for Apache vhost is common
+# to both Source and Journalist interfaces. Hardcoding these values
+# across multiple test files to speed up development; they should be
+# written once and imported in a DRY manner.
+common_apache2_directory_declarations = """
+<Directory />
+ Options None
+ AllowOverride None
+ Order deny,allow
+ Deny from all
+</Directory>
+
+<Directory /var/www/>
+ Options None
+ AllowOverride None
+ <Limit GET POST HEAD>
+ Order allow,deny
+ allow from {apache_allow_from}
+ </Limit>
+ <LimitExcept GET POST HEAD>
+ Order deny,allow
+ Deny from all
+ </LimitExcept>
+</Directory>
+
+<Directory {securedrop_code}>
+ Options None
+ AllowOverride None
+ <Limit GET POST HEAD>
+ Order allow,deny
+ allow from {apache_allow_from}
+ </Limit>
+ <LimitExcept GET POST HEAD>
+ Order deny,allow
+ Deny from all
+ </LimitExcept>
+</Directory>
+""".lstrip().rstrip().format(
+ apache_allow_from=securedrop_test_vars.apache_allow_from,
+ securedrop_code=securedrop_test_vars.securedrop_code)
+
+
+# declare journalist-specific apache configs
[email protected]("apache_opt", [
+ "<VirtualHost {}:8080>".format(securedrop_test_vars.apache_listening_address),
+ "WSGIDaemonProcess journalist processes=2 threads=30 display-name=%{{GROUP}} python-path={}".format(securedrop_test_vars.securedrop_code),
+ 'WSGIProcessGroup journalist',
+ 'WSGIScriptAlias / /var/www/journalist.wsgi',
+ 'Header set Cache-Control "no-store"',
+ "Alias /static {}/static".format(securedrop_test_vars.securedrop_code),
+ """
+<Directory {}/static>
+ Order allow,deny
+ Allow from all
+ # Cache static resources for 1 hour
+ Header set Cache-Control "max-age=3600"
+</Directory>
+""".strip('\n').format(securedrop_test_vars.securedrop_code),
+ 'XSendFile On',
+ 'LimitRequestBody 524288000',
+ 'XSendFilePath /var/lib/securedrop/store/',
+ 'XSendFilePath /var/lib/securedrop/tmp/',
+ 'ErrorLog /var/log/apache2/journalist-error.log',
+ 'CustomLog /var/log/apache2/journalist-access.log combined',
+])
+def test_apache_config_journalist_interface(File, apache_opt):
+ """
+ Ensure the necessary Apache settings for serving the application
+ are in place. Some values will change according to the host,
+ e.g. app-staging versus app-prod will have different listening
+ addresses, depending on whether Tor connections are forced.
+
+ These checks apply only to the Document Interface, used by Journalists.
+ """
+ f = File("/etc/apache2/sites-available/journalist.conf")
+ assert f.is_file
+ assert f.user == "root"
+ assert f.group == "root"
+ assert oct(f.mode) == "0644"
+ regex = "^{}$".format(re.escape(apache_opt))
+ assert re.search(regex, f.content, re.M)
+
+
+def test_apache_journalist_interface_vhost(File):
+ """
+ Ensure the document root is configured with correct access restrictions
+ for serving Journalist Interface application code.
+ """
+ f = File("/etc/apache2/sites-available/journalist.conf")
+ assert common_apache2_directory_declarations in f.content
+
+
+def test_apache_logging_journalist_interface(File, Command, Sudo):
+ """
+ Check that logging is configured correctly for the Journalist Interface.
+ The actions of Journalists are logged by the system, so that an Admin can
+ investigate incidents and track access.
+
+ Logs were broken for some period of time, logging only "combined" to the logfile,
+ rather than the combined LogFormat intended.
+ """
+ # Sudo is necessary because /var/log/apache2 is mode 0750.
+ with Sudo():
+ f = File("/var/log/apache2/journalist-access.log")
+ assert f.is_file
+ if f.size == 0:
+ # If the file is empty, the Journalist Interface hasn't been used
+ # yet, so make a quick GET request local to the host so we can
+ # validate the log entry.
+ Command.check_output("curl http://127.0.0.1:8080")
+
+ assert f.size > 0 # Make sure something was logged.
+ # LogFormat declaration was missing, so track regressions that log
+ # just the string "combined" and nothing else.
+ assert not f.contains("^combined$")
+ assert f.contains("GET")
diff --git a/testinfra/app/apache/test_apache_service.py b/testinfra/app/apache/test_apache_service.py
new file mode 100644
--- /dev/null
+++ b/testinfra/app/apache/test_apache_service.py
@@ -0,0 +1,69 @@
+import pytest
+import re
+
+
+securedrop_test_vars = pytest.securedrop_test_vars
+
+
[email protected]("apache_site", [
+ "source",
+ "journalist",
+])
+def test_apache_enabled_sites(Command, Sudo, apache_site):
+ """
+ Ensure the Source and Journalist interfaces are enabled.
+ """
+ with Sudo():
+ c = Command("/usr/sbin/a2query -s {}".format(apache_site))
+ assert "{} (enabled".format(apache_site) in c.stdout
+ assert c.rc == 0
+
+
[email protected]("apache_site", [
+ "000-default",
+])
+def test_apache_disabled_sites(Command, apache_site):
+ """
+ Ensure the default HTML document root is disabled.
+ """
+ c = Command("a2query -s {}".format(apache_site))
+ assert "No site matches {} (disabled".format(apache_site) in c.stderr
+ assert c.rc == 32
+
+
+def test_apache_service(Service, Sudo):
+ """
+ Ensure Apache service is running.
+ """
+ # Sudo is necessary to run `service apache2 status`, otherwise
+ # the service is falsely reported as not running.
+ with Sudo():
+ s = Service("apache2")
+ assert s.is_running
+ assert s.is_enabled
+
+
+def test_apache_user(User):
+ """
+ Ensure user account for running application code is configured correctly.
+ """
+ u = User("www-data")
+ assert u.exists
+ assert u.home == "/var/www"
+ assert u.shell == "/usr/sbin/nologin"
+
+
[email protected]("port", [
+ "80",
+ "8080",
+])
+def test_apache_listening(Socket, Sudo, port):
+ """
+ Ensure Apache is listening on proper ports and interfaces.
+ In staging, expect the service to be bound to 0.0.0.0,
+ but in prod, it should be restricted to 127.0.0.1.
+ """
+ # Sudo is necessary to read from /proc/net/tcp.
+ with Sudo():
+ s = Socket("tcp://{}:{}".format(securedrop_test_vars.apache_listening_address, port))
+ assert s.is_listening
diff --git a/testinfra/app/apache/test_apache_source_interface.py b/testinfra/app/apache/test_apache_source_interface.py
new file mode 100644
--- /dev/null
+++ b/testinfra/app/apache/test_apache_source_interface.py
@@ -0,0 +1,61 @@
+import pytest
+import re
+
+
+securedrop_test_vars = pytest.securedrop_test_vars
+
+
[email protected]("header", securedrop_test_vars.wanted_apache_headers)
+def test_apache_headers_source_interface(File, header):
+ """
+ Test for expected headers in Source Interface vhost config.
+ """
+ f = File("/etc/apache2/sites-available/source.conf")
+ assert f.is_file
+ assert f.user == "root"
+ assert f.group == "root"
+ assert oct(f.mode) == "0644"
+ header_regex = "^{}$".format(re.escape(header))
+ assert re.search(header_regex, f.content, re.M)
+
+
[email protected]("apache_opt", [
+ "<VirtualHost {}:80>".format(securedrop_test_vars.apache_listening_address),
+ "WSGIDaemonProcess source processes=2 threads=30 display-name=%{{GROUP}} python-path={}".format(securedrop_test_vars.securedrop_code),
+ 'WSGIProcessGroup source',
+ 'WSGIScriptAlias / /var/www/source.wsgi',
+ 'Header set Cache-Control "no-store"',
+ "Alias /static {}/static".format(securedrop_test_vars.securedrop_code),
+ """
+<Directory {}/static>
+ Order allow,deny
+ Allow from all
+ # Cache static resources for 1 hour
+ Header set Cache-Control "max-age=3600"
+</Directory>
+""".strip('\n').format(securedrop_test_vars.securedrop_code),
+ 'XSendFile Off',
+ 'LimitRequestBody 524288000',
+ 'ErrorDocument 400 /notfound',
+ 'ErrorDocument 401 /notfound',
+ 'ErrorDocument 403 /notfound',
+ 'ErrorDocument 404 /notfound',
+ 'ErrorDocument 500 /notfound',
+ "ErrorLog {}".format(securedrop_test_vars.apache_source_log),
+])
+def test_apache_config_source_interface(File, apache_opt):
+ """
+ Ensure the necessary Apache settings for serving the application
+ are in place. Some values will change according to the host,
+ e.g. app-staging versus app-prod will have different listening
+ addresses, depending on whether Tor connections are forced.
+
+ These checks apply only to the Source Interface, used by Sources.
+ """
+ f = File("/etc/apache2/sites-available/source.conf")
+ assert f.is_file
+ assert f.user == "root"
+ assert f.group == "root"
+ assert oct(f.mode) == "0644"
+ regex = "^{}$".format(re.escape(apache_opt))
+ assert re.search(regex, f.content, re.M)
diff --git a/testinfra/app/apache/test_apache_system_config.py b/testinfra/app/apache/test_apache_system_config.py
new file mode 100644
--- /dev/null
+++ b/testinfra/app/apache/test_apache_system_config.py
@@ -0,0 +1,135 @@
+import pytest
+import re
+
+
+securedrop_test_vars = pytest.securedrop_test_vars
+
[email protected]("package", [
+ "apache2-mpm-worker",
+ "libapache2-mod-wsgi",
+ "libapache2-mod-xsendfile",
+])
+def test_apache_apt_packages(Package, package):
+ """
+ Ensure required Apache packages are installed.
+ """
+ assert Package(package).is_installed
+
+
+def test_apache_security_config_deprecated(File):
+ """
+ Ensure that /etc/apache2/security is absent, since it was setting
+ redundant options already presentin /etc/apache2/apache2.conf.
+ See #643 for discussion.
+ """
+ assert not File("/etc/apache2/security").exists
+
+
[email protected]("apache_opt", [
+ 'Mutex file:${APACHE_LOCK_DIR} default',
+ 'PidFile ${APACHE_PID_FILE}',
+ 'Timeout 60',
+ 'KeepAlive On',
+ 'MaxKeepAliveRequests 100',
+ 'KeepAliveTimeout 5',
+ 'User www-data',
+ 'Group www-data',
+ 'AddDefaultCharset UTF-8',
+ 'DefaultType None',
+ 'HostnameLookups Off',
+ 'ErrorLog /dev/null',
+ 'LogLevel crit',
+ 'IncludeOptional mods-enabled/*.load',
+ 'IncludeOptional mods-enabled/*.conf',
+ 'Include ports.conf',
+ 'IncludeOptional sites-enabled/*.conf',
+ 'ServerTokens Prod',
+ 'ServerSignature Off',
+ 'TraceEnable Off',
+])
+def test_apache_config_settings(File, apache_opt):
+ """
+ Check required Apache config settings for general server.
+ These checks do not target individual interfaces, e.g.
+ Source versus Document Interface, and instead apply to
+ Apache more generally.
+ """
+ f = File("/etc/apache2/apache2.conf")
+ assert f.is_file
+ assert f.user == "root"
+ assert f.group == "root"
+ assert oct(f.mode) == "0644"
+ assert re.search("^{}$".format(re.escape(apache_opt)), f.content, re.M)
+
+
[email protected]("port", [
+ "80",
+ "8080",
+])
+def test_apache_ports_config(File, SystemInfo, port):
+ """
+ Ensure Apache ports config items, which specify how the
+ Source and Document Interfaces are configured to be served
+ over Tor. On staging hosts, they will listen on any interface,
+ to permit port forwarding for local testing, but in production,
+ they're restricted to localhost, for use over Tor.
+ """
+ f = File("/etc/apache2/ports.conf")
+ assert f.is_file
+ assert f.user == "root"
+ assert f.group == "root"
+ assert oct(f.mode) == "0644"
+
+ listening_regex = "^Listen {}:{}$".format(re.escape(
+ securedrop_test_vars.apache_listening_address), port)
+ assert f.contains(listening_regex)
+
+
[email protected]("apache_module", [
+ 'access_compat',
+ 'authn_core',
+ 'alias',
+ 'authz_core',
+ 'authz_host',
+ 'authz_user',
+ 'deflate',
+ 'filter',
+ 'dir',
+ 'headers',
+ 'mime',
+ 'mpm_event',
+ 'negotiation',
+ 'reqtimeout',
+ 'rewrite',
+ 'wsgi',
+ 'xsendfile',
+])
+def test_apache_modules_present(Command, Sudo, apache_module):
+ """
+ Ensure presence of required Apache modules. Application will not work
+ correctly if these are missing. A separate test will check for
+ disabled modules.
+ """
+ with Sudo():
+ c = Command("/usr/sbin/a2query -m {}".format(apache_module))
+ assert "{} (enabled".format(apache_module) in c.stdout
+ assert c.rc == 0
+
+
[email protected]("apache_module", [
+ 'auth_basic',
+ 'authn_file',
+ 'autoindex',
+ 'env',
+ 'status',
+])
+def test_apache_modules_absent(Command, Sudo, apache_module):
+ """
+ Ensure absence of unwanted Apache modules. Application does not require
+ these modules, so they should be disabled to reduce attack surface.
+ A separate test will check for disabled modules.
+ """
+ with Sudo():
+ c = Command("/usr/sbin/a2query -m {}".format(apache_module))
+ assert "No module matches {} (disabled".format(apache_module) in c.stderr
+ assert c.rc == 32
diff --git a/testinfra/app/test_apparmor.py b/testinfra/app/test_apparmor.py
new file mode 100644
--- /dev/null
+++ b/testinfra/app/test_apparmor.py
@@ -0,0 +1,95 @@
+import os
+import pytest
+
+
+sdvars = pytest.securedrop_test_vars
+
+
[email protected]('pkg', ['apparmor', 'apparmor-utils'])
+def test_apparmor_pkg(Package, pkg):
+ """ Apparmor package dependencies """
+ assert Package(pkg).is_installed
+
+def test_apparmor_enabled(Command, Sudo):
+ """ Check that apparmor is enabled """
+ with Sudo():
+ assert Command("aa-status --enabled").rc == 0
+
+apache2_capabilities = [
+ 'kill',
+ 'net_bind_service',
+ 'sys_ptrace'
+ ]
[email protected]('cap', apache2_capabilities)
+def test_apparmor_apache_capabilities(Command, cap):
+ """ check for exact list of expected app-armor capabilities for apache2 """
+ c = Command("perl -nE \'/^\s+capability\s+(\w+),$/ && say $1\' /etc/apparmor.d/usr.sbin.apache2")
+ assert cap in c.stdout
+
+def test_apparmor_apache_exact_capabilities(Command):
+ """ ensure no extra capabilities are defined for apache2 """
+ c = Command.check_output("grep -ic capability /etc/apparmor.d/usr.sbin.apache2")
+ assert str(len(apache2_capabilities)) == c
+
+tor_capabilities = ['setgid']
[email protected]('cap', tor_capabilities)
+def test_apparmor_tor_capabilities(Command, cap):
+ """ check for exact list of expected app-armor capabilities for tor """
+ c = Command("perl -nE \'/^\s+capability\s+(\w+),$/ && say $1\' /etc/apparmor.d/usr.sbin.tor")
+ assert cap in c.stdout
+
+def test_apparmor_apache_exact_capabilities(Command):
+ """ ensure no extra capabilities are defined for tor """
+ c = Command.check_output("grep -ic capability /etc/apparmor.d/usr.sbin.tor")
+ assert str(len(tor_capabilities)) == c
+
+enforced_profiles = [
+ 'ntpd',
+ 'apache2',
+ 'tcpdump',
+ 'tor']
[email protected]('profile', enforced_profiles)
+def test_apparmor_ensure_not_disabled(File, Sudo, profile):
+ """ Explicitly check that enforced profiles are NOT in /etc/apparmor.d/disable
+ Polling aa-status only checks the last config that was loaded, this ensures
+ it wont be disabled on reboot.
+ """
+ f = File("/etc/apparmor.d/disabled/usr.sbin.{}".format(profile))
+ with Sudo():
+ assert not f.exists
+
+
[email protected]('complain_pkg', sdvars.apparmor_complain)
+def test_app_apparmor_complain(Command, Sudo, complain_pkg):
+ """ Ensure app-armor profiles are in complain mode for staging """
+ with Sudo():
+ awk = "awk '/[0-9]+ profiles.*complain./{flag=1;next}/^[0-9]+.*/{flag=0}flag'"
+ c = Command.check_output("aa-status | {}".format(awk))
+ assert complain_pkg in c
+
+
+def test_app_apparmor_complain_count(Command, Sudo):
+ """ Ensure right number of app-armor profiles are in complain mode """
+ with Sudo():
+ c = Command.check_output("aa-status --complaining")
+ assert c == str(len(sdvars.apparmor_complain))
+
[email protected]('aa_enforced', sdvars.apparmor_enforce)
+def test_apparmor_enforced(Command, Sudo, aa_enforced):
+ awk = "awk '/[0-9]+ profiles.*enforce./{flag=1;next}/^[0-9]+.*/{flag=0}flag'"
+ with Sudo():
+ c = Command.check_output("aa-status | {}".format(awk))
+ assert aa_enforced in c
+
+def test_apparmor_total_profiles(Command, Sudo):
+ """ ensure number of total profiles is sum of enforced and complaining profiles """
+ with Sudo():
+ total_expected = str((len(sdvars.apparmor_enforce)
+ + len(sdvars.apparmor_complain)))
+ assert Command.check_output("aa-status --profiled") == total_expected
+
+def test_aastatus_unconfined(Command, Sudo):
+ """ Ensure that there are no processes that are unconfined but have a profile """
+ unconfined_chk = "0 processes are unconfined but have a profile defined"
+ with Sudo():
+ assert unconfined_chk in Command("aa-status").stdout
diff --git a/testinfra/app/test_appenv.py b/testinfra/app/test_appenv.py
new file mode 100644
--- /dev/null
+++ b/testinfra/app/test_appenv.py
@@ -0,0 +1,71 @@
+import pytest
+import os
+
+sdvars = pytest.securedrop_test_vars
+
[email protected]('exp_pip_pkg', sdvars.pip_deps)
+def test_app_pip_deps(PipPackage, exp_pip_pkg):
+ """ Ensure pip dependencies are installed """
+ pip = PipPackage.get_packages()
+ assert pip[exp_pip_pkg['name']]['version'] == exp_pip_pkg['version']
+
+
+def test_app_wsgi(File, Sudo):
+ """ ensure logging is enabled for source interface in staging """
+ f = File("/var/www/source.wsgi")
+ with Sudo():
+ assert f.is_file
+ assert oct(f.mode) == "0640"
+ assert f.user == 'www-data'
+ assert f.group == 'www-data'
+ assert f.contains("^import logging$")
+ assert f.contains("^logging\.basicConfig(stream=sys\.stderr)$")
+
+def test_pidfile(File):
+ """ ensure there are no pid files """
+ assert not File('/tmp/journalist.pid').exists
+ assert not File('/tmp/source.pid').exists
+
[email protected]('app_dir', sdvars.app_directories)
+def test_app_directories(File, Sudo, app_dir):
+ """ ensure securedrop app directories exist with correct permissions """
+ f = File(app_dir)
+ with Sudo():
+ assert f.is_directory
+ assert f.user == sdvars.securedrop_user
+ assert f.group == sdvars.securedrop_user
+ assert oct(f.mode) == "0700"
+
+def test_app_code_pkg(Package):
+ """ ensure securedrop-app-code package is installed """
+ assert Package("securedrop-app-code").is_installed
+
+def test_gpg_key_in_keyring(Command, Sudo):
+ """ ensure test gpg key is present in app keyring """
+ with Sudo(sdvars.securedrop_user):
+ c = Command("gpg --homedir /var/lib/securedrop/keys --list-keys 28271441")
+ assert "pub 4096R/28271441 2013-10-12" in c.stdout
+
+def test_ensure_logo(File, Sudo):
+ """ ensure default logo header file exists """
+ f = File("{}/static/i/logo.png".format(sdvars.securedrop_code))
+ with Sudo():
+ assert oct(f.mode) == "0644"
+ assert f.user == sdvars.securedrop_user
+ assert f.group == sdvars.securedrop_user
+
+def test_securedrop_tmp_clean_cron(Command, Sudo):
+ """ Ensure securedrop tmp clean cron job in place """
+ with Sudo():
+ cronlist = Command("crontab -l").stdout
+ cronjob = "@daily {}/manage.py clean-tmp".format(sdvars.securedrop_code)
+ assert cronjob in cronlist
+
+def test_app_workerlog_dir(File, Sudo):
+ """ ensure directory for worker logs is present """
+ f = File('/var/log/securedrop_worker')
+ with Sudo():
+ assert f.is_directory
+ assert f.user == "root"
+ assert f.group == "root"
+ assert oct(f.mode) == "0644"
diff --git a/testinfra/app/test_network.py b/testinfra/app/test_network.py
new file mode 100644
--- /dev/null
+++ b/testinfra/app/test_network.py
@@ -0,0 +1,42 @@
+import os
+import difflib
+import pytest
+from jinja2 import Template
+
+
+securedrop_test_vars = pytest.securedrop_test_vars
+
+
+def test_app_iptables_rules(SystemInfo, Command, Sudo):
+
+ # Build a dict of variables to pass to jinja for iptables comparison
+ kwargs = dict(
+ mon_ip=securedrop_test_vars.mon_ip,
+ default_interface = Command.check_output("ip r | head -n 1 | awk '{ print $5 }'"),
+ tor_user_id = Command.check_output("id -u debian-tor"),
+ securedrop_user_id = Command.check_output("id -u www-data"),
+ ssh_group_gid = Command.check_output("getent group ssh | cut -d: -f3"),
+ dns_server = securedrop_test_vars.dns_server)
+
+ # Build iptables scrape cmd, purge comments + counters
+ iptables = "iptables-save | sed 's/ \[[0-9]*\:[0-9]*\]//g' | egrep -v '^#'"
+ environment = os.environ.get("CI_SD_ENV", "staging")
+ iptables_file = "{}/iptables-app-{}.j2".format(
+ os.path.dirname(os.path.abspath(__file__)),
+ environment)
+
+ # template out a local iptables jinja file
+ jinja_iptables = Template(open(iptables_file,'r').read())
+ iptables_expected = jinja_iptables.render(**kwargs)
+
+ with Sudo():
+ # Actually run the iptables scrape command
+ iptables = Command.check_output(iptables)
+ # print diff comparison (only shows up in pytests if test fails or
+ # verbosity turned way up)
+ for iptablesdiff in difflib.context_diff(iptables_expected.split('\n'),
+ iptables.split('\n')):
+ print(iptablesdiff)
+ # Conduct the string comparison of the expected and actual iptables
+ # ruleset
+ assert iptables_expected == iptables
diff --git a/testinfra/app/test_ossec.py b/testinfra/app/test_ossec.py
new file mode 100644
--- /dev/null
+++ b/testinfra/app/test_ossec.py
@@ -0,0 +1,52 @@
+import re
+import pytest
+
+sdvars = pytest.securedrop_test_vars
+
+# Currently failing in CI under remote hosts
+# Looks like vagrant is currently appending hostname to local IP
[email protected]
+def test_hosts_files(File, SystemInfo):
+ """ Ensure host localhost is mapping to servername """
+ f = File('/etc/hosts')
+ assert f.contains('^127.0.0.1\.*mon-{0}$'.format(env))
+
+def test_hosts_files(File, SystemInfo):
+ """ Ensure host files mapping are in place """
+ f = File('/etc/hosts')
+
+ hostname = SystemInfo.hostname
+ mon_ip = sdvars.mon_ip
+ mon_host = sdvars.monitor_hostname
+
+ assert f.contains('^127.0.0.1\s*localhost')
+ assert f.contains('^{}\s*{}\s*securedrop-monitor-server-alias$'.format(
+ mon_ip,
+ mon_host))
+
+def test_hosts_duplicate(Command):
+ """ Regression test for duplicate entries """
+ assert Command.check_output("uniq --repeated /etc/hosts") == ""
+
+def test_ossec_agent_installed(Package):
+ """ Check that ossec-agent package is present """
+ assert Package("securedrop-ossec-agent").is_installed
+
+
+# Permissions don't match between Ansible and OSSEC deb packages postinst.
[email protected]
+def test_ossec_keyfile_present(File, Command, Sudo, SystemInfo):
+ """ ensure client keyfile for ossec-agent is present """
+ pattern = "^1024 {} {} [0-9a-f]{{64}}$".format(
+ sdvars.app_hostname,
+ sdvars.app_ip)
+ regex = re.compile(pattern)
+
+ with Sudo():
+ f = File("/var/ossec/etc/client.keys")
+ assert f.exists
+ assert oct(f.mode) == "0644"
+ assert f.user == "root"
+ assert f.group == "ossec"
+ assert f.content_string
+ assert bool(re.search(regex, f.content))
diff --git a/testinfra/build/test_build_dependencies.py b/testinfra/build/test_build_dependencies.py
new file mode 100644
--- /dev/null
+++ b/testinfra/build/test_build_dependencies.py
@@ -0,0 +1,84 @@
+import pytest
+
+
+securedrop_test_vars = pytest.securedrop_test_vars
+
+
+def get_build_directories():
+ """
+ Helper function to retrieve module-namespace test vars and format
+ the strings to interpolate version info. Keeps the test vars DRY
+ in terms of version info, and required since we can't rely on
+ Jinja-based evaluation of the YAML files (so we can't trivially
+ reuse vars in other var values, as is the case with Ansible).
+ """
+ substitutions = dict(
+ securedrop_version=securedrop_test_vars.securedrop_version,
+ ossec_version=securedrop_test_vars.ossec_version,
+ keyring_version=securedrop_test_vars.keyring_version,
+ )
+ build_directories = [d.format(**substitutions) for d in securedrop_test_vars.build_directories]
+ return build_directories
+
+
+build_directories = get_build_directories()
+
+
[email protected]("package", [
+ "devscripts",
+ "git",
+ "libssl-dev",
+ "python-dev",
+ "python-pip",
+])
+def test_build_dependencies(Package, package):
+ """
+ Ensure development apt dependencies are installed.
+ The devscripts and git packages are required for running the
+ `update_version.sh` script, which should be executed inside the
+ build VM, so let's make sure they're present.
+ """
+ assert Package(package).is_installed
+
+
+def test_pip_wheel_installed(Command):
+ """
+ Ensure `wheel` is installed via pip, for packaging Python
+ dependencies into a Debian package.
+ """
+ c = Command("pip freeze")
+ assert "wheel==0.24.0" in c.stdout
+ assert c.rc == 0
+
+
+def test_sass_gem_installed(Command):
+ """
+ Ensure the `sass` Ruby gem is installed, for compiling SASS to CSS.
+ """
+ c = Command("gem list")
+ assert "sass (3.4.23)" in c.stdout
+ assert c.rc == 0
+
+
[email protected]("directory", get_build_directories())
+def test_build_directories(File, directory):
+ """
+ Ensure the build directories are present. These directories are
+ the top-level of the Debian packages being created. They contain
+ nested subdirs of varying complexity, depending on package.
+ """
+ if '{}' in directory:
+ directory = directory.format(securedrop_test_vars.securedrop_version)
+ assert File(directory).is_directory
+
+
+def test_build_all_packages_updated(Command):
+ """
+ Ensure a dist-upgrade has already been run, by checking that no
+ packages are eligible for upgrade currently. This will ensure that
+ all upgrades, security and otherwise, have been applied to the VM
+ used to build packages.
+ """
+ c = Command('aptitude --simulate -y dist-upgrade')
+ assert c.rc == 0
+ assert "No packages will be installed, upgraded, or removed." in c.stdout
diff --git a/testinfra/build/test_legacy_paths.py b/testinfra/build/test_legacy_paths.py
new file mode 100644
--- /dev/null
+++ b/testinfra/build/test_legacy_paths.py
@@ -0,0 +1,20 @@
+import pytest
+
+
[email protected]('build_path', [
+ '/tmp/build-',
+ '/tmp/rsync-filter',
+ '/tmp/src_install_files',
+ '/tmp/build-securedrop-keyring',
+ '/tmp/build-securedrop-ossec-agent',
+ '/tmp/build-securedrop-ossec-server',
+])
+def test_build_ossec_apt_dependencies(File, build_path):
+ """
+ Ensure that unwanted build paths are absent. Most of these were created
+ as unwanted side-effects during CI-related changes to the build scripts.
+
+ All paths are rightly considered "legacy" and should never be present on
+ the build host. This test is strictly for guarding against regressions.
+ """
+ assert not File(build_path).exists
diff --git a/testinfra/build/test_ossec_packages.py b/testinfra/build/test_ossec_packages.py
new file mode 100644
--- /dev/null
+++ b/testinfra/build/test_ossec_packages.py
@@ -0,0 +1,16 @@
+import pytest
+
+
[email protected]('apt_package', [
+ 'inotify-tools',
+ 'libssl-dev',
+ 'make',
+ 'tar',
+ 'unzip',
+])
+def test_build_ossec_apt_dependencies(Package, apt_package):
+ """
+ Ensure that the apt dependencies required for building the OSSEC
+ source deb packages (not the metapackages) are installed.
+ """
+ assert Package(apt_package).is_installed
diff --git a/testinfra/build/test_securedrop_deb_package.py b/testinfra/build/test_securedrop_deb_package.py
new file mode 100644
--- /dev/null
+++ b/testinfra/build/test_securedrop_deb_package.py
@@ -0,0 +1,206 @@
+import pytest
+import os
+import re
+
+
+securedrop_test_vars = pytest.securedrop_test_vars
+
+
+def extract_package_name_from_filepath(filepath):
+ """
+ Helper function to infer intended package name from
+ the absolute filepath, using a rather garish regex.
+ E.g., given:
+ securedrop-ossec-agent-2.8.2+0.3.10-amd64.deb
+
+ retuns:
+
+ securedrop-ossec-agent
+
+ which can then be used for comparisons in dpkg output.
+ """
+ deb_basename = os.path.basename(filepath)
+ package_name = re.search('^([a-z\-]+(?!\d))', deb_basename).groups()[0]
+ assert deb_basename.startswith(package_name)
+ return package_name
+
+
+def get_deb_packages():
+ """
+ Helper function to retrieve module-namespace test vars and format
+ the strings to interpolate version info. Keeps the test vars DRY
+ in terms of version info, and required since we can't rely on
+ Jinja-based evaluation of the YAML files (so we can't trivially
+ reuse vars in other var values, as is the case with Ansible).
+ """
+ substitutions = dict(
+ securedrop_version=securedrop_test_vars.securedrop_version,
+ ossec_version=securedrop_test_vars.ossec_version,
+ keyring_version=securedrop_test_vars.keyring_version,
+ )
+
+ deb_packages = [d.format(**substitutions) for d in securedrop_test_vars.build_deb_packages]
+ return deb_packages
+
+
+deb_packages = get_deb_packages()
+
[email protected]("deb", deb_packages)
+def test_build_deb_packages(File, deb):
+ """
+ Sanity check the built Debian packages for Control field
+ values and general package structure.
+ """
+ deb_package = File(deb.format(
+ securedrop_test_vars.securedrop_version))
+ assert deb_package.is_file
+
+
[email protected]("deb", deb_packages)
+def test_deb_packages_appear_installable(File, Command, Sudo, deb):
+ """
+ Confirms that a dry-run of installation reports no errors.
+ Simple check for valid Debian package structure, but not thorough.
+ When run on a malformed package, `dpkg` will report:
+
+ dpkg-deb: error: `foo.deb' is not a debian format archive
+
+ Testing application behavior is left to the functional tests.
+ """
+
+ deb_package = File(deb.format(
+ securedrop_test_vars.securedrop_version))
+
+ deb_basename = os.path.basename(deb_package.path)
+ package_name = extract_package_name_from_filepath(deb_package.path)
+ assert deb_basename.startswith(package_name)
+
+ # Sudo is required to call `dpkg --install`, even as dry-run.
+ with Sudo():
+ c = Command("dpkg --install --dry-run {}".format(deb_package.path))
+ assert "Selecting previously unselected package {}".format(package_name) in c.stdout
+ regex = "Preparing to unpack [./]+{} ...".format(re.escape(deb_basename))
+ assert re.search(regex, c.stdout, re.M)
+ assert c.rc == 0
+
+
[email protected]("deb", deb_packages)
+def test_deb_package_control_fields(File, Command, deb):
+ """
+ Ensure Debian Control fields are populated as expected in the package.
+ These checks are rather superficial, and don't actually confirm that the
+ .deb files are not broken. At a later date, consider integration tests
+ that actually use these built files during an Ansible provisioning run.
+ """
+ deb_package = File(deb.format(
+ securedrop_test_vars.securedrop_version))
+ package_name = extract_package_name_from_filepath(deb_package.path)
+ # The `--field` option will display all fields if none are specified.
+ c = Command("dpkg-deb --field {}".format(deb_package.path))
+
+ assert "Maintainer: SecureDrop Team <[email protected]>" in c.stdout
+ assert "Architecture: amd64" in c.stdout
+ assert "Package: {}".format(package_name) in c.stdout
+ assert c.rc == 0
+
+
[email protected]("deb", deb_packages)
+def test_deb_package_control_fields_homepage(File, Command, deb):
+ deb_package = File(deb.format(
+ securedrop_test_vars.securedrop_version))
+ # The `--field` option will display all fields if none are specified.
+ c = Command("dpkg-deb --field {}".format(deb_package.path))
+ # The OSSEC source packages will have a different homepage;
+ # all other packages should set securedrop.org as homepage.
+ if os.path.basename(deb_package.path).startswith('ossec-'):
+ assert "Homepage: http://ossec.net" in c.stdout
+ else:
+ assert "Homepage: https://securedrop.org" in c.stdout
+
+
[email protected]("deb", deb_packages)
+def test_deb_package_contains_no_update_dependencies_file(File, Command, deb):
+ """
+ Ensures the update_python_dependencies script is not shipped via the
+ Debian packages.
+ """
+ deb_package = File(deb.format(
+ securedrop_test_vars.securedrop_version))
+ # Using `dpkg-deb` but `lintian --tag package-installs-python-bytecode`
+ # would be cleaner. Will defer to adding lintian tests later.
+ c = Command("dpkg-deb --contents {}".format(deb_package.path))
+ assert not re.search("^.*update_python_dependencies$", c.stdout, re.M)
+
+
[email protected]("deb", deb_packages)
+def test_deb_package_contains_no_pyc_files(File, Command, deb):
+ """
+ Ensures no .pyc files are shipped via the Debian packages.
+ """
+ deb_package = File(deb.format(
+ securedrop_test_vars.securedrop_version))
+ # Using `dpkg-deb` but `lintian --tag package-installs-python-bytecode`
+ # would be cleaner. Will defer to adding lintian tests later.
+ c = Command("dpkg-deb --contents {}".format(deb_package.path))
+ assert not re.search("^.*\.pyc$", c.stdout, re.M)
+
+
[email protected]("deb", deb_packages)
+def test_deb_package_contains_no_config_file(File, Command, deb):
+ """
+ Ensures the `securedrop-app-code` package does not ship a `config.py`
+ file. Doing so would clobber the site-specific changes made via Ansible.
+
+ Somewhat lazily checking all deb packages, rather than just the app-code
+ package, but it accomplishes the same in a DRY manner.
+ """
+ deb_package = File(deb.format(
+ securedrop_test_vars.securedrop_version))
+ # Using `dpkg-deb` but `lintian --tag package-installs-python-bytecode`
+ # would be cleaner. Will defer to adding lintian tests later.
+ c = Command("dpkg-deb --contents {}".format(deb_package.path))
+ assert not re.search("^.*config\.py$", c.stdout, re.M)
+
+
[email protected]("deb", deb_packages)
+def test_deb_package_contains_no_generated_assets(File, Command, deb):
+ """
+ Ensures the `securedrop-app-code` package does not ship a minified
+ static assets, which are built automatically via Flask-Assets, and may be
+ present in the source directory used to build from.
+ """
+ deb_package = File(deb.format(
+ securedrop_test_vars.securedrop_version))
+
+ # Only relevant for the securedrop-app-code package:
+ if "securedrop-app-code" in deb_package.path:
+ c = Command("dpkg-deb --contents {}".format(deb_package.path))
+ # static/gen/ directory should exist
+ assert re.search("^.*\./var/www/securedrop/static/gen/$", c.stdout, re.M)
+ # static/gen/ directory should be empty
+ assert not re.search("^.*\./var/www/securedrop/static/gen/.+$", c.stdout, re.M)
+
+ # static/.webassets-cache/ directory should exist
+ assert re.search("^.*\./var/www/securedrop/static/.webassets-cache/$", c.stdout, re.M)
+ # static/.webassets-cache/ directory should be empty
+ assert not re.search("^.*\./var/www/securedrop/static/.webassets-cache/.+$", c.stdout, re.M)
+
+ # no SASS files should exist; only the generated CSS files.
+ assert not re.search("^.*sass.*$", c.stdout, re.M)
+
[email protected]("deb", deb_packages)
+def test_deb_package_contains_css(File, Command, deb):
+ """
+ Ensures the `securedrop-app-code` package contains files that
+ are generated during the `sass` build process.
+ """
+ deb_package = File(deb.format(
+ securedrop_test_vars.securedrop_version))
+
+ # Only relevant for the securedrop-app-code package:
+ if "securedrop-app-code" in deb_package.path:
+ c = Command("dpkg-deb --contents {}".format(deb_package.path))
+
+ for css_type in ['journalist', 'source']:
+ assert re.search("^.*\./var/www/securedrop/static/css/{}.css$".format(css_type), c.stdout, re.M)
+ assert re.search("^.*\./var/www/securedrop/static/css/{}.css.map$".format(css_type), c.stdout, re.M)
diff --git a/testinfra/common/test_cron_apt.py b/testinfra/common/test_cron_apt.py
new file mode 100644
--- /dev/null
+++ b/testinfra/common/test_cron_apt.py
@@ -0,0 +1,140 @@
+import pytest
+import re
+
+
[email protected]('dependency', [
+ 'cron-apt',
+ 'ntp'
+])
+def test_cron_apt_dependencies(Package, dependency):
+ """
+ Ensure critical packages are installed. If any of these are missing,
+ the system will fail to receive automatic updates.
+
+ The current apt config uses cron-apt, rather than unattended-upgrades,
+ but this may change in the future. Previously the apt.freedom.press repo
+ was not reporting any "Origin" field, making use of unattended-upgrades
+ problematic. With better procedures in place regarding apt repo
+ maintenance, we can ensure the field is populated going forward.
+ """
+ assert Package(dependency).is_installed
+
+
+def test_cron_apt_config(File):
+ """
+ Ensure custom cron-apt config file is present.
+ """
+ f = File('/etc/cron-apt/config')
+ assert f.is_file
+ assert f.user == "root"
+ assert oct(f.mode) == "0644"
+ assert f.contains('^SYSLOGON="always"$')
+ assert f.contains('^EXITON=error$')
+
+
+
[email protected]('repo', [
+ 'deb http://security.ubuntu.com/ubuntu trusty-security main',
+ 'deb-src http://security.ubuntu.com/ubuntu trusty-security main',
+ 'deb http://security.ubuntu.com/ubuntu trusty-security universe',
+ 'deb-src http://security.ubuntu.com/ubuntu trusty-security universe',
+ 'deb [arch=amd64] https://apt.freedom.press trusty main',
+ 'deb http://deb.torproject.org/torproject.org trusty main',
+])
+def test_cron_apt_repo_list(File, repo):
+ """
+ Ensure the correct apt repositories are specified
+ in the security list for apt.
+ """
+ f = File('/etc/apt/security.list')
+ assert f.is_file
+ assert f.user == "root"
+ assert oct(f.mode) == "0644"
+ repo_regex = '^{}$'.format(re.escape(repo))
+ assert f.contains(repo_regex)
+
+
+
+def test_cron_apt_repo_config_update(File):
+ """
+ Ensure cron-apt updates repos from the security.list config.
+ """
+
+ f = File('/etc/cron-apt/action.d/0-update')
+ assert f.is_file
+ assert f.user == "root"
+ assert oct(f.mode) == "0644"
+ repo_config = str('update -o quiet=2'
+ ' -o Dir::Etc::SourceList=/etc/apt/security.list'
+ ' -o Dir::Etc::SourceParts=""')
+ assert f.contains('^{}$'.format(repo_config))
+
+
+def test_cron_apt_repo_config_upgrade(File):
+ """
+ Ensure cron-apt upgrades packages from the security.list config.
+ """
+ f = File('/etc/cron-apt/action.d/5-security')
+ assert f.is_file
+ assert f.user == "root"
+ assert oct(f.mode) == "0644"
+ assert f.contains('^autoclean -y$')
+ repo_config = str('dist-upgrade -y -o APT::Get::Show-Upgraded=true'
+ ' -o Dir::Etc::SourceList=/etc/apt/security.list'
+ ' -o Dpkg::Options::=--force-confdef'
+ ' -o Dpkg::Options::=--force-confold')
+ assert f.contains(re.escape(repo_config))
+
+
+def test_cron_apt_config_deprecated(File):
+ """
+ Ensure default cron-apt file to download all updates does not exist.
+ """
+ f = File('/etc/cron-apt/action.d/3-download')
+ assert not f.exists
+
+
[email protected]('cron_job', [
+ { 'job': '0 4 * * * root /usr/bin/test -x /usr/sbin/cron-apt && /usr/sbin/cron-apt && /sbin/reboot',
+ 'state': 'present',
+ },
+ { 'job': '0 4 * * * root /usr/bin/test -x /usr/sbin/cron-apt && /usr/sbin/cron-apt',
+ 'state': 'absent',
+ },
+ { 'job': '0 5 * * * root /sbin/reboot',
+ 'state': 'absent',
+ },
+])
+def test_cron_apt_cron_jobs(File, cron_job):
+ """
+ Check for correct cron job for upgrading all packages and rebooting.
+ We'll also check for absence of previous versions of the cron job,
+ to make sure those have been cleaned up via the playbooks.
+ """
+ f = File('/etc/cron.d/cron-apt')
+ assert f.is_file
+ assert f.user == "root"
+ assert oct(f.mode) == "0644"
+
+ regex_job = '^{}$'.format(re.escape(cron_job['job']))
+ if cron_job['state'] == 'present':
+ assert f.contains(regex_job)
+ else:
+ assert not f.contains(regex_job)
+
+
+def test_cron_apt_all_packages_updated(Command):
+ """
+ Ensure a safe-upgrade has already been run, by checking that no
+ packages are eligible for upgrade currently.
+
+ The Ansible config installs a specific, out-of-date version of Firefox
+ for use with Selenium. Therefore apt will report it's possible to upgrade
+ Firefox, which we'll need to mark as "OK" in terms of the tests.
+ """
+ c = Command('aptitude --simulate -y safe-upgrade')
+ assert c.rc == 0
+ # Staging hosts will have locally built deb packages, marked as held.
+ # Staging and development will have a version-locked Firefox pinned for
+ # Selenium compatibility; if the holds are working, they shouldn't be upgraded.
+ assert "No packages will be installed, upgraded, or removed." in c.stdout
diff --git a/testinfra/common/test_fpf_apt_repo.py b/testinfra/common/test_fpf_apt_repo.py
new file mode 100644
--- /dev/null
+++ b/testinfra/common/test_fpf_apt_repo.py
@@ -0,0 +1,45 @@
+def test_fpf_apt_repo_present(File):
+ """
+ Ensure the FPF apt repo, apt.freedom.press, is configured.
+ This repository is necessary for the SecureDrop Debian packages,
+ including:
+
+ * securedrop-app-code
+ * securedrop-keyring
+ * securedrop-grsec
+
+ Depending on the host, additional FPF-maintained packages will be
+ installed, e.g. for OSSEC. Install state for those packages
+ is tested separately.
+ """
+ f = File('/etc/apt/sources.list.d/apt_freedom_press.list')
+ assert f.contains('^deb \[arch=amd64\] https:\/\/apt\.freedom\.press trusty main$')
+
+
+def test_fpf_apt_repo_fingerprint(Command):
+ """
+ Ensure the FPF apt repo has the correct fingerprint on the associated
+ signing pubkey. The key changed in October 2016, so test for the
+ newest fingerprint, which is installed on systems via the
+ `securedrop-keyring` package.
+ """
+
+
+ c = Command('apt-key finger')
+
+ fpf_gpg_pub_key_info = """/etc/apt/trusted.gpg.d/securedrop-keyring.gpg
+---------------------------------------------
+pub 4096R/00F4AD77 2016-10-20 [expires: 2017-10-20]
+ Key fingerprint = 2224 5C81 E3BA EB41 38B3 6061 310F 5612 00F4 AD77
+uid SecureDrop Release Signing Key"""
+
+ assert c.rc == 0
+ assert fpf_gpg_pub_key_info in c.stdout
+
+ fpf_gpg_pub_key_fingerprint_expired = 'B89A 29DB 2128 160B 8E4B 1B4C BADD E0C7 FC9F 6818'
+ fpf_gpg_pub_key_info_expired = """pub 4096R/FC9F6818 2014-10-26 [expired: 2016-10-27]
+ Key fingerprint = #{fpf_gpg_pub_key_fingerprint_expired}
+uid Freedom of the Press Foundation Master Signing Key"""
+
+ assert fpf_gpg_pub_key_fingerprint_expired not in c.stdout
+ assert fpf_gpg_pub_key_info_expired not in c.stdout
diff --git a/testinfra/common/test_grsecurity.py b/testinfra/common/test_grsecurity.py
new file mode 100644
--- /dev/null
+++ b/testinfra/common/test_grsecurity.py
@@ -0,0 +1,181 @@
+import pytest
+import os
+import re
+
+
+def test_ssh_motd_disabled(File):
+ """
+ Ensure the SSH MOTD (Message of the Day) is disabled.
+ Grsecurity balks at Ubuntu's default MOTD.
+ """
+ f = File("/etc/pam.d/sshd")
+ assert f.is_file
+ assert not f.contains("pam\.motd")
+
+
[email protected](os.environ.get('FPF_GRSEC','true') == "false",
+ reason="Need to skip in environment w/o grsec")
[email protected]("package", [
+ 'paxctl',
+ 'securedrop-grsec',
+])
+def test_grsecurity_apt_packages(Package, package):
+ """
+ Ensure the grsecurity-related apt packages are present on the system.
+ Includes the FPF-maintained metapackage, as well as paxctl, for managing
+ PaX flags on binaries.
+ """
+ assert Package(package).is_installed
+
+
[email protected](os.environ.get('FPF_GRSEC','true') == "false",
+ reason="Need to skip in environment w/o grsec")
[email protected]("package", [
+ 'linux-signed-image-generic-lts-utopic',
+ 'linux-signed-image-generic',
+ 'linux-signed-generic-lts-utopic',
+ 'linux-signed-generic',
+ '^linux-image-.*generic$',
+ '^linux-headers-.*',
+])
+def test_generic_kernels_absent(Command, package):
+ """
+ Ensure the default Ubuntu-provided kernel packages are absent.
+ In the past, conflicting version numbers have caused machines
+ to reboot into a non-grsec kernel due to poor handling of
+ GRUB_DEFAULT logic. Removing the vendor-provided kernel packages
+ prevents accidental boots into non-grsec kernels.
+ """
+ # Can't use the TestInfra Package module to check state=absent,
+ # so let's check by shelling out to `dpkg -l`. Dpkg will automatically
+ # honor simple regex in package names.
+ c = Command("dpkg -l {}".format(package))
+ assert c.rc == 1
+ error_text = "dpkg-query: no packages found matching {}".format(package)
+ assert c.stderr == error_text
+
+
[email protected](os.environ.get('FPF_GRSEC','true') == "false",
+ reason="Need to skip in environment w/o grsec")
+def test_grsecurity_lock_file(File):
+ """
+ Ensure system is rerunning a grsecurity kernel by testing for the
+ `grsec_lock` file, which is automatically created by grsecurity.
+ """
+ f = File("/proc/sys/kernel/grsecurity/grsec_lock")
+ assert oct(f.mode) == "0600"
+ assert f.user == "root"
+ assert f.size == 0
+
+
[email protected](os.environ.get('FPF_GRSEC','true') == "false",
+ reason="Need to skip in environment w/o grsec")
+def test_grsecurity_kernel_is_running(Command):
+ """
+ Make sure the currently running kernel is specific grsec kernel.
+ """
+ c = Command('uname -r')
+ assert c.stdout.endswith('-grsec')
+ assert c.stdout == '3.14.79-grsec'
+
+
[email protected](os.environ.get('FPF_GRSEC','true') == "false",
+ reason="Need to skip in environment w/o grsec")
[email protected]('sysctl_opt', [
+ ('kernel.grsecurity.grsec_lock', 1),
+ ('kernel.grsecurity.rwxmap_logging', 0),
+ ('vm.heap_stack_gap', 1048576),
+])
+def test_grsecurity_sysctl_options(Sysctl, Sudo, sysctl_opt):
+ """
+ Check that the grsecurity-related sysctl options are set correctly.
+ In production the RWX logging is disabled, to reduce log noise.
+ """
+ with Sudo():
+ assert Sysctl(sysctl_opt[0]) == sysctl_opt[1]
+
[email protected](os.environ.get('FPF_GRSEC','true') == "false",
+ reason="Need to skip in environment w/o grsec")
[email protected]('paxtest_check', [
+ "Executable anonymous mapping",
+ "Executable bss",
+ "Executable data",
+ "Executable heap",
+ "Executable stack",
+ "Executable shared library bss",
+ "Executable shared library data",
+ "Executable anonymous mapping (mprotect)",
+ "Executable bss (mprotect)",
+ "Executable data (mprotect)",
+ "Executable heap (mprotect)",
+ "Executable stack (mprotect)",
+ "Executable shared library bss (mprotect)",
+ "Executable shared library data (mprotect)",
+ "Writable text segments",
+ "Return to function (memcpy)",
+ "Return to function (memcpy, PIE)",
+])
+def test_grsecurity_paxtest(Command, Sudo, paxtest_check):
+ """
+ Check that paxtest does not report anything vulnerable
+ Requires the package paxtest to be installed.
+ The paxtest package is currently being installed in the app-test role.
+ """
+ if Command.exists("/usr/bin/paxtest"):
+ with Sudo():
+ c = Command("paxtest blackhat")
+ assert c.rc == 0
+ assert "Vulnerable" not in c.stdout
+ regex = "^{}\s*:\sKilled$".format(re.escape(paxtest_check))
+
+
+
[email protected](os.environ.get('FPF_CI','false') == "true",
+ reason="Not needed in CI environment")
+def test_grub_pc_marked_manual(Command):
+ """
+ Ensure the `grub-pc` packaged is marked as manually installed.
+ This is necessary for VirtualBox with Vagrant.
+ """
+ c = Command('apt-mark showmanual grub-pc')
+ assert c.rc == 0
+ assert c.stdout == "grub-pc"
+
+
+def test_apt_autoremove(Command):
+ """
+ Ensure old packages have been autoremoved.
+ """
+ c = Command('apt-get --dry-run autoremove')
+ assert c.rc == 0
+ assert "The following packages will be REMOVED" not in c.stdout
+
+
[email protected](os.environ.get('FPF_GRSEC','true') == "false",
+ reason="Need to skip in environment w/o grsec")
[email protected]("binary", [
+ "/usr/sbin/grub-probe",
+ "/usr/sbin/grub-mkdevicemap",
+ "/usr/bin/grub-script-check",
+])
+def test_pax_flags(Command, File, binary):
+ """
+ Ensure PaX flags are set correctly on critical Grub binaries.
+ These flags are maintained as part of a post-install kernel hook
+ in the `securedrop-grsec` metapackage. If they aren't set correctly,
+ the machine may fail to boot into a new kernel.
+ """
+
+ f = File("/etc/kernel/postinst.d/paxctl-grub")
+ assert f.is_file
+ assert f.contains("^paxctl -zCE {}".format(binary))
+
+ c = Command("paxctl -v {}".format(binary))
+ assert c.rc == 0
+
+ assert "- PaX flags: --------E--- [{}]".format(binary) in c.stdout
+ assert "EMUTRAMP is enabled" in c.stdout
+ # Tracking regressions; previous versions of the Ansible config set
+ # the "p" and "m" flags.
+ assert "PAGEEXEC is disabled" not in c.stdout
+ assert "MPROTECT is disabled" not in c.stdout
diff --git a/testinfra/common/test_ip6tables.py b/testinfra/common/test_ip6tables.py
new file mode 100644
--- /dev/null
+++ b/testinfra/common/test_ip6tables.py
@@ -0,0 +1,14 @@
+def test_ip6tables_drop_everything(Command, Sudo):
+ """
+ Ensure that all IPv6 packets are dropped by default.
+ The IPv4 rules are more complicated, and tested separately.
+ """
+ desired_ip6tables_output = """
+-P INPUT DROP
+-P FORWARD DROP
+-P OUTPUT DROP
+""".lstrip().rstrip()
+
+ with Sudo():
+ c = Command.check_output("ip6tables -S")
+ assert c == desired_ip6tables_output
diff --git a/testinfra/common/test_platform.py b/testinfra/common/test_platform.py
new file mode 100644
--- /dev/null
+++ b/testinfra/common/test_platform.py
@@ -0,0 +1,26 @@
+import pytest
+
+
+def test_ansible_version(host):
+ """
+ Check that a supported version of Ansible is being used.
+
+ The project has long used the Ansible 1.x series, ans now
+ requires the 2.x series starting with the 0.4 release. Ensure
+ installation is not being performed with an outdated ansible version.
+ """
+ localhost = host.get_host("local://")
+ c = localhost.check_output("ansible --version")
+ assert c.startswith("ansible 2.")
+
+def test_platform(SystemInfo):
+ """
+ SecureDrop requires Ubuntu Trusty 14.04 LTS. The shelf life
+ of that release means we'll need to migrate to Xenial LTS
+ at some point; until then, require hosts to be running
+ Ubuntu.
+ """
+ assert SystemInfo.type == "linux"
+ assert SystemInfo.distribution == "ubuntu"
+ assert SystemInfo.codename == "trusty"
+ assert SystemInfo.release == "14.04"
diff --git a/testinfra/common/test_system_hardening.py b/testinfra/common/test_system_hardening.py
new file mode 100644
--- /dev/null
+++ b/testinfra/common/test_system_hardening.py
@@ -0,0 +1,78 @@
+import os
+import pytest
+import re
+
+hostenv = os.environ['SECUREDROP_TESTINFRA_TARGET_HOST']
+
+
[email protected]('sysctl_opt', [
+ ('net.ipv4.conf.all.accept_redirects', 0),
+ ('net.ipv4.conf.all.accept_source_route', 0),
+ ('net.ipv4.conf.all.rp_filter', 1),
+ ('net.ipv4.conf.all.secure_redirects', 0),
+ ('net.ipv4.conf.all.send_redirects', 0),
+ ('net.ipv4.conf.default.accept_redirects', 0),
+ ('net.ipv4.conf.default.accept_source_route', 0),
+ ('net.ipv4.conf.default.rp_filter', 1),
+ ('net.ipv4.conf.default.secure_redirects', 0),
+ ('net.ipv4.conf.default.send_redirects', 0),
+ ('net.ipv4.icmp_echo_ignore_broadcasts', 1),
+ ('net.ipv4.ip_forward', 0),
+ ('net.ipv4.tcp_max_syn_backlog', 4096),
+ ('net.ipv4.tcp_syncookies', 1),
+ ('net.ipv6.conf.all.disable_ipv6', 1),
+ ('net.ipv6.conf.default.disable_ipv6', 1),
+ ('net.ipv6.conf.lo.disable_ipv6', 1),
+])
+def test_sysctl_options(Sysctl, Sudo, sysctl_opt):
+ """
+ Ensure sysctl flags are set correctly. Most of these checks
+ are disabling IPv6 and hardening IPv4, which is appropriate
+ due to the heavy use of Tor.
+ """
+ with Sudo():
+ assert Sysctl(sysctl_opt[0]) == sysctl_opt[1]
+
+
+def test_dns_setting(File):
+ """
+ Ensure DNS service is hard-coded in resolv.conf config.
+ """
+ f = File('/etc/resolvconf/resolv.conf.d/base')
+ assert f.is_file
+ assert f.user == "root"
+ assert f.group == "root"
+ assert oct(f.mode) == "0644"
+ assert f.contains('^nameserver 8\.8\.8\.8$')
+
+
[email protected]('kernel_module', [
+ 'bluetooth',
+ 'iwlwifi',
+])
+def test_blacklisted_kernel_modules(Command, File, Sudo, kernel_module):
+ """
+ Test that unwanted kernel modules are blacklisted on the system.
+ Mostly these checks are defense-in-depth approaches to ensuring
+ that wireless interfaces will not work.
+ """
+ with Sudo():
+ assert kernel_module not in Command("lsmod").stdout
+
+ f = File("/etc/modprobe.d/blacklist.conf")
+ assert f.contains("^blacklist {}$".format(kernel_module))
+
+
[email protected](hostenv.startswith('mon'),
+ reason="Monitor Server does not have swap disabled yet.")
+def test_swap_disabled(Command):
+ """
+ Ensure swap space is disabled. Prohibit writing memory to swapfiles
+ to reduce the threat of forensic analysis leaking any sensitive info.
+ """
+ c = Command.check_output('swapon --summary')
+ # A leading slash will indicate full path to a swapfile.
+ assert not re.search("^/", c, re.M)
+ # Expect that ONLY the headers will be present in the output.
+ rgx = re.compile("Filename\s*Type\s*Size\s*Used\s*Priority")
+ assert re.search(rgx, c)
diff --git a/testinfra/common/test_tor_config.py b/testinfra/common/test_tor_config.py
new file mode 100644
--- /dev/null
+++ b/testinfra/common/test_tor_config.py
@@ -0,0 +1,101 @@
+import pytest
+import re
+
+sdvars = pytest.securedrop_test_vars
+
+def test_tor_apt_repo(File):
+ """
+ Ensure the Tor Project apt repository is configured.
+ The version of Tor in the Trusty repos is not up to date.
+ """
+ f = File('/etc/apt/sources.list.d/deb_torproject_org_torproject_org.list')
+ repo_regex = re.escape('deb http://deb.torproject.org/torproject.org trusty main')
+ assert f.contains(repo_regex)
+
+
[email protected]('package', [
+ 'deb.torproject.org-keyring',
+ 'tor',
+])
+def test_tor_packages(Package, package):
+ """
+ Ensure Tor packages are installed. Includes a check for the keyring,
+ so that automatic updates can handle rotating the signing key if necessary.
+ """
+ assert Package(package).is_installed
+
+
+def test_tor_service_running(Command, File, Sudo):
+ """
+ Ensure tor is running and enabled. Tor is required for SSH access,
+ so it must be enabled to start on boot.
+ """
+ # TestInfra tries determine the service manager intelligently, and
+ # inappropriately assumes Upstart on Trusty, due to presence of the
+ # `initctl` command. The tor service is handled via a SysV-style init
+ # script, so let's just shell out and verify the running and enabled
+ # states explicitly.
+ with Sudo():
+ assert Command.check_output("service tor status") == " * tor is running"
+ tor_enabled = Command.check_output("find /etc/rc?.d -name S??tor")
+
+ assert tor_enabled != ""
+
+ tor_targets = tor_enabled.split("\n")
+ assert len(tor_targets) == 4
+ for target in tor_targets:
+ t = File(target)
+ assert t.is_symlink
+ assert t.linked_to == "/etc/init.d/tor"
+
+
[email protected]('torrc_option', [
+ 'SocksPort 0',
+ 'SafeLogging 1',
+ 'RunAsDaemon 1',
+])
+def test_tor_torrc_options(File, torrc_option):
+ """
+ Check for required options in the system Tor config file.
+ These options should be present regardless of machine role,
+ meaning both Application and Monitor server will have them.
+
+ Separate tests will check for specific hidden services.
+ """
+ f = File("/etc/tor/torrc")
+ assert f.is_file
+ assert f.user == "debian-tor"
+ assert oct(f.mode) == "0644"
+ assert f.contains("^{}$".format(torrc_option))
+
+
+def test_tor_torrc_sandbox(File):
+ """
+ Check that the `Sandbox 1` declaration is not present in the torrc.
+ The torrc manpage states this option is experimental, and although we
+ use it already on Tails workstations, further testing is required
+ before we push it out to servers. See issues #944 and #1969.
+ """
+ f = File("/etc/tor/torrc")
+ # Only `Sandbox 1` will enable, but make sure there are zero occurrances
+ # of "Sandbox", otherwise we may have a regression somewhere.
+ assert not f.contains("^.*Sandbox.*$")
+
+
+def test_tor_signing_key_fingerprint(Command):
+ """
+ The `deb.torproject.org-keyring` package manages the repo signing pubkey
+ for tor-related packages, so make sure that fingerprint matches
+ expectations.
+ """
+
+ c = Command("apt-key finger")
+ tor_gpg_pub_key_info = """/etc/apt/trusted.gpg.d/deb.torproject.org-keyring.gpg
+-----------------------------------------------------
+pub 2048R/886DDD89 2009-09-04 [expires: 2020-08-29]
+ Key fingerprint = A3C4 F0F9 79CA A22C DBA8 F512 EE8C BC9E 886D DD89
+uid deb.torproject.org archive signing key
+sub 2048R/219EC810 2009-09-04 [expires: 2018-08-30]"""
+
+ assert c.rc == 0
+ assert tor_gpg_pub_key_info in c.stdout
diff --git a/testinfra/common/test_tor_hidden_services.py b/testinfra/common/test_tor_hidden_services.py
new file mode 100644
--- /dev/null
+++ b/testinfra/common/test_tor_hidden_services.py
@@ -0,0 +1,89 @@
+import pytest
+import re
+
+
+sdvars = pytest.securedrop_test_vars
+
+
[email protected]('tor_service', sdvars.tor_services)
+def test_tor_service_directories(File, Sudo, tor_service):
+ """
+ Check mode and ownership on Tor service directories.
+ """
+ with Sudo():
+ f = File("/var/lib/tor/services/{}".format(tor_service['name']))
+ assert f.is_directory
+ # TODO: tor might mark these dirs as setgid
+ assert oct(f.mode) == "0700"
+ assert f.user == "debian-tor"
+ assert f.group == "debian-tor"
+
+
[email protected]('tor_service', sdvars.tor_services)
+def test_tor_service_hostnames(File, Sudo, tor_service):
+ """
+ Check contents of tor service hostname file. For normal Hidden Services,
+ the file should contain only hostname (.onion URL). For Authenticated
+ Hidden Services, it should also contain the HidServAuth cookie.
+ """
+
+ # Declare regex only for THS; we'll build regex for ATHS only if
+ # necessary, since we won't have the required values otherwise.
+ ths_hostname_regex = "[a-z0-9]{16}\.onion"
+
+ with Sudo():
+ f = File("/var/lib/tor/services/{}/hostname".format(tor_service['name']))
+ assert f.is_file
+ assert oct(f.mode) == "0600"
+ assert f.user == "debian-tor"
+ assert f.group == "debian-tor"
+
+ # All hostnames should contain at *least* the hostname.
+ assert re.search(ths_hostname_regex, f.content)
+
+ if tor_service['authenticated']:
+ # HidServAuth regex is approximately [a-zA-Z0-9/+], but validating
+ # the entire entry is sane, and we don't need to nitpick the charset.
+ aths_hostname_regex = ths_hostname_regex+" .{22} # client: "+tor_service['client']
+ assert re.search("^{}$".format(aths_hostname_regex), f.content)
+ else:
+ assert re.search("^{}$".format(ths_hostname_regex), f.content)
+
+
[email protected]('tor_service', sdvars.tor_services)
+def test_tor_services_config(File, tor_service):
+ """
+ Ensure torrc file contains relevant lines for Hidden Service declarations.
+ All hidden services must include:
+
+ * HiddenServiceDir
+ * HiddenServicePort
+
+ Only authenticated hidden services must also include:
+
+ * HiddenServiceAuthorizeClient
+
+ Check for each as appropriate.
+ """
+ f = File("/etc/tor/torrc")
+ dir_regex = "HiddenServiceDir /var/lib/tor/services/{}".format(
+ tor_service['name'])
+ # We need at least one port, but it may be used for both config values.
+ # On the Journalist Interface, we reuse the "80" remote port but map it to
+ # a different local port, so Apache can listen on several sockets.
+ remote_port = tor_service['ports'][0]
+ try:
+ local_port = tor_service['ports'][1]
+ except IndexError:
+ local_port = remote_port
+
+ port_regex = "HiddenServicePort {} 127.0.0.1:{}".format(
+ remote_port, local_port)
+
+ assert f.contains("^{}$".format(dir_regex))
+ assert f.contains("^{}$".format(port_regex))
+
+ if tor_service['authenticated']:
+ auth_regex = "HiddenServiceAuthorizeClient stealth {}".format(
+ tor_service['client'])
+ assert f.contains("^{}$".format(auth_regex))
diff --git a/testinfra/common/test_user_config.py b/testinfra/common/test_user_config.py
new file mode 100644
--- /dev/null
+++ b/testinfra/common/test_user_config.py
@@ -0,0 +1,83 @@
+import os
+import pytest
+import re
+import getpass
+
+hostenv = os.environ['SECUREDROP_TESTINFRA_TARGET_HOST']
+
+
+def test_sudoers_config(File, Sudo):
+ """
+ Check sudoers config for passwordless sudo via group membership,
+ as well as environment-related hardening.
+ """
+ f = File("/etc/sudoers")
+ assert f.is_file
+ assert f.user == "root"
+ assert f.group == "root"
+ assert oct(f.mode) == "0440"
+
+ # Restrictive file mode requires sudo for reading, so let's
+ # read once and store the content in a var.
+ with Sudo():
+ sudoers_config = f.content
+
+ # Using re.search rather than `f.contains` since the basic grep
+ # matching doesn't support PCRE, so `\s` won't work.
+ assert re.search('^Defaults\s+env_reset$', sudoers_config, re.M)
+ assert re.search('^Defaults\s+env_reset$', sudoers_config, re.M)
+ assert re.search('^Defaults\s+mail_badpass$', sudoers_config, re.M)
+ assert re.search('Defaults\s+secure_path="/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"', sudoers_config, re.M)
+ assert re.search('^%sudo\s+ALL=\(ALL\)\s+NOPASSWD:\s+ALL$', sudoers_config, re.M)
+ assert re.search('Defaults:%sudo\s+!requiretty', sudoers_config, re.M)
+
+
+def test_sudoers_tmux_env(File):
+ """
+ Ensure SecureDrop-specific bashrc additions are present.
+ This checks for automatic tmux start on interactive shells.
+ If we switch to byobu, we can set `byobu-enabled` and check
+ the corresponding settings there.
+ """
+
+ f = File('/etc/profile.d/securedrop_additions.sh')
+ non_interactive_str = re.escape('[[ $- != *i* ]] && return')
+ tmux_check = re.escape('test -z "$TMUX" && (tmux attach || tmux new-session)')
+
+ assert f.contains("^{}$".format(non_interactive_str))
+ assert f.contains("^if which tmux >\/dev\/null 2>&1; then$")
+
+ assert 'test -z "$TMUX" && (tmux attach || tmux new-session)' in f.content
+ assert f.contains(tmux_check)
+
+
+def test_tmux_installed(Package):
+ """
+ Ensure the `tmux` package is present, since it's required for the user env.
+ When running an interactive SSH session over Tor, tmux should be started
+ automatically, to prevent problems if the connection is broken unexpectedly,
+ as sometimes happens over Tor. The Admin will be able to reconnect to the
+ running tmux session and review command output.
+ """
+ assert Package("tmux").is_installed
+
+
[email protected](hostenv == 'travis',
+ reason="Bashrc tests dont make sense on Travis")
+def test_sudoers_tmux_env_deprecated(File):
+ """
+ Previous version of the Ansible config set the tmux config
+ in per-user ~/.bashrc, which was redundant. The config has
+ since moved to /etc/profile.d, to provide a single point of
+ update that applies to all users. Let's make sure that the
+ old setting isn't still active.
+ """
+
+ admin_user = "vagrant"
+ if os.environ.get("FPF_CI", None):
+ admin_user = getpass.getuser()
+ if admin_user == "root":
+ admin_user = "ubuntu"
+
+ f = File("/home/{}/.bashrc".format(admin_user))
+ assert not f.contains("^. \/etc\/bashrc\.securedrop_additions$")
diff --git a/testinfra/development/test_development_application_settings.py b/testinfra/development/test_development_application_settings.py
new file mode 100644
--- /dev/null
+++ b/testinfra/development/test_development_application_settings.py
@@ -0,0 +1,130 @@
+import pytest
+import os
+
+hostenv = os.environ['SECUREDROP_TESTINFRA_TARGET_HOST']
+
+sd_test_vars = pytest.securedrop_test_vars
+
[email protected]('package', [
+ "securedrop-app-code",
+ "apache2-mpm-worker",
+ "libapache2-mod-wsgi",
+ "libapache2-mod-xsendfile",
+])
+def test_development_lacks_deb_packages(Command, package):
+ """
+ The development machine does not use Apache, but rather the Flask runner,
+ for standing up dev-friendly servers inside the VM. Therefore the
+ app-code-related deb packages should be absent.
+ """
+ # The TestInfra `Package` module doesn't offer state=absent checks,
+ # so let's call `dpkg -l` and inspect that output.
+ c = Command("dpkg -l {}".format(package))
+ assert c.rc == 1
+ assert c.stdout == ""
+ stderr = c.stderr.rstrip()
+ assert stderr == "dpkg-query: no packages found matching {}".format(package)
+
+
+def test_development_apparmor_no_complain_mode(Command, Sudo):
+ """
+ Ensure that AppArmor profiles are not set to complain mode in development.
+ The app-staging host sets profiles to complain, viz.
+
+ * usr.sbin.apache2
+ * usr.sbin.tor
+
+ but those changes should not land on the development machine.
+ """
+
+ with Sudo():
+ c = Command("aa-status")
+ if hostenv == "travis":
+ assert c.rc == 3
+ assert 'apparmor filesystem is not mounted' in c.stderr
+ else:
+ assert c.rc == 0
+ assert '0 profiles are in complain mode.' in c.stdout
+
+
[email protected]('unwanted_file', [
+ "/var/www/html",
+ "/var/www/source.wsgi",
+ "/var/www/document.wsgi",
+])
+def test_development_apache_docroot_absent(File, unwanted_file):
+ """
+ Ensure the default HTML document root is missing.
+ Development environment does not serve out of /var/www,
+ since it uses the Flask dev server, not Apache.
+ """
+ f = File(unwanted_file)
+ assert not f.exists
+
+
[email protected]('data_dir', [
+ "/var/lib/securedrop",
+ "/var/lib/securedrop/keys",
+ "/var/lib/securedrop/tmp",
+ "/var/lib/securedrop/store",
+])
+def test_development_data_directories_exist(File, data_dir):
+ """
+ Ensure that application code directories are created
+ under /vagrant for the development environment, rather than
+ /var/www as in staging and prod.
+ """
+ f = File(data_dir)
+ assert f.is_directory
+ assert f.user == sd_test_vars.securedrop_user
+ assert f.group == sd_test_vars.securedrop_user
+ assert oct(f.mode) == "0700"
+
+
+def test_development_app_directories_exist(File):
+ """
+ Ensure that application code directories are created
+ under /vagrant for the development environment, rather than
+ /var/www as in staging and prod.
+
+ Using a separate check from the data directories because /vagrant
+ will be mounted with different mode.
+ """
+ f = File(sd_test_vars.securedrop_code)
+ assert f.is_directory
+ assert f.user == sd_test_vars.securedrop_user
+ assert f.group == sd_test_vars.securedrop_user
+
+
+def test_development_clean_tmp_cron_job(Command, Sudo):
+ """
+ Ensure cron job for cleaning the temporary directory for the app code
+ exists. Also, ensure that the older format for the cron job is absent,
+ since we updated manage.py subcommands to use hyphens instead of
+ underscores (e.g. `clean_tmp` -> `clean-tmp`).
+ """
+
+ with Sudo():
+ c = Command.check_output('crontab -l')
+ assert "@daily {}/manage.py clean-tmp".format(sd_test_vars.securedrop_code) in c
+ assert "@daily {}/manage.py clean_tmp".format(sd_test_vars.securedrop_code) not in c
+ assert "clean_tmp".format(sd_test_vars.securedrop_code) not in c
+ # Make sure that the only cron lines are a comment and the actual job.
+ # We don't want any duplicates.
+ assert len(c.split("\n")) == 2
+
+
+def test_development_default_logo_exists(File):
+ """
+ Checks for default SecureDrop logo file.
+
+ TODO: Add check for custom logo file.
+ """
+
+ f = File("{}/static/i/logo.png".format(sd_test_vars.securedrop_code))
+ assert f.is_file
+ assert f.user == sd_test_vars.securedrop_user
+ assert f.group == sd_test_vars.securedrop_user
+ # check if logo is NOT the default securedrop png
+ if not f.md5sum == "92443946d5c9e05020a090f97b62d027":
+ assert oct(f.mode) == "0400"
diff --git a/testinfra/development/test_development_environment.py b/testinfra/development/test_development_environment.py
new file mode 100644
--- /dev/null
+++ b/testinfra/development/test_development_environment.py
@@ -0,0 +1,67 @@
+import pytest
+import os
+import getpass
+
+def test_development_app_dependencies(Package):
+ """
+ Ensure development apt dependencies are installed.
+ """
+ development_apt_dependencies = [
+ 'libssl-dev',
+ 'ntp',
+ 'python-dev',
+ 'python-pip',
+ ]
+ for dependency in development_apt_dependencies:
+ p = Package(dependency)
+ assert p.is_installed
+
+
[email protected]('pip_package,version', [
+ ('Flask-Testing', '0.6.2'),
+ ('Flask', '0.12.2'),
+ ('Jinja2', '2.9.6'),
+ ('MarkupSafe', '1.0'),
+ ('Werkzeug', '0.12.2'),
+ ('beautifulsoup4', '4.6.0'),
+ ('click', '6.7'),
+ ('coverage', '4.4.1'),
+ ('first', '2.0.1'),
+ ('funcsigs', '1.0.2'),
+ ('itsdangerous', '0.24'),
+ ('mock', '2.0.0'),
+ ('pbr', '3.0.1'),
+ ('pip-tools', '1.9.0'),
+ ('py', '1.4.34'),
+ ('pytest-cov', '2.5.1'),
+ ('pytest', '3.1.1'),
+ ('selenium', '2.53.6'),
+ ('six', '1.10.0'),
+])
+def test_development_pip_dependencies(Command, pip_package, version):
+ """
+ Declare SecureDrop app pip requirements. On the development VM,
+ the pip dependencies should be installed directly via pip, rather
+ than relying on the deb packages with pip-wheel inclusions.
+ Versions here are intentionally hardcoded to track changes.
+ """
+ c = Command('pip freeze')
+ assert "{}=={}".format(pip_package, version) in c.stdout.rstrip()
+
+
[email protected](getpass.getuser() != 'vagrant',
+ reason="vagrant bashrc checks dont make sense in CI")
+def test_development_securedrop_env_var(File):
+ """
+ Ensure that the SECUREDROP_ENV var is set to "dev".
+
+
+ TODO: this isn't really checking that the env var is set,
+ just that it's declared in the bashrc. spec_helper ignores
+ env vars via ssh by default, so start there.
+ """
+
+ f = File('/home/vagrant/.bashrc')
+ assert f.is_file
+ assert f.user == 'vagrant'
+ assert f.contains('^export SECUREDROP_ENV=dev$')
diff --git a/testinfra/development/test_development_networking.py b/testinfra/development/test_development_networking.py
new file mode 100644
--- /dev/null
+++ b/testinfra/development/test_development_networking.py
@@ -0,0 +1,61 @@
+import pytest
+import os
+
+hostenv = os.environ['SECUREDROP_TESTINFRA_TARGET_HOST']
+
[email protected](hostenv == 'travis',
+ reason="Custom networking in Travis")
+def test_development_iptables_rules(Command, Sudo):
+ """
+ Declare desired iptables rules
+ The 'development' machine doesn't have any custom
+ iptables rules, so just check for the default chains.
+ """
+ desired_iptables_rules = [
+ '-P INPUT ACCEPT',
+ '-P FORWARD ACCEPT',
+ '-P OUTPUT ACCEPT',
+ ]
+ with Sudo():
+ c = Command.check_output('iptables -S')
+ for rule in desired_iptables_rules:
+ assert rule in c
+
+ # If any iptables rules are ever added, this test will
+ # fail, so tests can be written for the new rules.
+ # Counting newlines in the output simply to avoid calling
+ # `iptables -S` again and piping to `wc -l`.
+ assert c.count("\n") == len(desired_iptables_rules) - 1
+
+
+def test_development_ssh_listening(Socket):
+ """
+ Check for ssh listening on all interfaces. In prod environment,
+ SSH will be listening only on localhost, i.e. SSH over ATHS.
+ """
+ s = Socket("tcp://0.0.0.0:22")
+ assert s.is_listening
+
+
+def test_development_redis_worker(Socket):
+ """
+ Ensure that Redis worker is listening on localhost.
+ This worker is used to handle incoming submissions.
+ """
+
+ s = Socket("tcp://127.0.0.1:6379")
+ assert s.is_listening
+
+# The Flask runners for the source and journalist interfaces
+# aren't configured to run by default, e.g. on boot. Nor
+# do the app tests cause them to be run. So, we shouldn't
+# really expected them to be running.
+## check for source interface flask port listening
+#describe port(8080) do
+# it { should be_listening.on('0.0.0.0').with('tcp') }
+#end
+#
+## check for journalist interface flask port listening
+#describe port(8081) do
+# it { should be_listening.on('0.0.0.0').with('tcp') }
+#end
diff --git a/testinfra/development/test_xvfb.py b/testinfra/development/test_xvfb.py
new file mode 100644
--- /dev/null
+++ b/testinfra/development/test_xvfb.py
@@ -0,0 +1,113 @@
+import pytest
+
+
[email protected]('dependency', [
+ 'firefox',
+ 'xvfb',
+])
+def test_xvfb_apt_dependencies(Package, dependency):
+ """
+ Ensure apt requirements for Xvfb are present.
+ """
+ assert Package(dependency).is_installed
+
+
+def test_xvfb_service_config(File, Sudo):
+ """
+ Ensure xvfb service configuration file is present.
+ Using Sudo context manager because the expected mode is 700.
+ Not sure it's really necessary to have this script by 700; 755
+ sounds sufficient.
+ """
+ with Sudo():
+ f = File("/etc/init.d/xvfb")
+ assert f.is_file
+ assert oct(f.mode) == "0700"
+ assert f.user == "root"
+ assert f.group == "root"
+ # Let's hardcode the entire init script and check for exact match.
+ # The pytest output will display a diff if anything is missing.
+ xvfb_init_content = """
+# This is the /etc/init.d/xvfb script. We use it to launch xvfb at boot in the
+# development environment so we can easily run the functional tests.
+
+XVFB=/usr/bin/Xvfb
+XVFBARGS=":1 -screen 0 1024x768x24 -ac +extension GLX +render -noreset"
+PIDFILE=/var/run/xvfb.pid
+case "$1" in
+ start)
+ echo -n "Starting virtual X frame buffer: Xvfb"
+ start-stop-daemon --start --quiet --pidfile $PIDFILE --make-pidfile --background --exec $XVFB -- $XVFBARGS
+ echo "."
+ ;;
+ stop)
+ echo -n "Stopping virtual X frame buffer: Xvfb"
+ start-stop-daemon --stop --quiet --pidfile $PIDFILE
+ echo "."
+ ;;
+ restart)
+ $0 stop
+ $0 start
+ ;;
+ *)
+ echo "Usage: /etc/init.d/xvfb {start|stop|restart}"
+ exit 1
+esac
+
+exit 0
+""".lstrip().rstrip()
+ with Sudo():
+ assert f.contains('^XVFB=/usr/bin/Xvfb$')
+ assert f.contains('^XVFBARGS=":1 -screen 0 1024x768x24 '
+ '-ac +extension GLX +render -noreset"$')
+ assert f.content.rstrip() == xvfb_init_content
+
+
+def test_xvfb_service_enabled(Command, Sudo):
+ """
+ Ensure xvfb is configured to start on boot via update-rc.d.
+ The `-n` option to update-rc.d is dry-run.
+
+ Using Sudo context manager because the service file is mode 700.
+ Not sure it's really necessary to have this script by 700; 755
+ sounds sufficient.
+ """
+ with Sudo():
+ c = Command('update-rc.d -n xvfb defaults')
+ assert c.rc == 0
+ wanted_text = 'System start/stop links for /etc/init.d/xvfb already exist.'
+ assert wanted_text in c.stdout
+
+
+def test_xvfb_display_config(File):
+ """
+ Ensure DISPLAY environment variable is set on boot, for running
+ headless tests via Xvfb.
+ """
+ f = File('/etc/profile.d/xvfb_display.sh')
+ assert f.is_file
+ assert oct(f.mode) == "0444"
+ assert f.user == "root"
+ assert f.group == "root"
+ assert f.contains("export DISPLAY=:1\n")
+
+
+def test_xvfb_service_running(Process, Sudo):
+ """
+ Ensure that xvfb service is running.
+
+ We can't use the Service module because it expects a "status"
+ subcommand for the init script, and our custom version doesn't have
+ one. So let's make sure the process is running.
+ """
+ # Sudo isn't necessary to read out of /proc on development, but is
+ # required when running under Grsecurity, which app-staging does.
+ # So let's escalate privileges to ensure we can determine service state.
+ with Sudo():
+ p = Process.get(user="root", comm="Xvfb")
+ wanted_args = str('/usr/bin/Xvfb :1 -screen 0 1024x768x24 '
+ '-ac +extension GLX +render -noreset')
+ assert p.args == wanted_args
+ # We only expect a single process, no children.
+ workers = Process.filter(ppid=p.pid)
+ assert len(workers) == 0
diff --git a/testinfra/functional/test_tor_interfaces.py b/testinfra/functional/test_tor_interfaces.py
new file mode 100644
--- /dev/null
+++ b/testinfra/functional/test_tor_interfaces.py
@@ -0,0 +1,28 @@
+import os
+import re
+import pytest
+
+sdvars = pytest.securedrop_test_vars
+
[email protected]('site', sdvars.tor_url_files)
[email protected](os.environ.get('FPF_CI', 'false') == "false",
+ reason="Can only assure Tor is configured in CI atm")
+def test_www(Command, site):
+ """
+ Ensure tor interface is reachable and returns expected content.
+ """
+
+ # Extract Onion URL from saved onion file, fetched back from app-staging.
+ onion_url_filepath = os.path.join(os.path.dirname(__file__),
+ "../../install_files/ansible-base/{}".format(site['file']))
+ onion_url_raw = open(onion_url_filepath,'ro').read()
+ onion_url = re.search("\w+\.onion", onion_url_raw).group()
+
+ # Fetch Onion URL via curl to confirm interface is rendered correctly.
+ curl_tor = 'curl -s --socks5-hostname "${{TOR_PROXY}}":9050 {}'.format(onion_url)
+ curl_tor_status = '{} -o /dev/null -w "%{{http_code}}"'.format(curl_tor)
+
+ site_scrape = Command.check_output(curl_tor)
+ assert Command.check_output(curl_tor_status) == "200"
+ assert site['check_string'] in site_scrape
+ assert site['error_string'] not in site_scrape
diff --git a/testinfra/mon/test_network.py b/testinfra/mon/test_network.py
new file mode 100644
--- /dev/null
+++ b/testinfra/mon/test_network.py
@@ -0,0 +1,58 @@
+import os
+import difflib
+import pytest
+from jinja2 import Template
+
+
+securedrop_test_vars = pytest.securedrop_test_vars
+
+
+def test_mon_iptables_rules(SystemInfo, Command, Sudo):
+ app_ip = securedrop_test_vars.app_ip
+
+ # Build a dict of variables to pass to jinja for iptables comparison
+ kwargs = dict(
+ app_ip=app_ip,
+ default_interface = Command.check_output("ip r | head -n 1 | awk '{ print $5 }'"),
+ tor_user_id = Command.check_output("id -u debian-tor"),
+ ssh_group_gid = Command.check_output("getent group ssh | cut -d: -f3"),
+ postfix_user_id = Command.check_output("id -u postfix"),
+ dns_server = securedrop_test_vars.dns_server)
+
+ # Build iptables scrape cmd, purge comments + counters
+ iptables = "iptables-save | sed 's/ \[[0-9]*\:[0-9]*\]//g' | egrep -v '^#'"
+ environment = os.environ.get("CI_SD_ENV", "staging")
+ iptables_file = "{}/iptables-mon-{}.j2".format(
+ os.path.dirname(os.path.abspath(__file__)),
+ environment)
+
+ # template out a local iptables jinja file
+ jinja_iptables = Template(open(iptables_file,'r').read())
+ iptables_expected = jinja_iptables.render(**kwargs)
+
+ with Sudo():
+ # Actually run the iptables scrape command
+ iptables = Command.check_output(iptables)
+ # print diff comparison (only shows up in pytests if test fails or
+ # verbosity turned way up)
+ for iptablesdiff in difflib.context_diff(iptables_expected.split('\n'),
+ iptables.split('\n')):
+ print(iptablesdiff)
+ # Conduct the string comparison of the expected and actual iptables
+ # ruleset
+ assert iptables_expected == iptables
+
+
[email protected]('ossec_service', [
+ dict(host="0.0.0.0", proto="tcp", port=22),
+ dict(host="127.0.0.1", proto="tcp", port=25),
+ dict(host="0.0.0.0", proto="udp", port=1514),
+])
+def test_listening_ports(Socket, Sudo, ossec_service):
+ """
+ Ensure the OSSEC-related services are listening on the
+ expected sockets. Services to check include ossec, mail, and ssh.
+ """
+ socket = "{proto}://{host}:{port}".format(**ossec_service)
+ with Sudo():
+ assert Socket(socket).is_listening
diff --git a/testinfra/mon/test_ossec.py b/testinfra/mon/test_ossec.py
new file mode 100644
--- /dev/null
+++ b/testinfra/mon/test_ossec.py
@@ -0,0 +1,262 @@
+import re
+import pytest
+
+
+securedrop_test_vars = pytest.securedrop_test_vars
+
[email protected]('package', [
+ 'mailutils',
+ 'ossec-server',
+ 'postfix',
+ 'procmail',
+ 'securedrop-ossec-server',
+])
+def test_ossec_package(Package, package):
+ """
+ Ensure required packages for OSSEC are installed.
+ Includes mail utilities and the FPF-maintained metapackage.
+ """
+ assert Package(package).is_installed
+
+
[email protected]('header', [
+ '/^X-Originating-IP:/ IGNORE',
+ '/^X-Mailer:/ IGNORE',
+ '/^Mime-Version:/ IGNORE',
+ '/^User-Agent:/ IGNORE',
+ '/^Received:/ IGNORE',
+])
+def test_postfix_headers(File, header):
+ """
+ Ensure postfix header filters are set correctly. Common mail headers
+ are stripped by default to avoid leaking metadata about the instance.
+ Message body is always encrypted prior to sending.
+ """
+ f = File("/etc/postfix/header_checks")
+ assert f.is_file
+ assert oct(f.mode) == "0644"
+ regex = '^{}$'.format(re.escape(header))
+ assert re.search(regex, f.content, re.M)
+
+
[email protected]('setting', [
+ 'relayhost = [smtp.gmail.com]:587',
+ 'smtp_sasl_auth_enable = yes',
+ 'smtp_sasl_password_maps = hash:/etc/postfix/sasl_passwd',
+ 'smtp_sasl_security_options = noanonymous',
+ 'smtp_use_tls = yes',
+ 'smtp_tls_session_cache_database = btree:${data_directory}/smtp_scache',
+ 'smtp_tls_security_level = secure',
+ 'smtp_tls_CApath = /etc/ssl/certs',
+ 'smtp_tls_ciphers = high',
+ 'smtp_tls_protocols = TLSv1.2 TLSv1.1 TLSv1 !SSLv3 !SSLv2',
+ 'myhostname = ossec.server',
+ 'myorigin = $myhostname',
+ 'smtpd_banner = $myhostname ESMTP $mail_name (Ubuntu)',
+ 'biff = no',
+ 'append_dot_mydomain = no',
+ 'readme_directory = no',
+ 'smtp_header_checks = regexp:/etc/postfix/header_checks',
+ 'mailbox_command = /usr/bin/procmail',
+ 'inet_interfaces = loopback-only',
+ 'alias_maps = hash:/etc/aliases',
+ 'alias_database = hash:/etc/aliases',
+ 'mydestination = $myhostname, localhost.localdomain , localhost',
+ 'mynetworks = 127.0.0.0/8 [::ffff:127.0.0.0]/104 [::1]/128',
+ 'mailbox_size_limit = 0',
+ 'recipient_delimiter = +',
+])
+def test_postfix_settings(File, setting):
+ """
+ Check all postfix configuration lines. There are technically multiple
+ configuration paths regarding the TLS settings, particularly the
+ fingerprint verification logic, but only the base default config is tested
+ currently.
+ """
+ f = File("/etc/postfix/main.cf")
+ assert f.is_file
+ assert f.user == 'root'
+ assert oct(f.mode) == "0644"
+ regex = '^{}$'.format(re.escape(setting))
+ assert re.search(regex, f.content, re.M)
+
+
+def test_ossec_connectivity(Command, Sudo):
+ """
+ Ensure ossec-server machine has active connection to the ossec-agent.
+ The ossec service will report all available agents, and we can inspect
+ that list to make sure it's the host we expect.
+ """
+ desired_output = "{}-{} is available.".format(securedrop_test_vars.app_hostname,
+ securedrop_test_vars.app_ip)
+ with Sudo():
+ c = Command.check_output("/var/ossec/bin/list_agents -a")
+ assert c == desired_output
+
+def test_ossec_gnupg(File, Sudo):
+ """ ensure ossec gpg homedir exists """
+ with Sudo():
+ f = File(OSSEC_GNUPG)
+ assert f.is_directory
+ assert f.user == "ossec"
+ assert oct(f.mode) == "0700"
+
+
+# Permissions don't match between Ansible and OSSEC deb packages postinst.
[email protected]
+def test_ossec_gnupg(File, Sudo):
+ """
+ Ensures the test Admin GPG public key is present as file.
+ Does not check that it's added to the keyring for the ossec user;
+ that's handled by a separate test.
+ """
+ with Sudo():
+ f = File("/var/ossec/test_admin_key.pub")
+ assert f.is_file
+ assert oct(f.mode) == "0644"
+
+
+def test_ossec_pubkey_in_keyring(Command, Sudo):
+ """
+ Ensure the test Admin GPG public key exists in the keyring
+ within the ossec home directory.
+ """
+ ossec_gpg_pubkey_info = """pub 4096R/EDDDC102 2014-10-15
+uid Test/Development (DO NOT USE IN PRODUCTION) (Admin's OSSEC Alert GPG key) <[email protected]>
+sub 4096R/97D2EB39 2014-10-15"""
+ with Sudo("ossec"):
+ c = Command.check_output("gpg --homedir /var/ossec/.gnupg --list-keys EDDDC102")
+ assert c == ossec_gpg_pubkey_info
+
+
+# Permissions don't match between Ansible and OSSEC deb packages postinst.
[email protected]
[email protected]('keyfile', [
+ '/var/ossec/etc/sslmanager.key',
+ '/var/ossec/etc/sslmanager.cert',
+])
+def test_ossec_keyfiles(File, Sudo, keyfile):
+ """
+ Ensure that the OSSEC transport key pair exists. These keys are used
+ to protect the connection between the ossec-server and ossec-agent.
+
+ All this check does in confirm they're present, it doesn't perform any
+ matching checks to validate the configuration.
+ """
+ with Sudo():
+ f = File(keyfile)
+ assert f.is_file
+ # The postinst scripts in the OSSEC deb packages set 440 on the keyfiles;
+ # the Ansible config should be updated to do the same.
+ assert oct(f.mode) == "0440"
+ assert f.user == "root"
+ assert f.group == "ossec"
+
+
[email protected]('setting', [
+ 'VERBOSE=yes',
+ 'MAILDIR=/var/mail/',
+ 'DEFAULT=$MAILDIR',
+ 'LOGFILE=/var/log/procmail.log',
+ 'SUBJECT=`formail -xSubject:`',
+ ':0 c',
+ '*^To:.*root.*',
+ '|/var/ossec/send_encrypted_alarm.sh',
+])
+def test_procmail_settings(File, Sudo, setting):
+ """
+ Ensure procmail settings are correct. These config lines determine
+ how the OSSEC email alerts are encrypted and then passed off for sending.
+ """
+ # Sudo is required to traverse the /var/ossec directory.
+ with Sudo():
+ f = File("/var/ossec/.procmailrc")
+ assert f.contains('^{}$'.format(setting))
+
+
+# Permissions don't match between Ansible and OSSEC deb packages postinst.
[email protected]
+def test_procmail_attrs(File, Sudo):
+ """
+ Ensure procmail file attributes are specified correctly.
+ """
+ with Sudo():
+ f = File("/var/ossec/.procmailrc")
+ assert f.is_file
+ assert f.user == "ossec"
+ assert oct(f.mode) == "0440"
+
+
+# Permissions don't match between Ansible and OSSEC deb packages postinst.
[email protected]
+def test_procmail_log(File, Sudo):
+ """
+ Ensure procmail log file exist with proper ownership.
+ Only the ossec user should have read/write permissions.
+ """
+ with Sudo():
+ f = File("/var/log/procmail.log")
+ assert f.is_file
+ assert f.user == "ossec"
+ assert f.group == "root"
+ assert oct(f.mode) == "0660"
+
+
+def test_ossec_authd(Command, Sudo):
+ """ Ensure that authd is not running """
+ with Sudo():
+ c = Command("pgrep ossec-authd")
+ assert c.stdout == ""
+ assert c.rc != 0
+
+# Currently failing in CI under remote hosts
+# Looks like vagrant is currently appending hostname to local IP
[email protected]
+def test_hosts_files(File, SystemInfo):
+ """ Ensure host localhost is mapping to servername """
+ f = File('/etc/hosts')
+ mon_host = securedrop_test_vars.monitor_hostname
+ assert f.contains('^127.0.0.1.*{0}$'.format(mon_host))
+
+def test_hosts_files(File, SystemInfo):
+ """ Ensure host files mapping are in place """
+ f = File('/etc/hosts')
+
+ hostname = SystemInfo.hostname
+ app_ip = securedrop_test_vars.app_ip
+ app_host = securedrop_test_vars.app_hostname
+
+ assert f.contains('^127.0.0.1.*localhost')
+ assert f.contains('^{}\s*{}$'.format(app_ip, app_host))
+
+
+def test_ossec_log_contains_no_malformed_events(File, Sudo):
+ """
+ Ensure the OSSEC log reports no errors for incorrectly formatted
+ messages. These events indicate that the OSSEC server failed to decrypt
+ the event sent by the OSSEC agent, which implies a misconfiguration,
+ likely the IPv4 address or keypair differing from what's declared.
+
+ Documentation regarding this error message can be found at:
+ http://ossec-docs.readthedocs.io/en/latest/faq/unexpected.html#id4
+ """
+ with Sudo():
+ f = File("/var/ossec/logs/ossec.log")
+ assert not f.contains("ERROR: Incorrectly formated message from")
+
+
+def test_regression_hosts(Command):
+ """ Regression test to check for duplicate entries. """
+ assert Command.check_output("uniq --repeated /etc/hosts") == ""
+
+
+def test_postfix_generic_maps(File):
+ """
+ Regression test to check that generic Postfix maps are not configured
+ by default. As of #1565 Admins can opt-in to overriding the FROM address
+ used for sending OSSEC alerts, but by default we're preserving the old
+ `[email protected]` behavior, to avoid breaking email for previously
+ existing instances.
+ """
+ assert not File("/etc/postfix/generic").exists
+ assert not File("/etc/postfix/main.cf").contains("^smtp_generic_maps")
diff --git a/testinfra/test.py b/testinfra/test.py
new file mode 100755
--- /dev/null
+++ b/testinfra/test.py
@@ -0,0 +1,134 @@
+#!/usr/bin/env python
+"""
+Wrapper script for running Testinfra against SecureDrop VMs.
+Accepts a single argument: the hostname to run the tests against.
+Script will handle building the list of tests to run, based on hostname.
+"""
+import os
+import subprocess
+import sys
+import tempfile
+
+# By default let's assume we're testing against the development VM.
+try:
+ target_host = sys.argv[1]
+except IndexError:
+ target_host = "development"
+
+# Set env var so that `testinfra/conftest.py` can read in a YAML vars file
+# specific to the host being tested.
+os.environ['SECUREDROP_TESTINFRA_TARGET_HOST'] = target_host
+
+
+def get_target_roles(target_host):
+ """
+ Assemble list of role tests to run. Hard-coded per host.
+ """
+ target_roles = {"development": ['testinfra/app-code',
+ 'testinfra/development'],
+ "app-staging": ['testinfra/app',
+ 'testinfra/app-code',
+ 'testinfra/common',
+ 'testinfra/development/test_xvfb.py'],
+ "mon-staging": ['testinfra/mon',
+ 'testinfra/common'],
+ "mon-prod": ['testinfra/mon'],
+ "apptestclient": ['testinfra/functional'],
+ "build": ['testinfra/build']}
+
+ try:
+ return target_roles[target_host]
+ except KeyError:
+ print("Unknown host '{}'! Exiting.".format(target_host))
+ sys.exit(1)
+
+
+def run_testinfra(target_host, verbose=True):
+ """
+ Handler for executing testinfra against `target_host`.
+ Queries list of roles via helper def `get_target_roles`.
+ """
+ conn_type = "ssh"
+ target_roles = get_target_roles(target_host)
+ if verbose:
+ # Print informative output prior to test run.
+ print("Running Testinfra suite against '{}'...".format(target_host))
+ print("Target roles:")
+ for role in target_roles:
+ print(" - {}".format(role))
+
+ # Prod hosts host have SSH access over Tor. Let's use the SSH backend
+ # for Testinfra, rather than Ansible. When we write a dynamic inventory
+ # script for Ansible SSH-over-Tor, we can use the Ansible backend
+ # everywhere.
+ if target_host.endswith("-prod"):
+ os.environ['SECUREDROP_SSH_OVER_TOR'] = '1'
+ # Dump SSH config to tempfile so it can be passed as arg to testinfra.
+ ssh_config_output = subprocess.check_output(["vagrant", "ssh-config", target_host])
+ # Create temporary file to store ssh-config. Not deleting it automatically
+ # because there's no sensitive info (HidServAuth is required to connect),
+ # and we'll need it outside of the context-manager block that writes to it.
+ ssh_config_tmpfile = tempfile.NamedTemporaryFile(delete=False)
+ with ssh_config_tmpfile.file as f:
+ f.write(ssh_config_output)
+ ssh_config_path = ssh_config_tmpfile.name
+ testinfra_command_template = """
+testinfra \
+ -vv \
+ -n auto \
+ --connection ssh \
+ --ssh-config \
+ {ssh_config_path}\
+ --hosts {target_host} \
+ {target_roles}
+""".lstrip().rstrip()
+
+ elif os.environ.get("FPF_CI", 'false') == 'true':
+ if os.environ.get("CI_SD_ENV","development") == "development":
+ os.environ['SECUREDROP_TESTINFRA_TARGET_HOST'] = "travis"
+ ssh_config_path = ""
+ testinfra_command_template = "testinfra -vv {target_roles}"
+ else:
+ if target_host in ["build", "apptestclient"]:
+ conn_type = "docker"
+ ssh_config_path = "{}/.ssh/sshconfig-securedrop-ci-{}".format(
+ os.environ["HOME"],
+ os.environ["BUILD_NUM"])
+ testinfra_command_template = """
+testinfra \
+ -vv \
+ -n 8 \
+ --connection {connection_type} \
+ --ssh-config \
+ {ssh_config_path}\
+ --junit-xml=./{target_host}-results.xml\
+ --junit-prefix={target_host}\
+ --hosts {target_host} \
+ {target_roles}
+""".lstrip().rstrip()
+
+ else:
+ ssh_config_path = ""
+ testinfra_command_template = """
+testinfra \
+ -vv \
+ -n auto \
+ --connection ansible \
+ --ansible-inventory \
+ .vagrant/provisioners/ansible/inventory/vagrant_ansible_inventory \
+ --hosts {target_host} \
+ {target_roles}
+""".lstrip().rstrip()
+
+ testinfra_command = testinfra_command_template.format(
+ target_host=target_host,
+ ssh_config_path=ssh_config_path,
+ connection_type=conn_type,
+ target_roles=" ".join(target_roles),
+ ).split()
+
+ # Execute config tests.
+ subprocess.check_call(testinfra_command)
+
+if __name__ == "__main__":
+ run_testinfra(target_host)
| Instruct Admins to stash config changes permanently
# Feature request
## Description
In #2003 we added a note to the 0.4 upgrade docs instructing Admins to run `git checkout prod-specific.yml` after migrating their config. The team discussed concerns about accidental destruction of data from such an action, so we added a scary **Warning** box to the docs to hedge our bets.
We should instead instruct Admins to run `git stash save "old site config"`, which is a non-destructive operation. Additionally, the stash action will apply to _all_ customizations made to version-controlled files, not just `prod-specific.yml`. We should also remove modifications to the `inventory` file, which was deprecated in #1774 in favor of a dynamic inventory script.
## User Stories
As an Admin, I don't want to see scary warnings about destroying my instance configuration when following the upgrade docs. I want a clear and concise command to run that will afford "undo" capability in case I decide I made a mistake.
Error handling for `securedrop-admin install` displays incorrect command
# Bug
## Description
If `securedrop-admin tailsconfig` fails, the exception handling should make a recommendation to run `securedrop-admin sdconfig` to configure the site-specific vars. The logic to do so is already in place, but the recommendation is technically inaccurate in its reference to the script name. Screenshot:
![tails3-securedrop-admin-incorrect-sdconfig-command](https://user-images.githubusercontent.com/657862/27980904-812c08a2-6338-11e7-9c12-b807e3abb3aa.png)
## Steps to Reproduce
1. Boot Tails Admin Workstation.
2. Don't configure site-specific vars (via `securedrop-admin sdconfig`).
3. Ensure site-specific vars are absent: `rm -f install_files/ansible-base/group_vars/all/site-specific`
4. Run `securedrop-admin tailsconfig` and wait for error.
5. Observe that the path in the exception referring to the `securedrop-admin` script is typed incorrectly.
## Expected Behavior
If the `securedrop-admin tailsconfig` command fails, it should display an informative error message instructing the Admin which steps to take to rectify.
## Actual Behavior
When the `securedrop-admin tailsconfig` command fails, it displays an incorrect error message that advises running a command that will never work.
## Comments
Should be a quick fix, just touch-up the error message with accurate info. I wouldn't mind skipping display of the traceback either.
Instructions for verifying the Ubuntu .iso are unclear
# Bug
## Description
In the **[Install Ubuntu](https://docs.securedrop.org/en/latest/servers.html#install-ubuntu)** section of the SecureDrop documentation, it says:
> In order to verify the installation media, you should also download the files named SHA256SUMS and SHA256SUMS.gpg.
But does not provide further instruction on how to obtain these files.
## Expected Behavior
The documentation should clearly describe how to obtain the files needed to verify the Ubuntu installation media for the SecureDrop servers.
| 2017-07-26T18:29:12Z | [] | [] |
|
freedomofpress/securedrop | 2,048 | freedomofpress__securedrop-2048 | [
"1922",
"1950",
"2022",
"2034"
] | 6a20b07b9e658eba62e7cfcd30895fec1b6f0245 | diff --git a/docs/conf.py b/docs/conf.py
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -59,9 +59,9 @@
# built documents.
#
# The short X.Y version.
-version = '0.3.12'
+version = '0.4'
# The full version, including alpha/beta/rc tags.
-release = '0.3.12'
+release = '0.4'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
diff --git a/install_files/ansible-base/roles/backup/files/0.3_restore.py b/install_files/ansible-base/roles/restore/files/0.3_restore.py
similarity index 100%
rename from install_files/ansible-base/roles/backup/files/0.3_restore.py
rename to install_files/ansible-base/roles/restore/files/0.3_restore.py
diff --git a/install_files/ansible-base/roles/backup/files/restore.py b/install_files/ansible-base/roles/restore/files/restore.py
similarity index 100%
rename from install_files/ansible-base/roles/backup/files/restore.py
rename to install_files/ansible-base/roles/restore/files/restore.py
diff --git a/securedrop/source.py b/securedrop/source.py
--- a/securedrop/source.py
+++ b/securedrop/source.py
@@ -124,9 +124,7 @@ def check_tor2web():
@app.route('/')
def index():
- return render_template('index.html',
- custom_notification=getattr(
- config, 'CUSTOM_NOTIFICATION', ''))
+ return render_template('index.html')
def generate_unique_codename():
diff --git a/securedrop/version.py b/securedrop/version.py
--- a/securedrop/version.py
+++ b/securedrop/version.py
@@ -1 +1 @@
-__version__ = '0.3.12'
+__version__ = '0.4'
| diff --git a/docs/test_the_installation.rst b/docs/test_the_installation.rst
--- a/docs/test_the_installation.rst
+++ b/docs/test_the_installation.rst
@@ -48,14 +48,17 @@ On each server:
#. Check that you can execute privileged commands by running ``sudo su``.
#. Verify that you are booted into a grsec kernel: run ``uname -r``
and verify that the name of the running kernel ends with ``-grsec``.
-#. Check the AppArmor status with ``sudo aa-status``. On a production
- instance all profiles should be in enforce mode.
#. Check the current applied iptables rules with ``iptables-save``. It
should output *approximately* 50 lines.
#. You should have received an email alert from OSSEC when it first
started. If not, review our :doc:`OSSEC Alerts
Guide <ossec_alerts>`.
+On the *Application Server*:
+
+#. Check the AppArmor status with ``sudo aa-status``. On a production
+ instance all profiles should be in enforce mode.
+
Test the web interfaces
-----------------------
diff --git a/securedrop/tests/functional/source_navigation_steps.py b/securedrop/tests/functional/source_navigation_steps.py
--- a/securedrop/tests/functional/source_navigation_steps.py
+++ b/securedrop/tests/functional/source_navigation_steps.py
@@ -162,3 +162,8 @@ def _source_deletes_a_journalist_reply(self):
def _source_logs_out(self):
self.driver.find_element_by_id('logout').click()
assert self.driver.find_element_by_css_selector('.important')
+
+ def _source_not_found(self):
+ self.driver.get(self.source_location + "/unlikely")
+ message = self.driver.find_element_by_id('page_not_found')
+ assert message.is_displayed()
diff --git a/securedrop/tests/functional/test_source_notfound.py b/securedrop/tests/functional/test_source_notfound.py
new file mode 100644
--- /dev/null
+++ b/securedrop/tests/functional/test_source_notfound.py
@@ -0,0 +1,10 @@
+import source_navigation_steps
+import functional_test
+
+
+class TestSourceInterfaceBannerWarnings(
+ functional_test.FunctionalTest,
+ source_navigation_steps.SourceNavigationSteps):
+
+ def test_not_found(self):
+ self._source_not_found()
diff --git a/securedrop/tests/test_source.py b/securedrop/tests/test_source.py
--- a/securedrop/tests/test_source.py
+++ b/securedrop/tests/test_source.py
@@ -13,7 +13,6 @@
import version
import utils
import json
-import config
class TestSourceApp(TestCase):
@@ -264,28 +263,6 @@ def test_submit_sanitizes_filename(self, gzipfile):
mode=ANY,
fileobj=ANY)
- def test_custom_notification(self):
- """Test that `CUSTOM_NOTIFICATION` string in config file
- is rendered on the Source Interface page. We cannot assume
- it will be present in production instances, since it is added
- via the Ansible config, not the Debian package scripts."""
- custom_msg = config.CUSTOM_NOTIFICATION
-
- dev_msg = ("This is an insecure SecureDrop Development server "
- "for testing ONLY. Do NOT submit documents here.")
- staging_msg = "This is a SecureDrop Staging VM for testing ONLY"
-
- self.assertTrue(custom_msg in (dev_msg, staging_msg))
- resp = self.client.get('/')
- self.assertEqual(resp.status_code, 200)
- # The app-tests aren't host-aware, so we can't accurately predict
- # which custom notification message we want. Let's check for both,
- # and fail only if both are not found.
- try:
- self.assertIn(dev_msg, resp.data)
- except AssertionError:
- self.assertIn(staging_msg, resp.data)
-
def test_tor2web_warning_headers(self):
resp = self.client.get('/', headers=[('X-tor2web', 'encrypted')])
self.assertEqual(resp.status_code, 200)
diff --git a/testinfra/common/test_tor_config.py b/testinfra/common/test_tor_config.py
--- a/testinfra/common/test_tor_config.py
+++ b/testinfra/common/test_tor_config.py
@@ -53,7 +53,6 @@ def test_tor_service_running(Command, File, Sudo):
'SocksPort 0',
'SafeLogging 1',
'RunAsDaemon 1',
- 'Sandbox 1',
])
def test_tor_torrc_options(File, torrc_option):
"""
@@ -70,6 +69,19 @@ def test_tor_torrc_options(File, torrc_option):
assert f.contains("^{}$".format(torrc_option))
+def test_tor_torrc_sandbox(File):
+ """
+ Check that the `Sandbox 1` declaration is not present in the torrc.
+ The torrc manpage states this option is experimental, and although we
+ use it already on Tails workstations, further testing is required
+ before we push it out to servers. See issues #944 and #1969.
+ """
+ f = File("/etc/tor/torrc")
+ # Only `Sandbox 1` will enable, but make sure there are zero occurrances
+ # of "Sandbox", otherwise we may have a regression somewhere.
+ assert not f.contains("^.*Sandbox.*$")
+
+
def test_tor_signing_key_fingerprint(Command):
"""
The `deb.torproject.org-keyring` package manages the repo signing pubkey
| Release SecureDrop 0.4
This is a tracking issue for the upcoming release of SecureDrop 0.4 - tasks may get added or modified.
# Pre-release
- [x] Prepare changelog - @redshiftzero
- [x] Send 0.4 pre-release notification to admins to give them sufficient time to prepare - @redshiftzero
# Finish release candidate (0.4-rc1)
- [x] Branch release/0.4 off develop once all necessary milestone issues are completed - @redshiftzero
- [x] Merge in any last minute PRs (#1926, #1934) - @redshiftzero
- [x] Build test debs - @conorsch
- [x] Stand up test apt server with 0.4-rc1 packages - @conorsch
# QA (0.4-rc1)
- [x] Unit tests pass on 0.4 staging VMs - @redshiftzero
- [x] Test upgrade from 0.3.12 works on prod w/ test repo debs - @msheiny, @conorsch
- [x] Test install (not upgrade) of 0.4 works on prod w/ test repo debs - @redshiftzero
- [ ] Test upgrade from 0.3.12 works on hardware w/ test repo debs without running playbooks [skipped until second round of test debs]
- [ ] Test upgrade from 0.3.12 works on hardware w/ test repo debs and re-running Ansible playbooks from admin workstation [skipped until second round of test debs]
- [x] Test fresh install (not upgrade) of 0.4 works on hardware w/ test repo debs - @redshiftzero
# Documentation
- [x] Make documentation changes against `release/0.4`
# Finish release candidate (0.4-rc2)
- [x] Merge in remainder of fixes from 0.4-rc1
- [x] Build test debs - @conorsch
- [x] Stand up test apt server with 0.4-rc2 packages - @conorsch
# QA (0.4-rc2)
- [x] Test upgrade from 0.3.12 works on hardware w/ test repo debs without running playbooks - @conorsch
- [x] Test upgrade from 0.3.12 works on hardware w/ test repo debs and re-running Ansible playbooks from admin workstation - @conorsch
- [x] Test fresh install (not upgrade) of 0.4 works on hardware w/ test repo debs - @redshiftzero
# Release
- [x] Build final Debian packages for 0.4 - @conorsch
- [x] Release 0.4
- [x] Publish blog post about 0.4 Debian package release and instructions for admins - @redshiftzero
# Post-release
- [x] Merge release changes into master branch - @redshiftzero
- [x] Merge release changes into development branch - @redshiftzero
Error handling for `securedrop-admin install` displays incorrect command
# Bug
## Description
If `securedrop-admin tailsconfig` fails, the exception handling should make a recommendation to run `securedrop-admin sdconfig` to configure the site-specific vars. The logic to do so is already in place, but the recommendation is technically inaccurate in its reference to the script name. Screenshot:
![tails3-securedrop-admin-incorrect-sdconfig-command](https://user-images.githubusercontent.com/657862/27980904-812c08a2-6338-11e7-9c12-b807e3abb3aa.png)
## Steps to Reproduce
1. Boot Tails Admin Workstation.
2. Don't configure site-specific vars (via `securedrop-admin sdconfig`).
3. Ensure site-specific vars are absent: `rm -f install_files/ansible-base/group_vars/all/site-specific`
4. Run `securedrop-admin tailsconfig` and wait for error.
5. Observe that the path in the exception referring to the `securedrop-admin` script is typed incorrectly.
## Expected Behavior
If the `securedrop-admin tailsconfig` command fails, it should display an informative error message instructing the Admin which steps to take to rectify.
## Actual Behavior
When the `securedrop-admin tailsconfig` command fails, it displays an incorrect error message that advises running a command that will never work.
## Comments
Should be a quick fix, just touch-up the error message with accurate info. I wouldn't mind skipping display of the traceback either.
Instruct Admins to stash config changes permanently
# Feature request
## Description
In #2003 we added a note to the 0.4 upgrade docs instructing Admins to run `git checkout prod-specific.yml` after migrating their config. The team discussed concerns about accidental destruction of data from such an action, so we added a scary **Warning** box to the docs to hedge our bets.
We should instead instruct Admins to run `git stash save "old site config"`, which is a non-destructive operation. Additionally, the stash action will apply to _all_ customizations made to version-controlled files, not just `prod-specific.yml`. We should also remove modifications to the `inventory` file, which was deprecated in #1774 in favor of a dynamic inventory script.
## User Stories
As an Admin, I don't want to see scary warnings about destroying my instance configuration when following the upgrade docs. I want a clear and concise command to run that will afford "undo" capability in case I decide I made a mistake.
Instructions for verifying the Ubuntu .iso are unclear
# Bug
## Description
In the **[Install Ubuntu](https://docs.securedrop.org/en/latest/servers.html#install-ubuntu)** section of the SecureDrop documentation, it says:
> In order to verify the installation media, you should also download the files named SHA256SUMS and SHA256SUMS.gpg.
But does not provide further instruction on how to obtain these files.
## Expected Behavior
The documentation should clearly describe how to obtain the files needed to verify the Ubuntu installation media for the SecureDrop servers.
| 2017-07-26T20:36:54Z | [] | [] |
|
freedomofpress/securedrop | 2,079 | freedomofpress__securedrop-2079 | [
"1998"
] | 5d067bb7244befca65489eaaf2fc7b78ae3e07c6 | diff --git a/securedrop/journalist.py b/securedrop/journalist.py
--- a/securedrop/journalist.py
+++ b/securedrop/journalist.py
@@ -46,11 +46,11 @@ def shutdown_session(exception=None):
db_session.remove()
-def get_source(sid):
+def get_source(filesystem_id):
"""Return a Source object, representing the database row, for the source
- with id `sid`"""
+ with the `filesystem_id`"""
source = None
- query = Source.query.filter(Source.filesystem_id == sid)
+ query = Source.query.filter(Source.filesystem_id == filesystem_id)
source = get_one_or_else(query, app.logger, abort)
return source
@@ -64,10 +64,10 @@ def setup_g():
g.user = Journalist.query.get(uid)
if request.method == 'POST':
- sid = request.form.get('sid')
- if sid:
- g.sid = sid
- g.source = get_source(sid)
+ filesystem_id = request.form.get('filesystem_id')
+ if filesystem_id:
+ g.filesystem_id = filesystem_id
+ g.source = get_source(filesystem_id)
def logged_in():
@@ -446,8 +446,8 @@ def account_reset_two_factor_hotp():
return render_template('account_edit_hotp_secret.html')
-def make_star_true(sid):
- source = get_source(sid)
+def make_star_true(filesystem_id):
+ source = get_source(filesystem_id)
if source.star:
source.star.starred = True
else:
@@ -455,8 +455,8 @@ def make_star_true(sid):
db_session.add(source_star)
-def make_star_false(sid):
- source = get_source(sid)
+def make_star_false(filesystem_id):
+ source = get_source(filesystem_id)
if not source.star:
source_star = SourceStar(source)
db_session.add(source_star)
@@ -464,18 +464,18 @@ def make_star_false(sid):
source.star.starred = False
[email protected]('/col/add_star/<sid>', methods=('POST',))
[email protected]('/col/add_star/<filesystem_id>', methods=('POST',))
@login_required
-def add_star(sid):
- make_star_true(sid)
+def add_star(filesystem_id):
+ make_star_true(filesystem_id)
db_session.commit()
return redirect(url_for('index'))
[email protected]("/col/remove_star/<sid>", methods=('POST',))
[email protected]("/col/remove_star/<filesystem_id>", methods=('POST',))
@login_required
-def remove_star(sid):
- make_star_false(sid)
+def remove_star(filesystem_id):
+ make_star_false(filesystem_id)
db_session.commit()
return redirect(url_for('index'))
@@ -505,23 +505,24 @@ def index():
return render_template('index.html', unstarred=unstarred, starred=starred)
[email protected]('/col/<sid>')
[email protected]('/col/<filesystem_id>')
@login_required
-def col(sid):
- source = get_source(sid)
- source.has_key = crypto_util.getkey(sid)
- return render_template("col.html", sid=sid, source=source)
+def col(filesystem_id):
+ source = get_source(filesystem_id)
+ source.has_key = crypto_util.getkey(filesystem_id)
+ return render_template("col.html", filesystem_id=filesystem_id,
+ source=source)
-def delete_collection(source_id):
+def delete_collection(filesystem_id):
# Delete the source's collection of submissions
- job = worker.enqueue(store.delete_source_directory, source_id)
+ job = worker.enqueue(store.delete_source_directory, filesystem_id)
# Delete the source's reply keypair
- crypto_util.delete_reply_keypair(source_id)
+ crypto_util.delete_reply_keypair(filesystem_id)
# Delete their entry in the db
- source = get_source(source_id)
+ source = get_source(filesystem_id)
db_session.delete(source)
db_session.commit()
return job
@@ -551,8 +552,9 @@ def col_process():
def col_download_unread(cols_selected):
"""Download all unread submissions from all selected sources."""
submissions = []
- for sid in cols_selected:
- id = Source.query.filter(Source.filesystem_id == sid).one().id
+ for filesystem_id in cols_selected:
+ id = Source.query.filter(Source.filesystem_id == filesystem_id) \
+ .one().id
submissions += Submission.query.filter(
Submission.downloaded == false(),
Submission.source_id == id).all()
@@ -565,35 +567,36 @@ def col_download_unread(cols_selected):
def col_download_all(cols_selected):
"""Download all submissions from all selected sources."""
submissions = []
- for sid in cols_selected:
- id = Source.query.filter(Source.filesystem_id == sid).one().id
+ for filesystem_id in cols_selected:
+ id = Source.query.filter(Source.filesystem_id == filesystem_id) \
+ .one().id
submissions += Submission.query.filter(
Submission.source_id == id).all()
return download("all", submissions)
def col_star(cols_selected):
- for sid in cols_selected:
- make_star_true(sid)
+ for filesystem_id in cols_selected:
+ make_star_true(filesystem_id)
db_session.commit()
return redirect(url_for('index'))
def col_un_star(cols_selected):
- for source_id in cols_selected:
- make_star_false(source_id)
+ for filesystem_id in cols_selected:
+ make_star_false(filesystem_id)
db_session.commit()
return redirect(url_for('index'))
[email protected]('/col/delete/<sid>', methods=('POST',))
[email protected]('/col/delete/<filesystem_id>', methods=('POST',))
@login_required
-def col_delete_single(sid):
+def col_delete_single(filesystem_id):
"""deleting a single collection from its /col page"""
- source = get_source(sid)
- delete_collection(sid)
+ source = get_source(filesystem_id)
+ delete_collection(filesystem_id)
flash(
"%s's collection deleted" %
(source.journalist_designation,), "notification")
@@ -605,8 +608,8 @@ def col_delete(cols_selected):
if len(cols_selected) < 1:
flash("No collections selected to delete!", "error")
else:
- for source_id in cols_selected:
- delete_collection(source_id)
+ for filesystem_id in cols_selected:
+ delete_collection(filesystem_id)
flash("%s %s deleted" % (
len(cols_selected),
"collection" if len(cols_selected) == 1 else "collections"
@@ -615,9 +618,9 @@ def col_delete(cols_selected):
return redirect(url_for('index'))
[email protected]('/col/<sid>/<fn>')
[email protected]('/col/<filesystem_id>/<fn>')
@login_required
-def download_single_submission(sid, fn):
+def download_single_submission(filesystem_id, fn):
"""Sends a client the contents of a single submission."""
if '..' in fn or fn.startswith('/'):
abort(404)
@@ -629,7 +632,8 @@ def download_single_submission(sid, fn):
except NoResultFound as e:
app.logger.error("Could not mark " + fn + " as downloaded: %s" % (e,))
- return send_file(store.path(sid, fn), mimetype="application/pgp-encrypted")
+ return send_file(store.path(filesystem_id, fn),
+ mimetype="application/pgp-encrypted")
@app.route('/reply', methods=('POST',))
@@ -652,14 +656,15 @@ def reply():
# Reject empty replies
if not msg:
flash("You cannot send an empty reply!", "error")
- return redirect(url_for('col', sid=g.sid))
+ return redirect(url_for('col', filesystem_id=g.filesystem_id))
g.source.interaction_count += 1
filename = "{0}-{1}-reply.gpg".format(g.source.interaction_count,
g.source.journalist_filename)
crypto_util.encrypt(msg,
- [crypto_util.getkey(g.sid), config.JOURNALIST_KEY],
- output=store.path(g.sid, filename))
+ [crypto_util.getkey(g.filesystem_id),
+ config.JOURNALIST_KEY],
+ output=store.path(g.filesystem_id, filename))
reply = Reply(g.user, g.source, filename)
try:
@@ -678,7 +683,7 @@ def reply():
else:
flash("Thanks! Your reply has been stored.", "notification")
finally:
- return redirect(url_for('col', sid=g.sid))
+ return redirect(url_for('col', filesystem_id=g.filesystem_id))
@app.route('/regenerate-code', methods=('POST',))
@@ -689,7 +694,7 @@ def generate_code():
for item in g.source.collection:
item.filename = store.rename_submission(
- g.sid,
+ g.filesystem_id,
item.filename,
g.source.journalist_filename)
db_session.commit()
@@ -699,20 +704,20 @@ def generate_code():
(original_journalist_designation,
g.source.journalist_designation),
"notification")
- return redirect('/col/' + g.sid)
+ return redirect('/col/' + g.filesystem_id)
[email protected]('/download_unread/<sid>')
[email protected]('/download_unread/<filesystem_id>')
@login_required
-def download_unread_sid(sid):
- id = Source.query.filter(Source.filesystem_id == sid).one().id
+def download_unread_filesystem_id(filesystem_id):
+ id = Source.query.filter(Source.filesystem_id == filesystem_id).one().id
submissions = Submission.query.filter(
Submission.source_id == id,
Submission.downloaded == false()).all()
if submissions == []:
flash("No unread submissions for this source!")
- return redirect(url_for('col', sid=sid))
- source = get_source(sid)
+ return redirect(url_for('col', filesystem_id=filesystem_id))
+ source = get_source(filesystem_id)
return download(source.journalist_filename, submissions)
@@ -729,29 +734,29 @@ def bulk():
flash("No collections selected to download!", "error")
elif action in ('delete', 'confirm_delete'):
flash("No collections selected to delete!", "error")
- return redirect(url_for('col', sid=g.sid))
+ return redirect(url_for('col', filesystem_id=g.filesystem_id))
if action == 'download':
- source = get_source(g.sid)
+ source = get_source(g.filesystem_id)
return download(source.journalist_filename, selected_docs)
elif action == 'delete':
- return bulk_delete(g.sid, selected_docs)
+ return bulk_delete(g.filesystem_id, selected_docs)
elif action == 'confirm_delete':
- return confirm_bulk_delete(g.sid, selected_docs)
+ return confirm_bulk_delete(g.filesystem_id, selected_docs)
else:
abort(400)
-def confirm_bulk_delete(sid, items_selected):
+def confirm_bulk_delete(filesystem_id, items_selected):
return render_template('delete.html',
- sid=sid,
+ filesystem_id=filesystem_id,
source=g.source,
items_selected=items_selected)
-def bulk_delete(sid, items_selected):
+def bulk_delete(filesystem_id, items_selected):
for item in items_selected:
- item_path = store.path(sid, item.filename)
+ item_path = store.path(filesystem_id, item.filename)
worker.enqueue(store.secure_unlink, item_path)
db_session.delete(item)
db_session.commit()
@@ -760,7 +765,7 @@ def bulk_delete(sid, items_selected):
"Submission{} deleted.".format(
"s" if len(items_selected) > 1 else ""),
"notification")
- return redirect(url_for('col', sid=sid))
+ return redirect(url_for('col', filesystem_id=filesystem_id))
def download(zip_basename, submissions):
@@ -794,7 +799,7 @@ def download(zip_basename, submissions):
def flag():
g.source.flagged = True
db_session.commit()
- return render_template('flag.html', sid=g.sid,
+ return render_template('flag.html', filesystem_id=g.filesystem_id,
codename=g.source.journalist_designation)
diff --git a/securedrop/source.py b/securedrop/source.py
--- a/securedrop/source.py
+++ b/securedrop/source.py
@@ -92,9 +92,11 @@ def setup_g():
# serving a static resource that won't need to access these common values.
if logged_in():
g.codename = session['codename']
- g.sid = crypto_util.hash_codename(g.codename)
+ g.filesystem_id = crypto_util.hash_codename(g.codename)
try:
- g.source = Source.query.filter(Source.filesystem_id == g.sid).one()
+ g.source = Source.query \
+ .filter(Source.filesystem_id == g.filesystem_id) \
+ .one()
except MultipleResultsFound as e:
app.logger.error(
"Found multiple Sources when one was expected: %s" %
@@ -107,7 +109,7 @@ def setup_g():
del session['logged_in']
del session['codename']
return redirect(url_for('index'))
- g.loc = store.path(g.sid)
+ g.loc = store.path(g.filesystem_id)
@app.before_request
@@ -145,9 +147,9 @@ def generate_unique_codename():
"(Codename='{}')".format(codename))
continue
- sid = crypto_util.hash_codename(codename) # scrypt (slow)
+ filesystem_id = crypto_util.hash_codename(codename) # scrypt (slow)
matching_sources = Source.query.filter(
- Source.filesystem_id == sid).all()
+ Source.filesystem_id == filesystem_id).all()
if len(matching_sources) == 0:
return codename
@@ -167,9 +169,9 @@ def generate():
@app.route('/create', methods=['POST'])
def create():
- sid = crypto_util.hash_codename(session['codename'])
+ filesystem_id = crypto_util.hash_codename(session['codename'])
- source = Source(sid, crypto_util.display_id())
+ source = Source(filesystem_id, crypto_util.display_id())
db_session.add(source)
try:
db_session.commit()
@@ -178,7 +180,7 @@ def create():
"Attempt to create a source with duplicate codename: %s" %
(e,))
else:
- os.mkdir(store.path(sid))
+ os.mkdir(store.path(filesystem_id))
session['logged_in'] = True
return redirect(url_for('lookup'))
@@ -192,18 +194,20 @@ def wrapper(*args, **kwargs):
@async
-def async_genkey(sid, codename):
- crypto_util.genkeypair(sid, codename)
+def async_genkey(filesystem_id, codename):
+ crypto_util.genkeypair(filesystem_id, codename)
# Register key generation as update to the source, so sources will
# filter to the top of the list in the journalist interface if a
# flagged source logs in and has a key generated for them. #789
try:
- source = Source.query.filter(Source.filesystem_id == sid).one()
+ source = Source.query.filter(Source.filesystem_id == filesystem_id) \
+ .one()
source.last_updated = datetime.utcnow()
db_session.commit()
except Exception as e:
- app.logger.error("async_genkey for source (sid={}): {}".format(sid, e))
+ app.logger.error("async_genkey for source "
+ "(filesystem_id={}): {}".format(filesystem_id, e))
@app.route('/lookup', methods=('GET',))
@@ -211,7 +215,7 @@ def async_genkey(sid, codename):
def lookup():
replies = []
for reply in g.source.replies:
- reply_path = store.path(g.sid, reply.filename)
+ reply_path = store.path(g.filesystem_id, reply.filename)
try:
reply.decrypted = crypto_util.decrypt(
g.codename,
@@ -229,8 +233,8 @@ def lookup():
# Generate a keypair to encrypt replies from the journalist
# Only do this if the journalist has flagged the source as one
# that they would like to reply to. (Issue #140.)
- if not crypto_util.getkey(g.sid) and g.source.flagged:
- async_genkey(g.sid, g.codename)
+ if not crypto_util.getkey(g.filesystem_id) and g.source.flagged:
+ async_genkey(g.filesystem_id, g.codename)
return render_template(
'lookup.html',
@@ -238,16 +242,16 @@ def lookup():
replies=replies,
flagged=g.source.flagged,
haskey=crypto_util.getkey(
- g.sid))
+ g.filesystem_id))
-def normalize_timestamps(sid):
+def normalize_timestamps(filesystem_id):
"""
Update the timestamps on all of the source's submissions to match that of
the latest submission. This minimizes metadata that could be useful to
investigators. See #301.
"""
- sub_paths = [store.path(sid, submission.filename)
+ sub_paths = [store.path(filesystem_id, submission.filename)
for submission in g.source.submissions]
if len(sub_paths) > 1:
args = ["touch"]
@@ -279,7 +283,7 @@ def submit():
g.source.interaction_count += 1
fnames.append(
store.save_message_submission(
- g.sid,
+ g.filesystem_id,
g.source.interaction_count,
journalist_filename,
msg))
@@ -287,7 +291,7 @@ def submit():
g.source.interaction_count += 1
fnames.append(
store.save_file_submission(
- g.sid,
+ g.filesystem_id,
g.source.interaction_count,
journalist_filename,
fh.filename,
@@ -320,11 +324,11 @@ def submit():
entropy_avail = int(
open('/proc/sys/kernel/random/entropy_avail').read())
if entropy_avail >= 2400:
- async_genkey(g.sid, g.codename)
+ async_genkey(g.filesystem_id, g.codename)
g.source.last_updated = datetime.utcnow()
db_session.commit()
- normalize_timestamps(g.sid)
+ normalize_timestamps(g.filesystem_id)
return redirect(url_for('lookup'))
@@ -335,7 +339,7 @@ def delete():
query = Reply.query.filter(
Reply.filename == request.form['reply_filename'])
reply = get_one_or_else(query, app.logger, abort)
- store.secure_unlink(store.path(g.sid, reply.filename))
+ store.secure_unlink(store.path(g.filesystem_id, reply.filename))
db_session.delete(reply)
db_session.commit()
@@ -351,7 +355,7 @@ def batch_delete():
app.logger.error("Found no replies when at least one was expected")
return redirect(url_for('lookup'))
for reply in replies:
- store.secure_unlink(store.path(g.sid, reply.filename))
+ store.secure_unlink(store.path(g.filesystem_id, reply.filename))
db_session.delete(reply)
db_session.commit()
diff --git a/securedrop/store.py b/securedrop/store.py
--- a/securedrop/store.py
+++ b/securedrop/store.py
@@ -95,7 +95,8 @@ def get_bulk_archive(selected_submissions, zip_directory=''):
return zip_file
-def save_file_submission(sid, count, journalist_filename, filename, stream):
+def save_file_submission(filesystem_id, count, journalist_filename, filename,
+ stream):
sanitized_filename = secure_filename(filename)
# We store file submissions in a .gz file for two reasons:
@@ -114,7 +115,7 @@ def save_file_submission(sid, count, journalist_filename, filename, stream):
encrypted_file_name = "{0}-{1}-doc.gz.gpg".format(
count,
journalist_filename)
- encrypted_file_path = path(sid, encrypted_file_name)
+ encrypted_file_path = path(filesystem_id, encrypted_file_name)
with SecureTemporaryFile("/tmp") as stf:
with gzip.GzipFile(filename=sanitized_filename,
mode='wb', fileobj=stf) as gzf:
@@ -131,14 +132,15 @@ def save_file_submission(sid, count, journalist_filename, filename, stream):
return encrypted_file_name
-def save_message_submission(sid, count, journalist_filename, message):
+def save_message_submission(filesystem_id, count, journalist_filename,
+ message):
filename = "{0}-{1}-msg.gpg".format(count, journalist_filename)
- msg_loc = path(sid, filename)
+ msg_loc = path(filesystem_id, filename)
crypto_util.encrypt(message, config.JOURNALIST_KEY, msg_loc)
return filename
-def rename_submission(sid, orig_filename, journalist_filename):
+def rename_submission(filesystem_id, orig_filename, journalist_filename):
check_submission_name = VALIDATE_FILENAME(orig_filename)
if check_submission_name:
parsed_filename = check_submission_name.groupdict()
@@ -147,7 +149,8 @@ def rename_submission(sid, orig_filename, journalist_filename):
parsed_filename['index'], journalist_filename,
parsed_filename['file_type'])
try:
- os.rename(path(sid, orig_filename), path(sid, new_filename))
+ os.rename(path(filesystem_id, orig_filename),
+ path(filesystem_id, new_filename))
except OSError:
pass
else:
@@ -165,6 +168,6 @@ def secure_unlink(fn, recursive=False):
return "success"
-def delete_source_directory(source_id):
- secure_unlink(path(source_id), recursive=True)
+def delete_source_directory(filesystem_id):
+ secure_unlink(path(filesystem_id), recursive=True)
return "success"
| diff --git a/securedrop/tests/test_integration.py b/securedrop/tests/test_integration.py
--- a/securedrop/tests/test_integration.py
+++ b/securedrop/tests/test_integration.py
@@ -66,7 +66,7 @@ def test_submit_message(self):
with self.source_app as source_app:
resp = source_app.get('/generate')
resp = source_app.post('/create', follow_redirects=True)
- sid = g.sid
+ filesystem_id = g.filesystem_id
# redirected to submission form
resp = self.source_app.post('/submit', data=dict(
msg=test_msg,
@@ -109,7 +109,7 @@ def test_submit_message(self):
'ul > li > input[name="doc_names_selected"]')[0]['value']
resp = self.journalist_app.post('/bulk', data=dict(
action='confirm_delete',
- sid=sid,
+ filesystem_id=filesystem_id,
doc_names_selected=doc_name
))
@@ -123,7 +123,7 @@ def test_submit_message(self):
'ul > li > input[name="doc_names_selected"]')[0]['value']
resp = self.journalist_app.post('/bulk', data=dict(
action='delete',
- sid=sid,
+ filesystem_id=filesystem_id,
doc_names_selected=doc_name,
), follow_redirects=True)
self.assertEqual(resp.status_code, 200)
@@ -139,7 +139,9 @@ def test_submit_message(self):
# since file deletion is handled by a polling worker, this test needs
# to wait for the worker to get the job and execute it
utils.async.wait_for_assertion(
- lambda: self.assertFalse(os.path.exists(store.path(sid, doc_name)))
+ lambda: self.assertFalse(
+ os.path.exists(store.path(filesystem_id, doc_name))
+ )
)
def test_submit_file(self):
@@ -151,7 +153,7 @@ def test_submit_file(self):
with self.source_app as source_app:
resp = source_app.get('/generate')
resp = source_app.post('/create', follow_redirects=True)
- sid = g.sid
+ filesystem_id = g.filesystem_id
# redirected to submission form
resp = self.source_app.post('/submit', data=dict(
msg="",
@@ -197,7 +199,7 @@ def test_submit_file(self):
'ul > li > input[name="doc_names_selected"]')[0]['value']
resp = self.journalist_app.post('/bulk', data=dict(
action='confirm_delete',
- sid=sid,
+ filesystem_id=filesystem_id,
doc_names_selected=doc_name
))
@@ -211,7 +213,7 @@ def test_submit_file(self):
'ul > li > input[name="doc_names_selected"]')[0]['value']
resp = self.journalist_app.post('/bulk', data=dict(
action='delete',
- sid=sid,
+ filesystem_id=filesystem_id,
doc_names_selected=doc_name,
), follow_redirects=True)
self.assertEqual(resp.status_code, 200)
@@ -227,7 +229,9 @@ def test_submit_file(self):
# since file deletion is handled by a polling worker, this test needs
# to wait for the worker to get the job and execute it
utils.async.wait_for_assertion(
- lambda: self.assertFalse(os.path.exists(store.path(sid, doc_name)))
+ lambda: self.assertFalse(
+ os.path.exists(store.path(filesystem_id, doc_name))
+ )
)
def test_reply_normal(self):
@@ -290,7 +294,7 @@ def helper_test_reply(self, test_reply, expected_success=True):
resp = source_app.get('/generate')
resp = source_app.post('/create', follow_redirects=True)
codename = session['codename']
- sid = g.sid
+ filesystem_id = g.filesystem_id
# redirected to submission form
resp = source_app.post('/submit', data=dict(
msg=test_msg,
@@ -318,7 +322,7 @@ def helper_test_reply(self, test_reply, expected_success=True):
with self.journalist_app as journalist_app:
resp = journalist_app.post('/flag', data=dict(
- sid=sid))
+ filesystem_id=filesystem_id))
self.assertEqual(resp.status_code, 200)
with self.source_app as source_app:
@@ -332,12 +336,14 @@ def helper_test_reply(self, test_reply, expected_success=True):
# Block up to 15s for the reply keypair, so we can test sending a reply
utils.async.wait_for_assertion(
- lambda: self.assertNotEqual(crypto_util.getkey(sid), None), 15)
+ lambda: self.assertNotEqual(crypto_util.getkey(filesystem_id),
+ None),
+ 15)
# Create 2 replies to test deleting on journalist and source interface
for i in range(2):
resp = self.journalist_app.post('/reply', data=dict(
- sid=sid,
+ filesystem_id=filesystem_id,
msg=test_reply
), follow_redirects=True)
self.assertEqual(resp.status_code, 200)
@@ -355,11 +361,11 @@ def helper_test_reply(self, test_reply, expected_success=True):
# Download the reply and verify that it can be decrypted with the
# journalist's key as well as the source's reply key
- sid = soup.select('input[name="sid"]')[0]['value']
+ filesystem_id = soup.select('input[name="filesystem_id"]')[0]['value']
checkbox_values = [
soup.select('input[name="doc_names_selected"]')[1]['value']]
resp = self.journalist_app.post('/bulk', data=dict(
- sid=sid,
+ filesystem_id=filesystem_id,
action='download',
doc_names_selected=checkbox_values
), follow_redirects=True)
@@ -368,7 +374,8 @@ def helper_test_reply(self, test_reply, expected_success=True):
zf = zipfile.ZipFile(StringIO(resp.data), 'r')
data = zf.read(zf.namelist()[0])
self._can_decrypt_with_key(data, config.JOURNALIST_KEY)
- self._can_decrypt_with_key(data, crypto_util.getkey(sid), codename)
+ self._can_decrypt_with_key(data, crypto_util.getkey(filesystem_id),
+ codename)
# Test deleting reply on the journalist interface
last_reply_number = len(
@@ -394,7 +401,7 @@ def helper_test_reply(self, test_reply, expected_success=True):
msgid = soup.select(
'form.message > input[name="reply_filename"]')[0]['value']
resp = source_app.post('/delete', data=dict(
- sid=sid,
+ filesystem_id=filesystem_id,
reply_filename=msgid
), follow_redirects=True)
self.assertEqual(resp.status_code, 200)
@@ -403,7 +410,7 @@ def helper_test_reply(self, test_reply, expected_success=True):
# Make sure the reply is deleted from the filesystem
utils.async.wait_for_assertion(
lambda: self.assertFalse(os.path.exists(
- store.path(sid, msgid))))
+ store.path(filesystem_id, msgid))))
source_app.get('/logout')
@@ -427,9 +434,9 @@ def test_delete_collection(self):
# find the delete form and extract the post parameters
soup = BeautifulSoup(resp.data, 'html.parser')
delete_form_inputs = soup.select('form#delete-collection')[0]('input')
- sid = delete_form_inputs[1]['value']
+ filesystem_id = delete_form_inputs[1]['value']
col_name = delete_form_inputs[2]['value']
- resp = self.journalist_app.post('/col/delete/' + sid,
+ resp = self.journalist_app.post('/col/delete/' + filesystem_id,
follow_redirects=True)
self.assertEquals(resp.status_code, 200)
@@ -439,7 +446,7 @@ def test_delete_collection(self):
# Make sure the collection is deleted from the filesystem
utils.async.wait_for_assertion(
- lambda: self.assertFalse(os.path.exists(store.path(sid)))
+ lambda: self.assertFalse(os.path.exists(store.path(filesystem_id)))
)
def test_delete_collections(self):
@@ -470,7 +477,8 @@ def test_delete_collections(self):
# Make sure the collections are deleted from the filesystem
utils.async.wait_for_assertion(lambda: self.assertFalse(
- any([os.path.exists(store.path(sid)) for sid in checkbox_values])))
+ any([os.path.exists(store.path(filesystem_id))
+ for filesystem_id in checkbox_values])))
def test_filenames(self):
"""Test pretty, sequential filenames when source uploads messages
@@ -586,13 +594,13 @@ def helper_filenames_submit(self):
), follow_redirects=True)
def helper_filenames_delete(self, soup, i):
- sid = soup.select('input[name="sid"]')[0]['value']
+ filesystem_id = soup.select('input[name="filesystem_id"]')[0]['value']
checkbox_values = [
soup.select('input[name="doc_names_selected"]')[i]['value']]
# delete
resp = self.journalist_app.post('/bulk', data=dict(
- sid=sid,
+ filesystem_id=filesystem_id,
action='confirm_delete',
doc_names_selected=checkbox_values
), follow_redirects=True)
@@ -604,7 +612,7 @@ def helper_filenames_delete(self, soup, i):
# confirm delete
resp = self.journalist_app.post('/bulk', data=dict(
- sid=sid,
+ filesystem_id=filesystem_id,
action='delete',
doc_names_selected=checkbox_values
), follow_redirects=True)
@@ -613,5 +621,5 @@ def helper_filenames_delete(self, soup, i):
# Make sure the files were deleted from the filesystem
utils.async.wait_for_assertion(lambda: self.assertFalse(
- any([os.path.exists(store.path(sid, doc_name))
+ any([os.path.exists(store.path(filesystem_id, doc_name))
for doc_name in checkbox_values])))
diff --git a/securedrop/tests/test_journalist.py b/securedrop/tests/test_journalist.py
--- a/securedrop/tests/test_journalist.py
+++ b/securedrop/tests/test_journalist.py
@@ -54,7 +54,7 @@ def test_make_password(self, mocked_pw_gen):
@patch('journalist.app.logger.error')
def test_reply_error_logging(self, mocked_error_logger):
source, _ = utils.db_helper.init_source()
- sid = source.filesystem_id
+ filesystem_id = source.filesystem_id
self._login_user()
exception_class = StaleDataError
@@ -62,7 +62,8 @@ def test_reply_error_logging(self, mocked_error_logger):
with patch('db.db_session.commit',
side_effect=exception_class(exception_msg)):
- self.client.post(url_for('reply'), data={'sid': sid, 'msg': '_'})
+ self.client.post(url_for('reply'),
+ data={'filesystem_id': filesystem_id, 'msg': '_'})
# Notice the "potentially sensitive" exception_msg is not present in
# the log event.
@@ -73,13 +74,14 @@ def test_reply_error_logging(self, mocked_error_logger):
def test_reply_error_flashed_message(self):
source, _ = utils.db_helper.init_source()
- sid = source.filesystem_id
+ filesystem_id = source.filesystem_id
self._login_user()
exception_class = StaleDataError
with patch('db.db_session.commit', side_effect=exception_class()):
- self.client.post(url_for('reply'), data={'sid': sid, 'msg': '_'})
+ self.client.post(url_for('reply'),
+ data={'filesystem_id': filesystem_id, 'msg': '_'})
self.assertMessageFlashed(
'An unexpected error occurred! Please check '
@@ -87,22 +89,24 @@ def test_reply_error_flashed_message(self):
def test_empty_replies_are_rejected(self):
source, _ = utils.db_helper.init_source()
- sid = source.filesystem_id
+ filesystem_id = source.filesystem_id
self._login_user()
resp = self.client.post(url_for('reply'),
- data={'sid': sid, 'msg': ''},
+ data={'filesystem_id': filesystem_id,
+ 'msg': ''},
follow_redirects=True)
self.assertIn("You cannot send an empty reply!", resp.data)
def test_nonempty_replies_are_accepted(self):
source, _ = utils.db_helper.init_source()
- sid = source.filesystem_id
+ filesystem_id = source.filesystem_id
self._login_user()
resp = self.client.post(url_for('reply'),
- data={'sid': sid, 'msg': '_'},
+ data={'filesystem_id': filesystem_id,
+ 'msg': '_'},
follow_redirects=True)
self.assertNotIn("You cannot send an empty reply!", resp.data)
@@ -588,8 +592,9 @@ def test_admin_page_restriction_http_posts(self):
self.assertStatus(resp, 302)
def test_user_authorization_for_gets(self):
- urls = [url_for('index'), url_for('col', sid='1'),
- url_for('download_single_submission', sid='1', fn='1'),
+ urls = [url_for('index'), url_for('col', filesystem_id='1'),
+ url_for('download_single_submission',
+ filesystem_id='1', fn='1'),
url_for('edit_account')]
for url in urls:
@@ -597,8 +602,10 @@ def test_user_authorization_for_gets(self):
self.assertStatus(resp, 302)
def test_user_authorization_for_posts(self):
- urls = [url_for('add_star', sid='1'), url_for('remove_star', sid='1'),
- url_for('col_process'), url_for('col_delete_single', sid='1'),
+ urls = [url_for('add_star', filesystem_id='1'),
+ url_for('remove_star', filesystem_id='1'),
+ url_for('col_process'),
+ url_for('col_delete_single', filesystem_id='1'),
url_for('reply'), url_for('generate_code'), url_for('bulk'),
url_for('account_new_two_factor'),
url_for('account_reset_two_factor_totp'),
@@ -740,7 +747,7 @@ def test_download_selected_submissions_from_source(self):
self._login_user()
resp = self.client.post(
'/bulk', data=dict(action='download',
- sid=source.filesystem_id,
+ filesystem_id=source.filesystem_id,
doc_names_selected=selected_fnames))
# The download request was succesful, and the app returned a zipfile
@@ -908,7 +915,8 @@ def test_download_all_selected_sources(self):
def test_add_star_redirects_to_index(self):
source, _ = utils.db_helper.init_source()
self._login_user()
- resp = self.client.post(url_for('add_star', sid=source.filesystem_id))
+ resp = self.client.post(url_for('add_star',
+ filesystem_id=source.filesystem_id))
self.assertRedirects(resp, url_for('index'))
@@ -968,9 +976,9 @@ def test_col_process_returns_404_with_bad_action(self, abort):
@patch("journalist.make_star_true")
@patch("journalist.db_session")
def test_col_star_call_db_(self, db_session, make_star_true):
- journalist.col_star(['sid'])
+ journalist.col_star(['filesystem_id'])
- make_star_true.assert_called_with('sid')
+ make_star_true.assert_called_with('filesystem_id')
@patch("journalist.db_session")
def test_col_un_star_call_db(self, db_session):
@@ -1043,44 +1051,44 @@ def setUp(self):
@patch('journalist.url_for')
@patch('journalist.redirect')
def test_add_star_renders_template(self, redirect, url_for):
- redirect_template = journalist.add_star('sid')
+ redirect_template = journalist.add_star('filesystem_id')
self.assertEqual(redirect_template, redirect(url_for('index')))
@patch('journalist.db_session')
def test_add_star_makes_commits(self, db_session):
- journalist.add_star('sid')
+ journalist.add_star('filesystem_id')
db_session.commit.assert_called_with()
@patch('journalist.make_star_true')
def test_single_delegates_to_make_star_true(self, make_star_true):
- sid = 'sid'
+ filesystem_id = 'filesystem_id'
- journalist.add_star(sid)
+ journalist.add_star(filesystem_id)
- make_star_true.assert_called_with(sid)
+ make_star_true.assert_called_with(filesystem_id)
@patch('journalist.url_for')
@patch('journalist.redirect')
def test_remove_star_renders_template(self, redirect, url_for):
- redirect_template = journalist.remove_star('sid')
+ redirect_template = journalist.remove_star('filesystem_id')
self.assertEqual(redirect_template, redirect(url_for('index')))
@patch('journalist.db_session')
def test_remove_star_makes_commits(self, db_session):
- journalist.remove_star('sid')
+ journalist.remove_star('filesystem_id')
db_session.commit.assert_called_with()
@patch('journalist.make_star_false')
def test_remove_star_delegates_to_make_star_false(self, make_star_false):
- sid = 'sid'
+ filesystem_id = 'filesystem_id'
- journalist.remove_star(sid)
+ journalist.remove_star(filesystem_id)
- make_star_false.assert_called_with(sid)
+ make_star_false.assert_called_with(filesystem_id)
@classmethod
def tearDownClass(cls):
| Resolve inconsistently named and confusing source IDs
## Description
When working with the SecureDrop application code, there are multiple IDs referred to as the `source_id`:
* the filesystem ID (the string used to refer to sources on disk)
* e.g. referred to as `source_id` in `delete_collection()` in `journalist.py`
* the source ID (primary key of the source in the SecureDrop database)
* e.g. referred to as `source_id` in `index()` in `journalist.py`
* the `sid` - it is unclear what this is on first inspection (it's the filesystem ID but these instances of `sid` should be replaced with which column this is referring to)
These are inconsistently named across the application and this is very confusing for new developers.
## Fix
We should resolve this confusion by using these variables in a consistent manner.
There should be two IDs corresponding to the two columns in the `sources` table:
* `filesystem_id`: replace instances of `sid` and replace any `source_id`s that actually refer to the `filesystem_id`
* `source_id`: this should _always_ refer to the primary key of the source in the `sources` table
| :+1: it got me confused when reading the code :-) | 2017-08-04T16:20:07Z | [] | [] |
freedomofpress/securedrop | 2,085 | freedomofpress__securedrop-2085 | [
"1682"
] | e55f0c61bcfc5df76f967c87c10d69315d6eff64 | diff --git a/securedrop/db.py b/securedrop/db.py
--- a/securedrop/db.py
+++ b/securedrop/db.py
@@ -264,10 +264,14 @@ class Journalist(Base):
"JournalistLoginAttempt",
backref="journalist")
+ MIN_USERNAME_LEN = 3
+
def __init__(self, username, password, is_admin=False, otp_secret=None):
+ self.check_username_acceptable(username)
self.username = username
self.set_password(password)
self.is_admin = is_admin
+
if otp_secret:
self.set_hotp_secret(otp_secret)
@@ -299,6 +303,13 @@ def set_password(self, password):
self.pw_salt = self._gen_salt()
self.pw_hash = self._scrypt_hash(password, self.pw_salt)
+ @classmethod
+ def check_username_acceptable(cls, username):
+ if len(username) < cls.MIN_USERNAME_LEN:
+ raise InvalidUsernameException(
+ 'Username "{}" must be at least {} characters long.'
+ .format(username, cls.MIN_USERNAME_LEN))
+
@classmethod
def check_password_acceptable(cls, password):
# Enforce a reasonable maximum length for passwords to avoid DoS
diff --git a/securedrop/journalist.py b/securedrop/journalist.py
--- a/securedrop/journalist.py
+++ b/securedrop/journalist.py
@@ -19,7 +19,7 @@
import template_filters
from db import (db_session, Source, Journalist, Submission, Reply,
SourceStar, get_one_or_else, LoginThrottledException,
- PasswordError)
+ PasswordError, InvalidUsernameException)
import worker
app = Flask(__name__, template_folder=config.JOURNALIST_TEMPLATES_DIR)
@@ -164,11 +164,7 @@ def admin_index():
def admin_add_user():
if request.method == 'POST':
form_valid = True
-
username = request.form['username']
- if len(username) == 0:
- form_valid = False
- flash("Missing username", "error")
password = request.form['password']
is_admin = bool(request.form.get('is_admin'))
@@ -188,6 +184,9 @@ def admin_add_user():
flash('There was an error with the autogenerated password. '
'User not created. Please try again.', 'error')
form_valid = False
+ except InvalidUsernameException as e:
+ form_valid = False
+ flash('Invalid username: ' + str(e), "error")
except IntegrityError as e:
db_session.rollback()
form_valid = False
@@ -298,6 +297,13 @@ def admin_edit_user(user_id):
if request.method == 'POST':
if request.form['username']:
new_username = request.form['username']
+
+ try:
+ Journalist.check_username_acceptable(new_username)
+ except InvalidUsernameException as e:
+ flash('Invalid username: ' + str(e), 'error')
+ return redirect(url_for("admin_edit_user", user_id=user_id))
+
if new_username == user.username:
pass
elif Journalist.query.filter_by(
diff --git a/securedrop/manage.py b/securedrop/manage.py
--- a/securedrop/manage.py
+++ b/securedrop/manage.py
@@ -18,7 +18,8 @@
os.environ['SECUREDROP_ENV'] = 'dev' # noqa
import config
import crypto_util
-from db import db_session, init_db, Journalist, PasswordError
+from db import (db_session, init_db, Journalist, PasswordError,
+ InvalidUsernameException)
from management.run import run
logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s')
@@ -105,7 +106,14 @@ def add_journalist(args):
def _get_username():
- return raw_input('Username: ')
+ while True:
+ username = raw_input('Username: ')
+ try:
+ Journalist.check_username_acceptable(username)
+ except InvalidUsernameException as e:
+ print('Invalid username: ' + str(e))
+ else:
+ return username
def _get_yubikey_usage():
@@ -121,19 +129,23 @@ def _get_yubikey_usage():
print 'Invalid answer. Please type "y" or "n"'
-def _add_user(is_admin=False):
- username = _get_username()
- print("Note: Journalist passwords are now autogenerated.")
-
+def _make_password():
while True:
password = crypto_util.genrandomid(7)
try:
Journalist.check_password_acceptable(password)
- print("This journalist's password is: {}".format(password))
- break
+ return password
except PasswordError:
continue
+
+def _add_user(is_admin=False):
+ username = _get_username()
+
+ print("Note: Journalist passwords are now autogenerated.")
+ password = _make_password()
+ print("This journalist's password is: {}".format(password))
+
is_hotp = _get_yubikey_usage()
otp_secret = None
if is_hotp:
| diff --git a/securedrop/tests/test_integration.py b/securedrop/tests/test_integration.py
--- a/securedrop/tests/test_integration.py
+++ b/securedrop/tests/test_integration.py
@@ -49,7 +49,7 @@ def setUp(self):
# Add a test user to the journalist interface and log them in
# print Journalist.query.all()
self.user_pw = "corret horse battery staple haha cultural reference"
- self.user = Journalist(username="foo",
+ self.user = Journalist(username="some-username",
password=self.user_pw)
db_session.add(self.user)
db_session.commit()
diff --git a/securedrop/tests/test_journalist.py b/securedrop/tests/test_journalist.py
--- a/securedrop/tests/test_journalist.py
+++ b/securedrop/tests/test_journalist.py
@@ -532,6 +532,8 @@ def test_admin_add_user(self):
password=VALID_PASSWORD,
is_admin=False))
+ print resp.data.decode('utf-8')
+
self.assertRedirects(resp, url_for('admin_new_user_two_factor',
uid=max_journalist_pk+1))
@@ -541,7 +543,17 @@ def test_admin_add_user_without_username(self):
data=dict(username='',
password=VALID_PASSWORD,
is_admin=False))
- self.assertIn('Missing username', resp.data)
+ self.assertIn('Invalid username', resp.data)
+
+ def test_admin_add_user_too_short_username(self):
+ self._login_admin()
+ username = 'a' * (Journalist.MIN_USERNAME_LEN - 1)
+ resp = self.client.post(url_for('admin_add_user'),
+ data=dict(username=username,
+ password='pentagonpapers',
+ password_again='pentagonpapers',
+ is_admin=False))
+ self.assertIn('Invalid username', resp.data)
@patch('journalist.app.logger.error')
@patch('journalist.Journalist',
diff --git a/securedrop/tests/test_manage.py b/securedrop/tests/test_manage.py
--- a/securedrop/tests/test_manage.py
+++ b/securedrop/tests/test_manage.py
@@ -14,9 +14,10 @@
import time
import unittest
import version
-
import utils
+from db import Journalist
+
class TestManagePy(object):
def test_parse_args(self):
@@ -43,9 +44,14 @@ def setUp(self):
def tearDown(self):
utils.env.teardown()
- @mock.patch("__builtin__.raw_input", return_value='test')
- def test_get_username(self, mock_stdin):
- assert manage._get_username() == 'test'
+ @mock.patch("__builtin__.raw_input", return_value='jen')
+ def test_get_username_success(self, mock_stdin):
+ assert manage._get_username() == 'jen'
+
+ @mock.patch("__builtin__.raw_input",
+ side_effect=['a' * (Journalist.MIN_USERNAME_LEN - 1), 'jen'])
+ def test_get_username_fail(self, mock_stdin):
+ assert manage._get_username() == 'jen'
@mock.patch("__builtin__.raw_input", return_value='y')
def test_get_yubikey_usage_yes(self, mock_stdin):
@@ -84,6 +90,10 @@ def setup(self):
def teardown(self):
utils.env.teardown()
+ @mock.patch("__builtin__.raw_input", return_value='foo-bar-baz')
+ def test_get_username(self, mock_get_usernam):
+ assert manage._get_username() == 'foo-bar-baz'
+
def test_translate_compile_code_and_template(self):
source = [
'tests/i18n/code.py',
| There is no minimum username requirement for the JI
We specify `username = Column(String(255), nullable=False, unique=True)` among the `db.Journalist` class attributes, but `nullable=False` doesn't mean that `''` is an unacceptable string.
I think a nice way to do it would be to rename the `username` column as `_username`, and then a new `username` property whose `getter` returns `_username`, and whose `setter` will raise a new `InvalidUsernameException` if the value passed is the empty string. Since this would require a database migration, but is also not a pressing problem, it makes sense to wait for that to fix this.
An alternative is simply to put such a check into place everywhere username may be set.
| 2017-08-06T13:51:55Z | [] | [] |
|
freedomofpress/securedrop | 2,110 | freedomofpress__securedrop-2110 | [
"2107"
] | 0d76695b5bfb58cd3f38c89987b690a3e1ba34a4 | diff --git a/docs/conf.py b/docs/conf.py
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -59,9 +59,9 @@
# built documents.
#
# The short X.Y version.
-version = '0.4.1'
+version = '0.4.2'
# The full version, including alpha/beta/rc tags.
-release = '0.4.1'
+release = '0.4.2'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
diff --git a/securedrop/version.py b/securedrop/version.py
--- a/securedrop/version.py
+++ b/securedrop/version.py
@@ -1 +1 @@
-__version__ = '0.4.1'
+__version__ = '0.4.2'
| diff --git a/testinfra/app/test_apparmor.py b/testinfra/app/test_apparmor.py
--- a/testinfra/app/test_apparmor.py
+++ b/testinfra/app/test_apparmor.py
@@ -16,6 +16,7 @@ def test_apparmor_enabled(Command, Sudo):
assert Command("aa-status --enabled").rc == 0
apache2_capabilities = [
+ 'dac_override',
'kill',
'net_bind_service',
'sys_ptrace'
| Release SecureDrop 0.4.2
This is a tracking issue for this week's release of SecureDrop 0.4.2, a bugfix release for #2105. Tasks may get added or modified.
# Prepare release
- [x] Create `release/0.4.2` branch - @conorsch
- [x] PR, test and merge the hotfix for #2105 - @conorsch
- [x] Bump version numbers and build test debian packages. - @conorsch
# QA
- [x] Confirm clean installs in staging (using the 0.4.2 rc deb packages) succeed - @conorsch
- [x] Test updates from 0.4.1 -> 0.4.2 rc and confirm upgrade restores web interfaces - @conorsch
# Release
- [x] Release 0.4.2 - @conorsch
- [x] Publish blog post about 0.4.2 bugfix - @conorsch
# Post-release
- [x] Merge release changes into master branch - @redshiftzero
- [x] Merge release changes into development branch - @redshiftzero
| 2017-08-15T16:17:32Z | [] | [] |
|
freedomofpress/securedrop | 2,121 | freedomofpress__securedrop-2121 | [
"1849"
] | 5c242d3aaabdcc24be32201a72f5e04b76b42244 | diff --git a/securedrop/manage.py b/securedrop/manage.py
--- a/securedrop/manage.py
+++ b/securedrop/manage.py
@@ -8,10 +8,10 @@
import signal
import subprocess
import sys
+import time
import traceback
import version
-import psutil
import qrcode
from sqlalchemy.orm.exc import NoResultFound
@@ -231,39 +231,22 @@ def delete_user(args): # pragma: no cover
def clean_tmp(args): # pragma: no cover
- """Cleanup the SecureDrop temp directory. This is intended to be run
- as an automated cron job. We skip files that are currently in use to
- avoid deleting files that are currently being downloaded."""
- # Inspired by http://stackoverflow.com/a/11115521/1093000
- def file_in_use(fname):
- for proc in psutil.process_iter():
- try:
- open_files = proc.open_files()
- in_use = False or any([open_file.path == fname
- for open_file in open_files])
- # Early return for perf
- if in_use:
- break
- except psutil.NoSuchProcess:
- # This catches a race condition where a process ends before we
- # can examine its files. Ignore this - if the process ended, it
- # can't be using fname, so this won't cause an error.
- pass
-
- return in_use
+ """Cleanup the SecureDrop temp directory. """
+ if not os.path.exists(args.directory):
+ log.debug('{} does not exist, do nothing'.format(args.directory))
+ return 0
def listdir_fullpath(d):
- # Thanks to http://stackoverflow.com/a/120948/1093000
return [os.path.join(d, f) for f in os.listdir(d)]
- try:
- os.stat(config.TEMP_DIR)
- except OSError:
- pass
- else:
- for path in listdir_fullpath(config.TEMP_DIR):
- if not file_in_use(path):
- os.remove(path)
+ too_old = args.days * 24 * 60 * 60
+ for path in listdir_fullpath(args.directory):
+ if time.time() - os.stat(path).st_mtime > too_old:
+ os.remove(path)
+ log.debug('{} removed'.format(path))
+ else:
+ log.debug('{} modified less than {} days ago'.format(
+ path, args.days))
return 0
@@ -347,11 +330,8 @@ def get_args():
"SecureDrop application's state.")
reset_subp.set_defaults(func=reset)
# Cleanup the SD temp dir
- clean_tmp_subp = subps.add_parser('clean-tmp', help='Cleanup the '
- 'SecureDrop temp directory.')
- clean_tmp_subp.set_defaults(func=clean_tmp)
- clean_tmp_subp_a = subps.add_parser('clean_tmp', help='^')
- clean_tmp_subp_a.set_defaults(func=clean_tmp)
+ set_clean_tmp_parser(subps, 'clean-tmp')
+ set_clean_tmp_parser(subps, 'clean_tmp')
set_translate_parser(subps)
@@ -396,6 +376,24 @@ def set_translate_parser(subps):
parser.set_defaults(func=translate)
+def set_clean_tmp_parser(subps, name):
+ parser = subps.add_parser(name, help='Cleanup the '
+ 'SecureDrop temp directory.')
+ default_days = 7
+ parser.add_argument(
+ '--days',
+ default=default_days,
+ type=int,
+ help=('remove files not modified in a given number of DAYS '
+ '(default {} days)'.format(default_days)))
+ parser.add_argument(
+ '--directory',
+ default=config.TEMP_DIR,
+ help=('remove old files from DIRECTORY '
+ '(default {})'.format(config.TEMP_DIR)))
+ parser.set_defaults(func=clean_tmp)
+
+
def setup_verbosity(args):
if args.verbose:
logging.getLogger(__name__).setLevel(logging.DEBUG)
| diff --git a/securedrop/tests/test_manage.py b/securedrop/tests/test_manage.py
--- a/securedrop/tests/test_manage.py
+++ b/securedrop/tests/test_manage.py
@@ -197,6 +197,35 @@ def test_translate_compile_arg(self):
assert 'code hello i18n' in mo
assert 'template hello i18n' not in mo
+ def test_clean_tmp_do_nothing(self, caplog):
+ args = argparse.Namespace(days=0,
+ directory=' UNLIKELY ',
+ verbose=logging.DEBUG)
+ manage.setup_verbosity(args)
+ manage.clean_tmp(args)
+ assert 'does not exist, do nothing' in caplog.text()
+
+ def test_clean_tmp_too_young(self, caplog):
+ args = argparse.Namespace(days=24*60*60,
+ directory=config.TEMP_DIR,
+ verbose=logging.DEBUG)
+ open(os.path.join(config.TEMP_DIR, 'FILE'), 'a').close()
+ manage.setup_verbosity(args)
+ manage.clean_tmp(args)
+ assert 'modified less than' in caplog.text()
+
+ def test_clean_tmp_removed(self, caplog):
+ args = argparse.Namespace(days=0,
+ directory=config.TEMP_DIR,
+ verbose=logging.DEBUG)
+ fname = os.path.join(config.TEMP_DIR, 'FILE')
+ with open(fname, 'a'):
+ old = time.time() - 24*60*60
+ os.utime(fname, (old, old))
+ manage.setup_verbosity(args)
+ manage.clean_tmp(args)
+ assert 'FILE removed' in caplog.text()
+
class TestSh(object):
| manage: clean-tmp races against get_bulk_archive
# Bug
## Description
**./manage clean-tmp** races against get_bulk_archive
## Steps to Reproduce
* In the app-staging VM (the development server of werkzeug does not support X-Sendfile and won't reproduce the race)
* modify the journalist.py download function like so:
<pre>
def download(zip_basename, submissions):
"""Send client contents of zipfile *zip_basename*-<timestamp>.zip
containing *submissions*. The zipfile, being a
:class:`tempfile.NamedTemporaryFile`, is stored on disk only
temporarily.
:param str zip_basename: The basename of the zipfile download.
:param list submissions: A list of :class:`db.Submission`s to
include in the zipfile.
"""
zf = store.get_bulk_archive(submissions,
zip_directory=zip_basename)
attachment_filename = "{}--{}.zip".format(
zip_basename, datetime.utcnow().strftime("%Y-%m-%d--%H-%M-%S"))
# Mark the submissions that have been downloaded as such
for submission in submissions:
submission.downloaded = True
db_session.commit()
r = send_file(zf.name, mimetype="application/zip",
attachment_filename=attachment_filename,
as_attachment=True)
del zf
import time
time.sleep(30)
return r
</pre>
* Download a collection from the journalist interface
* After a file shows up in /var/lib/securedrop/tmp
* ./manage clean-tmp
* Apache will fail to find the file path created by get_bulk_archive (the path is found in the X-Sendfile header)
* /var/log/apache2/journalist-error.log will show an error looking like this:
<pre>
[Fri Aug 25 11:03:20.596664 2017] [:info] [pid 3259:tid 3087374333696] [remote 127.0.0.1:45020] mod_wsgi (pid=3259, process='journalist', application='app-staging:8080|'): Loading WSGI script '/var/www/journalist.wsgi'.
[Fri Aug 25 11:03:51.168580 2017] [:error] [pid 3259:tid 3087374333696] [remote 127.0.0.1:45020] [2017-08-25 11:03:51,154] ERROR in app: Exception on /col/process [POST]
[Fri Aug 25 11:03:51.168672 2017] [:error] [pid 3259:tid 3087374333696] [remote 127.0.0.1:45020] Traceback (most recent call last):
[Fri Aug 25 11:03:51.168693 2017] [:error] [pid 3259:tid 3087374333696] [remote 127.0.0.1:45020] File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 1982, in wsgi_app
[Fri Aug 25 11:03:51.168711 2017] [:error] [pid 3259:tid 3087374333696] [remote 127.0.0.1:45020] response = self.full_dispatch_request()
[Fri Aug 25 11:03:51.168729 2017] [:error] [pid 3259:tid 3087374333696] [remote 127.0.0.1:45020] File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 1614, in full_dispatch_request
[Fri Aug 25 11:03:51.168746 2017] [:error] [pid 3259:tid 3087374333696] [remote 127.0.0.1:45020] rv = self.handle_user_exception(e)
[Fri Aug 25 11:03:51.168762 2017] [:error] [pid 3259:tid 3087374333696] [remote 127.0.0.1:45020] File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 1517, in handle_user_exception
[Fri Aug 25 11:03:51.168778 2017] [:error] [pid 3259:tid 3087374333696] [remote 127.0.0.1:45020] reraise(exc_type, exc_value, tb)
[Fri Aug 25 11:03:51.168793 2017] [:error] [pid 3259:tid 3087374333696] [remote 127.0.0.1:45020] File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 1612, in full_dispatch_request
[Fri Aug 25 11:03:51.168810 2017] [:error] [pid 3259:tid 3087374333696] [remote 127.0.0.1:45020] rv = self.dispatch_request()
[Fri Aug 25 11:03:51.168854 2017] [:error] [pid 3259:tid 3087374333696] [remote 127.0.0.1:45020] File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 1598, in dispatch_request
[Fri Aug 25 11:03:51.168871 2017] [:error] [pid 3259:tid 3087374333696] [remote 127.0.0.1:45020] return self.view_functions[rule.endpoint](**req.view_args)
[Fri Aug 25 11:03:51.168886 2017] [:error] [pid 3259:tid 3087374333696] [remote 127.0.0.1:45020] File "/var/www/securedrop/journalist.py", line 89, in wrapper
[Fri Aug 25 11:03:51.168901 2017] [:error] [pid 3259:tid 3087374333696] [remote 127.0.0.1:45020] return func(*args, **kwargs)
[Fri Aug 25 11:03:51.168915 2017] [:error] [pid 3259:tid 3087374333696] [remote 127.0.0.1:45020] File "/var/www/securedrop/journalist.py", line 548, in col_process
[Fri Aug 25 11:03:51.168929 2017] [:error] [pid 3259:tid 3087374333696] [remote 127.0.0.1:45020] return method(cols_selected)
[Fri Aug 25 11:03:51.168943 2017] [:error] [pid 3259:tid 3087374333696] [remote 127.0.0.1:45020] File "/var/www/securedrop/journalist.py", line 572, in col_download_all
[Fri Aug 25 11:03:51.168958 2017] [:error] [pid 3259:tid 3087374333696] [remote 127.0.0.1:45020] return download("all", submissions)
[Fri Aug 25 11:03:51.168972 2017] [:error] [pid 3259:tid 3087374333696] [remote 127.0.0.1:45020] File "/var/www/securedrop/journalist.py", line 792, in download
[Fri Aug 25 11:03:51.168988 2017] [:error] [pid 3259:tid 3087374333696] [remote 127.0.0.1:45020] as_attachment=True)
[Fri Aug 25 11:03:51.169002 2017] [:error] [pid 3259:tid 3087374333696] [remote 127.0.0.1:45020] File "/usr/local/lib/python2.7/dist-packages/flask/helpers.py", line 544, in send_file
[Fri Aug 25 11:03:51.169016 2017] [:error] [pid 3259:tid 3087374333696] [remote 127.0.0.1:45020] fsize = os.path.getsize(filename)
[Fri Aug 25 11:03:51.169031 2017] [:error] [pid 3259:tid 3087374333696] [remote 127.0.0.1:45020] File "/usr/lib/python2.7/genericpath.py", line 49, in getsize
[Fri Aug 25 11:03:51.169049 2017] [:error] [pid 3259:tid 3087374333696] [remote 127.0.0.1:45020] return os.stat(filename).st_size
[Fri Aug 25 11:03:51.169069 2017] [:error] [pid 3259:tid 3087374333696] [remote 127.0.0.1:45020] OSError: [Errno 2] No such file or directory: '/var/lib/securedrop/tmp/tmp_securedrop_bulk_dl_QLYC7z'
[Fri Aug 25 11:03:58.116025 2017] [:info] [pid 3258:tid 3087374976768] mod_wsgi (pid=3258): Create interpreter 'app-staging:8080|'.
[Fri Aug 25 11:03:58.117313 2017] [:info] [pid 3258:tid 3087374976768] mod_wsgi (pid=3258): Adding '/var/www/securedrop' to path.
[Fri Aug 25 11:03:58.117892 2017] [:info] [pid 3258:tid 3087374976768] [remote 127.0.0.1:45020] mod_wsgi (pid=3258, process='journalist', application='app-staging:8080|'): Loading WSGI script '/var/www/journalist.wsgi'.
</pre>
## Expected Behavior
./manage clean-tmp never races against get_bulk_archive
## Actual Behavior
There is a window of opportunity for **./manage clean-tmp** to remove a file and fail a download
## Comments
This is unlikely but possible. The current safeguard only addresses part of the race. It should remove files older than X days instead.
| 2017-08-15T22:28:28Z | [] | [] |
|
freedomofpress/securedrop | 2,256 | freedomofpress__securedrop-2256 | [
"844"
] | 118d4f0fccbd2576ba5b659cbf794499577788a0 | diff --git a/securedrop/journalist.py b/securedrop/journalist.py
--- a/securedrop/journalist.py
+++ b/securedrop/journalist.py
@@ -15,6 +15,7 @@
import config
import version
import crypto_util
+from rm import srm
import store
import template_filters
from db import (db_session, Source, Journalist, Submission, Reply,
@@ -526,7 +527,7 @@ def col(filesystem_id):
def delete_collection(filesystem_id):
# Delete the source's collection of submissions
- job = worker.enqueue(store.delete_source_directory, filesystem_id)
+ job = worker.enqueue(srm, store.path(filesystem_id))
# Delete the source's reply keypair
crypto_util.delete_reply_keypair(filesystem_id)
@@ -767,7 +768,7 @@ def confirm_bulk_delete(filesystem_id, items_selected):
def bulk_delete(filesystem_id, items_selected):
for item in items_selected:
item_path = store.path(filesystem_id, item.filename)
- worker.enqueue(store.secure_unlink, item_path)
+ worker.enqueue(srm, item_path)
db_session.delete(item)
db_session.commit()
diff --git a/securedrop/rm.py b/securedrop/rm.py
new file mode 100644
--- /dev/null
+++ b/securedrop/rm.py
@@ -0,0 +1,24 @@
+# -*- coding: utf-8 -*-
+#
+# SecureDrop whistleblower submission system
+# Copyright (C) 2017 Loic Dachary <[email protected]>
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+#
+import subprocess
+
+
+def srm(fn):
+ subprocess.check_call(['srm', '-r', fn])
+ return "success"
diff --git a/securedrop/source.py b/securedrop/source.py
--- a/securedrop/source.py
+++ b/securedrop/source.py
@@ -18,6 +18,7 @@
import json
import version
import crypto_util
+from rm import srm
import store
import template_filters
from db import db_session, Source, Submission, Reply, get_one_or_else
@@ -340,7 +341,7 @@ def delete():
query = Reply.query.filter(
Reply.filename == request.form['reply_filename'])
reply = get_one_or_else(query, app.logger, abort)
- store.secure_unlink(store.path(g.filesystem_id, reply.filename))
+ srm(store.path(g.filesystem_id, reply.filename))
db_session.delete(reply)
db_session.commit()
@@ -356,7 +357,7 @@ def batch_delete():
app.logger.error("Found no replies when at least one was expected")
return redirect(url_for('lookup'))
for reply in replies:
- store.secure_unlink(store.path(g.filesystem_id, reply.filename))
+ srm(store.path(g.filesystem_id, reply.filename))
db_session.delete(reply)
db_session.commit()
diff --git a/securedrop/store.py b/securedrop/store.py
--- a/securedrop/store.py
+++ b/securedrop/store.py
@@ -5,7 +5,6 @@
import zipfile
import crypto_util
import tempfile
-import subprocess
import gzip
from werkzeug import secure_filename
@@ -156,18 +155,3 @@ def rename_submission(filesystem_id, orig_filename, journalist_filename):
else:
return new_filename # Only return new filename if successful
return orig_filename
-
-
-def secure_unlink(fn, recursive=False):
- verify(fn)
- command = ['srm']
- if recursive:
- command.append('-r')
- command.append(fn)
- subprocess.check_call(command)
- return "success"
-
-
-def delete_source_directory(filesystem_id):
- secure_unlink(path(filesystem_id), recursive=True)
- return "success"
| diff --git a/securedrop/tests/test_store.py b/securedrop/tests/test_store.py
--- a/securedrop/tests/test_store.py
+++ b/securedrop/tests/test_store.py
@@ -6,7 +6,6 @@
os.environ['SECUREDROP_ENV'] = 'test' # noqa
import config
from db import db_session
-import mock
import store
import utils
@@ -68,15 +67,3 @@ def test_rename_valid_submission(self):
source.filesystem_id, old_filename,
new_journalist_filename)
self.assertEquals(actual_filename, expected_filename)
-
- @mock.patch('store.subprocess.check_call')
- def test_secure_unlink(self, mock_check_call):
- path = os.path.join(config.STORE_DIR, 'FILENAME')
- self.assertEqual(store.secure_unlink(path), "success")
- mock_check_call.assert_called_with(['srm', path])
-
- @mock.patch('store.subprocess.check_call')
- def test_delete_source_directory(self, mock_check_call):
- path = os.path.join(config.STORE_DIR, 'DIRNAME')
- self.assertEqual(store.delete_source_directory('DIRNAME'), "success")
- mock_check_call.assert_called_with(['srm', '-r', path])
diff --git a/securedrop/tests/utils/env.py b/securedrop/tests/utils/env.py
--- a/securedrop/tests/utils/env.py
+++ b/securedrop/tests/utils/env.py
@@ -69,10 +69,7 @@ def teardown():
db_session.remove()
try:
shutil.rmtree(config.SECUREDROP_DATA_ROOT)
+ assert not os.path.exists(config.SECUREDROP_DATA_ROOT) # safeguard for #844
except OSError as exc:
- os.system("find " + config.SECUREDROP_DATA_ROOT) # REMOVE ME, see #844
if 'No such file or directory' not in exc:
raise
- except:
- os.system("find " + config.SECUREDROP_DATA_ROOT) # REMOVE ME, see #844
- raise
| Intermittent Travis test failures (Directory not empty: '/tmp/securedrop/keys')
Recently it seems to me that Travis has begun failing more often than it should. Most of the failures are not actual test failures, but appear to be related to the setup and teardown for each test. Re-running the Travis build usually results in all tests passing.
I will post relevant logs in comments on this issue as they occur. It does not appear to be possible to recover the logs from a failed build once you've restarted it :disappointed:
| Here's an example log for one of these intermittent failures: https://gist.github.com/garrettr/65271f6b1c31b03487a3
Another log for a similar intermittent failure: https://gist.github.com/garrettr/f0a0a603ee025ba16223
Here's another example, which looks like it might be a different flavor than the first two: https://gist.github.com/garrettr/04d58b02489c185c3081
Sometimes Pip is flaky too: https://gist.github.com/garrettr/dbe8e77b91fa6db1c4f9. I'm not sure if there's anything we could do about that (besides switching to a better CI system that allows us to use custom base images...)
Another rmtree-related intermittent test failure: https://gist.github.com/garrettr/ad6c791748abc310d267
And another one: https://gist.github.com/garrettr/3b778b308213f19c8e3b
This is an interesting one, in test_regenerate_valid_lengths: https://gist.github.com/garrettr/28f00ae6a2f45d44692a
Adding p-high, since this makes the CI less valuable and repeatedly wastes developer time.
Started looking at this today. There are approximately 3 distinct classes of errors reported in this issue, but the most common by far is errors in `shutil.rmtree` when it's called from `common.clean_root`.
Here's a [recent traceback](https://gist.github.com/garrettr/90c2ffb142f3ee69da20). I have a theory about what's going on here. I think this is a race condition caused by python-gnupg's use of daemon threads, similar to my theory about the cause of [this other intermittent test issue](https://github.com/freedomofpress/securedrop/issues/618#issuecomment-60293098).
Here's what I think happens:
1. Test calls some python-gnupg functions, which shell out to gnupg. python-gnupg [uses daemon threads](https://github.com/isislovecruft/python-gnupg/blob/2beac24161b07e40642d93f5d85faeb9740f78db/gnupg/_meta.py#L716), which means the process may shut down before the threads have completed.
2. The gnupg processes take variable amounts of time because they are doing crypto operations (especially key gen, since the time that takes depends on `/dev/random`'s blocking behavior).
3. Sometimes the test finishes before gpg finishes doing something that the test asked it to do.
4. The test's `tearDown` enters `clean_root`, which calls `shutil.rmtree`.
5. `shutil.rmtree` calls `os.listdir` and gets a list of files in the `SECUREDROP_DATA_ROOT`. It starts recursively deleting files.
6. gpg finishes, and cleans up some temporary files (e.g. the lockfile `/tmp/securedrop/keys/pubring.gpg.lock` and whatever the heck `/tmp/securedrop/keys/.#lk0x7f187816dac0.testing-worker-linux-e7a62211-2-2367-linux-7-56489264.17090` is).
7. `shutil.rmtree` is still deleting things recursively, using the list of files it got from `os.listdir` before gpg exited. This is the race condition.
8. It attempts to delete a file that was deleted after `os.listdir`, when gpg exited, and results in the errors we are seeing here.
tests that failed in `shutil.rmtree`:
- TestIntegration.test_delete_collection
- TestIntegration.test_filenames (2x)
- TestSource.test_regenerate_invalid_lengths
- TestSource.test_submit_message
- TestStore.test_get_zip
There is not an obvious pattern here - the failures are spread across a variety of modules and test functions. Note that all test functions initialize gpg and import keys, due to `common.init_gpg`.
Here's a new one: https://gist.github.com/garrettr/59ad3112df829887caa4
@garrettr Removed this one, then had second thoughts. Think you should make the call on the status of this one + it's targeted milestone.
@fowlslegs I'm not convinced this is actually a problem anymore. We've made a lot of changes to the app test suite, and we tend to trust Travis's failures to mean "take a close look at this PR." Very occasionally I have to manually retrigger a Travis buildβis that something you've been doing more frequently than I? If not, we may be able to close this until it becomes a problem again.
@conorsch I'm inclined to agree, but want to wait for @garrettr to chime in.
We're still seeing [OSError: [Errno 39] Directory not empty: '/tmp/securedrop/keys'](https://gist.github.com/garrettr/65271f6b1c31b03487a3) two years later. It's relatively rare but happens once every 50 runs or so (gut feeling, I'm not collecting stats ;-).
Here is [a travis build with the same error](https://travis-ci.org/freedomofpress/securedrop/builds/261809368?utm_source=github_status&utm_medium=notification) dated August 22.
<pre>
_____________________ TestSourceApp.test_login_and_logout ______________________
self =
def tearDown(self):
> utils.env.teardown()
tests/test_source.py:27:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
tests/utils/env.py:64: in teardown
shutil.rmtree(config.SECUREDROP_DATA_ROOT)
/usr/lib/python2.7/shutil.py:247: in rmtree
rmtree(fullname, ignore_errors, onerror)
/usr/lib/python2.7/shutil.py:256: in rmtree
onerror(os.rmdir, path, sys.exc_info())
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
path = '/tmp/securedrop/keys', ignore_errors = False
onerror =
def rmtree(path, ignore_errors=False, onerror=None):
"""Recursively delete a directory tree.
If ignore_errors is set, errors are ignored; otherwise, if onerror
is set, it is called to handle the error with arguments (func,
path, exc_info) where func is os.listdir, os.remove, or os.rmdir;
path is the argument to that function that caused it to fail; and
exc_info is a tuple returned by sys.exc_info(). If ignore_errors
is false and onerror is None, an exception is raised.
"""
if ignore_errors:
def onerror(*args):
pass
elif onerror is None:
def onerror(*args):
raise
try:
if os.path.islink(path):
# symlinks to directories are forbidden, see bug #1669
raise OSError("Cannot call rmtree on a symbolic link")
except OSError:
onerror(os.path.islink, path, sys.exc_info())
# can't continue even if onerror hook returns
return
names = []
try:
names = os.listdir(path)
except os.error, err:
onerror(os.listdir, path, sys.exc_info())
for name in names:
fullname = os.path.join(path, name)
try:
mode = os.lstat(fullname).st_mode
except os.error:
mode = 0
if stat.S_ISDIR(mode):
rmtree(fullname, ignore_errors, onerror)
else:
try:
os.remove(fullname)
except os.error, err:
onerror(os.remove, fullname, sys.exc_info())
try:
> os.rmdir(path)
E OSError: [Errno 39] Directory not empty: '/tmp/securedrop/keys'
/usr/lib/python2.7/shutil.py:254: OSError
</pre>
Update August 31: Hum, this example is actually not good because it happened *before* the teardown made sure all threads are joined. We see early in the [a travis output](https://travis-ci.org/freedomofpress/securedrop/builds/261809368?utm_source=github_status&utm_medium=notification)
<pre>
ests/test_source.py::TestSourceApp::test_login_and_logout [2017-08-07 12:41:02,739] ERROR in source: async_genkey for source (sid=FGZ5ZOHX7Y7AFKCEJK6O26V6AV5Q6ZFNWPBONNOUQ23FG5TQJB4GNO3XFV647CAXV4IJ6W5ARZSM2OSJXSWY3FX7I47GVPVNCOISEVI=): (sqlite3.OperationalError) no such table: sources [SQL: u'SELECT sources.id AS sources_id, sources.filesystem_id AS sources_filesystem_id, sources.journalist_designation AS sources_journalist_designation, sources.flagged AS sources_flagged, sources.last_updated AS sources_last_updated, sources.pending AS sources_pending, sources.interaction_count AS sources_interaction_count \nFROM sources \nWHERE sources.filesystem_id = ?'] [parameters: ('FGZ5ZOHX7Y7AFKCEJK6O26V6AV5Q6ZFNWPBONNOUQ23FG5TQJB4GNO3XFV647CAXV4IJ6W5ARZSM2OSJXSWY3FX7I47GVPVNCOISEVI=',)]
FAILED
</pre>
meaning the root of the problem was indeed that gnupg was racing with teardown.
> I have a theory about what's going on here. I think this is a race condition caused by python-gnupg's use of daemon threads
This was fixed with https://github.com/freedomofpress/securedrop/pull/2123 but the problem remains.
Yeah ! Have a reproducer. It takes a few hours but it's here ! Points to async gpg events indeed: must be in the background and no longer a child of the process.
<pre>
time while : ; do pytest tests || break; done
</pre>
<pre>
================================================================================================== FAILURES ===================================================================================================
__________________________________________________________________________ TestJournalistApp.test_delete_source_deletes_docs_on_disk __________________________________________________________________________
self = <tests.test_journalist.TestJournalistApp testMethod=test_delete_source_deletes_docs_on_disk>
def setUp(self):
> utils.env.setup()
tests/test_journalist.py:37:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
tests/utils/env.py:50: in setup
create_directories()
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
def create_directories():
"""Create directories for the file store and the GPG keyring.
"""
for d in (config.SECUREDROP_DATA_ROOT, config.STORE_DIR,
config.GPG_KEY_DIR, config.TEMP_DIR):
if not isdir(d):
> os.mkdir(d)
E OSError: [Errno 17] File exists: '/tmp/securedrop/keys'
tests/utils/env.py:33: OSError
============================================================================================== warnings summary ===============================================================================================
:171
'pytest_runtest_makereport' hook uses deprecated __multicall__ argument
None
pytest_funcarg__caplog: declaring fixtures using "pytest_funcarg__" prefix is deprecated and scheduled to be removed in pytest 4.0. Please remove the prefix and use the @pytest.fixture decorator instead.
pytest_funcarg__capturelog: declaring fixtures using "pytest_funcarg__" prefix is deprecated and scheduled to be removed in pytest 4.0. Please remove the prefix and use the @pytest.fixture decorator instead.
-- Docs: http://doc.pytest.org/en/latest/warnings.html
======================================================================= 1 failed, 187 passed, 41 skipped, 3 warnings in 166.54 seconds ========================================================================
real 111m33.239s
user 42m52.663s
sys 10m0.710s
</pre>
After running the reproducer during hours the error did not show. It obviously is a case of the gnupg subprocess still working on the key subdirectory when teardown() is called. When rmtree fails to remove the key directory because it is not empty, it means gnupg added a file *after* rmtree removed all files in the key directory. When **create_directories** cannot create the key directory because it already exists, it means gnupg managed to recreate it after rmtree from the previous test killed it.
The gnupg python module indead runs [gnupg asynchronously](https://github.com/isislovecruft/python-gnupg/blob/master/gnupg/_meta.py#L726) and has two threads pending to read/write to it. However, even though these threads are daemonic (i.e. the python process could exit without waiting for them), they are returned by [threading.enumerate()](https://github.com/freedomofpress/securedrop/blob/develop/securedrop/tests/utils/env.py#L66) and joined() before rmtree is run, therefore providing a guarantee that no gnupg process is pending.
The race is hidden not too far from this code path. The plot thickens.
I may have run the reproducer on a branch that did not have the **join** in the teardown. I'll wait until the error shows up once more to be sure it's still here. Right now I can't see how it could happen. And joining all threads returned by threading.enumerate() in teardown seems to be an effective way to ensure there is no overlap.
Not joining the **async_keygen()** thread explains the rogue key directory 100% given the following code in gnupg-2.3.0/gnupg/gnupg.py::gen_key
<pre>
for d in map(lambda x: os.path.dirname(x),
[self.temp_keyring, self.temp_secring]):
if not os.path.exists(d):
os.makedirs(d)
</pre>
It happened again, twice in a row ! This what a travis run, attaching the full log, it will hopefully give us something to work with :-)
* 1 x tests/test_journalist.py::TestJournalistApp::test_delete_source_deletes_source_key FAILED
* 3 x tests/test_journalist.py::TestJournalistApp::test_delete_collection_updates_db FAILED
[arg.txt.gz](https://github.com/freedomofpress/securedrop/files/1277834/arg.txt.gz)
[arg2.txt.gz](https://github.com/freedomofpress/securedrop/files/1277943/arg2.txt.gz)
[arg3.txt.gz](https://github.com/freedomofpress/securedrop/files/1278301/arg3.txt.gz)
[arg4.txt.gz](https://github.com/freedomofpress/securedrop/files/1278790/arg4.txt.gz)
So, this turns out to be a race due to the fact that the redis triggered job loads gnupg which tries to create directories although its only responsibility should be to remove files asynchronously.
IMO the right fix is to get rid of redis + workers and have a thread in charge of removing files asynchronously to not block the main process when/if it takes time.
The race we are observing has been extraordinarily difficult to find, the code path leading to the actual error is poorly tested, packaging and running the worker is ad-hoc, redis and the worker are two daemons required to run tests and that makes them more complicated to maintain and stabilize and any error occuring on a redis delegated task is serialized in a cryptic way in the **failed** queue of [rq](http://python-rq.org/). The minimal fix would be to move the removal functions from store.py to an independent file. It would also make sense to improve the conditions under which an error can be diagnosed and fix by changing the worker daemon to catch and display exceptions in a log instead of silently ignoring them. It would also be good for tests to be able to monitor the remaining jobs in the worker.py queue (yes, both the worker daemon and the worker client have the same name which is a bit confusing, renaming one of them would help) and wait for them to complete (or assert their failure).
This effort to consolidate the current code path is, IMHO, overkill. It would be less work and lead to a more stable and maintainable implementation to just remove redis and the worker. Instead a **Shredder** class could provide the same functionality. An instance of the **Shredder** class is created by the journalist app with a **config.SHREDDER_DIR**. It creates a thread that removes (via **srm**) all files from this directory and sleeps on inotify when there is nothing left. The journalist interface then calls the **rm** method of the **Shredder** instance which **mv** the file/directory to **config.SHREDDER_DIR**: this is an atomic and instantaneous system call (assuming the same file system, otherwise it just fails). If the journalist process crashes and some files are left in **config.SHREDDER_DIR** they will be taken care of when the journalist app restarts.
See also #1469 for a another discussion. Not motivated by an actual bug but raising similar concerns. | 2017-09-06T00:13:43Z | [] | [] |
freedomofpress/securedrop | 2,286 | freedomofpress__securedrop-2286 | [
"2132",
"1087",
"2169"
] | 85a6a4daf283db7f9f14083a40ad5675ab678165 | diff --git a/docs/conf.py b/docs/conf.py
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -59,9 +59,9 @@
# built documents.
#
# The short X.Y version.
-version = '0.4.2'
+version = '0.4.3'
# The full version, including alpha/beta/rc tags.
-release = '0.4.2'
+release = '0.4.3'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
diff --git a/install_files/ansible-base/callback_plugins/ansible_version_check.py b/install_files/ansible-base/callback_plugins/ansible_version_check.py
new file mode 100644
--- /dev/null
+++ b/install_files/ansible-base/callback_plugins/ansible_version_check.py
@@ -0,0 +1,28 @@
+# -*- encoding:utf-8 -*-
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+import sys
+
+import ansible
+
+try:
+ # Version 2.0+
+ from ansible.plugins.callback import CallbackBase
+except ImportError:
+ CallbackBase = object
+
+
+def print_red_bold(text):
+ print('\x1b[31;1m' + text + '\x1b[0m')
+
+
+class CallbackModule(CallbackBase):
+ def __init__(self):
+ # Can't use `on_X` because this isn't forwards compatible with Ansible 2.0+
+ required_version = '2.2' # Keep synchronized with group_vars/all/main.yml
+ if not ansible.__version__.startswith(required_version):
+ print_red_bold(
+ "SecureDrop restriction: only Ansible {version}.* is supported. "
+ .format(version=required_version)
+ )
+ sys.exit(1)
diff --git a/securedrop/db.py b/securedrop/db.py
--- a/securedrop/db.py
+++ b/securedrop/db.py
@@ -14,6 +14,7 @@
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import Column, Integer, String, Boolean, DateTime, Binary
from sqlalchemy.orm.exc import MultipleResultsFound, NoResultFound
+from jinja2 import Markup
import scrypt
import pyotp
@@ -216,9 +217,16 @@ class BadTokenException(Exception):
"""Raised when a user logins in with an incorrect TOTP token"""
-class InvalidPasswordLength(Exception):
+class PasswordError(Exception):
+
+ """Generic error for passwords that are invalid.
+ """
+
+
+class InvalidPasswordLength(PasswordError):
"""Raised when attempting to create a Journalist or log in with an invalid
- password length"""
+ password length.
+ """
def __init__(self, password):
self.pw_len = len(password)
@@ -232,6 +240,12 @@ def __str__(self):
)
+class NonDicewarePassword(PasswordError):
+
+ """Raised when attempting to validate a password that is not diceware-like
+ """
+
+
class Journalist(Base):
__tablename__ = "journalists"
id = Column(Integer, primary_key=True)
@@ -251,10 +265,14 @@ class Journalist(Base):
"JournalistLoginAttempt",
backref="journalist")
+ MIN_USERNAME_LEN = 3
+
def __init__(self, username, password, is_admin=False, otp_secret=None):
+ self.check_username_acceptable(username)
self.username = username
self.set_password(password)
self.is_admin = is_admin
+
if otp_secret:
self.set_hotp_secret(otp_secret)
@@ -274,20 +292,38 @@ def _scrypt_hash(self, password, salt, params=None):
return scrypt.hash(str(password), salt, **params)
MAX_PASSWORD_LEN = 128
- MIN_PASSWORD_LEN = 12
+ MIN_PASSWORD_LEN = 14
def set_password(self, password):
+ self.check_password_acceptable(password)
+
# Don't do anything if user's password hasn't changed.
if self.pw_hash and self.valid_password(password):
return
+
+ self.pw_salt = self._gen_salt()
+ self.pw_hash = self._scrypt_hash(password, self.pw_salt)
+
+ @classmethod
+ def check_username_acceptable(cls, username):
+ if len(username) < cls.MIN_USERNAME_LEN:
+ raise InvalidUsernameException(
+ 'Username "{}" must be at least {} characters long.'
+ .format(username, cls.MIN_USERNAME_LEN))
+
+ @classmethod
+ def check_password_acceptable(cls, password):
# Enforce a reasonable maximum length for passwords to avoid DoS
- if len(password) > self.MAX_PASSWORD_LEN:
+ if len(password) > cls.MAX_PASSWORD_LEN:
raise InvalidPasswordLength(password)
+
# Enforce a reasonable minimum length for new passwords
- if len(password) < self.MIN_PASSWORD_LEN:
+ if len(password) < cls.MIN_PASSWORD_LEN:
raise InvalidPasswordLength(password)
- self.pw_salt = self._gen_salt()
- self.pw_hash = self._scrypt_hash(password, self.pw_salt)
+
+ # Ensure all passwords are "diceware-like"
+ if len(password.split()) < 7:
+ raise NonDicewarePassword()
def valid_password(self, password):
# Avoid hashing passwords that are over the maximum length
@@ -334,7 +370,7 @@ def shared_secret_qrcode(self):
svg_out = StringIO()
img.save(svg_out)
- return svg_out.getvalue()
+ return Markup(svg_out.getvalue())
@property
def formatted_otp_secret(self):
diff --git a/securedrop/journalist.py b/securedrop/journalist.py
--- a/securedrop/journalist.py
+++ b/securedrop/journalist.py
@@ -1,6 +1,5 @@
# -*- coding: utf-8 -*-
-import os
from datetime import datetime
import functools
@@ -8,8 +7,10 @@
url_for, g, abort, session)
from flask_wtf.csrf import CSRFProtect
from flask_assets import Environment
-from sqlalchemy.orm.exc import MultipleResultsFound, NoResultFound
+from jinja2 import Markup
+from sqlalchemy.orm.exc import NoResultFound
from sqlalchemy.exc import IntegrityError
+from sqlalchemy.sql.expression import false
import config
import version
@@ -17,8 +18,8 @@
import store
import template_filters
from db import (db_session, Source, Journalist, Submission, Reply,
- SourceStar, get_one_or_else, WrongPasswordException,
- LoginThrottledException, InvalidPasswordLength)
+ SourceStar, get_one_or_else, LoginThrottledException,
+ PasswordError, InvalidUsernameException)
import worker
app = Flask(__name__, template_folder=config.JOURNALIST_TEMPLATES_DIR)
@@ -45,11 +46,11 @@ def shutdown_session(exception=None):
db_session.remove()
-def get_source(sid):
+def get_source(filesystem_id):
"""Return a Source object, representing the database row, for the source
- with id `sid`"""
+ with the `filesystem_id`"""
source = None
- query = Source.query.filter(Source.filesystem_id == sid)
+ query = Source.query.filter(Source.filesystem_id == filesystem_id)
source = get_one_or_else(query, app.logger, abort)
return source
@@ -63,10 +64,10 @@ def setup_g():
g.user = Journalist.query.get(uid)
if request.method == 'POST':
- sid = request.form.get('sid')
- if sid:
- g.sid = sid
- g.source = get_source(sid)
+ filesystem_id = request.form.get('filesystem_id')
+ if filesystem_id:
+ g.filesystem_id = filesystem_id
+ g.source = get_source(filesystem_id)
def logged_in():
@@ -123,7 +124,9 @@ def login():
user = Journalist.query.filter_by(
username=request.form['username']).one()
if user.is_totp:
- login_flashed_msg += " Please wait for a new two-factor token before logging in again."
+ login_flashed_msg += (
+ " Please wait for a new two-factor token"
+ " before logging in again.")
except:
pass
@@ -161,18 +164,9 @@ def admin_index():
def admin_add_user():
if request.method == 'POST':
form_valid = True
-
username = request.form['username']
- if len(username) == 0:
- form_valid = False
- flash("Missing username", "error")
password = request.form['password']
- password_again = request.form['password_again']
- if password != password_again:
- form_valid = False
- flash("Passwords didn't match", "error")
-
is_admin = bool(request.form.get('is_admin'))
if form_valid:
@@ -186,11 +180,13 @@ def admin_add_user():
otp_secret=otp_secret)
db_session.add(new_user)
db_session.commit()
- except InvalidPasswordLength:
+ except PasswordError:
+ flash('There was an error with the autogenerated password. '
+ 'User not created. Please try again.', 'error')
+ form_valid = False
+ except InvalidUsernameException as e:
form_valid = False
- flash("Your password must be between {} and {} characters.".format(
- Journalist.MIN_PASSWORD_LEN, Journalist.MAX_PASSWORD_LEN
- ), "error")
+ flash('Invalid username: ' + str(e), "error")
except IntegrityError as e:
db_session.rollback()
form_valid = False
@@ -208,7 +204,7 @@ def admin_add_user():
return redirect(url_for('admin_new_user_two_factor',
uid=new_user.id))
- return render_template("admin_add_user.html")
+ return render_template("admin_add_user.html", password=_make_password())
@app.route('/admin/2fa', methods=('GET', 'POST'))
@@ -278,20 +274,6 @@ class PasswordMismatchError(Exception):
pass
-def edit_account_password(user, password, password_again):
- if password:
- if password != password_again:
- flash("Passwords didn't match!", "error")
- raise PasswordMismatchError
- try:
- user.set_password(password)
- except InvalidPasswordLength:
- flash("Your password must be between {} and {} characters.".format(
- Journalist.MIN_PASSWORD_LEN, Journalist.MAX_PASSWORD_LEN
- ), "error")
- raise
-
-
def commit_account_changes(user):
if db_session.is_modified(user):
try:
@@ -315,27 +297,43 @@ def admin_edit_user(user_id):
if request.method == 'POST':
if request.form['username']:
new_username = request.form['username']
+
+ try:
+ Journalist.check_username_acceptable(new_username)
+ except InvalidUsernameException as e:
+ flash('Invalid username: ' + str(e), 'error')
+ return redirect(url_for("admin_edit_user", user_id=user_id))
+
if new_username == user.username:
pass
elif Journalist.query.filter_by(
- username=new_username).one_or_none():
+ username=new_username).one_or_none():
flash('Username "{}" is already taken!'.format(new_username),
"error")
return redirect(url_for("admin_edit_user", user_id=user_id))
else:
user.username = new_username
- try:
- edit_account_password(user, request.form['password'],
- request.form['password_again'])
- except (PasswordMismatchError, InvalidPasswordLength):
- return redirect(url_for("admin_edit_user", user_id=user_id))
-
user.is_admin = bool(request.form.get('is_admin'))
commit_account_changes(user)
- return render_template("edit_account.html", user=user)
+ password = _make_password()
+ return render_template("edit_account.html", user=user,
+ password=password)
+
+
[email protected]('/admin/edit/<int:user_id>/new-password', methods=('POST',))
+@admin_required
+def admin_set_diceware_password(user_id):
+ try:
+ user = Journalist.query.get(user_id)
+ except NoResultFound:
+ abort(404)
+
+ password = request.form.get('password')
+ _set_diceware_password(user, password)
+ return redirect(url_for('admin_edit_user', user_id=user_id))
@app.route('/admin/delete/<int:user_id>', methods=('POST',))
@@ -349,25 +347,74 @@ def admin_delete_user(user_id):
else:
app.logger.error(
"Admin {} tried to delete nonexistent user with pk={}".format(
- g.user.username, user_id))
+ g.user.username, user_id))
abort(404)
return redirect(url_for('admin_index'))
[email protected]('/account', methods=('GET', 'POST'))
[email protected]('/account', methods=('GET',))
@login_required
def edit_account():
- if request.method == 'POST':
+ password = _make_password()
+ return render_template('edit_account.html',
+ password=password)
+
+
[email protected]('/account/new-password', methods=['POST'])
+@login_required
+def new_password():
+ user = g.user
+ password = request.form.get('password')
+ _set_diceware_password(user, password)
+ return redirect(url_for('edit_account'))
+
+
[email protected]('/admin/edit/<int:user_id>/new-password', methods=('POST',))
+@admin_required
+def admin_new_password(user_id):
+ try:
+ user = Journalist.query.get(user_id)
+ except NoResultFound:
+ abort(404)
+
+ password = request.form.get('password')
+ _set_diceware_password(user, password)
+ return redirect(url_for('admin_edit_user', user_id=user_id))
+
+
+def _make_password():
+ while True:
+ password = crypto_util.genrandomid(7)
try:
- edit_account_password(g.user, request.form['password'],
- request.form['password_again'])
- except (PasswordMismatchError, InvalidPasswordLength):
- return redirect(url_for('edit_account'))
+ Journalist.check_password_acceptable(password)
+ return password
+ except PasswordError:
+ continue
+
- commit_account_changes(g.user)
+def _set_diceware_password(user, password):
+ try:
+ user.set_password(password)
+ except PasswordError:
+ flash('You submitted a bad password! Password not changed.', 'error')
+ return
+
+ try:
+ db_session.commit()
+ except Exception:
+ flash('There was an error, and the new password might not have been '
+ 'saved correctly. To prevent you from getting locked '
+ 'out of your account, you should reset your password again.',
+ 'error')
+ app.logger.error('Failed to update a valid password.')
+ return
- return render_template('edit_account.html')
+ # using Markup so the HTML isn't escaped
+ flash(Markup("<p>The password was successfully updated! Don't forget to "
+ 'save it in your KeePassX database. The new password is: '
+ '<span><code>{}</code></span></p>'.format(password)),
+ 'success')
@app.route('/account/2fa', methods=('GET', 'POST'))
@@ -405,8 +452,8 @@ def account_reset_two_factor_hotp():
return render_template('account_edit_hotp_secret.html')
-def make_star_true(sid):
- source = get_source(sid)
+def make_star_true(filesystem_id):
+ source = get_source(filesystem_id)
if source.star:
source.star.starred = True
else:
@@ -414,8 +461,8 @@ def make_star_true(sid):
db_session.add(source_star)
-def make_star_false(sid):
- source = get_source(sid)
+def make_star_false(filesystem_id):
+ source = get_source(filesystem_id)
if not source.star:
source_star = SourceStar(source)
db_session.add(source_star)
@@ -423,18 +470,18 @@ def make_star_false(sid):
source.star.starred = False
[email protected]('/col/add_star/<sid>', methods=('POST',))
[email protected]('/col/add_star/<filesystem_id>', methods=('POST',))
@login_required
-def add_star(sid):
- make_star_true(sid)
+def add_star(filesystem_id):
+ make_star_true(filesystem_id)
db_session.commit()
return redirect(url_for('index'))
[email protected]("/col/remove_star/<sid>", methods=('POST',))
[email protected]("/col/remove_star/<filesystem_id>", methods=('POST',))
@login_required
-def remove_star(sid):
- make_star_false(sid)
+def remove_star(filesystem_id):
+ make_star_false(filesystem_id)
db_session.commit()
return redirect(url_for('index'))
@@ -464,23 +511,24 @@ def index():
return render_template('index.html', unstarred=unstarred, starred=starred)
[email protected]('/col/<sid>')
[email protected]('/col/<filesystem_id>')
@login_required
-def col(sid):
- source = get_source(sid)
- source.has_key = crypto_util.getkey(sid)
- return render_template("col.html", sid=sid, source=source)
+def col(filesystem_id):
+ source = get_source(filesystem_id)
+ source.has_key = crypto_util.getkey(filesystem_id)
+ return render_template("col.html", filesystem_id=filesystem_id,
+ source=source)
-def delete_collection(source_id):
+def delete_collection(filesystem_id):
# Delete the source's collection of submissions
- job = worker.enqueue(store.delete_source_directory, source_id)
+ job = worker.enqueue(store.delete_source_directory, filesystem_id)
# Delete the source's reply keypair
- crypto_util.delete_reply_keypair(source_id)
+ crypto_util.delete_reply_keypair(filesystem_id)
# Delete their entry in the db
- source = get_source(source_id)
+ source = get_source(filesystem_id)
db_session.delete(source)
db_session.commit()
return job
@@ -510,10 +558,12 @@ def col_process():
def col_download_unread(cols_selected):
"""Download all unread submissions from all selected sources."""
submissions = []
- for sid in cols_selected:
- id = Source.query.filter(Source.filesystem_id == sid).one().id
- submissions += Submission.query.filter(Submission.downloaded == False,
- Submission.source_id == id).all()
+ for filesystem_id in cols_selected:
+ id = Source.query.filter(Source.filesystem_id == filesystem_id) \
+ .one().id
+ submissions += Submission.query.filter(
+ Submission.downloaded == false(),
+ Submission.source_id == id).all()
if submissions == []:
flash("No unread submissions in collections selected!", "error")
return redirect(url_for('index'))
@@ -523,34 +573,36 @@ def col_download_unread(cols_selected):
def col_download_all(cols_selected):
"""Download all submissions from all selected sources."""
submissions = []
- for sid in cols_selected:
- id = Source.query.filter(Source.filesystem_id == sid).one().id
- submissions += Submission.query.filter(Submission.source_id == id).all()
+ for filesystem_id in cols_selected:
+ id = Source.query.filter(Source.filesystem_id == filesystem_id) \
+ .one().id
+ submissions += Submission.query.filter(
+ Submission.source_id == id).all()
return download("all", submissions)
def col_star(cols_selected):
- for sid in cols_selected:
- make_star_true(sid)
+ for filesystem_id in cols_selected:
+ make_star_true(filesystem_id)
db_session.commit()
return redirect(url_for('index'))
def col_un_star(cols_selected):
- for source_id in cols_selected:
- make_star_false(source_id)
+ for filesystem_id in cols_selected:
+ make_star_false(filesystem_id)
db_session.commit()
return redirect(url_for('index'))
[email protected]('/col/delete/<sid>', methods=('POST',))
[email protected]('/col/delete/<filesystem_id>', methods=('POST',))
@login_required
-def col_delete_single(sid):
+def col_delete_single(filesystem_id):
"""deleting a single collection from its /col page"""
- source = get_source(sid)
- delete_collection(sid)
+ source = get_source(filesystem_id)
+ delete_collection(filesystem_id)
flash(
"%s's collection deleted" %
(source.journalist_designation,), "notification")
@@ -562,8 +614,8 @@ def col_delete(cols_selected):
if len(cols_selected) < 1:
flash("No collections selected to delete!", "error")
else:
- for source_id in cols_selected:
- delete_collection(source_id)
+ for filesystem_id in cols_selected:
+ delete_collection(filesystem_id)
flash("%s %s deleted" % (
len(cols_selected),
"collection" if len(cols_selected) == 1 else "collections"
@@ -572,9 +624,9 @@ def col_delete(cols_selected):
return redirect(url_for('index'))
[email protected]('/col/<sid>/<fn>')
[email protected]('/col/<filesystem_id>/<fn>')
@login_required
-def download_single_submission(sid, fn):
+def download_single_submission(filesystem_id, fn):
"""Sends a client the contents of a single submission."""
if '..' in fn or fn.startswith('/'):
abort(404)
@@ -586,7 +638,8 @@ def download_single_submission(sid, fn):
except NoResultFound as e:
app.logger.error("Could not mark " + fn + " as downloaded: %s" % (e,))
- return send_file(store.path(sid, fn), mimetype="application/pgp-encrypted")
+ return send_file(store.path(filesystem_id, fn),
+ mimetype="application/pgp-encrypted")
@app.route('/reply', methods=('POST',))
@@ -609,14 +662,15 @@ def reply():
# Reject empty replies
if not msg:
flash("You cannot send an empty reply!", "error")
- return redirect(url_for('col', sid=g.sid))
+ return redirect(url_for('col', filesystem_id=g.filesystem_id))
g.source.interaction_count += 1
filename = "{0}-{1}-reply.gpg".format(g.source.interaction_count,
g.source.journalist_filename)
crypto_util.encrypt(msg,
- [crypto_util.getkey(g.sid), config.JOURNALIST_KEY],
- output=store.path(g.sid, filename))
+ [crypto_util.getkey(g.filesystem_id),
+ config.JOURNALIST_KEY],
+ output=store.path(g.filesystem_id, filename))
reply = Reply(g.user, g.source, filename)
try:
@@ -635,7 +689,7 @@ def reply():
else:
flash("Thanks! Your reply has been stored.", "notification")
finally:
- return redirect(url_for('col', sid=g.sid))
+ return redirect(url_for('col', filesystem_id=g.filesystem_id))
@app.route('/regenerate-code', methods=('POST',))
@@ -646,7 +700,7 @@ def generate_code():
for item in g.source.collection:
item.filename = store.rename_submission(
- g.sid,
+ g.filesystem_id,
item.filename,
g.source.journalist_filename)
db_session.commit()
@@ -656,19 +710,20 @@ def generate_code():
(original_journalist_designation,
g.source.journalist_designation),
"notification")
- return redirect('/col/' + g.sid)
+ return redirect('/col/' + g.filesystem_id)
[email protected]('/download_unread/<sid>')
[email protected]('/download_unread/<filesystem_id>')
@login_required
-def download_unread_sid(sid):
- id = Source.query.filter(Source.filesystem_id == sid).one().id
- submissions = Submission.query.filter(Submission.source_id == id,
- Submission.downloaded == False).all()
+def download_unread_filesystem_id(filesystem_id):
+ id = Source.query.filter(Source.filesystem_id == filesystem_id).one().id
+ submissions = Submission.query.filter(
+ Submission.source_id == id,
+ Submission.downloaded == false()).all()
if submissions == []:
flash("No unread submissions for this source!")
- return redirect(url_for('col', sid=sid))
- source = get_source(sid)
+ return redirect(url_for('col', filesystem_id=filesystem_id))
+ source = get_source(filesystem_id)
return download(source.journalist_filename, submissions)
@@ -685,29 +740,29 @@ def bulk():
flash("No collections selected to download!", "error")
elif action in ('delete', 'confirm_delete'):
flash("No collections selected to delete!", "error")
- return redirect(url_for('col', sid=g.sid))
+ return redirect(url_for('col', filesystem_id=g.filesystem_id))
if action == 'download':
- source = get_source(g.sid)
+ source = get_source(g.filesystem_id)
return download(source.journalist_filename, selected_docs)
elif action == 'delete':
- return bulk_delete(g.sid, selected_docs)
+ return bulk_delete(g.filesystem_id, selected_docs)
elif action == 'confirm_delete':
- return confirm_bulk_delete(g.sid, selected_docs)
+ return confirm_bulk_delete(g.filesystem_id, selected_docs)
else:
abort(400)
-def confirm_bulk_delete(sid, items_selected):
+def confirm_bulk_delete(filesystem_id, items_selected):
return render_template('delete.html',
- sid=sid,
+ filesystem_id=filesystem_id,
source=g.source,
items_selected=items_selected)
-def bulk_delete(sid, items_selected):
+def bulk_delete(filesystem_id, items_selected):
for item in items_selected:
- item_path = store.path(sid, item.filename)
+ item_path = store.path(filesystem_id, item.filename)
worker.enqueue(store.secure_unlink, item_path)
db_session.delete(item)
db_session.commit()
@@ -716,7 +771,7 @@ def bulk_delete(sid, items_selected):
"Submission{} deleted.".format(
"s" if len(items_selected) > 1 else ""),
"notification")
- return redirect(url_for('col', sid=sid))
+ return redirect(url_for('col', filesystem_id=filesystem_id))
def download(zip_basename, submissions):
@@ -750,7 +805,7 @@ def download(zip_basename, submissions):
def flag():
g.source.flagged = True
db_session.commit()
- return render_template('flag.html', sid=g.sid,
+ return render_template('flag.html', filesystem_id=g.filesystem_id,
codename=g.source.journalist_designation)
diff --git a/securedrop/manage.py b/securedrop/manage.py
--- a/securedrop/manage.py
+++ b/securedrop/manage.py
@@ -2,12 +2,14 @@
# -*- coding: utf-8 -*-
import argparse
-from getpass import getpass
+import logging
import os
import shutil
import signal
+import subprocess
import sys
import traceback
+import version
import psutil
import qrcode
@@ -15,11 +17,52 @@
os.environ['SECUREDROP_ENV'] = 'dev' # noqa
import config
-from db import db_session, init_db, Journalist
-from management import run
+import crypto_util
+from db import (db_session, init_db, Journalist, PasswordError,
+ InvalidUsernameException)
+from management.run import run
+logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s')
+log = logging.getLogger(__name__)
-def reset(): # pragma: no cover
+
+def sh(command, input=None):
+ """Run the *command* which must be a shell snippet. The stdin is
+ either /dev/null or the *input* argument string.
+
+ The stderr/stdout of the snippet are captured and logged via
+ logging.debug(), one line at a time.
+ """
+ log.debug(":sh: " + command)
+ if input is None:
+ stdin = None
+ else:
+ stdin = subprocess.PIPE
+ proc = subprocess.Popen(
+ args=command,
+ stdin=stdin,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ shell=True,
+ bufsize=1)
+ if stdin is not None:
+ proc.stdin.write(input)
+ proc.stdin.close()
+ lines_of_command_output = []
+ with proc.stdout:
+ for line in iter(proc.stdout.readline, b''):
+ line = line.decode('utf-8')
+ lines_of_command_output.append(line)
+ log.debug(line.strip().encode('ascii', 'ignore'))
+ if proc.wait() != 0:
+ raise subprocess.CalledProcessError(
+ returncode=proc.returncode,
+ cmd=command
+ )
+ return "".join(lines_of_command_output)
+
+
+def reset(args): # pragma: no cover
"""Clears the SecureDrop development applications' state, restoring them to
the way they were immediately after running `setup_dev.sh`. This command:
1. Erases the development sqlite database file.
@@ -54,39 +97,58 @@ def reset(): # pragma: no cover
return 0
-def add_admin(): # pragma: no cover
+def add_admin(args):
return _add_user(is_admin=True)
-def add_journalist(): # pragma: no cover
+def add_journalist(args):
return _add_user()
-def _add_user(is_admin=False): # pragma: no cover
+def _get_username():
while True:
username = raw_input('Username: ')
- password = getpass('Password: ')
- password_again = getpass('Confirm Password: ')
+ try:
+ Journalist.check_username_acceptable(username)
+ except InvalidUsernameException as e:
+ print('Invalid username: ' + str(e))
+ else:
+ return username
+
+
+def _get_yubikey_usage():
+ '''Function used to allow for test suite mocking'''
+ while True:
+ answer = raw_input('Will this user be using a YubiKey [HOTP]? '
+ '(y/N): ').lower().strip()
+ if answer in ('y', 'yes'):
+ return True
+ elif answer in ('', 'n', 'no'):
+ return False
+ else:
+ print 'Invalid answer. Please type "y" or "n"'
- if len(password) > Journalist.MAX_PASSWORD_LEN:
- print('Your password is too long (maximum length {} characters). '
- 'Please pick a shorter '
- 'password.'.format(Journalist.MAX_PASSWORD_LEN))
- continue
- if len(password) < Journalist.MIN_PASSWORD_LEN:
- print('Error: Password needs to be at least {} characters.'.format(
- Journalist.MIN_PASSWORD_LEN
- ))
+def _make_password():
+ while True:
+ password = crypto_util.genrandomid(7)
+ try:
+ Journalist.check_password_acceptable(password)
+ return password
+ except PasswordError:
continue
- if password == password_again:
- break
- print("Passwords didn't match!")
- hotp_input = raw_input('Will this user be using a YubiKey [HOTP]? (y/N): ')
+def _add_user(is_admin=False):
+ username = _get_username()
+
+ print("Note: Journalist passwords are now autogenerated.")
+ password = _make_password()
+ print("This journalist's password is: {}".format(password))
+
+ is_hotp = _get_yubikey_usage()
otp_secret = None
- if hotp_input.lower() in ('y', 'yes'):
+ if is_hotp:
while True:
otp_secret = raw_input(
'Please configure your YubiKey and enter the secret: ')
@@ -130,7 +192,7 @@ def _add_user(is_admin=False): # pragma: no cover
return 0
-def delete_user(): # pragma: no cover
+def delete_user(args): # pragma: no cover
"""Deletes a journalist or administrator from the application."""
# Select user to delete
username = raw_input('Username to delete: ')
@@ -168,7 +230,7 @@ def delete_user(): # pragma: no cover
return 0
-def clean_tmp(): # pragma: no cover
+def clean_tmp(args): # pragma: no cover
"""Cleanup the SecureDrop temp directory. This is intended to be run
as an automated cron job. We skip files that are currently in use to
avoid deleting files that are currently being downloaded."""
@@ -206,9 +268,56 @@ def listdir_fullpath(d):
return 0
+def translate(args):
+ messages_file = os.path.join(args.translations_dir, 'messages.pot')
+
+ if args.extract_update:
+ sh("""
+ set -xe
+
+ mkdir -p {translations_dir}
+
+ pybabel extract \
+ --charset=utf-8 \
+ --mapping={mapping} \
+ --output={messages_file} \
+ --project=SecureDrop \
+ --version={version} \
+ --msgid-bugs-address='[email protected]' \
+ --copyright-holder='Freedom of the Press Foundation' \
+ {sources}
+
+ # we do not handle fuzzy translations yet
+ sed -i '/^#, fuzzy$/d' {messages_file}
+ """.format(translations_dir=args.translations_dir,
+ mapping=args.mapping,
+ messages_file=messages_file,
+ version=args.version,
+ sources=" ".join(args.source)))
+
+ if len(os.listdir(args.translations_dir)) > 1:
+ sh("""
+ set -xe
+ pybabel update \
+ --input-file {messages_file} \
+ --output-dir {translations_dir} \
+ --no-fuzzy-matching --ignore-obsolete
+ """.format(translations_dir=args.translations_dir,
+ messages_file=messages_file))
+ else:
+ log.warning("no translations found (ok for tests, not otherwise)")
+
+ if args.compile and len(os.listdir(args.translations_dir)) > 1:
+ sh("""
+ set -x
+ pybabel compile --directory {translations_dir}
+ """.format(translations_dir=args.translations_dir))
+
+
def get_args():
parser = argparse.ArgumentParser(prog=__file__, description='Management '
'and testing utility for SecureDrop.')
+ parser.add_argument('-v', '--verbose', action='store_true')
subps = parser.add_subparsers()
# Run WSGI app
run_subp = subps.add_parser('run', help='Run the Werkzeug source & '
@@ -244,13 +353,61 @@ def get_args():
clean_tmp_subp_a = subps.add_parser('clean_tmp', help='^')
clean_tmp_subp_a.set_defaults(func=clean_tmp)
+ set_translate_parser(subps)
+
return parser
+def set_translate_parser(subps):
+ parser = subps.add_parser('translate',
+ help='Update and compile translations')
+ translations_dir = 'translations'
+ parser.add_argument(
+ '--extract-update',
+ action='store_true',
+ help='run pybabel extract and pybabel update')
+ parser.add_argument(
+ '--compile',
+ action='store_true',
+ help='run pybabel compile')
+ mapping = 'babel.cfg'
+ parser.add_argument(
+ '--mapping',
+ default=mapping,
+ help='Mapping of files to consider (default {})'.format(
+ mapping))
+ parser.add_argument(
+ '--translations-dir',
+ default=translations_dir,
+ help='Base directory for translation files (default {})'.format(
+ translations_dir))
+ parser.add_argument(
+ '--version',
+ default=version.__version__,
+ help='SecureDrop version to store in pot files (default {})'.format(
+ version.__version__))
+ sources = ['.', 'source_templates', 'journalist_templates']
+ parser.add_argument(
+ '--source',
+ default=sources,
+ action='append',
+ help='Source file or directory to extract (default {})'.format(
+ sources))
+ parser.set_defaults(func=translate)
+
+
+def setup_verbosity(args):
+ if args.verbose:
+ logging.getLogger(__name__).setLevel(logging.DEBUG)
+ else:
+ logging.getLogger(__name__).setLevel(logging.INFO)
+
+
def _run_from_commandline(): # pragma: no cover
try:
args = get_args().parse_args()
- rc = args.func()
+ setup_verbosity(args)
+ rc = args.func(args)
sys.exit(rc)
except KeyboardInterrupt:
sys.exit(signal.SIGINT)
diff --git a/securedrop/management/__init__.py b/securedrop/management/__init__.py
--- a/securedrop/management/__init__.py
+++ b/securedrop/management/__init__.py
@@ -1 +0,0 @@
-from run import run
diff --git a/securedrop/management/run.py b/securedrop/management/run.py
--- a/securedrop/management/run.py
+++ b/securedrop/management/run.py
@@ -52,10 +52,10 @@ def __init__(self, label, cmd, color):
super(DevServerProcess, self).__init__(
self.cmd,
- stdin = subprocess.PIPE,
- stdout = subprocess.PIPE,
- stderr = subprocess.STDOUT,
- preexec_fn = os.setsid)
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ preexec_fn=os.setsid)
def print_label(self, to):
label = "\n => {} <= \n\n".format(self.label)
@@ -64,14 +64,14 @@ def print_label(self, to):
to.write(label)
def fileno(self):
- """
- Implement fileno() in order to use DevServerProcesses with select.select
- directly.
+ """Implement fileno() in order to use DevServerProcesses with
+ select.select directly.
Note this method assumes we only want to select this process'
stdout. This is a reasonable assumption for a DevServerProcess
because the __init__ redirects stderr to stdout, so all output is
available on stdout.
+
"""
return self.stdout.fileno()
@@ -140,7 +140,7 @@ def cleanup(self):
proc.terminate()
-def run(): # pragma: no cover
+def run(args): # pragma: no cover
"""
Starts development servers for both the Source Interface and the
Journalist Interface concurrently. Their output is collected,
@@ -150,7 +150,7 @@ def run(): # pragma: no cover
Ctrl-C will kill the servers and return you to the terminal.
Useful resources:
- * https://stackoverflow.com/questions/22565606/python-asynhronously-print-stdout-from-multiple-subprocesses
+ * https://stackoverflow.com/q/22565606/837471
"""
print \
@@ -164,7 +164,7 @@ def run(): # pragma: no cover
\\/_____/\\/____/\\/____/ \\/___/ \\/_/ \\/____/ \\/___/ \\/_/ \\/___/ \\ \\ \\/
\\ \\_\\
\\/_/
-"""
+""" # noqa
procs = [
lambda: DevServerProcess('Source Interface',
diff --git a/securedrop/source.py b/securedrop/source.py
--- a/securedrop/source.py
+++ b/securedrop/source.py
@@ -92,9 +92,11 @@ def setup_g():
# serving a static resource that won't need to access these common values.
if logged_in():
g.codename = session['codename']
- g.sid = crypto_util.hash_codename(g.codename)
+ g.filesystem_id = crypto_util.hash_codename(g.codename)
try:
- g.source = Source.query.filter(Source.filesystem_id == g.sid).one()
+ g.source = Source.query \
+ .filter(Source.filesystem_id == g.filesystem_id) \
+ .one()
except MultipleResultsFound as e:
app.logger.error(
"Found multiple Sources when one was expected: %s" %
@@ -107,7 +109,7 @@ def setup_g():
del session['logged_in']
del session['codename']
return redirect(url_for('index'))
- g.loc = store.path(g.sid)
+ g.loc = store.path(g.filesystem_id)
@app.before_request
@@ -116,9 +118,10 @@ def check_tor2web():
# ignore_static here so we only flash a single message warning
# about Tor2Web, corresponding to the initial page load.
if 'X-tor2web' in request.headers:
- flash('<strong>WARNING:</strong> You appear to be using Tor2Web. '
- 'This <strong>does not</strong> provide anonymity. '
- '<a href="/tor2web-warning">Why is this dangerous?</a>',
+ flash(Markup('<strong>WARNING:</strong> You appear to be using '
+ 'Tor2Web. This <strong>does not</strong> provide '
+ 'anonymity. <a href="/tor2web-warning">Why is this '
+ 'dangerous?</a>'),
"banner-warning")
@@ -145,9 +148,9 @@ def generate_unique_codename():
"(Codename='{}')".format(codename))
continue
- sid = crypto_util.hash_codename(codename) # scrypt (slow)
+ filesystem_id = crypto_util.hash_codename(codename) # scrypt (slow)
matching_sources = Source.query.filter(
- Source.filesystem_id == sid).all()
+ Source.filesystem_id == filesystem_id).all()
if len(matching_sources) == 0:
return codename
@@ -167,9 +170,9 @@ def generate():
@app.route('/create', methods=['POST'])
def create():
- sid = crypto_util.hash_codename(session['codename'])
+ filesystem_id = crypto_util.hash_codename(session['codename'])
- source = Source(sid, crypto_util.display_id())
+ source = Source(filesystem_id, crypto_util.display_id())
db_session.add(source)
try:
db_session.commit()
@@ -178,7 +181,7 @@ def create():
"Attempt to create a source with duplicate codename: %s" %
(e,))
else:
- os.mkdir(store.path(sid))
+ os.mkdir(store.path(filesystem_id))
session['logged_in'] = True
return redirect(url_for('lookup'))
@@ -192,18 +195,20 @@ def wrapper(*args, **kwargs):
@async
-def async_genkey(sid, codename):
- crypto_util.genkeypair(sid, codename)
+def async_genkey(filesystem_id, codename):
+ crypto_util.genkeypair(filesystem_id, codename)
# Register key generation as update to the source, so sources will
# filter to the top of the list in the journalist interface if a
# flagged source logs in and has a key generated for them. #789
try:
- source = Source.query.filter(Source.filesystem_id == sid).one()
+ source = Source.query.filter(Source.filesystem_id == filesystem_id) \
+ .one()
source.last_updated = datetime.utcnow()
db_session.commit()
except Exception as e:
- app.logger.error("async_genkey for source (sid={}): {}".format(sid, e))
+ app.logger.error("async_genkey for source "
+ "(filesystem_id={}): {}".format(filesystem_id, e))
@app.route('/lookup', methods=('GET',))
@@ -211,7 +216,7 @@ def async_genkey(sid, codename):
def lookup():
replies = []
for reply in g.source.replies:
- reply_path = store.path(g.sid, reply.filename)
+ reply_path = store.path(g.filesystem_id, reply.filename)
try:
reply.decrypted = crypto_util.decrypt(
g.codename,
@@ -229,8 +234,8 @@ def lookup():
# Generate a keypair to encrypt replies from the journalist
# Only do this if the journalist has flagged the source as one
# that they would like to reply to. (Issue #140.)
- if not crypto_util.getkey(g.sid) and g.source.flagged:
- async_genkey(g.sid, g.codename)
+ if not crypto_util.getkey(g.filesystem_id) and g.source.flagged:
+ async_genkey(g.filesystem_id, g.codename)
return render_template(
'lookup.html',
@@ -238,16 +243,16 @@ def lookup():
replies=replies,
flagged=g.source.flagged,
haskey=crypto_util.getkey(
- g.sid))
+ g.filesystem_id))
-def normalize_timestamps(sid):
+def normalize_timestamps(filesystem_id):
"""
Update the timestamps on all of the source's submissions to match that of
the latest submission. This minimizes metadata that could be useful to
investigators. See #301.
"""
- sub_paths = [store.path(sid, submission.filename)
+ sub_paths = [store.path(filesystem_id, submission.filename)
for submission in g.source.submissions]
if len(sub_paths) > 1:
args = ["touch"]
@@ -279,7 +284,7 @@ def submit():
g.source.interaction_count += 1
fnames.append(
store.save_message_submission(
- g.sid,
+ g.filesystem_id,
g.source.interaction_count,
journalist_filename,
msg))
@@ -287,7 +292,7 @@ def submit():
g.source.interaction_count += 1
fnames.append(
store.save_file_submission(
- g.sid,
+ g.filesystem_id,
g.source.interaction_count,
journalist_filename,
fh.filename,
@@ -320,11 +325,11 @@ def submit():
entropy_avail = int(
open('/proc/sys/kernel/random/entropy_avail').read())
if entropy_avail >= 2400:
- async_genkey(g.sid, g.codename)
+ async_genkey(g.filesystem_id, g.codename)
g.source.last_updated = datetime.utcnow()
db_session.commit()
- normalize_timestamps(g.sid)
+ normalize_timestamps(g.filesystem_id)
return redirect(url_for('lookup'))
@@ -335,7 +340,7 @@ def delete():
query = Reply.query.filter(
Reply.filename == request.form['reply_filename'])
reply = get_one_or_else(query, app.logger, abort)
- store.secure_unlink(store.path(g.sid, reply.filename))
+ store.secure_unlink(store.path(g.filesystem_id, reply.filename))
db_session.delete(reply)
db_session.commit()
@@ -351,7 +356,7 @@ def batch_delete():
app.logger.error("Found no replies when at least one was expected")
return redirect(url_for('lookup'))
for reply in replies:
- store.secure_unlink(store.path(g.sid, reply.filename))
+ store.secure_unlink(store.path(g.filesystem_id, reply.filename))
db_session.delete(reply)
db_session.commit()
@@ -397,7 +402,7 @@ def logout():
if logged_in():
session.clear()
msg = render_template('logout_flashed_message.html')
- flash(Markup(msg), "important")
+ flash(Markup(msg), "important hide-if-not-tor-browser")
return redirect(url_for('index'))
diff --git a/securedrop/store.py b/securedrop/store.py
--- a/securedrop/store.py
+++ b/securedrop/store.py
@@ -95,7 +95,8 @@ def get_bulk_archive(selected_submissions, zip_directory=''):
return zip_file
-def save_file_submission(sid, count, journalist_filename, filename, stream):
+def save_file_submission(filesystem_id, count, journalist_filename, filename,
+ stream):
sanitized_filename = secure_filename(filename)
# We store file submissions in a .gz file for two reasons:
@@ -114,7 +115,7 @@ def save_file_submission(sid, count, journalist_filename, filename, stream):
encrypted_file_name = "{0}-{1}-doc.gz.gpg".format(
count,
journalist_filename)
- encrypted_file_path = path(sid, encrypted_file_name)
+ encrypted_file_path = path(filesystem_id, encrypted_file_name)
with SecureTemporaryFile("/tmp") as stf:
with gzip.GzipFile(filename=sanitized_filename,
mode='wb', fileobj=stf) as gzf:
@@ -131,14 +132,15 @@ def save_file_submission(sid, count, journalist_filename, filename, stream):
return encrypted_file_name
-def save_message_submission(sid, count, journalist_filename, message):
+def save_message_submission(filesystem_id, count, journalist_filename,
+ message):
filename = "{0}-{1}-msg.gpg".format(count, journalist_filename)
- msg_loc = path(sid, filename)
+ msg_loc = path(filesystem_id, filename)
crypto_util.encrypt(message, config.JOURNALIST_KEY, msg_loc)
return filename
-def rename_submission(sid, orig_filename, journalist_filename):
+def rename_submission(filesystem_id, orig_filename, journalist_filename):
check_submission_name = VALIDATE_FILENAME(orig_filename)
if check_submission_name:
parsed_filename = check_submission_name.groupdict()
@@ -147,7 +149,8 @@ def rename_submission(sid, orig_filename, journalist_filename):
parsed_filename['index'], journalist_filename,
parsed_filename['file_type'])
try:
- os.rename(path(sid, orig_filename), path(sid, new_filename))
+ os.rename(path(filesystem_id, orig_filename),
+ path(filesystem_id, new_filename))
except OSError:
pass
else:
@@ -165,6 +168,6 @@ def secure_unlink(fn, recursive=False):
return "success"
-def delete_source_directory(source_id):
- secure_unlink(path(source_id), recursive=True)
+def delete_source_directory(filesystem_id):
+ secure_unlink(path(filesystem_id), recursive=True)
return "success"
diff --git a/securedrop/version.py b/securedrop/version.py
--- a/securedrop/version.py
+++ b/securedrop/version.py
@@ -1 +1 @@
-__version__ = '0.4.2'
+__version__ = '0.4.3'
diff --git a/testinfra/conftest.py b/testinfra/conftest.py
--- a/testinfra/conftest.py
+++ b/testinfra/conftest.py
@@ -7,9 +7,7 @@
"""
import os
-import sys
import yaml
-import pytest
target_host = os.environ['SECUREDROP_TESTINFRA_TARGET_HOST']
| diff --git a/docs/development/testing_application_tests.rst b/docs/development/testing_application_tests.rst
--- a/docs/development/testing_application_tests.rst
+++ b/docs/development/testing_application_tests.rst
@@ -70,6 +70,29 @@ file, class, and test name:
pytest tests/test_journalist.py::TestJournalistApp::test_invalid_credentials
+Some Selenium tests are decorated to produce before and after screenshots to aid
+in debugging. This behavior is enabled with the ``SCREENSHOTS_ENABLED`` environment
+variable. Output PNG files will be placed in the ``tests/log/`` directory.
+
+.. code:: sh
+
+ SCREENSHOTS_ENABLED=1 pytest tests/functional/
+
+Page Layout Tests
+~~~~~~~~~~~~~~~~~
+
+You can check the rendering of the layout of each page in each translated
+language using the page layout tests. These will generate screenshots of
+each page and can be used for example to update the SecureDrop user guides
+when modifications are made to the UI.
+
+You can run all tests, including the page layout tests with the `--page-layout`
+option:
+
+.. code:: sh
+
+ pytest tests/ --page-layout
+
Updating the application tests
------------------------------
diff --git a/docs/development/testing_configuration_tests.rst b/docs/development/testing_configuration_tests.rst
--- a/docs/development/testing_configuration_tests.rst
+++ b/docs/development/testing_configuration_tests.rst
@@ -30,14 +30,9 @@ For the staging VMs:
.. code:: sh
- vagrant up build --no-provision
+ make build-debs
vagrant up /staging/
-.. note:: The staging machines must be rebooted via in order to finalize
- the iptables config. You must manually reboot the machines via
- ``vagrant reload /staging/`` prior to running the config tests
- to ensure the config is valid.
-
Running all VMs concurrently may cause performance
problems if you have less than 8GB of RAM. You can isolate specific
machines for faster testing:
@@ -93,6 +88,9 @@ relevant location for the host you plan to test: ::
In the example above, to add a new test for the ``app-staging`` host,
add a new file to the ``testinfra/spec/app-staging`` directory.
+.. tip:: Read :ref:`updating_ossec_rules` to learn how to write tests for the
+ OSSEC rules.
+
Config test layout
------------------
@@ -116,9 +114,9 @@ Config testing strategy
-----------------------
The config tests currently emphasize testing implementation rather than
-functionality. This is a temporary measure to increase the current testing
-baseline for validating the Ansible provisioning flow, to aid in migrating
-to a current version of Ansible (v2+). After the Ansible version is current,
+functionality. This was a temporary measure to increase the testing
+baseline for validating the Ansible provisioning flow, which aided in migrating
+to a current version of Ansible (v2+). Now that the Ansible version is current,
the config tests can be improved to validate behavior, such as confirming
ports are blocked via external network calls, rather than simply checking
that the iptables rules are formatted as expected.
diff --git a/docs/development/testing_continuous_integration.rst b/docs/development/testing_continuous_integration.rst
--- a/docs/development/testing_continuous_integration.rst
+++ b/docs/development/testing_continuous_integration.rst
@@ -75,7 +75,7 @@ Source the setup script using the following command:
.. code:: sh
- $ source ./devops/scripts/local-setup.sh
+ source ./devops/scripts/local-setup.sh
You will be prompted for the values of the required environment variables. There
are some defaults set that you may want to change. You will need to determine
diff --git a/install_files/ansible-base/roles/app-test/tasks/dev_setup_xvfb_for_functional_tests.yml b/install_files/ansible-base/roles/app-test/tasks/dev_setup_xvfb_for_functional_tests.yml
--- a/install_files/ansible-base/roles/app-test/tasks/dev_setup_xvfb_for_functional_tests.yml
+++ b/install_files/ansible-base/roles/app-test/tasks/dev_setup_xvfb_for_functional_tests.yml
@@ -13,55 +13,6 @@
tags:
- apt
-# Selenium 3 makes breaking changes with the 2.X API, and requires the
-# installation of the Mozilla geckodriver. Since the Aaron Swartz Day Hackathon
-# is approaching, which will involve many new external contributors, we've
-# decided to play it as safe as possible by downgrading Firefox to the latest
-# version (46.0.1) that is compatible with the last 2.X series Selenium release
-# (2.53.6). After the Hackathon, we'll resolve the geckodriver business and
-# remove the following three tasks (as well as add firefox back to the
-# `test_apt_dependencies` list).
-- name: Download Firefox 46.0.1 for compatibility with Selenium 2.53.6.
- get_url:
- # Since the whole tasklisk is run as root, the ansible_env.HOME fact is
- # /root. Since this command doesn't need to be run as root and is part of a
- # crutch anyway, I've just hardcoded /tmp
- dest: "/opt/firefox_46.0.1+build1-0ubuntu0.14.04.3_amd64.deb"
- url: https://launchpad.net/~ubuntu-mozilla-security/+archive/ubuntu/ppa/+build/9727836/+files/firefox_46.0.1+build1-0ubuntu0.14.04.3_amd64.deb
- sha256sum: 88d25053306d33658580973b063cd459a56e3596a3a298c1fb8ab1d52171d860
- tags:
- - apt
-
-- name: Install dependencies for Firefox 46.0.1.
- apt:
- name: "{{ item }}"
- with_items:
- - libasound2
- - libcairo-gobject2
- - libgtk-3-0
- - libstartup-notification0
- tags:
- - apt
-
-- name: Install Firefox 46.0.1 for compatibility with Selenium 2.53.6.
- apt:
- deb: "/opt/firefox_46.0.1+build1-0ubuntu0.14.04.3_amd64.deb"
- tags:
- - apt
-
-- name: Set apt hold on Firefox version (via apt).
- command: apt-mark hold firefox
- register: apt_hold_firefox_result
- # apt-mark will return output to report changed status; subsequent runs
- # will report "firefox was already set on hold."
- changed_when: "'firefox set on hold' in apt_hold_firefox_result.stdout"
-
-- name: Set apt hold on Firefox version (via aptitude).
- command: aptitude hold firefox
- # `aptitude hold <package>` doesn't report meaningful changed status,
- # so mark the task as not changed.
- changed_when: false
-
- name: Copy xvfb init script.
copy:
src: xvfb
diff --git a/molecule/builder/tests/conftest.py b/molecule/builder/tests/conftest.py
new file mode 100644
--- /dev/null
+++ b/molecule/builder/tests/conftest.py
@@ -0,0 +1,15 @@
+"""
+Import variables from vars.yml and inject into pytest namespace
+"""
+
+import os
+import yaml
+
+
+def pytest_namespace():
+ """ Return dict of vars imported as 'securedrop_test_vars' into pytest
+ global namespace
+ """
+ filepath = os.path.join(os.path.dirname(__file__), "vars.yml")
+ with open(filepath, 'r') as f:
+ return dict(securedrop_test_vars=yaml.safe_load(f))
diff --git a/testinfra/build/test_build_dependencies.py b/molecule/builder/tests/test_build_dependencies.py
similarity index 95%
rename from testinfra/build/test_build_dependencies.py
rename to molecule/builder/tests/test_build_dependencies.py
--- a/testinfra/build/test_build_dependencies.py
+++ b/molecule/builder/tests/test_build_dependencies.py
@@ -17,7 +17,8 @@ def get_build_directories():
ossec_version=securedrop_test_vars.ossec_version,
keyring_version=securedrop_test_vars.keyring_version,
)
- build_directories = [d.format(**substitutions) for d in securedrop_test_vars.build_directories]
+ build_directories = [d.format(**substitutions) for d
+ in securedrop_test_vars.build_directories]
return build_directories
diff --git a/testinfra/build/test_legacy_paths.py b/molecule/builder/tests/test_legacy_paths.py
similarity index 100%
rename from testinfra/build/test_legacy_paths.py
rename to molecule/builder/tests/test_legacy_paths.py
diff --git a/testinfra/build/test_ossec_packages.py b/molecule/builder/tests/test_ossec_packages.py
similarity index 100%
rename from testinfra/build/test_ossec_packages.py
rename to molecule/builder/tests/test_ossec_packages.py
diff --git a/testinfra/build/test_securedrop_deb_package.py b/molecule/builder/tests/test_securedrop_deb_package.py
similarity index 87%
rename from testinfra/build/test_securedrop_deb_package.py
rename to molecule/builder/tests/test_securedrop_deb_package.py
--- a/testinfra/build/test_securedrop_deb_package.py
+++ b/molecule/builder/tests/test_securedrop_deb_package.py
@@ -39,12 +39,14 @@ def get_deb_packages():
keyring_version=securedrop_test_vars.keyring_version,
)
- deb_packages = [d.format(**substitutions) for d in securedrop_test_vars.build_deb_packages]
+ deb_packages = [d.format(**substitutions) for d
+ in securedrop_test_vars.build_deb_packages]
return deb_packages
deb_packages = get_deb_packages()
+
@pytest.mark.parametrize("deb", deb_packages)
def test_build_deb_packages(File, deb):
"""
@@ -78,8 +80,10 @@ def test_deb_packages_appear_installable(File, Command, Sudo, deb):
# Sudo is required to call `dpkg --install`, even as dry-run.
with Sudo():
c = Command("dpkg --install --dry-run {}".format(deb_package.path))
- assert "Selecting previously unselected package {}".format(package_name) in c.stdout
- regex = "Preparing to unpack [./]+{} ...".format(re.escape(deb_basename))
+ assert "Selecting previously unselected package {}".format(
+ package_name) in c.stdout
+ regex = "Preparing to unpack [./]+{} ...".format(
+ re.escape(deb_basename))
assert re.search(regex, c.stdout, re.M)
assert c.rc == 0
@@ -176,18 +180,23 @@ def test_deb_package_contains_no_generated_assets(File, Command, deb):
if "securedrop-app-code" in deb_package.path:
c = Command("dpkg-deb --contents {}".format(deb_package.path))
# static/gen/ directory should exist
- assert re.search("^.*\./var/www/securedrop/static/gen/$", c.stdout, re.M)
+ assert re.search("^.*\./var/www/securedrop"
+ "/static/gen/$", c.stdout, re.M)
# static/gen/ directory should be empty
- assert not re.search("^.*\./var/www/securedrop/static/gen/.+$", c.stdout, re.M)
+ assert not re.search("^.*\./var/www/securedrop"
+ "/static/gen/.+$", c.stdout, re.M)
# static/.webassets-cache/ directory should exist
- assert re.search("^.*\./var/www/securedrop/static/.webassets-cache/$", c.stdout, re.M)
+ assert re.search("^.*\./var/www/securedrop"
+ "/static/.webassets-cache/$", c.stdout, re.M)
# static/.webassets-cache/ directory should be empty
- assert not re.search("^.*\./var/www/securedrop/static/.webassets-cache/.+$", c.stdout, re.M)
+ assert not re.search("^.*\./var/www/securedrop"
+ "/static/.webassets-cache/.+$", c.stdout, re.M)
# no SASS files should exist; only the generated CSS files.
assert not re.search("^.*sass.*$", c.stdout, re.M)
+
@pytest.mark.parametrize("deb", deb_packages)
def test_deb_package_contains_css(File, Command, deb):
"""
@@ -202,5 +211,8 @@ def test_deb_package_contains_css(File, Command, deb):
c = Command("dpkg-deb --contents {}".format(deb_package.path))
for css_type in ['journalist', 'source']:
- assert re.search("^.*\./var/www/securedrop/static/css/{}.css$".format(css_type), c.stdout, re.M)
- assert re.search("^.*\./var/www/securedrop/static/css/{}.css.map$".format(css_type), c.stdout, re.M)
+ assert re.search("^.*\./var/www/securedrop/static/"
+ "css/{}.css$".format(css_type), c.stdout, re.M)
+ assert re.search("^.*\./var/www/securedrop/static/"
+ "css/{}.css.map$".format(css_type), c.stdout,
+ re.M)
diff --git a/testinfra/vars/build.yml b/molecule/builder/tests/vars.yml
similarity index 97%
rename from testinfra/vars/build.yml
rename to molecule/builder/tests/vars.yml
--- a/testinfra/vars/build.yml
+++ b/molecule/builder/tests/vars.yml
@@ -1,5 +1,5 @@
---
-securedrop_version: "0.4.2"
+securedrop_version: "0.4.3"
ossec_version: "2.8.2"
keyring_version: "0.1.0"
diff --git a/securedrop/tests/conftest.py b/securedrop/tests/conftest.py
--- a/securedrop/tests/conftest.py
+++ b/securedrop/tests/conftest.py
@@ -17,6 +17,22 @@
TEST_WORKER_PIDFILE = '/tmp/securedrop_test_worker.pid'
+def pytest_addoption(parser):
+ parser.addoption("--page-layout", action="store_true",
+ default=False, help="run page layout tests")
+
+
+def pytest_collection_modifyitems(config, items):
+ if config.getoption("--page-layout"):
+ return
+ skip_page_layout = pytest.mark.skip(
+ reason="need --page-layout option to run page layout tests"
+ )
+ for item in items:
+ if "pagelayout" in item.keywords:
+ item.add_marker(skip_page_layout)
+
+
@pytest.fixture(scope='session')
def setUptearDown():
_start_test_rqworker(config)
diff --git a/securedrop/tests/functional/functional_test.py b/securedrop/tests/functional/functional_test.py
--- a/securedrop/tests/functional/functional_test.py
+++ b/securedrop/tests/functional/functional_test.py
@@ -1,27 +1,26 @@
# -*- coding: utf-8 -*-
from datetime import datetime
+import errno
import mock
from multiprocessing import Process
import os
from os.path import abspath, dirname, join, realpath
-import shutil
import signal
import socket
-import sys
import time
import traceback
-import unittest
-import urllib2
+import requests
from Crypto import Random
-import gnupg
from selenium import webdriver
-from selenium.common.exceptions import WebDriverException
+from selenium.common.exceptions import (WebDriverException,
+ NoAlertPresentException)
from selenium.webdriver.firefox import firefox_binary
+from selenium.webdriver.support.ui import WebDriverWait
+from selenium.webdriver.support import expected_conditions
-os.environ['SECUREDROP_ENV'] = 'test'
-import config
+os.environ['SECUREDROP_ENV'] = 'test' # noqa
import db
import journalist
import source
@@ -29,6 +28,19 @@
LOG_DIR = abspath(join(dirname(realpath(__file__)), '..', 'log'))
+
+# https://stackoverflow.com/a/34795883/837471
+class alert_is_not_present(object):
+ """ Expect an alert to not be present."""
+ def __call__(self, driver):
+ try:
+ alert = driver.switch_to.alert
+ alert.text
+ return False
+ except NoAlertPresentException:
+ return True
+
+
class FunctionalTest():
def _unused_port(self):
@@ -38,20 +50,40 @@ def _unused_port(self):
s.close()
return port
- def _create_webdriver(self):
+ def _create_webdriver(self, firefox, profile=None):
+ # see https://review.openstack.org/#/c/375258/ and the
+ # associated issues for background on why this is necessary
+ connrefused_retry_count = 3
+ connrefused_retry_interval = 5
+
+ for i in range(connrefused_retry_count + 1):
+ try:
+ driver = webdriver.Firefox(firefox_binary=firefox,
+ firefox_profile=profile)
+ if i > 0:
+ # i==0 is normal behavior without connection refused.
+ print('NOTE: Retried {} time(s) due to '
+ 'connection refused.'.format(i))
+ return driver
+ except socket.error as socket_error:
+ if (socket_error.errno == errno.ECONNREFUSED
+ and i < connrefused_retry_count):
+ time.sleep(connrefused_retry_interval)
+ continue
+ raise
+
+ def _prepare_webdriver(self):
log_file = open(join(LOG_DIR, 'firefox.log'), 'a')
log_file.write(
'\n\n[%s] Running Functional Tests\n' % str(
datetime.now()))
log_file.flush()
- firefox = firefox_binary.FirefoxBinary(log_file=log_file)
- return webdriver.Firefox(firefox_binary=firefox)
+ return firefox_binary.FirefoxBinary(log_file=log_file)
- def setUp(self):
+ def setup(self):
# Patch the two-factor verification to avoid intermittent errors
- patcher = mock.patch('journalist.Journalist.verify_token')
- self.addCleanup(patcher.stop)
- self.mock_journalist_verify_token = patcher.start()
+ self.patcher = mock.patch('journalist.Journalist.verify_token')
+ self.mock_journalist_verify_token = self.patcher.start()
self.mock_journalist_verify_token.return_value = True
signal.signal(signal.SIGUSR1, lambda _, s: traceback.print_stack(s))
@@ -94,23 +126,45 @@ def start_journalist_server():
self.source_process.start()
self.journalist_process.start()
- self.driver = self._create_webdriver()
+ for tick in range(30):
+ try:
+ requests.get(self.source_location)
+ requests.get(self.journalist_location)
+ except:
+ time.sleep(1)
+ else:
+ break
+
+ if not hasattr(self, 'override_driver'):
+ self.driver = self._create_webdriver(self._prepare_webdriver())
+
+ # Polls the DOM to wait for elements. To read more about why
+ # this is necessary:
+ #
+ # http://www.obeythetestinggoat.com/how-to-get-selenium-to-wait-for-page-load-after-a-click.html
+ #
+ # A value of 5 is known to not be enough in some cases, when
+ # the machine hosting the tests is slow, reason why it was
+ # raised to 10. Setting the value to 60 or more would surely
+ # cover even the slowest of machine. However it also means
+ # that a test failing to find the desired element in the DOM
+ # will only report failure after 60 seconds which is painful
+ # for quickly debuging.
+ #
+ self.driver.implicitly_wait(10)
# Set window size and position explicitly to avoid potential bugs due
# to discrepancies between environments.
- self.driver.set_window_position(0, 0);
- self.driver.set_window_size(1024, 768);
-
- # Poll the DOM briefly to wait for elements. It appears .click() does
- # not always do a good job waiting for the page to load, or perhaps
- # Firefox takes too long to render it (#399)
- self.driver.implicitly_wait(5)
+ self.driver.set_window_position(0, 0)
+ self.driver.set_window_size(1024, 768)
self.secret_message = 'blah blah blah'
- def tearDown(self):
+ def teardown(self):
+ self.patcher.stop()
env.teardown()
- self.driver.quit()
+ if not hasattr(self, 'override_driver'):
+ self.driver.quit()
self.source_process.terminate()
self.journalist_process.terminate()
@@ -126,3 +180,20 @@ def wait_for(self, function_with_assertion, timeout=5):
time.sleep(0.1)
# one more try, which will raise any errors if they are outstanding
return function_with_assertion()
+
+ def _alert_wait(self):
+ WebDriverWait(self.driver, 10).until(
+ expected_conditions.alert_is_present(),
+ 'Timed out waiting for confirmation popup.')
+
+ def _alert_accept(self):
+ self.driver.switch_to.alert.accept()
+ WebDriverWait(self.driver, 10).until(
+ alert_is_not_present(),
+ 'Timed out waiting for confirmation popup to disappear.')
+
+ def _alert_dismiss(self):
+ self.driver.switch_to.alert.dismiss()
+ WebDriverWait(self.driver, 10).until(
+ alert_is_not_present(),
+ 'Timed out waiting for confirmation popup to disappear.')
diff --git a/securedrop/tests/functional/journalist_navigation_steps.py b/securedrop/tests/functional/journalist_navigation_steps.py
--- a/securedrop/tests/functional/journalist_navigation_steps.py
+++ b/securedrop/tests/functional/journalist_navigation_steps.py
@@ -1,17 +1,20 @@
+import pytest
import urllib2
+import re
import tempfile
-import zipfile
import gzip
-import datetime
from selenium.common.exceptions import NoSuchElementException
import tests.utils.db_helper as db_helper
+import crypto_util
from db import Journalist
+from step_helpers import screenshots
class JournalistNavigationSteps():
+ @screenshots
def _get_submission_content(self, file_url, raw_content):
if not file_url.endswith(".gz.gpg"):
return str(raw_content)
@@ -25,7 +28,7 @@ def _get_submission_content(self, file_url, raw_content):
return content
- def _login_user(self, username, password, token):
+ def _try_login_user(self, username, password, token):
self.driver.get(self.journalist_location + "/login")
username_field = self.driver.find_element_by_css_selector(
'input[name="username"]')
@@ -43,50 +46,85 @@ def _login_user(self, username, password, token):
'button[type=submit]')
submit_button.click()
+ @screenshots
+ def _login_user(self, username, password, token):
+ self._try_login_user(username, password, token)
# Successful login should redirect to the index
- self.assertEquals(self.driver.current_url,
- self.journalist_location + '/')
+ assert self.driver.current_url == self.journalist_location + '/'
+ @screenshots
def _journalist_logs_in(self):
# Create a test user for logging in
self.user, self.user_pw = db_helper.init_journalist()
self._login_user(self.user.username, self.user_pw, 'mocked')
headline = self.driver.find_element_by_css_selector('span.headline')
- self.assertIn('Sources', headline.text)
+ if not hasattr(self, 'accept_languages'):
+ assert 'Sources' in headline.text
+
+ def _journalist_visits_col(self):
+ self.driver.find_element_by_css_selector(
+ '#un-starred-source-link-1').click()
+ def _journalist_selects_first_doc(self):
+ self.driver.find_elements_by_name('doc_names_selected')[0].click()
+
+ def _journalist_clicks_delete_selected_javascript(self):
+ self.driver.find_element_by_id('delete-selected').click()
+ self._alert_wait()
+
+ def _journalist_verifies_deletion_of_one_submission_javascript(self):
+ self._journalist_selects_first_doc()
+ self._journalist_clicks_delete_selected_javascript()
+ self._alert_dismiss()
+ selected_count = len(self.driver.find_elements_by_name(
+ 'doc_names_selected'))
+ assert selected_count > 0
+ self._journalist_clicks_delete_selected_javascript()
+ self._alert_accept()
+ assert selected_count > len(self.driver.find_elements_by_name(
+ 'doc_names_selected'))
+
+ @screenshots
def _admin_logs_in(self):
self.admin, self.admin_pw = db_helper.init_journalist(is_admin=True)
self._login_user(self.admin.username, self.admin_pw, 'mocked')
- # Admin user should log in to the same interface as a normal user,
- # since there may be users who wish to be both journalists and admins.
- headline = self.driver.find_element_by_css_selector('span.headline')
- self.assertIn('Sources', headline.text)
+ if not hasattr(self, 'accept_languages'):
+ # Admin user should log in to the same interface as a
+ # normal user, since there may be users who wish to be
+ # both journalists and admins.
+ headline = self.driver.find_element_by_css_selector(
+ 'span.headline')
+ assert 'Sources' in headline.text
- # Admin user should have a link that take them to the admin page
- links = self.driver.find_elements_by_tag_name('a')
- self.assertIn('Admin', [el.text for el in links])
+ # Admin user should have a link that take them to the admin page
+ links = self.driver.find_elements_by_tag_name('a')
+ assert 'Admin' in [el.text for el in links]
+ @screenshots
def _admin_visits_admin_interface(self):
- admin_interface_link = self.driver.find_element_by_link_text('Admin')
+ admin_interface_link = self.driver.find_element_by_id(
+ 'link-admin-index')
admin_interface_link.click()
+ if not hasattr(self, 'accept_languages'):
+ h1s = self.driver.find_elements_by_tag_name('h1')
+ assert "Admin Interface" in [el.text for el in h1s]
- h1s = self.driver.find_elements_by_tag_name('h1')
- self.assertIn("Admin Interface", [el.text for el in h1s])
-
- def _add_user(self, username, password, is_admin=False):
+ @screenshots
+ def _add_user(self, username, is_admin=False, hotp=None):
username_field = self.driver.find_element_by_css_selector(
'input[name="username"]')
username_field.send_keys(username)
- password_field = self.driver.find_element_by_css_selector(
- 'input[name="password"]')
- password_field.send_keys(password)
-
- password_again_field = self.driver.find_element_by_css_selector(
- 'input[name="password_again"]')
- password_again_field.send_keys(password)
+ if hotp:
+ hotp_checkbox = self.driver.find_element_by_css_selector(
+ 'input[name="is_hotp"]')
+ print(str(hotp_checkbox.__dict__))
+ hotp_checkbox.click()
+ hotp_secret = self.driver.find_element_by_css_selector(
+ 'input[name="otp_secret"]')
+ hotp_secret.send_keys(hotp)
if is_admin:
# TODO implement (checkbox is unchecked by default)
@@ -96,25 +134,31 @@ def _add_user(self, username, password, is_admin=False):
'button[type=submit]')
submit_button.click()
+ @screenshots
def _admin_adds_a_user(self):
add_user_btn = self.driver.find_element_by_css_selector(
'button#add-user')
add_user_btn.click()
- # The add user page has a form with an "ADD USER" button
- btns = self.driver.find_elements_by_tag_name('button')
- self.assertIn('ADD USER', [el.text for el in btns])
+ if not hasattr(self, 'accept_languages'):
+ # The add user page has a form with an "ADD USER" button
+ btns = self.driver.find_elements_by_tag_name('button')
+ assert 'ADD USER' in [el.text for el in btns]
- self.new_user = dict(
- username='dellsberg',
- password='pentagonpapers')
+ password = self.driver.find_element_by_css_selector('#password') \
+ .text.strip()
- self._add_user(self.new_user['username'], self.new_user['password'])
+ self.new_user = dict(
+ username='dellsberg',
+ password=password,
+ )
+ self._add_user(self.new_user['username'])
- # Clicking submit on the add user form should redirect to the Google
- # Authenticator page
- h1s = self.driver.find_elements_by_tag_name('h1')
- self.assertIn("Enable Google Authenticator", [el.text for el in h1s])
+ if not hasattr(self, 'accept_languages'):
+ # Clicking submit on the add user form should redirect to
+ # the Google Authenticator page
+ h1s = self.driver.find_elements_by_tag_name('h1')
+ assert "Enable Google Authenticator" in [el.text for el in h1s]
# Retrieve the saved user object from the db and keep it around for
# further testing
@@ -129,31 +173,36 @@ def _admin_adds_a_user(self):
'button[type=submit]')
submit_button.click()
- # Successfully verifying the code should redirect to the admin
- # interface, and flash a message indicating success
- flashed_msgs = self.driver.find_elements_by_css_selector('.flash')
- self.assertIn(("Two-factor token successfully verified for user"
- " {}!").format(self.new_user['username']),
- [el.text for el in flashed_msgs])
+ if not hasattr(self, 'accept_languages'):
+ # Successfully verifying the code should redirect to the admin
+ # interface, and flash a message indicating success
+ flashed_msgs = self.driver.find_elements_by_css_selector('.flash')
+ assert (("Two-factor token successfully verified for user"
+ " {}!").format(self.new_user['username']) in
+ [el.text for el in flashed_msgs])
+ @screenshots
def _logout(self):
# Click the logout link
- logout_link = self.driver.find_element_by_link_text('Logout')
+ logout_link = self.driver.find_element_by_id('link-logout')
logout_link.click()
# Logging out should redirect back to the login page
- self.wait_for(
- lambda: self.assertIn("Login to access the journalist interface",
- self.driver.page_source)
- )
+ def login_page():
+ assert ("Login to access the journalist interface" in
+ self.driver.page_source)
+ self.wait_for(login_page)
+ @screenshots
def _check_login_with_otp(self, otp):
self._logout()
self._login_user(self.new_user['username'],
self.new_user['password'], otp)
- # Test that the new user was logged in successfully
- self.assertIn('Sources', self.driver.page_source)
+ if not hasattr(self, 'accept_languages'):
+ # Test that the new user was logged in successfully
+ assert 'Sources' in self.driver.page_source
+ @screenshots
def _new_user_can_log_in(self):
# Log the admin user out
self._logout()
@@ -163,86 +212,90 @@ def _new_user_can_log_in(self):
self.new_user['password'],
'mocked')
- # Test that the new user was logged in successfully
- self.assertIn('Sources', self.driver.page_source)
+ if not hasattr(self, 'accept_languages'):
+ # Test that the new user was logged in successfully
+ assert 'Sources' in self.driver.page_source
# The new user was not an admin, so they should not have the admin
# interface link available
- self.assertRaises(NoSuchElementException,
- self.driver.find_element_by_link_text,
- 'Admin')
+ with pytest.raises(NoSuchElementException):
+ self.driver.find_element_by_id('link-admin-index')
+ @screenshots
def _edit_account(self):
- edit_account_link = self.driver.find_element_by_link_text(
- 'Edit Account')
+ edit_account_link = self.driver.find_element_by_id(
+ 'link-edit-account')
edit_account_link.click()
# The header says "Edit your account"
h1s = self.driver.find_elements_by_tag_name('h1')[0]
- self.assertEqual('Edit your account', h1s.text)
+ assert 'Edit your account' == h1s.text
# There's no link back to the admin interface.
- with self.assertRaises(NoSuchElementException):
- self.driver.find_element_by_partial_link_text('Back to admin interface')
+ with pytest.raises(NoSuchElementException):
+ self.driver.find_element_by_partial_link_text(
+ 'Back to admin interface')
# There's no field to change your username.
- with self.assertRaises(NoSuchElementException):
+ with pytest.raises(NoSuchElementException):
self.driver.find_element_by_css_selector('#username')
# There's no checkbox to change the administrator status of your
# account.
- with self.assertRaises(NoSuchElementException):
- username_field = self.driver.find_element_by_css_selector('#is_admin')
+ with pytest.raises(NoSuchElementException):
+ self.driver.find_element_by_css_selector('#is-admin')
# 2FA reset buttons at the bottom point to the user URLs for reset.
totp_reset_button = self.driver.find_elements_by_css_selector(
'#reset-two-factor-totp')[0]
- self.assertRegexpMatches(totp_reset_button.get_attribute('action'),
- '/account/reset-2fa-totp')
+ assert ('/account/reset-2fa-totp' in
+ totp_reset_button.get_attribute('action'))
hotp_reset_button = self.driver.find_elements_by_css_selector(
'#reset-two-factor-hotp')[0]
- self.assertRegexpMatches(hotp_reset_button.get_attribute('action'),
- '/account/reset-2fa-hotp')
+ assert ('/account/reset-2fa-hotp' in
+ hotp_reset_button.get_attribute('action'))
+ @screenshots
def _edit_user(self, username):
user = Journalist.query.filter_by(username=username).one()
new_user_edit_links = filter(
lambda el: el.get_attribute('data-username') == username,
self.driver.find_elements_by_tag_name('a'))
- self.assertEquals(len(new_user_edit_links), 1)
+ assert 1 == len(new_user_edit_links)
new_user_edit_links[0].click()
# The header says "Edit user "username"".
h1s = self.driver.find_elements_by_tag_name('h1')[0]
- self.assertEqual('Edit user "{}"'.format(username), h1s.text)
+ assert 'Edit user "{}"'.format(username) == h1s.text
# There's a convenient link back to the admin interface.
admin_interface_link = self.driver.find_element_by_partial_link_text(
'Back to admin interface')
- self.assertRegexpMatches(admin_interface_link.get_attribute('href'),
- '/admin$')
+ assert re.search('/admin$', admin_interface_link.get_attribute('href'))
# There's a field to change the user's username and it's already filled
# out with the user's username.
username_field = self.driver.find_element_by_css_selector('#username')
- self.assertEqual(username_field.get_attribute('placeholder'), username)
+ assert username_field.get_attribute('placeholder') == username
# There's a checkbox to change the administrator status of the user and
# it's already checked appropriately to reflect the current status of
# our user.
- username_field = self.driver.find_element_by_css_selector('#is_admin')
- self.assertEqual(bool(username_field.get_attribute('checked')),
- user.is_admin)
+ username_field = self.driver.find_element_by_css_selector('#is-admin')
+ assert (bool(username_field.get_attribute('checked')) ==
+ user.is_admin)
# 2FA reset buttons at the bottom point to the admin URLs for
# resettting 2FA and include the correct user id in the hidden uid.
totp_reset_button = self.driver.find_elements_by_css_selector(
'#reset-two-factor-totp')[0]
- self.assertRegexpMatches(totp_reset_button.get_attribute('action'),
- '/admin/reset-2fa-totp')
+ assert '/admin/reset-2fa-totp' in totp_reset_button.get_attribute(
+ 'action')
totp_reset_uid = totp_reset_button.find_element_by_name('uid')
- self.assertEqual(int(totp_reset_uid.get_attribute('value')), user.id)
- self.assertFalse(totp_reset_uid.is_displayed())
+ assert int(totp_reset_uid.get_attribute('value')) == user.id
+ assert totp_reset_uid.is_displayed() is False
hotp_reset_button = self.driver.find_elements_by_css_selector(
'#reset-two-factor-hotp')[0]
- self.assertRegexpMatches(hotp_reset_button.get_attribute('action'),
- '/admin/reset-2fa-hotp')
+ assert '/admin/reset-2fa-hotp' in hotp_reset_button.get_attribute(
+ 'action')
+
hotp_reset_uid = hotp_reset_button.find_element_by_name('uid')
- self.assertEqual(int(hotp_reset_uid.get_attribute('value')), user.id)
- self.assertFalse(hotp_reset_uid.is_displayed())
+ assert int(hotp_reset_uid.get_attribute('value')) == user.id
+ assert hotp_reset_uid.is_displayed() is False
+ @screenshots
def _admin_can_edit_new_user(self):
# Log the new user out
self._logout()
@@ -250,21 +303,23 @@ def _admin_can_edit_new_user(self):
self._login_user(self.admin.username, self.admin_pw, 'mocked')
# Go to the admin interface
- admin_interface_link = self.driver.find_element_by_link_text('Admin')
+ admin_interface_link = self.driver.find_element_by_id(
+ 'link-admin-index')
admin_interface_link.click()
# Click the "edit user" link for the new user
# self._edit_user(self.new_user['username'])
new_user_edit_links = filter(
- lambda el: el.get_attribute('data-username') == self.new_user['username'],
+ lambda el: (el.get_attribute('data-username') ==
+ self.new_user['username']),
self.driver.find_elements_by_tag_name('a'))
- self.assertEquals(len(new_user_edit_links), 1)
+ assert len(new_user_edit_links) == 1
new_user_edit_links[0].click()
- self.wait_for(
- lambda: self.assertIn('Edit user "{}"'.format(
- self.new_user['username']),
- self.driver.page_source)
- )
+
+ def can_edit_user():
+ assert ('"{}"'.format(self.new_user['username']) in
+ self.driver.page_source)
+ self.wait_for(can_edit_user)
new_username = self.new_user['username'] + "2"
@@ -275,10 +330,9 @@ def _admin_can_edit_new_user(self):
'button[type=submit]')
update_user_btn.click()
- self.wait_for(
- lambda: self.assertIn('Edit user "{}"'.format(new_username),
- self.driver.page_source)
- )
+ def can_edit_user():
+ assert ('"{}"'.format(new_username) in self.driver.page_source)
+ self.wait_for(can_edit_user)
# Update self.new_user with the new username for the future tests
self.new_user['username'] = new_username
@@ -288,94 +342,92 @@ def _admin_can_edit_new_user(self):
self._login_user(self.new_user['username'],
self.new_user['password'],
'mocked')
- self.wait_for(
- lambda: self.assertIn('Sources', self.driver.page_source)
- )
+ if not hasattr(self, 'accept_languages'):
+ def found_sources():
+ assert 'Sources' in self.driver.page_source
+ self.wait_for(found_sources)
# Log the admin user back in
self._logout()
self._login_user(self.admin.username, self.admin_pw, 'mocked')
# Go to the admin interface
- admin_interface_link = self.driver.find_element_by_link_text('Admin')
+ admin_interface_link = self.driver.find_element_by_id(
+ 'link-admin-index')
admin_interface_link.click()
+
# Edit the new user's password
self._edit_user(self.new_user['username'])
+ new_password = self.driver.find_element_by_css_selector('#password') \
+ .text.strip()
+ self.new_user['password'] = new_password
- new_password = self.new_user['password'] + "2"
- password_field = self.driver.find_element_by_css_selector(
- 'input[name="password"]')
- password_field.send_keys(new_password)
- password_again_field = self.driver.find_element_by_css_selector(
- 'input[name="password_again"]')
- password_again_field.send_keys(new_password)
- update_user_btn = self.driver.find_element_by_css_selector(
- 'button#update')
- update_user_btn.click()
+ reset_pw_btn = self.driver.find_element_by_css_selector(
+ '#reset-password')
+ reset_pw_btn.click()
- # Wait until page refreshes to avoid causing a broken pipe error (#623)
- self.wait_for(
- lambda: self.assertIn('Edit user "{}"'.format(new_username),
- self.driver.page_source)
- )
+ def update_password_success():
+ assert ('The password was successfully updated' in
+ self.driver.page_source)
- # Update self.new_user with the new password
- # TODO dry
- self.new_user['password'] = new_password
+ # Wait until page refreshes to avoid causing a broken pipe error (#623)
+ self.wait_for(update_password_success)
# Log the new user in with their new password
self._logout()
self._login_user(self.new_user['username'],
self.new_user['password'],
'mocked')
- self.wait_for(
- lambda: self.assertIn('Sources', self.driver.page_source)
- )
+ self.wait_for(found_sources)
+ @screenshots
def _journalist_checks_messages(self):
self.driver.get(self.journalist_location)
# There should be 1 collection in the list of collections
code_names = self.driver.find_elements_by_class_name('code-name')
- self.assertEquals(1, len(code_names))
+ assert 1 == len(code_names)
# There should be a "1 unread" span in the sole collection entry
unread_span = self.driver.find_element_by_css_selector('span.unread')
- self.assertIn("1 unread", unread_span.text)
+ assert "1 unread" in unread_span.text
+ @screenshots
def _journalist_stars_and_unstars_single_message(self):
# Message begins unstarred
- with self.assertRaises(NoSuchElementException):
+ with pytest.raises(NoSuchElementException):
self.driver.find_element_by_id('starred-source-link-1')
# Journalist stars the message
self.driver.find_element_by_class_name('button-star').click()
starred = self.driver.find_elements_by_id('starred-source-link-1')
- self.assertEquals(1, len(starred))
+ assert 1 == len(starred)
# Journalist unstars the message
self.driver.find_element_by_class_name('button-star').click()
- with self.assertRaises(NoSuchElementException):
+ with pytest.raises(NoSuchElementException):
self.driver.find_element_by_id('starred-source-link-1')
+ @screenshots
def _journalist_selects_all_sources_then_selects_none(self):
self.driver.find_element_by_id('select_all').click()
checkboxes = self.driver.find_elements_by_id('checkbox')
for checkbox in checkboxes:
- self.assertTrue(checkbox.is_selected())
+ assert checkbox.is_selected()
self.driver.find_element_by_id('select_none').click()
checkboxes = self.driver.find_elements_by_id('checkbox')
for checkbox in checkboxes:
- self.assertFalse(checkbox.is_selected())
+ assert checkbox.is_selected() is False
+ @screenshots
def _journalist_downloads_message(self):
self.driver.find_element_by_css_selector(
'#un-starred-source-link-1').click()
submissions = self.driver.find_elements_by_css_selector(
'#submissions a')
- self.assertEqual(1, len(submissions))
+ assert 1 == len(submissions)
file_url = submissions[0].get_attribute('href')
@@ -400,12 +452,145 @@ def cookie_string_from_selenium_cookies(cookies):
decrypted_submission = self.gpg.decrypt(raw_content)
submission = self._get_submission_content(file_url,
decrypted_submission)
- self.assertEqual(self.secret_message, submission)
+ assert self.secret_message == submission
def _journalist_sends_reply_to_source(self):
- self.driver.find_element_by_id('reply-text-field').send_keys('Nice docs')
+ self.driver.find_element_by_id('reply-text-field').send_keys(
+ 'Nice docs')
self.driver.find_element_by_id('reply-button').click()
- self.assertIn("Thanks! Your reply has been stored.",
- self.driver.page_source)
+ assert "Thanks! Your reply has been stored." in self.driver.page_source
+
+ def _visit_edit_account(self):
+ edit_account_link = self.driver.find_element_by_id(
+ 'link-edit-account')
+ edit_account_link.click()
+
+ def _visit_edit_hotp_secret(self):
+ hotp_reset_button = self.driver.find_elements_by_css_selector(
+ '#reset-two-factor-hotp')[0]
+ assert ('/account/reset-2fa-hotp' in
+ hotp_reset_button.get_attribute('action'))
+
+ hotp_reset_button.click()
+
+ def _set_hotp_secret(self):
+ hotp_secret_field = self.driver.find_elements_by_css_selector(
+ 'input[name="otp_secret"]')[0]
+ hotp_secret_field.send_keys('123456')
+ submit_button = self.driver.find_element_by_css_selector(
+ 'button[type=submit]')
+ submit_button.click()
+
+ def _visit_edit_totp_secret(self):
+ totp_reset_button = self.driver.find_elements_by_css_selector(
+ '#reset-two-factor-totp')[0]
+ assert ('/account/reset-2fa-totp' in
+ totp_reset_button.get_attribute('action'))
+ totp_reset_button.click()
+
+ def _admin_visits_add_user(self):
+ add_user_btn = self.driver.find_element_by_css_selector(
+ 'button#add-user')
+ add_user_btn.click()
+
+ def _admin_visits_edit_user(self):
+ new_user_edit_links = filter(
+ lambda el: (el.get_attribute('data-username') ==
+ self.new_user['username']),
+ self.driver.find_elements_by_tag_name('a'))
+ assert len(new_user_edit_links) == 1
+ new_user_edit_links[0].click()
+
+ def can_edit_user():
+ assert ('"{}"'.format(self.new_user['username']) in
+ self.driver.page_source)
+ self.wait_for(can_edit_user)
+
+ def _admin_visits_reset_2fa_hotp(self):
+ hotp_reset_button = self.driver.find_elements_by_css_selector(
+ '#reset-two-factor-hotp')[0]
+ assert ('/admin/reset-2fa-hotp' in
+ hotp_reset_button.get_attribute('action'))
+ hotp_reset_button.click()
+
+ def _admin_visits_reset_2fa_totp(self):
+ totp_reset_button = self.driver.find_elements_by_css_selector(
+ '#reset-two-factor-totp')[0]
+ assert ('/admin/reset-2fa-totp' in
+ totp_reset_button.get_attribute('action'))
+ totp_reset_button.click()
+
+ def _admin_creates_a_user(self, hotp):
+ add_user_btn = self.driver.find_element_by_css_selector(
+ 'button#add-user')
+ add_user_btn.click()
+
+ self.new_user = dict(
+ username='dellsberg',
+ password='pentagonpapers')
+
+ self._add_user(self.new_user['username'],
+ is_admin=False,
+ hotp=hotp)
+
+ def _journalist_delete_all(self):
+ for checkbox in self.driver.find_elements_by_name(
+ 'doc_names_selected'):
+ checkbox.click()
+ self.driver.find_element_by_id('delete-selected').click()
+
+ def _journalist_confirm_delete_all(self):
+ self.wait_for(
+ lambda: self.driver.find_element_by_id('confirm-delete'))
+ confirm_btn = self.driver.find_element_by_id('confirm-delete')
+ confirm_btn.click()
+
+ def _source_delete_key(self):
+ filesystem_id = crypto_util.hash_codename(self.source_name)
+ crypto_util.delete_reply_keypair(filesystem_id)
+
+ def _journalist_continues_after_flagging(self):
+ self.driver.find_element_by_id('continue-to-list').click()
+
+ def _journalist_delete_none(self):
+ self.driver.find_element_by_id('delete-selected').click()
+
+ def _journalist_delete_all_javascript(self):
+ self.driver.find_element_by_id('select_all').click()
+ self.driver.find_element_by_id('delete-selected').click()
+ self._alert_wait()
+
+ def _journalist_delete_one(self):
+ self.driver.find_elements_by_name('doc_names_selected')[0].click()
+ self.driver.find_element_by_id('delete-selected').click()
+
+ def _journalist_flags_source(self):
+ self.driver.find_element_by_id('flag-button').click()
+
+ def _journalist_visits_admin(self):
+ self.driver.get(self.journalist_location + "/admin")
+
+ def _journalist_fail_login(self):
+ self.user, self.user_pw = db_helper.init_journalist()
+ self._try_login_user(self.user.username, 'worse', 'mocked')
+
+ def _journalist_fail_login_many(self):
+ self.user, self.user_pw = db_helper.init_journalist()
+ for _ in range(Journalist._MAX_LOGIN_ATTEMPTS_PER_PERIOD + 1):
+ self._try_login_user(self.user.username, 'worse', 'mocked')
+
+ def _admin_enters_journalist_account_details_hotp(self, username,
+ hotp_secret):
+ username_field = self.driver.find_element_by_css_selector(
+ 'input[name="username"]')
+ username_field.send_keys(username)
+
+ hotp_secret_field = self.driver.find_element_by_css_selector(
+ 'input[name="otp_secret"]')
+ hotp_secret_field.send_keys(hotp_secret)
+
+ hotp_checkbox = self.driver.find_element_by_css_selector(
+ 'input[name="is_hotp"]')
+ hotp_checkbox.click()
diff --git a/securedrop/tests/functional/make_account_changes.py b/securedrop/tests/functional/make_account_changes.py
--- a/securedrop/tests/functional/make_account_changes.py
+++ b/securedrop/tests/functional/make_account_changes.py
@@ -3,8 +3,12 @@
from functional_test import FunctionalTest
from journalist_navigation_steps import JournalistNavigationSteps
+from step_helpers import screenshots
+
class MakeAccountChanges(FunctionalTest, JournalistNavigationSteps, TestCase):
+
+ @screenshots
def test_admin_edit_account_html_template_rendering(self):
"""The edit_account.html template is used both when an admin is editing
a user's account, and when a user is editing their own account. While
diff --git a/securedrop/tests/functional/source_navigation_steps.py b/securedrop/tests/functional/source_navigation_steps.py
--- a/securedrop/tests/functional/source_navigation_steps.py
+++ b/securedrop/tests/functional/source_navigation_steps.py
@@ -1,109 +1,123 @@
import tempfile
from selenium.webdriver.common.action_chains import ActionChains
-from selenium.webdriver.common.by import By
+from step_helpers import screenshots
class SourceNavigationSteps():
+ @screenshots
def _source_visits_source_homepage(self):
self.driver.get(self.source_location)
- self.assertEqual("SecureDrop | Protecting Journalists and Sources",
- self.driver.title)
+ if not hasattr(self, 'accept_languages'):
+ assert ("SecureDrop | Protecting Journalists and Sources" ==
+ self.driver.title)
+ @screenshots
def _source_chooses_to_submit_documents(self):
# First move the cursor to a known position in case it happens to
# be hovering over one of the buttons we are testing below.
- header_image = self.driver.find_element_by_id('header')
+ header_image = self.driver.find_element_by_css_selector('.header')
ActionChains(self.driver).move_to_element(header_image).perform()
# It's the source's first time visiting this SecureDrop site, so they
# choose to "Submit Documents".
- submit_button = self.driver.find_element_by_id('submit-documents-button')
+ submit_button = self.driver.find_element_by_id(
+ 'submit-documents-button')
submit_button_icon = self.driver.find_element_by_css_selector(
'a#submit-documents-button > img.off-hover')
- self.assertTrue(submit_button_icon.is_displayed())
+ assert submit_button_icon.is_displayed()
# The source hovers their cursor over the button, and the visual style
# of the button changes to encourage them to click it.
ActionChains(self.driver).move_to_element(submit_button).perform()
- ## Let's make sure toggling the icon image with the hover state is working.
- self.assertFalse(submit_button_icon.is_displayed())
+ # Let's make sure toggling the icon image with the hover state
+ # is working.
+ assert submit_button_icon.is_displayed() is False
submit_button_hover_icon = self.driver.find_element_by_css_selector(
'a#submit-documents-button > img.on-hover')
- self.assertTrue(submit_button_hover_icon.is_displayed())
+ assert submit_button_hover_icon.is_displayed()
# The source clicks the submit button.
submit_button.click()
codename = self.driver.find_element_by_css_selector('#codename')
- self.assertTrue(len(codename.text) > 0)
+ assert len(codename.text) > 0
self.source_name = codename.text
+ @screenshots
def _source_chooses_to_login(self):
self.driver.find_element_by_id('login-button').click()
- logins = self.driver.find_elements_by_id('login-with-existing-codename')
+ logins = self.driver.find_elements_by_id(
+ 'login-with-existing-codename')
- self.assertTrue(len(logins) > 0)
+ assert len(logins) > 0
+ @screenshots
def _source_hits_cancel_at_login_page(self):
self.driver.find_element_by_id('cancel').click()
self.driver.get(self.source_location)
- self.assertEqual("SecureDrop | Protecting Journalists and Sources",
- self.driver.title)
+ assert ("SecureDrop | Protecting Journalists and Sources" ==
+ self.driver.title)
+ @screenshots
def _source_proceeds_to_login(self):
- codename_input = self.driver.find_element_by_id('login-with-existing-codename')
+ codename_input = self.driver.find_element_by_id(
+ 'login-with-existing-codename')
codename_input.send_keys(self.source_name)
continue_button = self.driver.find_element_by_id('login')
continue_button.click()
- self.assertEqual("SecureDrop | Protecting Journalists and Sources",
- self.driver.title)
+ assert ("SecureDrop | Protecting Journalists and Sources" ==
+ self.driver.title)
+ @screenshots
def _source_hits_cancel_at_submit_page(self):
self.driver.find_element_by_id('cancel').click()
- headline = self.driver.find_element_by_class_name('headline')
- self.assertEqual('Submit Materials', headline.text)
+ if not hasattr(self, 'accept_languages'):
+ headline = self.driver.find_element_by_class_name('headline')
+ assert 'Submit Materials' == headline.text
+ @screenshots
def _source_continues_to_submit_page(self):
continue_button = self.driver.find_element_by_id('continue-button')
continue_button_icon = self.driver.find_element_by_css_selector(
'button#continue-button > img.off-hover')
- self.assertTrue(continue_button_icon.is_displayed())
+ assert continue_button_icon.is_displayed()
- ## Hover over the continue button test toggle the icon images with the
- ## hover state.
+ # Hover over the continue button test toggle the icon images
+ # with the hover state.
ActionChains(self.driver).move_to_element(continue_button).perform()
- self.assertFalse(continue_button_icon.is_displayed())
+ assert continue_button_icon.is_displayed() is False
continue_button_hover_icon = self.driver.find_element_by_css_selector(
'button#continue-button img.on-hover'
)
- self.assertTrue(continue_button_hover_icon.is_displayed())
+ assert continue_button_hover_icon.is_displayed()
continue_button.click()
- headline = self.driver.find_element_by_class_name('headline')
- self.assertEqual('Submit Materials', headline.text)
+ if not hasattr(self, 'accept_languages'):
+ headline = self.driver.find_element_by_class_name('headline')
+ assert 'Submit Materials' == headline.text
+ @screenshots
def _source_submits_a_file(self):
with tempfile.NamedTemporaryFile() as file:
file.write(self.secret_message)
file.seek(0)
filename = file.name
- filebasename = filename.split('/')[-1]
file_upload_box = self.driver.find_element_by_css_selector(
'[name=fh]')
@@ -112,53 +126,71 @@ def _source_submits_a_file(self):
submit_button = self.driver.find_element_by_id('submit-doc-button')
ActionChains(self.driver).move_to_element(submit_button).perform()
- toggled_submit_button_icon = self.driver.find_element_by_css_selector(
- 'button#submit-doc-button img.on-hover'
- )
- self.assertTrue(toggled_submit_button_icon.is_displayed())
+ toggled_submit_button_icon = (
+ self.driver.find_element_by_css_selector(
+ 'button#submit-doc-button img.on-hover'))
+ assert toggled_submit_button_icon.is_displayed()
submit_button.click()
- notification = self.driver.find_element_by_css_selector(
- '.success')
- expected_notification = 'Thank you for sending this information to us'
- self.assertIn(expected_notification, notification.text)
+ if not hasattr(self, 'accept_languages'):
+ notification = self.driver.find_element_by_css_selector(
+ '.success')
+ expected_notification = (
+ 'Thank you for sending this information to us')
+ assert expected_notification in notification.text
+ @screenshots
def _source_submits_a_message(self):
text_box = self.driver.find_element_by_css_selector('[name=msg]')
- text_box.send_keys(self.secret_message) # send_keys = type into text box
+ # send_keys = type into text box
+ text_box.send_keys(self.secret_message)
submit_button = self.driver.find_element_by_id('submit-doc-button')
submit_button.click()
- notification = self.driver.find_element_by_css_selector(
- '.success')
- self.assertIn('Thank you for sending this information to us',
- notification.text)
+ if not hasattr(self, 'accept_languages'):
+ notification = self.driver.find_element_by_css_selector(
+ '.success')
+ assert 'Thank' in notification.text
+ @screenshots
def _source_deletes_a_journalist_reply(self):
# Get the reply filename so we can use IDs to select the delete buttons
- reply_filename_element = self.driver.find_element_by_name('reply_filename')
+ reply_filename_element = self.driver.find_element_by_name(
+ 'reply_filename')
reply_filename = reply_filename_element.get_attribute('value')
delete_button_id = 'delete-reply-{}'.format(reply_filename)
delete_button = self.driver.find_element_by_id(delete_button_id)
delete_button.click()
- confirm_button_id = 'confirm-delete-reply-button-{}'.format(reply_filename)
+ confirm_button_id = 'confirm-delete-reply-button-{}'.format(
+ reply_filename)
confirm_button = self.driver.find_element_by_id(confirm_button_id)
- self.assertTrue(confirm_button.is_displayed())
+ assert confirm_button.is_displayed()
confirm_button.click()
- notification = self.driver.find_element_by_class_name('notification')
- self.assertIn('Reply deleted', notification.text)
+ if not hasattr(self, 'accept_languages'):
+ notification = self.driver.find_element_by_class_name(
+ 'notification')
+ assert 'Reply deleted' in notification.text
+ @screenshots
def _source_logs_out(self):
- logout_button = self.driver.find_element_by_id('logout').click()
- notification = self.driver.find_element_by_css_selector('.important')
- self.assertIn('Thank you for exiting your session!', notification.text)
+ self.driver.find_element_by_id('logout').click()
+ assert self.driver.find_element_by_css_selector('.important')
def _source_not_found(self):
self.driver.get(self.source_location + "/unlikely")
- message = self.driver.find_element_by_id('page_not_found')
- self.assertTrue(message.is_displayed())
+ message = self.driver.find_element_by_id('page-not-found')
+ assert message.is_displayed()
+
+ def _source_visits_use_tor(self):
+ self.driver.get(self.source_location + "/use-tor")
+
+ def _source_tor2web_warning(self):
+ self.driver.get(self.source_location + "/tor2web-warning")
+
+ def _source_why_journalist_key(self):
+ self.driver.get(self.source_location + "/why-journalist-key")
diff --git a/securedrop/tests/functional/step_helpers.py b/securedrop/tests/functional/step_helpers.py
new file mode 100644
--- /dev/null
+++ b/securedrop/tests/functional/step_helpers.py
@@ -0,0 +1,36 @@
+import os
+from os.path import abspath, dirname, join, realpath
+import inspect
+import traceback
+
+LOG_DIR = abspath(join(dirname(realpath(__file__)), '..', 'log'))
+screenshots_enabled = os.environ.get('SCREENSHOTS_ENABLED')
+
+
+# screenshots is a decorator that records an image before and after
+# the steps described in this file
+def screenshots(f):
+ def wrapper(*args, **kwargs):
+ curframe = inspect.currentframe()
+ calframe = inspect.getouterframes(curframe, 2)
+
+ locals = calframe[1][0].f_locals
+ if "testfunction" in locals:
+ fun = calframe[1][0].f_locals["testfunction"]
+ class_name = fun.__self__.__class__.__name__
+ else:
+ class_name = calframe[1][0].f_locals["self"].__class__.__name__
+
+ stack = [x for x in traceback.extract_stack()
+ if '/tests/functional' in x[0]]
+ path = ('-'.join([stack[0][0].split('/')[-1], class_name] +
+ [x[2] for x in stack if x[2] is not 'wrapper']))
+ if screenshots_enabled:
+ image_path = join(LOG_DIR, '%s-before.png' % path)
+ args[0].driver.save_screenshot(image_path)
+ result = f(*args, **kwargs)
+ if screenshots_enabled:
+ image_path = join(LOG_DIR, '%s-after.png' % path)
+ args[0].driver.save_screenshot(image_path)
+ return result
+ return wrapper
diff --git a/securedrop/tests/functional/submission_not_in_memory.py b/securedrop/tests/functional/submission_not_in_memory.py
--- a/securedrop/tests/functional/submission_not_in_memory.py
+++ b/securedrop/tests/functional/submission_not_in_memory.py
@@ -1,11 +1,11 @@
from unittest import TestCase
from functional_test import FunctionalTest
import subprocess
-import tempfile
from source_navigation_steps import SourceNavigationSteps
import os
import getpass
import re
+from step_helpers import screenshots
class SubmissionNotInMemoryTest(TestCase, FunctionalTest,
@@ -35,6 +35,7 @@ def _memory_dump(self, pid):
def _num_strings_in(self, needle, haystack):
return sum(1 for _ in re.finditer(re.escape(needle), haystack))
+ @screenshots
def test_message_is_not_retained_in_memory(self):
self._source_visits_source_homepage()
self._source_chooses_to_submit_documents()
@@ -47,8 +48,9 @@ def test_message_is_not_retained_in_memory(self):
secrets_in_memory = self._num_strings_in(self.secret_message,
memory_dump)
- self.assertLess(secrets_in_memory, 1)
+ assert secrets_in_memory < 1
+ @screenshots
def test_file_upload_is_not_retained_in_memory(self):
self._source_visits_source_homepage()
self._source_chooses_to_submit_documents()
@@ -61,4 +63,4 @@ def test_file_upload_is_not_retained_in_memory(self):
secrets_in_memory = self._num_strings_in(self.secret_message,
memory_dump)
- self.assertLess(secrets_in_memory, 1)
+ assert secrets_in_memory < 1
diff --git a/securedrop/tests/functional/test_admin_interface.py b/securedrop/tests/functional/test_admin_interface.py
--- a/securedrop/tests/functional/test_admin_interface.py
+++ b/securedrop/tests/functional/test_admin_interface.py
@@ -1,19 +1,13 @@
import functional_test
import journalist_navigation_steps
-import unittest
+from step_helpers import screenshots
-class AdminInterface(
- unittest.TestCase,
+class TestAdminInterface(
functional_test.FunctionalTest,
journalist_navigation_steps.JournalistNavigationSteps):
- def setUp(self):
- functional_test.FunctionalTest.setUp(self)
-
- def tearDown(self):
- functional_test.FunctionalTest.tearDown(self)
-
+ @screenshots
def test_admin_interface(self):
self._admin_logs_in()
self._admin_visits_admin_interface()
diff --git a/securedrop/tests/functional/test_journalist.py b/securedrop/tests/functional/test_journalist.py
new file mode 100644
--- /dev/null
+++ b/securedrop/tests/functional/test_journalist.py
@@ -0,0 +1,36 @@
+#
+# SecureDrop whistleblower submission system
+# Copyright (C) 2017 Loic Dachary <[email protected]>
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+#
+import source_navigation_steps
+import journalist_navigation_steps
+import functional_test
+
+
+class TestJournalist(
+ functional_test.FunctionalTest,
+ source_navigation_steps.SourceNavigationSteps,
+ journalist_navigation_steps.JournalistNavigationSteps):
+
+ def test_journalist_verifies_deletion_of_one_submission_javascript(self):
+ self._source_visits_source_homepage()
+ self._source_chooses_to_submit_documents()
+ self._source_continues_to_submit_page()
+ self._source_submits_a_file()
+ self._source_logs_out()
+ self._journalist_logs_in()
+ self._journalist_visits_col()
+ self._journalist_verifies_deletion_of_one_submission_javascript()
diff --git a/securedrop/tests/functional/test_source_notfound.py b/securedrop/tests/functional/test_source_notfound.py
--- a/securedrop/tests/functional/test_source_notfound.py
+++ b/securedrop/tests/functional/test_source_notfound.py
@@ -1,19 +1,10 @@
-import unittest
-
import source_navigation_steps
import functional_test
-class SourceInterfaceBannerWarnings(
- unittest.TestCase,
+class TestSourceInterfaceBannerWarnings(
functional_test.FunctionalTest,
source_navigation_steps.SourceNavigationSteps):
- def setUp(self):
- functional_test.FunctionalTest.setUp(self)
-
- def tearDown(self):
- functional_test.FunctionalTest.tearDown(self)
-
def test_not_found(self):
self._source_not_found()
diff --git a/securedrop/tests/functional/test_source_warnings.py b/securedrop/tests/functional/test_source_warnings.py
--- a/securedrop/tests/functional/test_source_warnings.py
+++ b/securedrop/tests/functional/test_source_warnings.py
@@ -1,25 +1,16 @@
-from selenium import webdriver
-import unittest
-
import source_navigation_steps
import functional_test
-class SourceInterfaceBannerWarnings(
- unittest.TestCase,
+class TestSourceInterfaceBannerWarnings(
functional_test.FunctionalTest,
source_navigation_steps.SourceNavigationSteps):
- def setUp(self):
- functional_test.FunctionalTest.setUp(self)
-
- def tearDown(self):
- functional_test.FunctionalTest.tearDown(self)
-
def test_warning_appears_if_tor_browser_not_in_use(self):
self.driver.get(self.source_location)
- warning_banner = self.driver.find_element_by_class_name('use-tor-browser')
+ warning_banner = self.driver.find_element_by_class_name(
+ 'use-tor-browser')
- self.assertIn("We recommend using Tor Browser to access SecureDrop",
- warning_banner.text)
+ assert ("We recommend using Tor Browser to access SecureDrop" in
+ warning_banner.text)
diff --git a/securedrop/tests/functional/test_submit_and_retrieve_file.py b/securedrop/tests/functional/test_submit_and_retrieve_file.py
--- a/securedrop/tests/functional/test_submit_and_retrieve_file.py
+++ b/securedrop/tests/functional/test_submit_and_retrieve_file.py
@@ -1,21 +1,15 @@
-import unittest
import source_navigation_steps
import journalist_navigation_steps
import functional_test
+from step_helpers import screenshots
-class SubmitAndRetrieveFile(
- unittest.TestCase,
+class TestSubmitAndRetrieveFile(
functional_test.FunctionalTest,
source_navigation_steps.SourceNavigationSteps,
journalist_navigation_steps.JournalistNavigationSteps):
- def setUp(self):
- functional_test.FunctionalTest.setUp(self)
-
- def tearDown(self):
- functional_test.FunctionalTest.tearDown(self)
-
+ @screenshots
def test_submit_and_retrieve_happy_path(self):
self._source_visits_source_homepage()
self._source_chooses_to_submit_documents()
@@ -33,11 +27,13 @@ def test_submit_and_retrieve_happy_path(self):
self._source_proceeds_to_login()
self._source_deletes_a_journalist_reply()
+ @screenshots
def test_source_cancels_at_login_page(self):
self._source_visits_source_homepage()
self._source_chooses_to_login()
self._source_hits_cancel_at_login_page()
+ @screenshots
def test_source_cancels_at_submit_page(self):
self._source_visits_source_homepage()
self._source_chooses_to_submit_documents()
diff --git a/securedrop/tests/functional/test_submit_and_retrieve_message.py b/securedrop/tests/functional/test_submit_and_retrieve_message.py
--- a/securedrop/tests/functional/test_submit_and_retrieve_message.py
+++ b/securedrop/tests/functional/test_submit_and_retrieve_message.py
@@ -1,22 +1,15 @@
import functional_test
import source_navigation_steps
import journalist_navigation_steps
-import unittest
-import urllib2
+from step_helpers import screenshots
-class SubmitAndRetrieveMessage(
- unittest.TestCase,
+class TestSubmitAndRetrieveMessage(
functional_test.FunctionalTest,
source_navigation_steps.SourceNavigationSteps,
journalist_navigation_steps.JournalistNavigationSteps):
- def setUp(self):
- functional_test.FunctionalTest.setUp(self)
-
- def tearDown(self):
- functional_test.FunctionalTest.tearDown(self)
-
+ @screenshots
def test_submit_and_retrieve_happy_path(self):
self._source_visits_source_homepage()
self._source_chooses_to_submit_documents()
diff --git a/securedrop/tests/i18n/babel.cfg b/securedrop/tests/i18n/babel.cfg
new file mode 100644
--- /dev/null
+++ b/securedrop/tests/i18n/babel.cfg
@@ -0,0 +1,4 @@
+[python: **/tests/i18n/*.py]
+
+[jinja2: **/tests/i18n/*.html]
+extensions=jinja2.ext.autoescape,jinja2.ext.with_
diff --git a/securedrop/tests/i18n/code.py b/securedrop/tests/i18n/code.py
new file mode 100644
--- /dev/null
+++ b/securedrop/tests/i18n/code.py
@@ -0,0 +1,2 @@
+# -*- coding: utf-8 -*-
+print(gettext('code hello i18n'))
diff --git a/securedrop/tests/i18n/template.html b/securedrop/tests/i18n/template.html
new file mode 100644
--- /dev/null
+++ b/securedrop/tests/i18n/template.html
@@ -0,0 +1 @@
+<button type="submit">{{ gettext('template hello i18n') }}</button>
diff --git a/securedrop/tests/log/.gitignore b/securedrop/tests/log/.gitignore
--- a/securedrop/tests/log/.gitignore
+++ b/securedrop/tests/log/.gitignore
@@ -1 +1,2 @@
*.log
+*.png
diff --git a/securedrop/tests/pages-layout/.gitignore b/securedrop/tests/pages-layout/.gitignore
new file mode 100644
--- /dev/null
+++ b/securedrop/tests/pages-layout/.gitignore
@@ -0,0 +1 @@
+screenshots
diff --git a/securedrop/tests/pages-layout/__init__.py b/securedrop/tests/pages-layout/__init__.py
new file mode 100644
diff --git a/securedrop/tests/pages-layout/functional_test.py b/securedrop/tests/pages-layout/functional_test.py
new file mode 100644
--- /dev/null
+++ b/securedrop/tests/pages-layout/functional_test.py
@@ -0,0 +1,79 @@
+#
+# SecureDrop whistleblower submission system
+# Copyright (C) 2017 Loic Dachary <[email protected]>
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+#
+from datetime import datetime
+import os
+from os.path import abspath, dirname, realpath
+import pytest
+
+from selenium.webdriver.common.action_chains import ActionChains
+from selenium.webdriver.common.keys import Keys
+
+from selenium import webdriver
+from selenium.webdriver.firefox import firefox_binary
+
+from tests.functional import functional_test
+
+
+def list_locales():
+ d = os.path.join(dirname(__file__), '..', '..', 'translations')
+ locales = ['en_US']
+ if os.path.isdir(d):
+ files = os.listdir(d)
+ locales.extend([f for f in files if f != 'messages.pot'])
+ return locales
+
+
+class FunctionalTest(functional_test.FunctionalTest):
+
+ @pytest.fixture(autouse=True, params=list_locales())
+ def webdriver_fixture(self, request):
+ self.accept_languages = request.param
+ self.log_dir = abspath(
+ os.path.join(dirname(realpath(__file__)),
+ 'screenshots', self.accept_languages))
+ os.system("mkdir -p " + self.log_dir)
+ firefox = self._prepare_webdriver()
+ profile = webdriver.FirefoxProfile()
+ profile.set_preference("intl.accept_languages", self.accept_languages)
+ self.override_driver = True
+ self.driver = self._create_webdriver(firefox, profile)
+ self._javascript_toggle()
+
+ yield None
+
+ self.driver.quit()
+
+ def _screenshot(self, filename):
+ self.driver.save_screenshot(os.path.join(self.log_dir, filename))
+
+ def _javascript_toggle(self):
+ # the following is a noop for some reason, workaround it
+ # profile.set_preference("javascript.enabled", False)
+ # https://stackoverflow.com/a/36782979/837471
+ self.driver.get("about:config")
+ actions = ActionChains(self.driver)
+ actions.send_keys(Keys.RETURN)
+ actions.send_keys("javascript.enabled")
+ actions.perform()
+ actions.send_keys(Keys.TAB)
+ actions.send_keys(Keys.RETURN)
+ actions.perform()
+
+ def _save_alert(self, filename):
+ fd = open(os.path.join(self.log_dir, filename), 'wb')
+ fd.write(self.driver.switch_to.alert.text.encode('utf-8'))
diff --git a/securedrop/tests/pages-layout/test_journalist.py b/securedrop/tests/pages-layout/test_journalist.py
new file mode 100644
--- /dev/null
+++ b/securedrop/tests/pages-layout/test_journalist.py
@@ -0,0 +1,296 @@
+#
+# SecureDrop whistleblower submission system
+# Copyright (C) 2017 Loic Dachary <[email protected]>
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+#
+from tests.functional import journalist_navigation_steps
+from tests.functional import source_navigation_steps
+import functional_test
+import pytest
+
+import db
+
+
[email protected]
+def hardening(request):
+ hardening = db.LOGIN_HARDENING
+
+ def finalizer():
+ db.LOGIN_HARDENING = hardening
+ request.addfinalizer(finalizer)
+ db.LOGIN_HARDENING = True
+ return None
+
[email protected]
+class TestJournalistLayout(
+ functional_test.FunctionalTest,
+ source_navigation_steps.SourceNavigationSteps,
+ journalist_navigation_steps.JournalistNavigationSteps):
+
+ def test_account_edit_hotp_secret(self):
+ self._journalist_logs_in()
+ self._visit_edit_account()
+ self._visit_edit_hotp_secret()
+ self._screenshot('journalist-account_edit_hotp_secret.png')
+
+ def test_account_new_two_factor_hotp(self):
+ self._journalist_logs_in()
+ self._visit_edit_account()
+ self._visit_edit_hotp_secret()
+ self._set_hotp_secret()
+ self._screenshot('journalist-account_new_two_factor_hotp.png')
+
+ def test_account_new_two_factor_totp(self):
+ self._journalist_logs_in()
+ self._visit_edit_account()
+ self._visit_edit_totp_secret()
+ self._screenshot('journalist-account_new_two_factor_totp.png')
+
+ def test_admin_add_user_hotp(self):
+ self._admin_logs_in()
+ self._admin_visits_admin_interface()
+ self._admin_visits_add_user()
+ self._admin_enters_journalist_account_details_hotp(
+ 'journalist2',
+ 'c4 26 43 52 69 13 02 49 9f 6a a5 33 96 46 d9 05 42 a3 4f ae'
+ )
+ self._screenshot('journalist-admin_add_user_hotp.png')
+
+ def test_admin_add_user_totp(self):
+ self._admin_logs_in()
+ self._admin_visits_admin_interface()
+ self._admin_visits_add_user()
+ self._screenshot('journalist-admin_add_user_totp.png')
+
+ def test_admin_edit_hotp_secret(self):
+ self._admin_logs_in()
+ self._admin_visits_admin_interface()
+ self._admin_adds_a_user()
+ self._admin_visits_edit_user()
+ self._admin_visits_reset_2fa_hotp()
+ self._screenshot('journalist-admin_edit_hotp_secret.png')
+
+ def test_admin_edit_totp_secret(self):
+ self._admin_logs_in()
+ self._admin_visits_admin_interface()
+ self._admin_adds_a_user()
+ self._admin_visits_edit_user()
+ self._admin_visits_reset_2fa_totp()
+ self._screenshot('journalist-admin_edit_totp_secret.png')
+
+ def test_login(self):
+ self.driver.get(self.journalist_location + "/login")
+ self._screenshot('journalist-login.png')
+
+ def test_admin(self):
+ self._admin_logs_in()
+ self._admin_visits_admin_interface()
+ self._admin_adds_a_user()
+ self._screenshot('journalist-admin.png')
+
+ def test_admin_new_user_two_factor_hotp(self):
+ self._admin_logs_in()
+ self._admin_visits_admin_interface()
+ self._admin_creates_a_user(hotp='123456')
+ self._screenshot('journalist-admin_new_user_two_factor_hotp.png')
+
+ def test_admin_new_user_two_factor_totp(self):
+ self._admin_logs_in()
+ self._admin_visits_admin_interface()
+ self._admin_creates_a_user(hotp=None)
+ self._screenshot('journalist-admin_new_user_two_factor_totp.png')
+
+ def test_col_no_documents(self):
+ self._source_visits_source_homepage()
+ self._source_chooses_to_submit_documents()
+ self._source_continues_to_submit_page()
+ self._source_submits_a_file()
+ self._source_logs_out()
+ self._journalist_logs_in()
+ self._journalist_visits_col()
+ self._journalist_delete_all()
+ self._journalist_confirm_delete_all()
+ self._screenshot('journalist-col_no_document.png')
+
+ def test_col_has_no_key(self):
+ self._source_visits_source_homepage()
+ self._source_chooses_to_submit_documents()
+ self._source_continues_to_submit_page()
+ self._source_submits_a_file()
+ self._source_logs_out()
+ self._journalist_logs_in()
+ self._source_delete_key()
+ self._journalist_visits_col()
+ self._screenshot('journalist-col_has_no_key.png')
+
+ def test_col_flagged(self):
+ self._source_visits_source_homepage()
+ self._source_chooses_to_submit_documents()
+ self._source_continues_to_submit_page()
+ self._source_submits_a_file()
+ self._source_logs_out()
+ self._journalist_logs_in()
+ self._source_delete_key()
+ self._journalist_visits_col()
+ self._journalist_flags_source()
+ self._journalist_continues_after_flagging()
+ self._screenshot('journalist-col_flagged.png')
+
+ def test_col(self):
+ self._source_visits_source_homepage()
+ self._source_chooses_to_submit_documents()
+ self._source_continues_to_submit_page()
+ self._source_submits_a_file()
+ self._source_logs_out()
+ self._journalist_logs_in()
+ self._journalist_visits_col()
+ self._screenshot('journalist-col.png')
+
+ def test_col_javascript(self):
+ self._javascript_toggle()
+ self._source_visits_source_homepage()
+ self._source_chooses_to_submit_documents()
+ self._source_continues_to_submit_page()
+ self._source_submits_a_file()
+ self._source_logs_out()
+ self._journalist_logs_in()
+ self._journalist_visits_col()
+ self._screenshot('journalist-col_javascript.png')
+
+ def test_delete_none(self):
+ self._source_visits_source_homepage()
+ self._source_chooses_to_submit_documents()
+ self._source_continues_to_submit_page()
+ self._source_submits_a_file()
+ self._source_submits_a_message()
+ self._source_logs_out()
+ self._journalist_logs_in()
+ self._journalist_visits_col()
+ self._journalist_delete_none()
+ self._screenshot('journalist-delete_none.png')
+
+ def test_delete_one_javascript(self):
+ self._javascript_toggle()
+ self._source_visits_source_homepage()
+ self._source_chooses_to_submit_documents()
+ self._source_continues_to_submit_page()
+ self._source_submits_a_file()
+ self._source_submits_a_message()
+ self._source_logs_out()
+ self._journalist_logs_in()
+ self._journalist_visits_col()
+ self._journalist_selects_first_doc()
+ self._journalist_clicks_delete_selected_javascript()
+ self._save_alert('journalist-delete_one_javascript.txt')
+ self._alert_accept()
+
+ def test_delete_all_javascript(self):
+ self._javascript_toggle()
+ self._source_visits_source_homepage()
+ self._source_chooses_to_submit_documents()
+ self._source_continues_to_submit_page()
+ self._source_submits_a_file()
+ self._source_submits_a_message()
+ self._source_logs_out()
+ self._journalist_logs_in()
+ self._journalist_visits_col()
+ self._journalist_delete_all_javascript()
+ self._save_alert('journalist-delete_all_javascript.txt')
+ self._alert_accept()
+
+ def test_delete_one(self):
+ self._source_visits_source_homepage()
+ self._source_chooses_to_submit_documents()
+ self._source_continues_to_submit_page()
+ self._source_submits_a_file()
+ self._source_submits_a_message()
+ self._source_logs_out()
+ self._journalist_logs_in()
+ self._journalist_visits_col()
+ self._journalist_delete_one()
+ self._screenshot('journalist-delete_one.png')
+
+ def test_delete_all(self):
+ self._source_visits_source_homepage()
+ self._source_chooses_to_submit_documents()
+ self._source_continues_to_submit_page()
+ self._source_submits_a_file()
+ self._source_submits_a_message()
+ self._source_logs_out()
+ self._journalist_logs_in()
+ self._journalist_visits_col()
+ self._journalist_delete_all()
+ self._screenshot('journalist-delete_all.png')
+
+ def test_edit_account_user(self):
+ self._journalist_logs_in()
+ self._visit_edit_account()
+ self._screenshot('journalist-edit_account_user.png')
+
+ def test_edit_account_admin(self):
+ self._admin_logs_in()
+ self._admin_visits_admin_interface()
+ self._admin_adds_a_user()
+ self._admin_visits_edit_user()
+ self._screenshot('journalist-edit_account_admin.png')
+
+ def test_flag(self):
+ self._source_visits_source_homepage()
+ self._source_chooses_to_submit_documents()
+ self._source_continues_to_submit_page()
+ self._source_submits_a_file()
+ self._source_logs_out()
+ self._journalist_logs_in()
+ self._source_delete_key()
+ self._journalist_visits_col()
+ self._journalist_flags_source()
+ self._screenshot('journalist-flag.png')
+
+ def test_index_no_documents(self):
+ self._journalist_logs_in()
+ self._screenshot('journalist-index_no_documents.png')
+
+ def test_index(self):
+ self._source_visits_source_homepage()
+ self._source_chooses_to_submit_documents()
+ self._source_continues_to_submit_page()
+ self._source_submits_a_file()
+ self._source_submits_a_message()
+ self._source_logs_out()
+ self._journalist_logs_in()
+ self._screenshot('journalist-index.png')
+
+ def test_index_javascript(self):
+ self._javascript_toggle()
+ self._source_visits_source_homepage()
+ self._source_chooses_to_submit_documents()
+ self._source_continues_to_submit_page()
+ self._source_submits_a_file()
+ self._source_submits_a_message()
+ self._source_logs_out()
+ self._journalist_logs_in()
+ self._screenshot('journalist-index_javascript.png')
+
+ def test_fail_to_visit_admin(self):
+ self._journalist_visits_admin()
+ self._screenshot('journalist-code-fail_to_visit_admin.png')
+
+ def test_fail_login(self, hardening):
+ self._journalist_fail_login()
+ self._screenshot('journalist-code-fail_login.png')
+
+ def test_fail_login_many(self, hardening):
+ self._journalist_fail_login_many()
+ self._screenshot('journalist-code-fail_login_many.png')
diff --git a/securedrop/tests/pages-layout/test_source.py b/securedrop/tests/pages-layout/test_source.py
new file mode 100644
--- /dev/null
+++ b/securedrop/tests/pages-layout/test_source.py
@@ -0,0 +1,86 @@
+#
+# SecureDrop whistleblower submission system
+# Copyright (C) 2017 Loic Dachary <[email protected]>
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+#
+from tests.functional import journalist_navigation_steps
+from tests.functional import source_navigation_steps
+import functional_test
+import pytest
+
+
[email protected]
+class TestSourceLayout(
+ functional_test.FunctionalTest,
+ source_navigation_steps.SourceNavigationSteps,
+ journalist_navigation_steps.JournalistNavigationSteps):
+
+ def test_index(self):
+ self._source_visits_source_homepage()
+ self._screenshot('source-index.png')
+
+ def test_index_javascript(self):
+ self._javascript_toggle()
+ self._source_visits_source_homepage()
+ self._screenshot('source-index_javascript.png')
+
+ def test_lookup(self):
+ self._source_visits_source_homepage()
+ self._source_chooses_to_submit_documents()
+ self._source_continues_to_submit_page()
+ self._source_submits_a_file()
+ self._screenshot('source-lookup.png')
+
+ def test_login(self):
+ self._source_visits_source_homepage()
+ self._source_chooses_to_login()
+ self._screenshot('source-login.png')
+
+ def test_use_tor_browser(self):
+ self._source_visits_use_tor()
+ self._screenshot('source-use_tor_browser.png')
+
+ def test_generate(self):
+ self._source_visits_source_homepage()
+ self._source_chooses_to_submit_documents()
+ self._screenshot('source-generate.png')
+
+ def test_logout_flashed_message(self):
+ self._source_visits_source_homepage()
+ self._source_chooses_to_submit_documents()
+ self._source_continues_to_submit_page()
+ self._source_submits_a_file()
+ self._source_logs_out()
+ self._screenshot('source-logout_flashed_message.png')
+
+ def test_next_submission_flashed_message(self):
+ self._source_visits_source_homepage()
+ self._source_chooses_to_submit_documents()
+ self._source_continues_to_submit_page()
+ self._source_submits_a_file()
+ self._source_submits_a_message()
+ self._screenshot('source-next_submission_flashed_message.png')
+
+ def test_notfound(self):
+ self._source_not_found()
+ self._screenshot('source-notfound.png')
+
+ def test_tor2web_warning(self):
+ self._source_tor2web_warning()
+ self._screenshot('source-tor2web_warning.png')
+
+ def test_why_journalist_key(self):
+ self._source_why_journalist_key()
+ self._screenshot('source-why_journalist_key.png')
diff --git a/securedrop/tests/test_integration.py b/securedrop/tests/test_integration.py
--- a/securedrop/tests/test_integration.py
+++ b/securedrop/tests/test_integration.py
@@ -48,8 +48,8 @@ def setUp(self):
# Add a test user to the journalist interface and log them in
# print Journalist.query.all()
- self.user_pw = "longpassword"
- self.user = Journalist(username="foo",
+ self.user_pw = "corret horse battery staple haha cultural reference"
+ self.user = Journalist(username="some-username",
password=self.user_pw)
db_session.add(self.user)
db_session.commit()
@@ -66,7 +66,7 @@ def test_submit_message(self):
with self.source_app as source_app:
resp = source_app.get('/generate')
resp = source_app.post('/create', follow_redirects=True)
- sid = g.sid
+ filesystem_id = g.filesystem_id
# redirected to submission form
resp = self.source_app.post('/submit', data=dict(
msg=test_msg,
@@ -109,7 +109,7 @@ def test_submit_message(self):
'ul > li > input[name="doc_names_selected"]')[0]['value']
resp = self.journalist_app.post('/bulk', data=dict(
action='confirm_delete',
- sid=sid,
+ filesystem_id=filesystem_id,
doc_names_selected=doc_name
))
@@ -123,7 +123,7 @@ def test_submit_message(self):
'ul > li > input[name="doc_names_selected"]')[0]['value']
resp = self.journalist_app.post('/bulk', data=dict(
action='delete',
- sid=sid,
+ filesystem_id=filesystem_id,
doc_names_selected=doc_name,
), follow_redirects=True)
self.assertEqual(resp.status_code, 200)
@@ -139,7 +139,9 @@ def test_submit_message(self):
# since file deletion is handled by a polling worker, this test needs
# to wait for the worker to get the job and execute it
utils.async.wait_for_assertion(
- lambda: self.assertFalse(os.path.exists(store.path(sid, doc_name)))
+ lambda: self.assertFalse(
+ os.path.exists(store.path(filesystem_id, doc_name))
+ )
)
def test_submit_file(self):
@@ -151,7 +153,7 @@ def test_submit_file(self):
with self.source_app as source_app:
resp = source_app.get('/generate')
resp = source_app.post('/create', follow_redirects=True)
- sid = g.sid
+ filesystem_id = g.filesystem_id
# redirected to submission form
resp = self.source_app.post('/submit', data=dict(
msg="",
@@ -197,7 +199,7 @@ def test_submit_file(self):
'ul > li > input[name="doc_names_selected"]')[0]['value']
resp = self.journalist_app.post('/bulk', data=dict(
action='confirm_delete',
- sid=sid,
+ filesystem_id=filesystem_id,
doc_names_selected=doc_name
))
@@ -211,7 +213,7 @@ def test_submit_file(self):
'ul > li > input[name="doc_names_selected"]')[0]['value']
resp = self.journalist_app.post('/bulk', data=dict(
action='delete',
- sid=sid,
+ filesystem_id=filesystem_id,
doc_names_selected=doc_name,
), follow_redirects=True)
self.assertEqual(resp.status_code, 200)
@@ -227,7 +229,9 @@ def test_submit_file(self):
# since file deletion is handled by a polling worker, this test needs
# to wait for the worker to get the job and execute it
utils.async.wait_for_assertion(
- lambda: self.assertFalse(os.path.exists(store.path(sid, doc_name)))
+ lambda: self.assertFalse(
+ os.path.exists(store.path(filesystem_id, doc_name))
+ )
)
def test_reply_normal(self):
@@ -290,7 +294,7 @@ def helper_test_reply(self, test_reply, expected_success=True):
resp = source_app.get('/generate')
resp = source_app.post('/create', follow_redirects=True)
codename = session['codename']
- sid = g.sid
+ filesystem_id = g.filesystem_id
# redirected to submission form
resp = source_app.post('/submit', data=dict(
msg=test_msg,
@@ -318,7 +322,7 @@ def helper_test_reply(self, test_reply, expected_success=True):
with self.journalist_app as journalist_app:
resp = journalist_app.post('/flag', data=dict(
- sid=sid))
+ filesystem_id=filesystem_id))
self.assertEqual(resp.status_code, 200)
with self.source_app as source_app:
@@ -332,12 +336,14 @@ def helper_test_reply(self, test_reply, expected_success=True):
# Block up to 15s for the reply keypair, so we can test sending a reply
utils.async.wait_for_assertion(
- lambda: self.assertNotEqual(crypto_util.getkey(sid), None), 15)
+ lambda: self.assertNotEqual(crypto_util.getkey(filesystem_id),
+ None),
+ 15)
# Create 2 replies to test deleting on journalist and source interface
for i in range(2):
resp = self.journalist_app.post('/reply', data=dict(
- sid=sid,
+ filesystem_id=filesystem_id,
msg=test_reply
), follow_redirects=True)
self.assertEqual(resp.status_code, 200)
@@ -355,11 +361,11 @@ def helper_test_reply(self, test_reply, expected_success=True):
# Download the reply and verify that it can be decrypted with the
# journalist's key as well as the source's reply key
- sid = soup.select('input[name="sid"]')[0]['value']
+ filesystem_id = soup.select('input[name="filesystem_id"]')[0]['value']
checkbox_values = [
soup.select('input[name="doc_names_selected"]')[1]['value']]
resp = self.journalist_app.post('/bulk', data=dict(
- sid=sid,
+ filesystem_id=filesystem_id,
action='download',
doc_names_selected=checkbox_values
), follow_redirects=True)
@@ -368,7 +374,8 @@ def helper_test_reply(self, test_reply, expected_success=True):
zf = zipfile.ZipFile(StringIO(resp.data), 'r')
data = zf.read(zf.namelist()[0])
self._can_decrypt_with_key(data, config.JOURNALIST_KEY)
- self._can_decrypt_with_key(data, crypto_util.getkey(sid), codename)
+ self._can_decrypt_with_key(data, crypto_util.getkey(filesystem_id),
+ codename)
# Test deleting reply on the journalist interface
last_reply_number = len(
@@ -394,7 +401,7 @@ def helper_test_reply(self, test_reply, expected_success=True):
msgid = soup.select(
'form.message > input[name="reply_filename"]')[0]['value']
resp = source_app.post('/delete', data=dict(
- sid=sid,
+ filesystem_id=filesystem_id,
reply_filename=msgid
), follow_redirects=True)
self.assertEqual(resp.status_code, 200)
@@ -403,7 +410,7 @@ def helper_test_reply(self, test_reply, expected_success=True):
# Make sure the reply is deleted from the filesystem
utils.async.wait_for_assertion(
lambda: self.assertFalse(os.path.exists(
- store.path(sid, msgid))))
+ store.path(filesystem_id, msgid))))
source_app.get('/logout')
@@ -426,10 +433,10 @@ def test_delete_collection(self):
# find the delete form and extract the post parameters
soup = BeautifulSoup(resp.data, 'html.parser')
- delete_form_inputs = soup.select('form#delete_collection')[0]('input')
- sid = delete_form_inputs[1]['value']
+ delete_form_inputs = soup.select('form#delete-collection')[0]('input')
+ filesystem_id = delete_form_inputs[1]['value']
col_name = delete_form_inputs[2]['value']
- resp = self.journalist_app.post('/col/delete/' + sid,
+ resp = self.journalist_app.post('/col/delete/' + filesystem_id,
follow_redirects=True)
self.assertEquals(resp.status_code, 200)
@@ -439,7 +446,7 @@ def test_delete_collection(self):
# Make sure the collection is deleted from the filesystem
utils.async.wait_for_assertion(
- lambda: self.assertFalse(os.path.exists(store.path(sid)))
+ lambda: self.assertFalse(os.path.exists(store.path(filesystem_id)))
)
def test_delete_collections(self):
@@ -470,7 +477,8 @@ def test_delete_collections(self):
# Make sure the collections are deleted from the filesystem
utils.async.wait_for_assertion(lambda: self.assertFalse(
- any([os.path.exists(store.path(sid)) for sid in checkbox_values])))
+ any([os.path.exists(store.path(filesystem_id))
+ for filesystem_id in checkbox_values])))
def test_filenames(self):
"""Test pretty, sequential filenames when source uploads messages
@@ -533,10 +541,9 @@ def test_user_change_password(self):
their password"""
# change password
- self.journalist_app.post('/account', data=dict(
- password='newlongpassword',
- password_again='newlongpassword'
- ))
+ new_pw = 'another correct horse battery staply long password'
+ self.journalist_app.post('/account/new-password',
+ data=dict(password=new_pw))
# logout
self.journalist_app.get('/logout')
@@ -544,7 +551,7 @@ def test_user_change_password(self):
# login with new credentials should redirect to index page
resp = self.journalist_app.post('/login', data=dict(
username=self.user.username,
- password='newlongpassword',
+ password=new_pw,
token='mocked',
follow_redirects=True))
self.assertEqual(resp.status_code, 302)
@@ -587,13 +594,13 @@ def helper_filenames_submit(self):
), follow_redirects=True)
def helper_filenames_delete(self, soup, i):
- sid = soup.select('input[name="sid"]')[0]['value']
+ filesystem_id = soup.select('input[name="filesystem_id"]')[0]['value']
checkbox_values = [
soup.select('input[name="doc_names_selected"]')[i]['value']]
# delete
resp = self.journalist_app.post('/bulk', data=dict(
- sid=sid,
+ filesystem_id=filesystem_id,
action='confirm_delete',
doc_names_selected=checkbox_values
), follow_redirects=True)
@@ -605,7 +612,7 @@ def helper_filenames_delete(self, soup, i):
# confirm delete
resp = self.journalist_app.post('/bulk', data=dict(
- sid=sid,
+ filesystem_id=filesystem_id,
action='delete',
doc_names_selected=checkbox_values
), follow_redirects=True)
@@ -614,5 +621,5 @@ def helper_filenames_delete(self, soup, i):
# Make sure the files were deleted from the filesystem
utils.async.wait_for_assertion(lambda: self.assertFalse(
- any([os.path.exists(store.path(sid, doc_name))
+ any([os.path.exists(store.path(filesystem_id, doc_name))
for doc_name in checkbox_values])))
diff --git a/securedrop/tests/test_journalist.py b/securedrop/tests/test_journalist.py
--- a/securedrop/tests/test_journalist.py
+++ b/securedrop/tests/test_journalist.py
@@ -23,6 +23,9 @@
# Smugly seed the RNG for deterministic testing
random.seed('Β―\_(γ)_/Β―')
+VALID_PASSWORD = 'correct horse battery staple generic passphrase hooray'
+VALID_PASSWORD_2 = 'another correct horse battery staple generic passphrase'
+
class TestJournalistApp(TestCase):
@@ -44,10 +47,14 @@ def setUp(self):
def tearDown(self):
utils.env.teardown()
+ @patch('crypto_util.genrandomid', side_effect=['bad', VALID_PASSWORD])
+ def test_make_password(self, mocked_pw_gen):
+ assert journalist._make_password() == VALID_PASSWORD
+
@patch('journalist.app.logger.error')
def test_reply_error_logging(self, mocked_error_logger):
source, _ = utils.db_helper.init_source()
- sid = source.filesystem_id
+ filesystem_id = source.filesystem_id
self._login_user()
exception_class = StaleDataError
@@ -55,7 +62,8 @@ def test_reply_error_logging(self, mocked_error_logger):
with patch('db.db_session.commit',
side_effect=exception_class(exception_msg)):
- self.client.post(url_for('reply'), data={'sid': sid, 'msg': '_'})
+ self.client.post(url_for('reply'),
+ data={'filesystem_id': filesystem_id, 'msg': '_'})
# Notice the "potentially sensitive" exception_msg is not present in
# the log event.
@@ -66,13 +74,14 @@ def test_reply_error_logging(self, mocked_error_logger):
def test_reply_error_flashed_message(self):
source, _ = utils.db_helper.init_source()
- sid = source.filesystem_id
+ filesystem_id = source.filesystem_id
self._login_user()
exception_class = StaleDataError
with patch('db.db_session.commit', side_effect=exception_class()):
- self.client.post(url_for('reply'), data={'sid': sid, 'msg': '_'})
+ self.client.post(url_for('reply'),
+ data={'filesystem_id': filesystem_id, 'msg': '_'})
self.assertMessageFlashed(
'An unexpected error occurred! Please check '
@@ -80,22 +89,24 @@ def test_reply_error_flashed_message(self):
def test_empty_replies_are_rejected(self):
source, _ = utils.db_helper.init_source()
- sid = source.filesystem_id
+ filesystem_id = source.filesystem_id
self._login_user()
resp = self.client.post(url_for('reply'),
- data={'sid': sid, 'msg': ''},
+ data={'filesystem_id': filesystem_id,
+ 'msg': ''},
follow_redirects=True)
self.assertIn("You cannot send an empty reply!", resp.data)
def test_nonempty_replies_are_accepted(self):
source, _ = utils.db_helper.init_source()
- sid = source.filesystem_id
+ filesystem_id = source.filesystem_id
self._login_user()
resp = self.client.post(url_for('reply'),
- data={'sid': sid, 'msg': '_'},
+ data={'filesystem_id': filesystem_id,
+ 'msg': '_'},
follow_redirects=True)
self.assertNotIn("You cannot send an empty reply!", resp.data)
@@ -163,8 +174,8 @@ def test_admin_has_link_to_edit_account_page_in_index_page(self):
password=self.admin_pw,
token='mocked'),
follow_redirects=True)
- edit_account_link = '<a href="{}">{}</a>'.format(
- url_for('edit_account'), "Edit Account")
+ edit_account_link = '<a href="{}" id="link-edit-account">'.format(
+ url_for('edit_account'))
self.assertIn(edit_account_link, resp.data)
def test_user_has_link_to_edit_account_page_in_index_page(self):
@@ -173,8 +184,8 @@ def test_user_has_link_to_edit_account_page_in_index_page(self):
password=self.user_pw,
token='mocked'),
follow_redirects=True)
- edit_account_link = '<a href="{}">{}</a>'.format(
- url_for('edit_account'), "Edit Account")
+ edit_account_link = '<a href="{}" id="link-edit-account">'.format(
+ url_for('edit_account'))
self.assertIn(edit_account_link, resp.data)
def test_admin_has_link_to_admin_index_page_in_index_page(self):
@@ -183,8 +194,8 @@ def test_admin_has_link_to_admin_index_page_in_index_page(self):
password=self.admin_pw,
token='mocked'),
follow_redirects=True)
- admin_link = '<a href="{}">{}</a>'.format(
- url_for('admin_index'), "Admin")
+ admin_link = '<a href="{}" id="link-admin-index">'.format(
+ url_for('admin_index'))
self.assertIn(admin_link, resp.data)
def test_user_lacks_link_to_admin_index_page_in_index_page(self):
@@ -193,8 +204,8 @@ def test_user_lacks_link_to_admin_index_page_in_index_page(self):
password=self.user_pw,
token='mocked'),
follow_redirects=True)
- admin_link = '<a href="{}">{}</a>'.format(
- url_for('admin_index'), "Admin")
+ admin_link = '<a href="{}" id="link-admin-index">'.format(
+ url_for('admin_index'))
self.assertNotIn(admin_link, resp.data)
# WARNING: we are purposely doing something that would not work in
@@ -256,112 +267,119 @@ def test_admin_deletes_invalid_user_404(self):
def test_admin_edits_user_password_success_response(self):
self._login_admin()
- self.client.post(
- url_for('admin_edit_user', user_id=self.user.id),
- data=dict(username=self.user.username, is_admin=False,
- password='validlongpassword',
- password_again='validlongpassword'))
-
- self.assertMessageFlashed("Account successfully updated!", 'success')
+ resp = self.client.post(
+ url_for('admin_new_password', user_id=self.user.id),
+ data=dict(password=VALID_PASSWORD_2),
+ follow_redirects=True)
- def test_user_edits_password_success_reponse(self):
- self._login_user()
- self.client.post(url_for('edit_account'),
- data=dict(password='validlongpassword',
- password_again='validlongpassword'))
- self.assertMessageFlashed("Account successfully updated!", 'success')
+ text = resp.data.decode('utf-8')
+ assert 'The password was successfully updated!' in text
+ assert VALID_PASSWORD_2 in text
- def test_admin_edits_user_password_mismatch_warning(self):
+ def test_admin_edits_user_password_error_response(self):
self._login_admin()
- self.client.post(
- url_for('admin_edit_user', user_id=self.user.id),
- data=dict(username=self.user.username, is_admin=False,
- password='not', password_again='thesame'),
+ with patch('db.db_session.commit', side_effect=Exception()):
+ resp = self.client.post(
+ url_for('admin_new_password', user_id=self.user.id),
+ data=dict(password=VALID_PASSWORD_2),
+ follow_redirects=True)
+
+ assert ('There was an error, and the new password might not have '
+ 'been saved correctly.') in resp.data.decode('utf-8')
+
+ def test_user_edits_password_success_reponse(self):
+ self._login_user()
+ resp = self.client.post(
+ url_for('new_password'),
+ data=dict(password=VALID_PASSWORD_2),
follow_redirects=True)
- self.assertMessageFlashed("Passwords didn't match!", "error")
+ text = resp.data.decode('utf-8')
+ assert "The password was successfully updated!" in text
+ assert VALID_PASSWORD_2 in text
- def test_user_edits_password_mismatch_redirect(self):
+ def test_user_edits_password_error_reponse(self):
self._login_user()
- resp = self.client.post(url_for('edit_account'), data=dict(
- password='not',
- password_again='thesame'))
- self.assertRedirects(resp, url_for('edit_account'))
- def test_admin_add_user_password_mismatch_warning(self):
- self._login_admin()
- resp = self.client.post(url_for('admin_add_user'),
- data=dict(username='dellsberg',
- password='not',
- password_again='thesame',
- is_admin=False))
- self.assertIn('Passwords didn', resp.data)
+ with patch('db.db_session.commit', side_effect=Exception()):
+ resp = self.client.post(
+ url_for('new_password'),
+ data=dict(password=VALID_PASSWORD_2),
+ follow_redirects=True)
+
+ assert ('There was an error, and the new password might not have '
+ 'been saved correctly.') in resp.data.decode('utf-8')
def test_admin_add_user_when_username_already_in_use(self):
self._login_admin()
resp = self.client.post(url_for('admin_add_user'),
data=dict(username=self.admin.username,
- password='testtesttest',
- password_again='testtesttest',
+ password=VALID_PASSWORD,
is_admin=False))
self.assertIn('That username is already in use', resp.data)
def test_max_password_length(self):
"""Creating a Journalist with a password that is greater than the
maximum password length should raise an exception"""
- overly_long_password = 'a'*(Journalist.MAX_PASSWORD_LEN + 1)
+ overly_long_password = VALID_PASSWORD + \
+ 'a' * (Journalist.MAX_PASSWORD_LEN - len(VALID_PASSWORD) + 1)
with self.assertRaises(InvalidPasswordLength):
Journalist(username="My Password is Too Big!",
password=overly_long_password)
def test_min_password_length(self):
"""Creating a Journalist with a password that is smaller than the
- minimum password length should raise an exception"""
+ minimum password length should raise an exception. This uses the
+ magic number 7 below to get around the "diceware-like" requirement
+ that may cause a failure before the length check.
+ """
+ password = ('a ' * 7)[0:(Journalist.MIN_PASSWORD_LEN - 1)]
with self.assertRaises(InvalidPasswordLength):
Journalist(username="My Password is Too Small!",
- password='tiny')
+ password=password)
def test_admin_edits_user_password_too_long_warning(self):
self._login_admin()
- overly_long_password = 'a' * (Journalist.MAX_PASSWORD_LEN + 1)
+ overly_long_password = VALID_PASSWORD + \
+ 'a' * (Journalist.MAX_PASSWORD_LEN - len(VALID_PASSWORD) + 1)
- self.client.post(
- url_for('admin_edit_user', user_id=self.user.id),
+ resp = self.client.post(
+ url_for('admin_new_password', user_id=self.user.id),
data=dict(username=self.user.username, is_admin=False,
- password=overly_long_password,
- password_again=overly_long_password),
+ password=overly_long_password),
follow_redirects=True)
- self.assertMessageFlashed('Your password must be between {} and {} '
- 'characters.'.format(
- Journalist.MIN_PASSWORD_LEN,
- Journalist.MAX_PASSWORD_LEN), 'error')
+ print resp.data.decode('utf-8')
+ self.assertMessageFlashed('You submitted a bad password! '
+ 'Password not changed.', 'error')
def test_user_edits_password_too_long_warning(self):
self._login_user()
- overly_long_password = 'a' * (Journalist.MAX_PASSWORD_LEN + 1)
+ overly_long_password = VALID_PASSWORD + \
+ 'a' * (Journalist.MAX_PASSWORD_LEN - len(VALID_PASSWORD) + 1)
- self.client.post(url_for('edit_account'),
- data=dict(password=overly_long_password,
- password_again=overly_long_password),
+ self.client.post(url_for('new_password'),
+ data=dict(password=overly_long_password),
follow_redirects=True)
- self.assertMessageFlashed('Your password must be between {} and {} '
- 'characters.'.format(
- Journalist.MIN_PASSWORD_LEN,
- Journalist.MAX_PASSWORD_LEN), 'error')
+ self.assertMessageFlashed('You submitted a bad password! '
+ 'Password not changed.', 'error')
def test_admin_add_user_password_too_long_warning(self):
self._login_admin()
- overly_long_password = 'a' * (Journalist.MAX_PASSWORD_LEN + 1)
- resp = self.client.post(
+ overly_long_password = VALID_PASSWORD + \
+ 'a' * (Journalist.MAX_PASSWORD_LEN - len(VALID_PASSWORD) + 1)
+ self.client.post(
url_for('admin_add_user'),
- data=dict(username='dellsberg', password=overly_long_password,
- password_again=overly_long_password, is_admin=False))
+ data=dict(username='dellsberg',
+ password=overly_long_password,
+ is_admin=False))
- self.assertIn('Your password must be between', resp.data)
+ self.assertMessageFlashed('There was an error with the autogenerated '
+ 'password. User not created. '
+ 'Please try again.', 'error')
def test_admin_edits_user_invalid_username(self):
"""Test expected error message when admin attempts to change a user's
@@ -371,8 +389,7 @@ def test_admin_edits_user_invalid_username(self):
self.client.post(
url_for('admin_edit_user', user_id=self.user.id),
- data=dict(username=new_username, is_admin=False,
- password='', password_again=''))
+ data=dict(username=new_username, is_admin=False))
self.assertMessageFlashed('Username "{}" is already taken!'.format(
new_username), 'error')
@@ -516,10 +533,11 @@ def test_admin_add_user(self):
resp = self.client.post(url_for('admin_add_user'),
data=dict(username='dellsberg',
- password='pentagonpapers',
- password_again='pentagonpapers',
+ password=VALID_PASSWORD,
is_admin=False))
+ print resp.data.decode('utf-8')
+
self.assertRedirects(resp, url_for('admin_new_user_two_factor',
uid=max_journalist_pk+1))
@@ -527,10 +545,19 @@ def test_admin_add_user_without_username(self):
self._login_admin()
resp = self.client.post(url_for('admin_add_user'),
data=dict(username='',
+ password=VALID_PASSWORD,
+ is_admin=False))
+ self.assertIn('Invalid username', resp.data)
+
+ def test_admin_add_user_too_short_username(self):
+ self._login_admin()
+ username = 'a' * (Journalist.MIN_USERNAME_LEN - 1)
+ resp = self.client.post(url_for('admin_add_user'),
+ data=dict(username=username,
password='pentagonpapers',
password_again='pentagonpapers',
is_admin=False))
- self.assertIn('Missing username', resp.data)
+ self.assertIn('Invalid username', resp.data)
@patch('journalist.app.logger.error')
@patch('journalist.Journalist',
@@ -542,8 +569,7 @@ def test_admin_add_user_integrity_error(self,
self.client.post(url_for('admin_add_user'),
data=dict(username='username',
- password='pentagonpapers',
- password_again='pentagonpapers',
+ password=VALID_PASSWORD,
is_admin=False))
mocked_error_logger.assert_called_once_with(
@@ -578,8 +604,9 @@ def test_admin_page_restriction_http_posts(self):
self.assertStatus(resp, 302)
def test_user_authorization_for_gets(self):
- urls = [url_for('index'), url_for('col', sid='1'),
- url_for('download_single_submission', sid='1', fn='1'),
+ urls = [url_for('index'), url_for('col', filesystem_id='1'),
+ url_for('download_single_submission',
+ filesystem_id='1', fn='1'),
url_for('edit_account')]
for url in urls:
@@ -587,8 +614,10 @@ def test_user_authorization_for_gets(self):
self.assertStatus(resp, 302)
def test_user_authorization_for_posts(self):
- urls = [url_for('add_star', sid='1'), url_for('remove_star', sid='1'),
- url_for('col_process'), url_for('col_delete_single', sid='1'),
+ urls = [url_for('add_star', filesystem_id='1'),
+ url_for('remove_star', filesystem_id='1'),
+ url_for('col_process'),
+ url_for('col_delete_single', filesystem_id='1'),
url_for('reply'), url_for('generate_code'), url_for('bulk'),
url_for('account_new_two_factor'),
url_for('account_reset_two_factor_totp'),
@@ -599,31 +628,32 @@ def test_user_authorization_for_posts(self):
def test_invalid_user_password_change(self):
self._login_user()
- res = self.client.post(url_for('edit_account'), data=dict(
- password='not',
- password_again='thesame'))
+ res = self.client.post(url_for('new_password'),
+ data=dict(password='badpw'))
self.assertRedirects(res, url_for('edit_account'))
def test_too_long_user_password_change(self):
self._login_user()
- overly_long_password = 'a' * (Journalist.MAX_PASSWORD_LEN + 1)
- self.client.post(url_for('edit_account'), data=dict(
- password=overly_long_password,
- password_again=overly_long_password),
+ overly_long_password = VALID_PASSWORD + \
+ 'a' * (Journalist.MAX_PASSWORD_LEN - len(VALID_PASSWORD) + 1)
+
+ self.client.post(url_for('new_password'),
+ data=dict(password=overly_long_password),
follow_redirects=True)
- self.assertMessageFlashed('Your password must be between {} and {} '
- 'characters.'.format(
- Journalist.MIN_PASSWORD_LEN,
- Journalist.MAX_PASSWORD_LEN), 'error')
+ self.assertMessageFlashed('You submitted a bad password! Password not '
+ 'changed.', 'error')
def test_valid_user_password_change(self):
self._login_user()
- self.client.post(url_for('edit_account'), data=dict(
- password='validlongpassword',
- password_again='validlongpassword'))
- self.assertMessageFlashed("Account successfully updated!", 'success')
+ resp = self.client.post(
+ url_for('new_password'),
+ data=dict(password=VALID_PASSWORD_2),
+ follow_redirects=True)
+
+ assert 'The password was successfully updated!' in \
+ resp.data.decode('utf-8')
def test_regenerate_totp(self):
self._login_user()
@@ -729,7 +759,7 @@ def test_download_selected_submissions_from_source(self):
self._login_user()
resp = self.client.post(
'/bulk', data=dict(action='download',
- sid=source.filesystem_id,
+ filesystem_id=source.filesystem_id,
doc_names_selected=selected_fnames))
# The download request was succesful, and the app returned a zipfile
@@ -897,7 +927,8 @@ def test_download_all_selected_sources(self):
def test_add_star_redirects_to_index(self):
source, _ = utils.db_helper.init_source()
self._login_user()
- resp = self.client.post(url_for('add_star', sid=source.filesystem_id))
+ resp = self.client.post(url_for('add_star',
+ filesystem_id=source.filesystem_id))
self.assertRedirects(resp, url_for('index'))
@@ -957,9 +988,9 @@ def test_col_process_returns_404_with_bad_action(self, abort):
@patch("journalist.make_star_true")
@patch("journalist.db_session")
def test_col_star_call_db_(self, db_session, make_star_true):
- journalist.col_star(['sid'])
+ journalist.col_star(['filesystem_id'])
- make_star_true.assert_called_with('sid')
+ make_star_true.assert_called_with('filesystem_id')
@patch("journalist.db_session")
def test_col_un_star_call_db(self, db_session):
@@ -1032,44 +1063,44 @@ def setUp(self):
@patch('journalist.url_for')
@patch('journalist.redirect')
def test_add_star_renders_template(self, redirect, url_for):
- redirect_template = journalist.add_star('sid')
+ redirect_template = journalist.add_star('filesystem_id')
self.assertEqual(redirect_template, redirect(url_for('index')))
@patch('journalist.db_session')
def test_add_star_makes_commits(self, db_session):
- journalist.add_star('sid')
+ journalist.add_star('filesystem_id')
db_session.commit.assert_called_with()
@patch('journalist.make_star_true')
def test_single_delegates_to_make_star_true(self, make_star_true):
- sid = 'sid'
+ filesystem_id = 'filesystem_id'
- journalist.add_star(sid)
+ journalist.add_star(filesystem_id)
- make_star_true.assert_called_with(sid)
+ make_star_true.assert_called_with(filesystem_id)
@patch('journalist.url_for')
@patch('journalist.redirect')
def test_remove_star_renders_template(self, redirect, url_for):
- redirect_template = journalist.remove_star('sid')
+ redirect_template = journalist.remove_star('filesystem_id')
self.assertEqual(redirect_template, redirect(url_for('index')))
@patch('journalist.db_session')
def test_remove_star_makes_commits(self, db_session):
- journalist.remove_star('sid')
+ journalist.remove_star('filesystem_id')
db_session.commit.assert_called_with()
@patch('journalist.make_star_false')
def test_remove_star_delegates_to_make_star_false(self, make_star_false):
- sid = 'sid'
+ filesystem_id = 'filesystem_id'
- journalist.remove_star(sid)
+ journalist.remove_star(filesystem_id)
- make_star_false.assert_called_with(sid)
+ make_star_false.assert_called_with(filesystem_id)
@classmethod
def tearDownClass(cls):
diff --git a/securedrop/tests/test_manage.py b/securedrop/tests/test_manage.py
--- a/securedrop/tests/test_manage.py
+++ b/securedrop/tests/test_manage.py
@@ -1,19 +1,42 @@
# -*- coding: utf-8 -*-
+import argparse
+import os
+from os.path import abspath, dirname, exists, getmtime, join, realpath
+os.environ['SECUREDROP_ENV'] = 'test' # noqa
+import config
+import logging
import manage
import mock
+import pytest
from StringIO import StringIO
+import subprocess
import sys
+import time
import unittest
-
+import version
import utils
+from db import Journalist
+
-class TestManagePy(unittest.TestCase):
+class TestManagePy(object):
def test_parse_args(self):
# just test that the arg parser is stable
manage.get_args()
+ def test_not_verbose(self, caplog):
+ args = manage.get_args().parse_args(['run'])
+ manage.setup_verbosity(args)
+ manage.log.debug('INVISIBLE')
+ assert 'INVISIBLE' not in caplog.text()
+
+ def test_verbose(self, caplog):
+ args = manage.get_args().parse_args(['--verbose', 'run'])
+ manage.setup_verbosity(args)
+ manage.log.debug('VISIBLE')
+ assert 'VISIBLE' in caplog.text()
+
class TestManagementCommand(unittest.TestCase):
def setUp(self):
@@ -22,11 +45,29 @@ def setUp(self):
def tearDown(self):
utils.env.teardown()
- @mock.patch("__builtin__.raw_input", return_value='N')
- @mock.patch("manage.getpass", return_value='testtesttest')
+ @mock.patch("__builtin__.raw_input", return_value='jen')
+ def test_get_username_success(self, mock_stdin):
+ assert manage._get_username() == 'jen'
+
+ @mock.patch("__builtin__.raw_input",
+ side_effect=['a' * (Journalist.MIN_USERNAME_LEN - 1), 'jen'])
+ def test_get_username_fail(self, mock_stdin):
+ assert manage._get_username() == 'jen'
+
+ @mock.patch("__builtin__.raw_input", return_value='y')
+ def test_get_yubikey_usage_yes(self, mock_stdin):
+ assert manage._get_yubikey_usage()
+
+ @mock.patch("__builtin__.raw_input", return_value='n')
+ def test_get_yubikey_usage_no(self, mock_stdin):
+ assert not manage._get_yubikey_usage()
+
+ @mock.patch("manage._get_username", return_value='foo-bar-baz')
+ @mock.patch("manage._get_yubikey_usage", return_value=False)
@mock.patch("sys.stdout", new_callable=StringIO)
- def test_exception_handling_when_duplicate_username(self, mock_raw_input,
- mock_getpass,
+ def test_exception_handling_when_duplicate_username(self,
+ mock_username,
+ mock_yubikey,
mock_stdout):
"""Regression test for duplicate username logic in manage.py"""
@@ -40,3 +81,146 @@ def test_exception_handling_when_duplicate_username(self, mock_raw_input,
self.assertEqual(return_value, 1)
self.assertIn('ERROR: That username is already taken!',
sys.stdout.getvalue())
+
+
+class TestManage(object):
+
+ def setup(self):
+ self.dir = abspath(dirname(realpath(__file__)))
+ utils.env.setup()
+
+ def teardown(self):
+ utils.env.teardown()
+
+ @mock.patch("__builtin__.raw_input", return_value='foo-bar-baz')
+ def test_get_username(self, mock_get_usernam):
+ assert manage._get_username() == 'foo-bar-baz'
+
+ def test_translate_compile_code_and_template(self):
+ source = [
+ join(self.dir, 'i18n/code.py'),
+ join(self.dir, 'i18n/template.html'),
+ ]
+ kwargs = {
+ 'translations_dir': config.TEMP_DIR,
+ 'mapping': join(self.dir, 'i18n/babel.cfg'),
+ 'source': source,
+ 'extract_update': True,
+ 'compile': True,
+ 'verbose': logging.DEBUG,
+ 'version': version.__version__,
+ }
+ args = argparse.Namespace(**kwargs)
+ manage.setup_verbosity(args)
+ manage.translate(args)
+ messages_file = join(config.TEMP_DIR, 'messages.pot')
+ assert exists(messages_file)
+ pot = open(messages_file).read()
+ assert 'code hello i18n' in pot
+ assert 'template hello i18n' in pot
+
+ locale = 'en_US'
+ locale_dir = join(config.TEMP_DIR, locale)
+ manage.sh("pybabel init -i {} -d {} -l {}".format(
+ messages_file,
+ config.TEMP_DIR,
+ locale,
+ ))
+ mo_file = join(locale_dir, 'LC_MESSAGES/messages.mo')
+ assert not exists(mo_file)
+ manage.translate(args)
+ assert exists(mo_file)
+ mo = open(mo_file).read()
+ assert 'code hello i18n' in mo
+ assert 'template hello i18n' in mo
+
+ def test_translate_compile_arg(self):
+ source = [
+ join(self.dir, 'i18n/code.py'),
+ ]
+ kwargs = {
+ 'translations_dir': config.TEMP_DIR,
+ 'mapping': join(self.dir, 'i18n/babel.cfg'),
+ 'source': source,
+ 'extract_update': True,
+ 'compile': False,
+ 'verbose': logging.DEBUG,
+ 'version': version.__version__,
+ }
+ args = argparse.Namespace(**kwargs)
+ manage.setup_verbosity(args)
+ manage.translate(args)
+ messages_file = join(config.TEMP_DIR, 'messages.pot')
+ assert exists(messages_file)
+ pot = open(messages_file).read()
+ assert 'code hello i18n' in pot
+
+ locale = 'en_US'
+ locale_dir = join(config.TEMP_DIR, locale)
+ po_file = join(locale_dir, 'LC_MESSAGES/messages.po')
+ manage.sh("pybabel init -i {} -d {} -l {}".format(
+ messages_file,
+ config.TEMP_DIR,
+ locale,
+ ))
+ assert exists(po_file)
+ # pretend this happened a few seconds ago
+ few_seconds_ago = time.time() - 60
+ os.utime(po_file, (few_seconds_ago, few_seconds_ago))
+
+ mo_file = join(locale_dir, 'LC_MESSAGES/messages.mo')
+
+ #
+ # Extract+update but do not compile
+ #
+ old_po_mtime = getmtime(po_file)
+ assert not exists(mo_file)
+ manage.translate(args)
+ assert not exists(mo_file)
+ current_po_mtime = getmtime(po_file)
+ assert old_po_mtime < current_po_mtime
+
+ #
+ # Compile but do not extract+update
+ #
+ source = [
+ join(self.dir, 'i18n/code.py'),
+ join(self.dir, 'i18n/template.html'),
+ ]
+ kwargs['extract_update'] = False
+ kwargs['compile'] = True
+ args = argparse.Namespace(**kwargs)
+ old_po_mtime = current_po_mtime
+ manage.translate(args)
+ assert old_po_mtime == current_po_mtime
+ mo = open(mo_file).read()
+ assert 'code hello i18n' in mo
+ assert 'template hello i18n' not in mo
+
+
+class TestSh(object):
+
+ def test_sh(self):
+ assert 'A' == manage.sh("echo -n A")
+ with pytest.raises(Exception) as excinfo:
+ manage.sh("exit 123")
+ assert excinfo.value.returncode == 123
+
+ def test_sh_progress(self, caplog):
+ manage.sh("echo AB ; sleep 5 ; echo C")
+ records = caplog.records()
+ assert ':sh: ' in records[0].message
+ assert 'AB' == records[1].message
+ assert 'C' == records[2].message
+
+ def test_sh_input(self, caplog):
+ assert 'abc' == manage.sh("cat", 'abc')
+
+ def test_sh_fail(self, caplog):
+ with pytest.raises(subprocess.CalledProcessError) as excinfo:
+ manage.sh("/bin/echo -n AB ; /bin/echo C ; exit 111")
+ assert excinfo.value.returncode == 111
+ for record in caplog.records():
+ if record.levelname == 'ERROR':
+ assert ('replay full' in record.message or
+ 'ABC\n' == record.message)
diff --git a/securedrop/tests/test_secure_tempfile.py b/securedrop/tests/test_secure_tempfile.py
--- a/securedrop/tests/test_secure_tempfile.py
+++ b/securedrop/tests/test_secure_tempfile.py
@@ -4,7 +4,7 @@
from gnupg._util import _is_stream
-os.environ['SECUREDROP_ENV'] = 'test'
+os.environ['SECUREDROP_ENV'] = 'test' # noqa
import config
import secure_tempfile
import utils
@@ -19,17 +19,11 @@ def setUp(self):
def tearDown(self):
utils.env.teardown()
- def test_write_then_read_twice(self):
- self.f.write(self.msg)
- self.f.read()
-
- self.assertEqual(self.f.read(), '')
-
def test_read_before_writing(self):
with self.assertRaisesRegexp(AssertionError,
'You must write before reading!'):
self.f.read()
-
+
def test_write_then_read_once(self):
self.f.write(self.msg)
@@ -58,7 +52,7 @@ def test_write_then_read_then_write(self):
def test_read_write_unicode(self):
unicode_msg = u'ι¬Όη₯ Kill Em All 1989'
self.f.write(unicode_msg)
-
+
self.assertEqual(self.f.read().decode('utf-8'), unicode_msg)
def test_file_seems_encrypted(self):
@@ -80,7 +74,8 @@ def test_file_is_removed_from_disk(self):
self.assertFalse(os.path.exists(fp))
def test_SecureTemporaryFile_is_a_STREAMLIKE_TYPE(self):
- self.assertTrue(_is_stream(secure_tempfile.SecureTemporaryFile('/tmp')))
+ self.assertTrue(_is_stream(
+ secure_tempfile.SecureTemporaryFile('/tmp')))
def test_buffered_read(self):
msg = self.msg * 1000
diff --git a/securedrop/tests/test_source.py b/securedrop/tests/test_source.py
--- a/securedrop/tests/test_source.py
+++ b/securedrop/tests/test_source.py
@@ -2,9 +2,7 @@
from cStringIO import StringIO
import gzip
from mock import patch, ANY
-import os
import re
-import unittest
from bs4 import BeautifulSoup
from flask import session, escape
@@ -15,7 +13,6 @@
import version
import utils
import json
-import config
class TestSourceApp(TestCase):
@@ -46,7 +43,8 @@ def _find_codename(self, html):
"""Find a source codename (diceware passphrase) in HTML"""
# Codenames may contain HTML escape characters, and the wordlist
# contains various symbols.
- codename_re = r'<p [^>]*id="codename"[^>]*>(?P<codename>[a-z0-9 &#;?:=@_.*+()\'"$%!-]+)</p>'
+ codename_re = (r'<p [^>]*id="codename"[^>]*>'
+ r'(?P<codename>[a-z0-9 &#;?:=@_.*+()\'"$%!-]+)</p>')
codename_match = re.search(codename_re, html)
self.assertIsNotNone(codename_match)
return codename_match.group('codename')
@@ -56,7 +54,8 @@ def test_generate(self):
resp = c.get('/generate')
self.assertEqual(resp.status_code, 200)
session_codename = session['codename']
- self.assertIn("This codename is what you will use in future visits", resp.data)
+ self.assertIn("This codename is what you will use in future visits",
+ resp.data)
codename = self._find_codename(resp.data)
self.assertEqual(len(codename.split()), Source.NUM_WORDS)
# codename is also stored in the session - make sure it matches the
@@ -87,7 +86,6 @@ def test_generate_already_logged_in(self):
def test_create(self):
with self.client as c:
resp = c.get('/generate')
- codename = session['codename']
resp = c.post('/create', follow_redirects=True)
self.assertTrue(session['logged_in'])
# should be redirected to /lookup
@@ -123,14 +121,15 @@ def test_login_and_logout(self):
with self.client as c:
resp = c.post('/login', data=dict(codename='invalid'),
- follow_redirects=True)
+ follow_redirects=True)
self.assertEqual(resp.status_code, 200)
- self.assertIn('Sorry, that is not a recognized codename.', resp.data)
+ self.assertIn('Sorry, that is not a recognized codename.',
+ resp.data)
self.assertNotIn('logged_in', session)
with self.client as c:
resp = c.post('/login', data=dict(codename=codename),
- follow_redirects=True)
+ follow_redirects=True)
self.assertEqual(resp.status_code, 200)
self.assertTrue(session['logged_in'])
resp = c.get('/logout', follow_redirects=True)
@@ -138,7 +137,8 @@ def test_login_and_logout(self):
self.assertIn('Thank you for exiting your session!', resp.data)
def test_login_with_whitespace(self):
- """Test that codenames with leading or trailing whitespace still work"""
+ """
+ Test that codenames with leading or trailing whitespace still work"""
def login_test(codename):
resp = self.client.get('/login')
self.assertEqual(resp.status_code, 200)
@@ -146,7 +146,7 @@ def login_test(codename):
with self.client as c:
resp = c.post('/login', data=dict(codename=codename),
- follow_redirects=True)
+ follow_redirects=True)
self.assertEqual(resp.status_code, 200)
self.assertIn("Submit Materials", resp.data)
self.assertTrue(session['logged_in'])
@@ -239,7 +239,7 @@ def test_submit_both(self):
def test_delete_all(self):
journalist, _ = utils.db_helper.init_journalist()
source, codename = utils.db_helper.init_source()
- replies = utils.db_helper.reply(journalist, source, 1)
+ utils.db_helper.reply(journalist, source, 1)
with self.client as c:
resp = c.post('/login', data=dict(codename=codename),
follow_redirects=True)
@@ -287,7 +287,8 @@ def test_metadata_route(self):
resp = self.client.get('/metadata')
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.headers.get('Content-Type'), 'application/json')
- self.assertEqual(json.loads(resp.data.decode('utf-8')).get('sd_version'), version.__version__)
+ self.assertEqual(json.loads(resp.data.decode('utf-8')).get(
+ 'sd_version'), version.__version__)
@patch('crypto_util.hash_codename')
def test_login_with_overly_long_codename(self, mock_hash_codename):
@@ -298,7 +299,8 @@ def test_login_with_overly_long_codename(self, mock_hash_codename):
resp = c.post('/login', data=dict(codename=overly_long_codename),
follow_redirects=True)
self.assertEqual(resp.status_code, 200)
- self.assertIn("Sorry, that is not a recognized codename.", resp.data)
+ self.assertIn("Sorry, that is not a recognized codename.",
+ resp.data)
self.assertFalse(mock_hash_codename.called,
"Called hash_codename for codename w/ invalid "
"length")
diff --git a/securedrop/tests/test_store.py b/securedrop/tests/test_store.py
--- a/securedrop/tests/test_store.py
+++ b/securedrop/tests/test_store.py
@@ -3,10 +3,9 @@
import unittest
import zipfile
-import crypto_util
-os.environ['SECUREDROP_ENV'] = 'test'
+os.environ['SECUREDROP_ENV'] = 'test' # noqa
import config
-from db import db_session, Source
+from db import db_session
import mock
import store
import utils
@@ -34,8 +33,9 @@ def test_verify_in_store_dir(self):
def test_verify_store_dir_not_absolute(self):
STORE_DIR = config.STORE_DIR
try:
- with self.assertRaisesRegexp(store.PathException,
- 'config.STORE_DIR\(\S*\) is not absolute'):
+ with self.assertRaisesRegexp(
+ store.PathException,
+ 'config.STORE_DIR\(\S*\) is not absolute'):
config.STORE_DIR = '.'
store.verify('something')
finally:
@@ -64,8 +64,9 @@ def test_rename_valid_submission(self):
new_journalist_filename = 'nestor_makhno'
expected_filename = old_filename.replace(old_journalist_filename,
new_journalist_filename)
- actual_filename = store.rename_submission(source.filesystem_id, old_filename,
- new_journalist_filename)
+ actual_filename = store.rename_submission(
+ source.filesystem_id, old_filename,
+ new_journalist_filename)
self.assertEquals(actual_filename, expected_filename)
@mock.patch('store.subprocess.check_call')
diff --git a/securedrop/tests/test_template_filters.py b/securedrop/tests/test_template_filters.py
--- a/securedrop/tests/test_template_filters.py
+++ b/securedrop/tests/test_template_filters.py
@@ -1,10 +1,10 @@
# -*- coding: utf-8 -*-
from datetime import datetime, timedelta
-import os
import unittest
import template_filters
+
class TestTemplateFilters(unittest.TestCase):
def test_datetimeformat_default_fmt(self):
diff --git a/securedrop/tests/utils/__init__.py b/securedrop/tests/utils/__init__.py
--- a/securedrop/tests/utils/__init__.py
+++ b/securedrop/tests/utils/__init__.py
@@ -1,3 +1,3 @@
-import async
-import db_helper
-import env
+import async # noqa
+import db_helper # noqa
+import env # noqa
diff --git a/securedrop/tests/utils/db_helper.py b/securedrop/tests/utils/db_helper.py
--- a/securedrop/tests/utils/db_helper.py
+++ b/securedrop/tests/utils/db_helper.py
@@ -5,13 +5,14 @@
import mock
import os
-os.environ['SECUREDROP_ENV'] = 'test'
+os.environ['SECUREDROP_ENV'] = 'test' # noqa
import config
import crypto_util
import db
import store
-## db.{Journalist, Reply}
+# db.{Journalist, Reply}
+
def init_journalist(is_admin=False):
"""Initialize a journalist into the database. Return their
@@ -88,7 +89,7 @@ def mark_downloaded(*submissions):
db.db_session.commit()
-## db.{Source,Submission}
+# db.{Source,Submission}
def init_source():
"""Initialize a source: create their database record, the
diff --git a/securedrop/tests/utils/env.py b/securedrop/tests/utils/env.py
--- a/securedrop/tests/utils/env.py
+++ b/securedrop/tests/utils/env.py
@@ -5,10 +5,11 @@
from os.path import abspath, dirname, exists, isdir, join, realpath
import shutil
import subprocess
+import threading
import gnupg
-os.environ['SECUREDROP_ENV'] = 'test'
+os.environ['SECUREDROP_ENV'] = 'test' # noqa
import config
import crypto_util
from db import init_db, db_session
@@ -59,9 +60,19 @@ def setup():
def teardown():
+ # make sure threads launched by tests complete before
+ # teardown, otherwise they may fail because resources
+ # they need disappear
+ for t in threading.enumerate():
+ if t.is_alive() and not isinstance(t, threading._MainThread):
+ t.join()
db_session.remove()
try:
shutil.rmtree(config.SECUREDROP_DATA_ROOT)
except OSError as exc:
+ os.system("find " + config.SECUREDROP_DATA_ROOT) # REMOVE ME, see #844
if 'No such file or directory' not in exc:
raise
+ except:
+ os.system("find " + config.SECUREDROP_DATA_ROOT) # REMOVE ME, see #844
+ raise
diff --git a/testinfra/app-code/test_redis_worker.py b/testinfra/app-code/test_redis_worker.py
--- a/testinfra/app-code/test_redis_worker.py
+++ b/testinfra/app-code/test_redis_worker.py
@@ -18,27 +18,27 @@
'environment=HOME="/tmp/python-gnupg"',
])
def test_redis_worker_configuration(File, config_line):
- """
- Ensure SecureDrop Redis worker config for supervisor service
- management is configured correctly.
- """
- f = File('/etc/supervisor/conf.d/securedrop_worker.conf')
- # Config lines may have special characters such as [] which will
- # throw off the regex matching, so let's escape those chars.
- regex = re.escape(config_line)
- assert f.contains('^{}$'.format(regex))
+ """
+ Ensure SecureDrop Redis worker config for supervisor service
+ management is configured correctly.
+ """
+ f = File('/etc/supervisor/conf.d/securedrop_worker.conf')
+ # Config lines may have special characters such as [] which will
+ # throw off the regex matching, so let's escape those chars.
+ regex = re.escape(config_line)
+ assert f.contains('^{}$'.format(regex))
def test_redis_worker_config_file(File):
- """
- Ensure SecureDrop Redis worker config for supervisor service
- management has proper ownership and mode.
+ """
+ Ensure SecureDrop Redis worker config for supervisor service
+ management has proper ownership and mode.
- Using separate test so that the parametrization doesn't rerun
- the file mode checks, which would be useless.
- """
- f = File('/etc/supervisor/conf.d/securedrop_worker.conf')
- assert f.is_file
- assert oct(f.mode) == '0644'
- assert f.user == "root"
- assert f.group == "root"
+ Using separate test so that the parametrization doesn't rerun
+ the file mode checks, which would be useless.
+ """
+ f = File('/etc/supervisor/conf.d/securedrop_worker.conf')
+ assert f.is_file
+ assert oct(f.mode) == '0644'
+ assert f.user == "root"
+ assert f.group == "root"
diff --git a/testinfra/app-code/test_securedrop_app_code.py b/testinfra/app-code/test_securedrop_app_code.py
--- a/testinfra/app-code/test_securedrop_app_code.py
+++ b/testinfra/app-code/test_securedrop_app_code.py
@@ -35,6 +35,18 @@ def test_securedrop_application_apt_dependencies(Package, package):
assert Package(package).is_installed
+def test_securedrop_application_test_locale(File, Sudo):
+ """
+ Ensure SecureDrop DEFAULT_LOCALE is present.
+ """
+ securedrop_config = File("{}/config.py".format(
+ securedrop_test_vars.securedrop_code))
+ with Sudo():
+ assert securedrop_config.is_file
+ assert securedrop_config.contains("^DEFAULT_LOCALE")
+ assert securedrop_config.content.count("DEFAULT_LOCALE") == 1
+
+
def test_securedrop_application_test_journalist_key(File, Sudo):
"""
Ensure the SecureDrop Application GPG public key file is present.
@@ -63,11 +75,13 @@ def test_securedrop_application_test_journalist_key(File, Sudo):
assert securedrop_config.user == "root"
assert securedrop_config.group == "root"
else:
- assert securedrop_config.user == securedrop_test_vars.securedrop_user
- assert securedrop_config.group == securedrop_test_vars.securedrop_user
+ assert securedrop_config.user == \
+ securedrop_test_vars.securedrop_user
+ assert securedrop_config.group == \
+ securedrop_test_vars.securedrop_user
assert oct(securedrop_config.mode) == "0600"
assert securedrop_config.contains(
- "^JOURNALIST_KEY = '65A1B5FF195B56353CC63DFFCC40EF1228271441'$")
+ "^JOURNALIST_KEY = '65A1B5FF195B56353CC63DFFCC40EF1228271441'$")
def test_securedrop_application_sqlite_db(File, Sudo):
diff --git a/testinfra/app/apache/test_apache_journalist_interface.py b/testinfra/app/apache/test_apache_journalist_interface.py
--- a/testinfra/app/apache/test_apache_journalist_interface.py
+++ b/testinfra/app/apache/test_apache_journalist_interface.py
@@ -16,6 +16,7 @@
'Header unset Etag',
]
+
# Test is not DRY; haven't figured out how to parametrize on
# multiple inputs, so explicitly redeclaring test logic.
@pytest.mark.parametrize("header", wanted_apache_headers)
@@ -31,6 +32,7 @@ def test_apache_headers_journalist_interface(File, header):
header_regex = "^{}$".format(re.escape(header))
assert re.search(header_regex, f.content, re.M)
+
# Block of directory declarations for Apache vhost is common
# to both Source and Journalist interfaces. Hardcoding these values
# across multiple test files to speed up development; they should be
@@ -75,8 +77,10 @@ def test_apache_headers_journalist_interface(File, header):
# declare journalist-specific apache configs
@pytest.mark.parametrize("apache_opt", [
- "<VirtualHost {}:8080>".format(securedrop_test_vars.apache_listening_address),
- "WSGIDaemonProcess journalist processes=2 threads=30 display-name=%{{GROUP}} python-path={}".format(securedrop_test_vars.securedrop_code),
+ "<VirtualHost {}:8080>".format(
+ securedrop_test_vars.apache_listening_address),
+ "WSGIDaemonProcess journalist processes=2 threads=30 display-name=%{{GROUP}} python-path={}".format( # noqa
+ securedrop_test_vars.securedrop_code),
'WSGIProcessGroup journalist',
'WSGIScriptAlias / /var/www/journalist.wsgi',
'Header set Cache-Control "no-store"',
@@ -129,8 +133,8 @@ def test_apache_logging_journalist_interface(File, Command, Sudo):
The actions of Journalists are logged by the system, so that an Admin can
investigate incidents and track access.
- Logs were broken for some period of time, logging only "combined" to the logfile,
- rather than the combined LogFormat intended.
+ Logs were broken for some period of time, logging only "combined" to
+ the logfile, rather than the combined LogFormat intended.
"""
# Sudo is necessary because /var/log/apache2 is mode 0750.
with Sudo():
@@ -142,7 +146,7 @@ def test_apache_logging_journalist_interface(File, Command, Sudo):
# validate the log entry.
Command.check_output("curl http://127.0.0.1:8080")
- assert f.size > 0 # Make sure something was logged.
+ assert f.size > 0 # Make sure something was logged.
# LogFormat declaration was missing, so track regressions that log
# just the string "combined" and nothing else.
assert not f.contains("^combined$")
diff --git a/testinfra/app/apache/test_apache_service.py b/testinfra/app/apache/test_apache_service.py
--- a/testinfra/app/apache/test_apache_service.py
+++ b/testinfra/app/apache/test_apache_service.py
@@ -1,5 +1,4 @@
import pytest
-import re
securedrop_test_vars = pytest.securedrop_test_vars
@@ -65,5 +64,6 @@ def test_apache_listening(Socket, Sudo, port):
"""
# Sudo is necessary to read from /proc/net/tcp.
with Sudo():
- s = Socket("tcp://{}:{}".format(securedrop_test_vars.apache_listening_address, port))
+ s = Socket("tcp://{}:{}".format(
+ securedrop_test_vars.apache_listening_address, port))
assert s.is_listening
diff --git a/testinfra/app/apache/test_apache_source_interface.py b/testinfra/app/apache/test_apache_source_interface.py
--- a/testinfra/app/apache/test_apache_source_interface.py
+++ b/testinfra/app/apache/test_apache_source_interface.py
@@ -20,8 +20,10 @@ def test_apache_headers_source_interface(File, header):
@pytest.mark.parametrize("apache_opt", [
- "<VirtualHost {}:80>".format(securedrop_test_vars.apache_listening_address),
- "WSGIDaemonProcess source processes=2 threads=30 display-name=%{{GROUP}} python-path={}".format(securedrop_test_vars.securedrop_code),
+ "<VirtualHost {}:80>".format(
+ securedrop_test_vars.apache_listening_address),
+ "WSGIDaemonProcess source processes=2 threads=30 display-name=%{{GROUP}} python-path={}".format( # noqa
+ securedrop_test_vars.securedrop_code),
'WSGIProcessGroup source',
'WSGIScriptAlias / /var/www/source.wsgi',
'Header set Cache-Control "no-store"',
diff --git a/testinfra/app/apache/test_apache_system_config.py b/testinfra/app/apache/test_apache_system_config.py
--- a/testinfra/app/apache/test_apache_system_config.py
+++ b/testinfra/app/apache/test_apache_system_config.py
@@ -4,6 +4,7 @@
securedrop_test_vars = pytest.securedrop_test_vars
+
@pytest.mark.parametrize("package", [
"apache2-mpm-worker",
"libapache2-mod-wsgi",
@@ -131,5 +132,37 @@ def test_apache_modules_absent(Command, Sudo, apache_module):
"""
with Sudo():
c = Command("/usr/sbin/a2query -m {}".format(apache_module))
- assert "No module matches {} (disabled".format(apache_module) in c.stderr
+ assert "No module matches {} (disabled".format(apache_module) in \
+ c.stderr
assert c.rc == 32
+
+
[email protected]("logfile",
+ securedrop_test_vars.allowed_apache_logfiles)
+def test_apache_logfiles_present(File, Command, Sudo, logfile):
+ """"
+ Ensure that whitelisted Apache log files for the Source and Journalist
+ Interfaces are present. In staging, we permit a "source-error" log,
+ but on prod even that is not allowed. A separate test will confirm
+ absence of unwanted logfiles by comparing the file count in the
+ Apache log directory.
+ """
+ # We need elevated privileges to read files inside /var/log/apache2
+ with Sudo():
+ f = File(logfile)
+ assert f.is_file
+ assert f.user == "root"
+
+
+def test_apache_logfiles_no_extras(Command, Sudo):
+ """
+ Ensure that no unwanted Apache logfiles are present. Complements the
+ `test_apache_logfiles_present` config test. Here, we confirm that the
+ total number of Apache logfiles exactly matches the number permitted
+ on the Application Server, whether staging or prod.
+ """
+ # We need elevated privileges to read files inside /var/log/apache2
+ with Sudo():
+ c = Command("find /var/log/apache2 -mindepth 1 | wc -l")
+ assert int(c.stdout) == \
+ len(securedrop_test_vars.allowed_apache_logfiles)
diff --git a/testinfra/app/test_apparmor.py b/testinfra/app/test_apparmor.py
--- a/testinfra/app/test_apparmor.py
+++ b/testinfra/app/test_apparmor.py
@@ -1,4 +1,3 @@
-import os
import pytest
@@ -10,50 +9,67 @@ def test_apparmor_pkg(Package, pkg):
""" Apparmor package dependencies """
assert Package(pkg).is_installed
+
def test_apparmor_enabled(Command, Sudo):
""" Check that apparmor is enabled """
with Sudo():
assert Command("aa-status --enabled").rc == 0
+
apache2_capabilities = [
'dac_override',
'kill',
'net_bind_service',
'sys_ptrace'
]
+
+
@pytest.mark.parametrize('cap', apache2_capabilities)
def test_apparmor_apache_capabilities(Command, cap):
""" check for exact list of expected app-armor capabilities for apache2 """
- c = Command("perl -nE \'/^\s+capability\s+(\w+),$/ && say $1\' /etc/apparmor.d/usr.sbin.apache2")
+ c = Command("perl -nE \'/^\s+capability\s+(\w+),$/ && say $1\' "
+ "/etc/apparmor.d/usr.sbin.apache2")
assert cap in c.stdout
+
def test_apparmor_apache_exact_capabilities(Command):
""" ensure no extra capabilities are defined for apache2 """
- c = Command.check_output("grep -ic capability /etc/apparmor.d/usr.sbin.apache2")
+ c = Command.check_output("grep -ic capability "
+ "/etc/apparmor.d/usr.sbin.apache2")
assert str(len(apache2_capabilities)) == c
+
tor_capabilities = ['setgid']
+
+
@pytest.mark.parametrize('cap', tor_capabilities)
def test_apparmor_tor_capabilities(Command, cap):
""" check for exact list of expected app-armor capabilities for tor """
- c = Command("perl -nE \'/^\s+capability\s+(\w+),$/ && say $1\' /etc/apparmor.d/usr.sbin.tor")
+ c = Command("perl -nE \'/^\s+capability\s+(\w+),$/ && "
+ "say $1\' /etc/apparmor.d/usr.sbin.tor")
assert cap in c.stdout
-def test_apparmor_apache_exact_capabilities(Command):
+
+def test_apparmor_tor_exact_capabilities(Command):
""" ensure no extra capabilities are defined for tor """
- c = Command.check_output("grep -ic capability /etc/apparmor.d/usr.sbin.tor")
+ c = Command.check_output("grep -ic capability "
+ "/etc/apparmor.d/usr.sbin.tor")
assert str(len(tor_capabilities)) == c
+
enforced_profiles = [
'ntpd',
'apache2',
'tcpdump',
'tor']
+
+
@pytest.mark.parametrize('profile', enforced_profiles)
def test_apparmor_ensure_not_disabled(File, Sudo, profile):
- """ Explicitly check that enforced profiles are NOT in /etc/apparmor.d/disable
- Polling aa-status only checks the last config that was loaded, this ensures
- it wont be disabled on reboot.
+ """ Explicitly check that enforced profiles are NOT in
+ /etc/apparmor.d/disable
+ Polling aa-status only checks the last config that was loaded,
+ this ensures it wont be disabled on reboot.
"""
f = File("/etc/apparmor.d/disabled/usr.sbin.{}".format(profile))
with Sudo():
@@ -64,7 +80,8 @@ def test_apparmor_ensure_not_disabled(File, Sudo, profile):
def test_app_apparmor_complain(Command, Sudo, complain_pkg):
""" Ensure app-armor profiles are in complain mode for staging """
with Sudo():
- awk = "awk '/[0-9]+ profiles.*complain./{flag=1;next}/^[0-9]+.*/{flag=0}flag'"
+ awk = ("awk '/[0-9]+ profiles.*complain."
+ "/{flag=1;next}/^[0-9]+.*/{flag=0}flag'")
c = Command.check_output("aa-status | {}".format(awk))
assert complain_pkg in c
@@ -75,22 +92,28 @@ def test_app_apparmor_complain_count(Command, Sudo):
c = Command.check_output("aa-status --complaining")
assert c == str(len(sdvars.apparmor_complain))
+
@pytest.mark.parametrize('aa_enforced', sdvars.apparmor_enforce)
def test_apparmor_enforced(Command, Sudo, aa_enforced):
- awk = "awk '/[0-9]+ profiles.*enforce./{flag=1;next}/^[0-9]+.*/{flag=0}flag'"
+ awk = ("awk '/[0-9]+ profiles.*enforce./"
+ "{flag=1;next}/^[0-9]+.*/{flag=0}flag'")
with Sudo():
c = Command.check_output("aa-status | {}".format(awk))
assert aa_enforced in c
+
def test_apparmor_total_profiles(Command, Sudo):
- """ ensure number of total profiles is sum of enforced and complaining profiles """
+ """ Ensure number of total profiles is sum of enforced and
+ complaining profiles """
with Sudo():
- total_expected = str((len(sdvars.apparmor_enforce)
- + len(sdvars.apparmor_complain)))
+ total_expected = str((len(sdvars.apparmor_enforce)
+ + len(sdvars.apparmor_complain)))
assert Command.check_output("aa-status --profiled") == total_expected
+
def test_aastatus_unconfined(Command, Sudo):
- """ Ensure that there are no processes that are unconfined but have a profile """
+ """ Ensure that there are no processes that are unconfined but have
+ a profile """
unconfined_chk = "0 processes are unconfined but have a profile defined"
with Sudo():
assert unconfined_chk in Command("aa-status").stdout
diff --git a/testinfra/app/test_appenv.py b/testinfra/app/test_appenv.py
--- a/testinfra/app/test_appenv.py
+++ b/testinfra/app/test_appenv.py
@@ -1,8 +1,8 @@
import pytest
-import os
sdvars = pytest.securedrop_test_vars
+
@pytest.mark.parametrize('exp_pip_pkg', sdvars.pip_deps)
def test_app_pip_deps(PipPackage, exp_pip_pkg):
""" Ensure pip dependencies are installed """
@@ -21,11 +21,13 @@ def test_app_wsgi(File, Sudo):
assert f.contains("^import logging$")
assert f.contains("^logging\.basicConfig(stream=sys\.stderr)$")
+
def test_pidfile(File):
""" ensure there are no pid files """
assert not File('/tmp/journalist.pid').exists
assert not File('/tmp/source.pid').exists
+
@pytest.mark.parametrize('app_dir', sdvars.app_directories)
def test_app_directories(File, Sudo, app_dir):
""" ensure securedrop app directories exist with correct permissions """
@@ -36,15 +38,19 @@ def test_app_directories(File, Sudo, app_dir):
assert f.group == sdvars.securedrop_user
assert oct(f.mode) == "0700"
+
def test_app_code_pkg(Package):
""" ensure securedrop-app-code package is installed """
assert Package("securedrop-app-code").is_installed
+
def test_gpg_key_in_keyring(Command, Sudo):
""" ensure test gpg key is present in app keyring """
with Sudo(sdvars.securedrop_user):
- c = Command("gpg --homedir /var/lib/securedrop/keys --list-keys 28271441")
- assert "pub 4096R/28271441 2013-10-12" in c.stdout
+ c = Command("gpg --homedir /var/lib/securedrop/keys "
+ "--list-keys 28271441")
+ assert "pub 4096R/28271441 2013-10-12" in c.stdout
+
def test_ensure_logo(File, Sudo):
""" ensure default logo header file exists """
@@ -54,13 +60,16 @@ def test_ensure_logo(File, Sudo):
assert f.user == sdvars.securedrop_user
assert f.group == sdvars.securedrop_user
+
def test_securedrop_tmp_clean_cron(Command, Sudo):
""" Ensure securedrop tmp clean cron job in place """
with Sudo():
cronlist = Command("crontab -l").stdout
- cronjob = "@daily {}/manage.py clean-tmp".format(sdvars.securedrop_code)
+ cronjob = "@daily {}/manage.py clean-tmp".format(
+ sdvars.securedrop_code)
assert cronjob in cronlist
+
def test_app_workerlog_dir(File, Sudo):
""" ensure directory for worker logs is present """
f = File('/var/log/securedrop_worker')
diff --git a/testinfra/app/test_network.py b/testinfra/app/test_network.py
--- a/testinfra/app/test_network.py
+++ b/testinfra/app/test_network.py
@@ -12,21 +12,22 @@ def test_app_iptables_rules(SystemInfo, Command, Sudo):
# Build a dict of variables to pass to jinja for iptables comparison
kwargs = dict(
mon_ip=securedrop_test_vars.mon_ip,
- default_interface = Command.check_output("ip r | head -n 1 | awk '{ print $5 }'"),
- tor_user_id = Command.check_output("id -u debian-tor"),
- securedrop_user_id = Command.check_output("id -u www-data"),
- ssh_group_gid = Command.check_output("getent group ssh | cut -d: -f3"),
- dns_server = securedrop_test_vars.dns_server)
+ default_interface=Command.check_output("ip r | head -n 1 | "
+ "awk '{ print $5 }'"),
+ tor_user_id=Command.check_output("id -u debian-tor"),
+ securedrop_user_id=Command.check_output("id -u www-data"),
+ ssh_group_gid=Command.check_output("getent group ssh | cut -d: -f3"),
+ dns_server=securedrop_test_vars.dns_server)
# Build iptables scrape cmd, purge comments + counters
iptables = "iptables-save | sed 's/ \[[0-9]*\:[0-9]*\]//g' | egrep -v '^#'"
environment = os.environ.get("CI_SD_ENV", "staging")
iptables_file = "{}/iptables-app-{}.j2".format(
- os.path.dirname(os.path.abspath(__file__)),
- environment)
+ os.path.dirname(os.path.abspath(__file__)),
+ environment)
# template out a local iptables jinja file
- jinja_iptables = Template(open(iptables_file,'r').read())
+ jinja_iptables = Template(open(iptables_file, 'r').read())
iptables_expected = jinja_iptables.render(**kwargs)
with Sudo():
@@ -35,7 +36,7 @@ def test_app_iptables_rules(SystemInfo, Command, Sudo):
# print diff comparison (only shows up in pytests if test fails or
# verbosity turned way up)
for iptablesdiff in difflib.context_diff(iptables_expected.split('\n'),
- iptables.split('\n')):
+ iptables.split('\n')):
print(iptablesdiff)
# Conduct the string comparison of the expected and actual iptables
# ruleset
diff --git a/testinfra/app/test_ossec.py b/testinfra/app/test_ossec.py
--- a/testinfra/app/test_ossec.py
+++ b/testinfra/app/test_ossec.py
@@ -3,19 +3,11 @@
sdvars = pytest.securedrop_test_vars
-# Currently failing in CI under remote hosts
-# Looks like vagrant is currently appending hostname to local IP
[email protected]
-def test_hosts_files(File, SystemInfo):
- """ Ensure host localhost is mapping to servername """
- f = File('/etc/hosts')
- assert f.contains('^127.0.0.1\.*mon-{0}$'.format(env))
def test_hosts_files(File, SystemInfo):
""" Ensure host files mapping are in place """
f = File('/etc/hosts')
- hostname = SystemInfo.hostname
mon_ip = sdvars.mon_ip
mon_host = sdvars.monitor_hostname
@@ -24,10 +16,12 @@ def test_hosts_files(File, SystemInfo):
mon_ip,
mon_host))
+
def test_hosts_duplicate(Command):
""" Regression test for duplicate entries """
assert Command.check_output("uniq --repeated /etc/hosts") == ""
+
def test_ossec_agent_installed(Package):
""" Check that ossec-agent package is present """
assert Package("securedrop-ossec-agent").is_installed
diff --git a/testinfra/common/test_cron_apt.py b/testinfra/common/test_cron_apt.py
--- a/testinfra/common/test_cron_apt.py
+++ b/testinfra/common/test_cron_apt.py
@@ -32,7 +32,6 @@ def test_cron_apt_config(File):
assert f.contains('^EXITON=error$')
-
@pytest.mark.parametrize('repo', [
'deb http://security.ubuntu.com/ubuntu trusty-security main',
'deb-src http://security.ubuntu.com/ubuntu trusty-security main',
@@ -54,7 +53,6 @@ def test_cron_apt_repo_list(File, repo):
assert f.contains(repo_regex)
-
def test_cron_apt_repo_config_update(File):
"""
Ensure cron-apt updates repos from the security.list config.
@@ -65,8 +63,8 @@ def test_cron_apt_repo_config_update(File):
assert f.user == "root"
assert oct(f.mode) == "0644"
repo_config = str('update -o quiet=2'
- ' -o Dir::Etc::SourceList=/etc/apt/security.list'
- ' -o Dir::Etc::SourceParts=""')
+ ' -o Dir::Etc::SourceList=/etc/apt/security.list'
+ ' -o Dir::Etc::SourceParts=""')
assert f.contains('^{}$'.format(repo_config))
@@ -80,9 +78,9 @@ def test_cron_apt_repo_config_upgrade(File):
assert oct(f.mode) == "0644"
assert f.contains('^autoclean -y$')
repo_config = str('dist-upgrade -y -o APT::Get::Show-Upgraded=true'
- ' -o Dir::Etc::SourceList=/etc/apt/security.list'
- ' -o Dpkg::Options::=--force-confdef'
- ' -o Dpkg::Options::=--force-confold')
+ ' -o Dir::Etc::SourceList=/etc/apt/security.list'
+ ' -o Dpkg::Options::=--force-confdef'
+ ' -o Dpkg::Options::=--force-confold')
assert f.contains(re.escape(repo_config))
@@ -95,15 +93,12 @@ def test_cron_apt_config_deprecated(File):
@pytest.mark.parametrize('cron_job', [
- { 'job': '0 4 * * * root /usr/bin/test -x /usr/sbin/cron-apt && /usr/sbin/cron-apt && /sbin/reboot',
- 'state': 'present',
- },
- { 'job': '0 4 * * * root /usr/bin/test -x /usr/sbin/cron-apt && /usr/sbin/cron-apt',
- 'state': 'absent',
- },
- { 'job': '0 5 * * * root /sbin/reboot',
- 'state': 'absent',
- },
+ {'job': '0 4 * * * root /usr/bin/test -x /usr/sbin/cron-apt && /usr/sbin/cron-apt && /sbin/reboot', # noqa
+ 'state': 'present'},
+ {'job': '0 4 * * * root /usr/bin/test -x /usr/sbin/cron-apt && /usr/sbin/cron-apt', # noqa
+ 'state': 'absent'},
+ {'job': '0 5 * * * root /sbin/reboot',
+ 'state': 'absent'},
])
def test_cron_apt_cron_jobs(File, cron_job):
"""
@@ -136,5 +131,6 @@ def test_cron_apt_all_packages_updated(Command):
assert c.rc == 0
# Staging hosts will have locally built deb packages, marked as held.
# Staging and development will have a version-locked Firefox pinned for
- # Selenium compatibility; if the holds are working, they shouldn't be upgraded.
+ # Selenium compatibility; if the holds are working, they shouldn't be
+ # upgraded.
assert "No packages will be installed, upgraded, or removed." in c.stdout
diff --git a/testinfra/common/test_fpf_apt_repo.py b/testinfra/common/test_fpf_apt_repo.py
--- a/testinfra/common/test_fpf_apt_repo.py
+++ b/testinfra/common/test_fpf_apt_repo.py
@@ -13,18 +13,18 @@ def test_fpf_apt_repo_present(File):
is tested separately.
"""
f = File('/etc/apt/sources.list.d/apt_freedom_press.list')
- assert f.contains('^deb \[arch=amd64\] https:\/\/apt\.freedom\.press trusty main$')
+ assert f.contains('^deb \[arch=amd64\] https:\/\/apt\.freedom\.press '
+ 'trusty main$')
def test_fpf_apt_repo_fingerprint(Command):
"""
Ensure the FPF apt repo has the correct fingerprint on the associated
- signing pubkey. The key changed in October 2016, so test for the
- newest fingerprint, which is installed on systems via the
+ signing pubkey. The key changed in October 2016, so test for the
+ newest fingerprint, which is installed on systems via the
`securedrop-keyring` package.
"""
-
c = Command('apt-key finger')
fpf_gpg_pub_key_info = """/etc/apt/trusted.gpg.d/securedrop-keyring.gpg
@@ -36,7 +36,8 @@ def test_fpf_apt_repo_fingerprint(Command):
assert c.rc == 0
assert fpf_gpg_pub_key_info in c.stdout
- fpf_gpg_pub_key_fingerprint_expired = 'B89A 29DB 2128 160B 8E4B 1B4C BADD E0C7 FC9F 6818'
+ fpf_gpg_pub_key_fingerprint_expired = ('B89A 29DB 2128 160B 8E4B '
+ '1B4C BADD E0C7 FC9F 6818')
fpf_gpg_pub_key_info_expired = """pub 4096R/FC9F6818 2014-10-26 [expired: 2016-10-27]
Key fingerprint = #{fpf_gpg_pub_key_fingerprint_expired}
uid Freedom of the Press Foundation Master Signing Key"""
diff --git a/testinfra/common/test_grsecurity.py b/testinfra/common/test_grsecurity.py
--- a/testinfra/common/test_grsecurity.py
+++ b/testinfra/common/test_grsecurity.py
@@ -13,7 +13,7 @@ def test_ssh_motd_disabled(File):
assert not f.contains("pam\.motd")
[email protected](os.environ.get('FPF_GRSEC','true') == "false",
[email protected](os.environ.get('FPF_GRSEC', 'true') == "false",
reason="Need to skip in environment w/o grsec")
@pytest.mark.parametrize("package", [
'paxctl',
@@ -28,7 +28,7 @@ def test_grsecurity_apt_packages(Package, package):
assert Package(package).is_installed
[email protected](os.environ.get('FPF_GRSEC','true') == "false",
[email protected](os.environ.get('FPF_GRSEC', 'true') == "false",
reason="Need to skip in environment w/o grsec")
@pytest.mark.parametrize("package", [
'linux-signed-image-generic-lts-utopic',
@@ -42,7 +42,7 @@ def test_generic_kernels_absent(Command, package):
"""
Ensure the default Ubuntu-provided kernel packages are absent.
In the past, conflicting version numbers have caused machines
- to reboot into a non-grsec kernel due to poor handling of
+ to reboot into a non-grsec kernel due to poor handling of
GRUB_DEFAULT logic. Removing the vendor-provided kernel packages
prevents accidental boots into non-grsec kernels.
"""
@@ -55,7 +55,7 @@ def test_generic_kernels_absent(Command, package):
assert c.stderr == error_text
[email protected](os.environ.get('FPF_GRSEC','true') == "false",
[email protected](os.environ.get('FPF_GRSEC', 'true') == "false",
reason="Need to skip in environment w/o grsec")
def test_grsecurity_lock_file(File):
"""
@@ -68,7 +68,7 @@ def test_grsecurity_lock_file(File):
assert f.size == 0
[email protected](os.environ.get('FPF_GRSEC','true') == "false",
[email protected](os.environ.get('FPF_GRSEC', 'true') == "false",
reason="Need to skip in environment w/o grsec")
def test_grsecurity_kernel_is_running(Command):
"""
@@ -79,7 +79,7 @@ def test_grsecurity_kernel_is_running(Command):
assert c.stdout == '3.14.79-grsec'
[email protected](os.environ.get('FPF_GRSEC','true') == "false",
[email protected](os.environ.get('FPF_GRSEC', 'true') == "false",
reason="Need to skip in environment w/o grsec")
@pytest.mark.parametrize('sysctl_opt', [
('kernel.grsecurity.grsec_lock', 1),
@@ -94,7 +94,8 @@ def test_grsecurity_sysctl_options(Sysctl, Sudo, sysctl_opt):
with Sudo():
assert Sysctl(sysctl_opt[0]) == sysctl_opt[1]
[email protected](os.environ.get('FPF_GRSEC','true') == "false",
+
[email protected](os.environ.get('FPF_GRSEC', 'true') == "false",
reason="Need to skip in environment w/o grsec")
@pytest.mark.parametrize('paxtest_check', [
"Executable anonymous mapping",
@@ -127,10 +128,10 @@ def test_grsecurity_paxtest(Command, Sudo, paxtest_check):
assert c.rc == 0
assert "Vulnerable" not in c.stdout
regex = "^{}\s*:\sKilled$".format(re.escape(paxtest_check))
+ assert re.search(regex, c.stdout)
-
[email protected](os.environ.get('FPF_CI','false') == "true",
[email protected](os.environ.get('FPF_CI', 'false') == "true",
reason="Not needed in CI environment")
def test_grub_pc_marked_manual(Command):
"""
@@ -151,7 +152,7 @@ def test_apt_autoremove(Command):
assert "The following packages will be REMOVED" not in c.stdout
[email protected](os.environ.get('FPF_GRSEC','true') == "false",
[email protected](os.environ.get('FPF_GRSEC', 'true') == "false",
reason="Need to skip in environment w/o grsec")
@pytest.mark.parametrize("binary", [
"/usr/sbin/grub-probe",
diff --git a/testinfra/common/test_platform.py b/testinfra/common/test_platform.py
--- a/testinfra/common/test_platform.py
+++ b/testinfra/common/test_platform.py
@@ -1,6 +1,3 @@
-import pytest
-
-
def test_ansible_version(host):
"""
Check that a supported version of Ansible is being used.
@@ -13,6 +10,7 @@ def test_ansible_version(host):
c = localhost.check_output("ansible --version")
assert c.startswith("ansible 2.")
+
def test_platform(SystemInfo):
"""
SecureDrop requires Ubuntu Trusty 14.04 LTS. The shelf life
diff --git a/testinfra/common/test_system_hardening.py b/testinfra/common/test_system_hardening.py
--- a/testinfra/common/test_system_hardening.py
+++ b/testinfra/common/test_system_hardening.py
@@ -64,7 +64,7 @@ def test_blacklisted_kernel_modules(Command, File, Sudo, kernel_module):
@pytest.mark.skipif(hostenv.startswith('mon'),
- reason="Monitor Server does not have swap disabled yet.")
+ reason="Monitor Server does not have swap disabled yet.")
def test_swap_disabled(Command):
"""
Ensure swap space is disabled. Prohibit writing memory to swapfiles
diff --git a/testinfra/common/test_tor_config.py b/testinfra/common/test_tor_config.py
--- a/testinfra/common/test_tor_config.py
+++ b/testinfra/common/test_tor_config.py
@@ -3,13 +3,15 @@
sdvars = pytest.securedrop_test_vars
+
def test_tor_apt_repo(File):
"""
Ensure the Tor Project apt repository is configured.
The version of Tor in the Trusty repos is not up to date.
"""
f = File('/etc/apt/sources.list.d/deb_torproject_org_torproject_org.list')
- repo_regex = re.escape('deb http://deb.torproject.org/torproject.org trusty main')
+ repo_regex = re.escape('deb http://deb.torproject.org/torproject.org '
+ 'trusty main')
assert f.contains(repo_regex)
@@ -36,7 +38,8 @@ def test_tor_service_running(Command, File, Sudo):
# script, so let's just shell out and verify the running and enabled
# states explicitly.
with Sudo():
- assert Command.check_output("service tor status") == " * tor is running"
+ assert Command.check_output("service tor status") == \
+ " * tor is running"
tor_enabled = Command.check_output("find /etc/rc?.d -name S??tor")
assert tor_enabled != ""
diff --git a/testinfra/common/test_tor_hidden_services.py b/testinfra/common/test_tor_hidden_services.py
--- a/testinfra/common/test_tor_hidden_services.py
+++ b/testinfra/common/test_tor_hidden_services.py
@@ -32,7 +32,8 @@ def test_tor_service_hostnames(File, Sudo, tor_service):
ths_hostname_regex = "[a-z0-9]{16}\.onion"
with Sudo():
- f = File("/var/lib/tor/services/{}/hostname".format(tor_service['name']))
+ f = File("/var/lib/tor/services/{}/hostname".format(
+ tor_service['name']))
assert f.is_file
assert oct(f.mode) == "0600"
assert f.user == "debian-tor"
@@ -43,8 +44,10 @@ def test_tor_service_hostnames(File, Sudo, tor_service):
if tor_service['authenticated']:
# HidServAuth regex is approximately [a-zA-Z0-9/+], but validating
- # the entire entry is sane, and we don't need to nitpick the charset.
- aths_hostname_regex = ths_hostname_regex+" .{22} # client: "+tor_service['client']
+ # the entire entry is sane, and we don't need to nitpick the
+ # charset.
+ aths_hostname_regex = ths_hostname_regex + " .{22} # client: " + \
+ tor_service['client']
assert re.search("^{}$".format(aths_hostname_regex), f.content)
else:
assert re.search("^{}$".format(ths_hostname_regex), f.content)
@@ -67,7 +70,7 @@ def test_tor_services_config(File, tor_service):
"""
f = File("/etc/tor/torrc")
dir_regex = "HiddenServiceDir /var/lib/tor/services/{}".format(
- tor_service['name'])
+ tor_service['name'])
# We need at least one port, but it may be used for both config values.
# On the Journalist Interface, we reuse the "80" remote port but map it to
# a different local port, so Apache can listen on several sockets.
diff --git a/testinfra/common/test_user_config.py b/testinfra/common/test_user_config.py
--- a/testinfra/common/test_user_config.py
+++ b/testinfra/common/test_user_config.py
@@ -27,8 +27,11 @@ def test_sudoers_config(File, Sudo):
assert re.search('^Defaults\s+env_reset$', sudoers_config, re.M)
assert re.search('^Defaults\s+env_reset$', sudoers_config, re.M)
assert re.search('^Defaults\s+mail_badpass$', sudoers_config, re.M)
- assert re.search('Defaults\s+secure_path="/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"', sudoers_config, re.M)
- assert re.search('^%sudo\s+ALL=\(ALL\)\s+NOPASSWD:\s+ALL$', sudoers_config, re.M)
+ assert re.search('Defaults\s+secure_path="/usr/local/sbin:'
+ '/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"',
+ sudoers_config, re.M)
+ assert re.search('^%sudo\s+ALL=\(ALL\)\s+NOPASSWD:\s+ALL$',
+ sudoers_config, re.M)
assert re.search('Defaults:%sudo\s+!requiretty', sudoers_config, re.M)
@@ -42,7 +45,8 @@ def test_sudoers_tmux_env(File):
f = File('/etc/profile.d/securedrop_additions.sh')
non_interactive_str = re.escape('[[ $- != *i* ]] && return')
- tmux_check = re.escape('test -z "$TMUX" && (tmux attach || tmux new-session)')
+ tmux_check = re.escape('test -z "$TMUX" && (tmux attach ||'
+ ' tmux new-session)')
assert f.contains("^{}$".format(non_interactive_str))
assert f.contains("^if which tmux >\/dev\/null 2>&1; then$")
@@ -55,15 +59,15 @@ def test_tmux_installed(Package):
"""
Ensure the `tmux` package is present, since it's required for the user env.
When running an interactive SSH session over Tor, tmux should be started
- automatically, to prevent problems if the connection is broken unexpectedly,
- as sometimes happens over Tor. The Admin will be able to reconnect to the
- running tmux session and review command output.
+ automatically, to prevent problems if the connection is broken
+ unexpectedly, as sometimes happens over Tor. The Admin will be able to
+ reconnect to the running tmux session and review command output.
"""
assert Package("tmux").is_installed
@pytest.mark.skipif(hostenv == 'travis',
- reason="Bashrc tests dont make sense on Travis")
+ reason="Bashrc tests dont make sense on Travis")
def test_sudoers_tmux_env_deprecated(File):
"""
Previous version of the Ansible config set the tmux config
diff --git a/testinfra/development/test_development_application_settings.py b/testinfra/development/test_development_application_settings.py
--- a/testinfra/development/test_development_application_settings.py
+++ b/testinfra/development/test_development_application_settings.py
@@ -5,6 +5,7 @@
sd_test_vars = pytest.securedrop_test_vars
+
@pytest.mark.parametrize('package', [
"securedrop-app-code",
"apache2-mpm-worker",
@@ -23,7 +24,8 @@ def test_development_lacks_deb_packages(Command, package):
assert c.rc == 1
assert c.stdout == ""
stderr = c.stderr.rstrip()
- assert stderr == "dpkg-query: no packages found matching {}".format(package)
+ assert stderr == "dpkg-query: no packages found matching {}".format(
+ package)
def test_development_apparmor_no_complain_mode(Command, Sudo):
@@ -106,8 +108,10 @@ def test_development_clean_tmp_cron_job(Command, Sudo):
with Sudo():
c = Command.check_output('crontab -l')
- assert "@daily {}/manage.py clean-tmp".format(sd_test_vars.securedrop_code) in c
- assert "@daily {}/manage.py clean_tmp".format(sd_test_vars.securedrop_code) not in c
+ assert "@daily {}/manage.py clean-tmp".format(
+ sd_test_vars.securedrop_code) in c
+ assert "@daily {}/manage.py clean_tmp".format(
+ sd_test_vars.securedrop_code) not in c
assert "clean_tmp".format(sd_test_vars.securedrop_code) not in c
# Make sure that the only cron lines are a comment and the actual job.
# We don't want any duplicates.
diff --git a/testinfra/development/test_development_environment.py b/testinfra/development/test_development_environment.py
--- a/testinfra/development/test_development_environment.py
+++ b/testinfra/development/test_development_environment.py
@@ -1,7 +1,7 @@
import pytest
-import os
import getpass
+
def test_development_app_dependencies(Package):
"""
Ensure development apt dependencies are installed.
@@ -38,19 +38,23 @@ def test_development_app_dependencies(Package):
('selenium', '2.53.6'),
('six', '1.10.0'),
])
-def test_development_pip_dependencies(Command, pip_package, version):
+def test_development_pip_dependencies(Command, Sudo, pip_package, version):
"""
Declare SecureDrop app pip requirements. On the development VM,
the pip dependencies should be installed directly via pip, rather
than relying on the deb packages with pip-wheel inclusions.
Versions here are intentionally hardcoded to track changes.
"""
- c = Command('pip freeze')
- assert "{}=={}".format(pip_package, version) in c.stdout.rstrip()
+ # Using elevated privileges to list the Python packages, since
+ # the playbooks use sudo to install the pip packages system-wide.
+ # In Travis, lack of sudo here hides a number of dependencies.
+ with Sudo():
+ c = Command('pip freeze')
+ assert "{}=={}".format(pip_package, version) in c.stdout.rstrip()
@pytest.mark.skipif(getpass.getuser() != 'vagrant',
- reason="vagrant bashrc checks dont make sense in CI")
+ reason="vagrant bashrc checks dont make sense in CI")
def test_development_securedrop_env_var(File):
"""
Ensure that the SECUREDROP_ENV var is set to "dev".
diff --git a/testinfra/development/test_development_networking.py b/testinfra/development/test_development_networking.py
--- a/testinfra/development/test_development_networking.py
+++ b/testinfra/development/test_development_networking.py
@@ -3,6 +3,7 @@
hostenv = os.environ['SECUREDROP_TESTINFRA_TARGET_HOST']
+
@pytest.mark.skipif(hostenv == 'travis',
reason="Custom networking in Travis")
def test_development_iptables_rules(Command, Sudo):
@@ -50,12 +51,12 @@ def test_development_redis_worker(Socket):
# aren't configured to run by default, e.g. on boot. Nor
# do the app tests cause them to be run. So, we shouldn't
# really expected them to be running.
-## check for source interface flask port listening
-#describe port(8080) do
+# check for source interface flask port listening
+# describe port(8080) do
# it { should be_listening.on('0.0.0.0').with('tcp') }
-#end
+# end
#
-## check for journalist interface flask port listening
-#describe port(8081) do
+# check for journalist interface flask port listening
+# describe port(8081) do
# it { should be_listening.on('0.0.0.0').with('tcp') }
-#end
+# end
diff --git a/testinfra/development/test_xvfb.py b/testinfra/development/test_xvfb.py
--- a/testinfra/development/test_xvfb.py
+++ b/testinfra/development/test_xvfb.py
@@ -1,15 +1,30 @@
-import pytest
+import os
[email protected]('dependency', [
- 'firefox',
- 'xvfb',
-])
-def test_xvfb_apt_dependencies(Package, dependency):
+def test_xvfb_is_installed(Package):
"""
Ensure apt requirements for Xvfb are present.
"""
- assert Package(dependency).is_installed
+ assert Package("xvfb").is_installed
+
+
+def test_firefox_is_installed(Package, Command):
+ """
+ The app test suite requires a very specific version of Firefox, for
+ compatibility with Selenium. Make sure to check the explicit
+ version of Firefox, not just that any version of Firefox is installed.
+
+ In Travis, the Firefox installation is handled via the travis.yml
+ file, and so it won't show as installed via dpkg.
+ """
+ if "TRAVIS" not in os.environ:
+ p = Package("firefox")
+ assert p.is_installed
+
+ c = Command("firefox --version")
+ # Reminder: the rstrip is only necessary for local-context actions,
+ # e.g. in Travis, but it's a fine practice in all contexts.
+ assert c.stdout.rstrip() == "Mozilla Firefox 46.0.1"
def test_xvfb_service_config(File, Sudo):
@@ -55,7 +70,7 @@ def test_xvfb_service_config(File, Sudo):
esac
exit 0
-""".lstrip().rstrip()
+""".lstrip().rstrip() # noqa
with Sudo():
assert f.contains('^XVFB=/usr/bin/Xvfb$')
assert f.contains('^XVFBARGS=":1 -screen 0 1024x768x24 '
@@ -106,7 +121,7 @@ def test_xvfb_service_running(Process, Sudo):
with Sudo():
p = Process.get(user="root", comm="Xvfb")
wanted_args = str('/usr/bin/Xvfb :1 -screen 0 1024x768x24 '
- '-ac +extension GLX +render -noreset')
+ '-ac +extension GLX +render -noreset')
assert p.args == wanted_args
# We only expect a single process, no children.
workers = Process.filter(ppid=p.pid)
diff --git a/testinfra/functional/test_tor_interfaces.py b/testinfra/functional/test_tor_interfaces.py
--- a/testinfra/functional/test_tor_interfaces.py
+++ b/testinfra/functional/test_tor_interfaces.py
@@ -4,6 +4,7 @@
sdvars = pytest.securedrop_test_vars
+
@pytest.mark.parametrize('site', sdvars.tor_url_files)
@pytest.mark.skipif(os.environ.get('FPF_CI', 'false') == "false",
reason="Can only assure Tor is configured in CI atm")
@@ -13,13 +14,16 @@ def test_www(Command, site):
"""
# Extract Onion URL from saved onion file, fetched back from app-staging.
- onion_url_filepath = os.path.join(os.path.dirname(__file__),
- "../../install_files/ansible-base/{}".format(site['file']))
- onion_url_raw = open(onion_url_filepath,'ro').read()
+ onion_url_filepath = os.path.join(
+ os.path.dirname(__file__),
+ "../../install_files/ansible-base/{}".format(site['file'])
+ )
+ onion_url_raw = open(onion_url_filepath, 'ro').read()
onion_url = re.search("\w+\.onion", onion_url_raw).group()
# Fetch Onion URL via curl to confirm interface is rendered correctly.
- curl_tor = 'curl -s --socks5-hostname "${{TOR_PROXY}}":9050 {}'.format(onion_url)
+ curl_tor = 'curl -s --socks5-hostname "${{TOR_PROXY}}":9050 {}'.format(
+ onion_url)
curl_tor_status = '{} -o /dev/null -w "%{{http_code}}"'.format(curl_tor)
site_scrape = Command.check_output(curl_tor)
diff --git a/testinfra/mon/test_network.py b/testinfra/mon/test_network.py
--- a/testinfra/mon/test_network.py
+++ b/testinfra/mon/test_network.py
@@ -13,21 +13,22 @@ def test_mon_iptables_rules(SystemInfo, Command, Sudo):
# Build a dict of variables to pass to jinja for iptables comparison
kwargs = dict(
app_ip=app_ip,
- default_interface = Command.check_output("ip r | head -n 1 | awk '{ print $5 }'"),
- tor_user_id = Command.check_output("id -u debian-tor"),
- ssh_group_gid = Command.check_output("getent group ssh | cut -d: -f3"),
- postfix_user_id = Command.check_output("id -u postfix"),
- dns_server = securedrop_test_vars.dns_server)
+ default_interface=Command.check_output(
+ "ip r | head -n 1 | awk '{ print $5 }'"),
+ tor_user_id=Command.check_output("id -u debian-tor"),
+ ssh_group_gid=Command.check_output("getent group ssh | cut -d: -f3"),
+ postfix_user_id=Command.check_output("id -u postfix"),
+ dns_server=securedrop_test_vars.dns_server)
# Build iptables scrape cmd, purge comments + counters
iptables = "iptables-save | sed 's/ \[[0-9]*\:[0-9]*\]//g' | egrep -v '^#'"
environment = os.environ.get("CI_SD_ENV", "staging")
iptables_file = "{}/iptables-mon-{}.j2".format(
- os.path.dirname(os.path.abspath(__file__)),
- environment)
+ os.path.dirname(os.path.abspath(__file__)),
+ environment)
# template out a local iptables jinja file
- jinja_iptables = Template(open(iptables_file,'r').read())
+ jinja_iptables = Template(open(iptables_file, 'r').read())
iptables_expected = jinja_iptables.render(**kwargs)
with Sudo():
@@ -36,7 +37,7 @@ def test_mon_iptables_rules(SystemInfo, Command, Sudo):
# print diff comparison (only shows up in pytests if test fails or
# verbosity turned way up)
for iptablesdiff in difflib.context_diff(iptables_expected.split('\n'),
- iptables.split('\n')):
+ iptables.split('\n')):
print(iptablesdiff)
# Conduct the string comparison of the expected and actual iptables
# ruleset
@@ -44,15 +45,21 @@ def test_mon_iptables_rules(SystemInfo, Command, Sudo):
@pytest.mark.parametrize('ossec_service', [
- dict(host="0.0.0.0", proto="tcp", port=22),
- dict(host="127.0.0.1", proto="tcp", port=25),
- dict(host="0.0.0.0", proto="udp", port=1514),
+ dict(host="0.0.0.0", proto="tcp", port=22, listening=True),
+ dict(host="0.0.0.0", proto="udp", port=1514, listening=True),
+ dict(host="0.0.0.0", proto="tcp", port=1515, listening=False),
])
def test_listening_ports(Socket, Sudo, ossec_service):
"""
Ensure the OSSEC-related services are listening on the
- expected sockets. Services to check include ossec, mail, and ssh.
+ expected sockets. Services to check include ossec-remoted
+ and ossec-authd. Helper services such as postfix are checked
+ separately.
+
+ Note that the SSH check will fail if run against a prod host, due
+ to the SSH-over-Tor strategy. We can port the parametrized values
+ to config test YAML vars at that point.
"""
socket = "{proto}://{host}:{port}".format(**ossec_service)
with Sudo():
- assert Socket(socket).is_listening
+ assert Socket(socket).is_listening == ossec_service['listening']
diff --git a/testinfra/mon/test_ossec.py b/testinfra/mon/test_ossec.py
--- a/testinfra/mon/test_ossec.py
+++ b/testinfra/mon/test_ossec.py
@@ -1,9 +1,9 @@
-import re
import pytest
securedrop_test_vars = pytest.securedrop_test_vars
+
@pytest.mark.parametrize('package', [
'mailutils',
'ossec-server',
@@ -19,84 +19,24 @@ def test_ossec_package(Package, package):
assert Package(package).is_installed
[email protected]('header', [
- '/^X-Originating-IP:/ IGNORE',
- '/^X-Mailer:/ IGNORE',
- '/^Mime-Version:/ IGNORE',
- '/^User-Agent:/ IGNORE',
- '/^Received:/ IGNORE',
-])
-def test_postfix_headers(File, header):
- """
- Ensure postfix header filters are set correctly. Common mail headers
- are stripped by default to avoid leaking metadata about the instance.
- Message body is always encrypted prior to sending.
- """
- f = File("/etc/postfix/header_checks")
- assert f.is_file
- assert oct(f.mode) == "0644"
- regex = '^{}$'.format(re.escape(header))
- assert re.search(regex, f.content, re.M)
-
-
[email protected]('setting', [
- 'relayhost = [smtp.gmail.com]:587',
- 'smtp_sasl_auth_enable = yes',
- 'smtp_sasl_password_maps = hash:/etc/postfix/sasl_passwd',
- 'smtp_sasl_security_options = noanonymous',
- 'smtp_use_tls = yes',
- 'smtp_tls_session_cache_database = btree:${data_directory}/smtp_scache',
- 'smtp_tls_security_level = secure',
- 'smtp_tls_CApath = /etc/ssl/certs',
- 'smtp_tls_ciphers = high',
- 'smtp_tls_protocols = TLSv1.2 TLSv1.1 TLSv1 !SSLv3 !SSLv2',
- 'myhostname = ossec.server',
- 'myorigin = $myhostname',
- 'smtpd_banner = $myhostname ESMTP $mail_name (Ubuntu)',
- 'biff = no',
- 'append_dot_mydomain = no',
- 'readme_directory = no',
- 'smtp_header_checks = regexp:/etc/postfix/header_checks',
- 'mailbox_command = /usr/bin/procmail',
- 'inet_interfaces = loopback-only',
- 'alias_maps = hash:/etc/aliases',
- 'alias_database = hash:/etc/aliases',
- 'mydestination = $myhostname, localhost.localdomain , localhost',
- 'mynetworks = 127.0.0.0/8 [::ffff:127.0.0.0]/104 [::1]/128',
- 'mailbox_size_limit = 0',
- 'recipient_delimiter = +',
-])
-def test_postfix_settings(File, setting):
- """
- Check all postfix configuration lines. There are technically multiple
- configuration paths regarding the TLS settings, particularly the
- fingerprint verification logic, but only the base default config is tested
- currently.
- """
- f = File("/etc/postfix/main.cf")
- assert f.is_file
- assert f.user == 'root'
- assert oct(f.mode) == "0644"
- regex = '^{}$'.format(re.escape(setting))
- assert re.search(regex, f.content, re.M)
-
-
def test_ossec_connectivity(Command, Sudo):
"""
Ensure ossec-server machine has active connection to the ossec-agent.
The ossec service will report all available agents, and we can inspect
that list to make sure it's the host we expect.
"""
- desired_output = "{}-{} is available.".format(securedrop_test_vars.app_hostname,
- securedrop_test_vars.app_ip)
+ desired_output = "{}-{} is available.".format(
+ securedrop_test_vars.app_hostname,
+ securedrop_test_vars.app_ip)
with Sudo():
c = Command.check_output("/var/ossec/bin/list_agents -a")
assert c == desired_output
-def test_ossec_gnupg(File, Sudo):
+
+def test_ossec_gnupg_homedir(File, Sudo):
""" ensure ossec gpg homedir exists """
with Sudo():
- f = File(OSSEC_GNUPG)
+ f = File("/var/ossec/.gnupg")
assert f.is_directory
assert f.user == "ossec"
assert oct(f.mode) == "0700"
@@ -123,10 +63,11 @@ def test_ossec_pubkey_in_keyring(Command, Sudo):
"""
ossec_gpg_pubkey_info = """pub 4096R/EDDDC102 2014-10-15
uid Test/Development (DO NOT USE IN PRODUCTION) (Admin's OSSEC Alert GPG key) <[email protected]>
-sub 4096R/97D2EB39 2014-10-15"""
+sub 4096R/97D2EB39 2014-10-15""" # noqa
with Sudo("ossec"):
- c = Command.check_output("gpg --homedir /var/ossec/.gnupg --list-keys EDDDC102")
- assert c == ossec_gpg_pubkey_info
+ c = Command.check_output("gpg --homedir /var/ossec/.gnupg "
+ "--list-keys EDDDC102")
+ assert c == ossec_gpg_pubkey_info
# Permissions don't match between Ansible and OSSEC deb packages postinst.
@@ -146,8 +87,8 @@ def test_ossec_keyfiles(File, Sudo, keyfile):
with Sudo():
f = File(keyfile)
assert f.is_file
- # The postinst scripts in the OSSEC deb packages set 440 on the keyfiles;
- # the Ansible config should be updated to do the same.
+ # The postinst scripts in the OSSEC deb packages set 440 on the
+ # keyfiles; the Ansible config should be updated to do the same.
assert oct(f.mode) == "0440"
assert f.user == "root"
assert f.group == "ossec"
@@ -209,20 +150,11 @@ def test_ossec_authd(Command, Sudo):
assert c.stdout == ""
assert c.rc != 0
-# Currently failing in CI under remote hosts
-# Looks like vagrant is currently appending hostname to local IP
[email protected]
-def test_hosts_files(File, SystemInfo):
- """ Ensure host localhost is mapping to servername """
- f = File('/etc/hosts')
- mon_host = securedrop_test_vars.monitor_hostname
- assert f.contains('^127.0.0.1.*{0}$'.format(mon_host))
def test_hosts_files(File, SystemInfo):
""" Ensure host files mapping are in place """
f = File('/etc/hosts')
- hostname = SystemInfo.hostname
app_ip = securedrop_test_vars.app_ip
app_host = securedrop_test_vars.app_hostname
@@ -248,15 +180,3 @@ def test_ossec_log_contains_no_malformed_events(File, Sudo):
def test_regression_hosts(Command):
""" Regression test to check for duplicate entries. """
assert Command.check_output("uniq --repeated /etc/hosts") == ""
-
-
-def test_postfix_generic_maps(File):
- """
- Regression test to check that generic Postfix maps are not configured
- by default. As of #1565 Admins can opt-in to overriding the FROM address
- used for sending OSSEC alerts, but by default we're preserving the old
- `[email protected]` behavior, to avoid breaking email for previously
- existing instances.
- """
- assert not File("/etc/postfix/generic").exists
- assert not File("/etc/postfix/main.cf").contains("^smtp_generic_maps")
diff --git a/testinfra/mon/test_ossec_ruleset.py b/testinfra/mon/test_ossec_ruleset.py
new file mode 100644
--- /dev/null
+++ b/testinfra/mon/test_ossec_ruleset.py
@@ -0,0 +1,29 @@
+import pytest
+import re
+
+
+alert_level_regex = re.compile(r"Level: '(\d+)'")
+rule_id_regex = re.compile(r"Rule id: '(\d+)'")
+sdvars = pytest.securedrop_test_vars
+
+
[email protected]('log_event',
+ sdvars.log_events_without_ossec_alerts)
+def test_ossec_false_positives_suppressed(Command, Sudo, log_event):
+ with Sudo():
+ c = Command('echo "{}" | /var/ossec/bin/ossec-logtest'.format(
+ log_event["alert"]))
+ assert "Alert to be generated" not in c.stderr
+
+
[email protected]('log_event',
+ sdvars.log_events_with_ossec_alerts)
+def test_ossec_expected_alerts_are_present(Command, Sudo, log_event):
+ with Sudo():
+ c = Command('echo "{}" | /var/ossec/bin/ossec-logtest'.format(
+ log_event["alert"]))
+ assert "Alert to be generated" in c.stderr
+ alert_level = alert_level_regex.findall(c.stderr)[0]
+ assert alert_level == log_event["level"]
+ rule_id = rule_id_regex.findall(c.stderr)[0]
+ assert rule_id == log_event["rule_id"]
diff --git a/testinfra/mon/test_postfix.py b/testinfra/mon/test_postfix.py
new file mode 100644
--- /dev/null
+++ b/testinfra/mon/test_postfix.py
@@ -0,0 +1,96 @@
+import re
+import pytest
+
+
+securedrop_test_vars = pytest.securedrop_test_vars
+
+
[email protected]('header', [
+ '/^X-Originating-IP:/ IGNORE',
+ '/^X-Mailer:/ IGNORE',
+ '/^Mime-Version:/ IGNORE',
+ '/^User-Agent:/ IGNORE',
+ '/^Received:/ IGNORE',
+])
+def test_postfix_headers(File, header):
+ """
+ Ensure postfix header filters are set correctly. Common mail headers
+ are stripped by default to avoid leaking metadata about the instance.
+ Message body is always encrypted prior to sending.
+ """
+ f = File("/etc/postfix/header_checks")
+ assert f.is_file
+ assert oct(f.mode) == "0644"
+ regex = '^{}$'.format(re.escape(header))
+ assert re.search(regex, f.content, re.M)
+
+
[email protected]('setting', [
+ 'relayhost = [smtp.gmail.com]:587',
+ 'smtp_sasl_auth_enable = yes',
+ 'smtp_sasl_password_maps = hash:/etc/postfix/sasl_passwd',
+ 'smtp_sasl_security_options = noanonymous',
+ 'smtp_use_tls = yes',
+ 'smtp_tls_session_cache_database = btree:${data_directory}/smtp_scache',
+ 'smtp_tls_security_level = secure',
+ 'smtp_tls_CApath = /etc/ssl/certs',
+ 'smtp_tls_ciphers = high',
+ 'smtp_tls_protocols = TLSv1.2 TLSv1.1 TLSv1 !SSLv3 !SSLv2',
+ 'myhostname = ossec.server',
+ 'myorigin = $myhostname',
+ 'smtpd_banner = $myhostname ESMTP $mail_name (Ubuntu)',
+ 'biff = no',
+ 'append_dot_mydomain = no',
+ 'readme_directory = no',
+ 'smtp_header_checks = regexp:/etc/postfix/header_checks',
+ 'mailbox_command = /usr/bin/procmail',
+ 'inet_interfaces = loopback-only',
+ 'alias_maps = hash:/etc/aliases',
+ 'alias_database = hash:/etc/aliases',
+ 'mydestination = $myhostname, localhost.localdomain , localhost',
+ 'mynetworks = 127.0.0.0/8 [::ffff:127.0.0.0]/104 [::1]/128',
+ 'mailbox_size_limit = 0',
+ 'recipient_delimiter = +',
+])
+def test_postfix_settings(File, setting):
+ """
+ Check all postfix configuration lines. There are technically multiple
+ configuration paths regarding the TLS settings, particularly the
+ fingerprint verification logic, but only the base default config is tested
+ currently.
+ """
+ f = File("/etc/postfix/main.cf")
+ assert f.is_file
+ assert f.user == 'root'
+ assert oct(f.mode) == "0644"
+ regex = '^{}$'.format(re.escape(setting))
+ assert re.search(regex, f.content, re.M)
+
+
+def test_postfix_generic_maps(File):
+ """
+ Regression test to check that generic Postfix maps are not configured
+ by default. As of #1565 Admins can opt-in to overriding the FROM address
+ used for sending OSSEC alerts, but by default we're preserving the old
+ `[email protected]` behavior, to avoid breaking email for previously
+ existing instances.
+ """
+ assert not File("/etc/postfix/generic").exists
+ assert not File("/etc/postfix/main.cf").contains("^smtp_generic_maps")
+
+
+def test_postfix_service(Service, Socket, Sudo):
+ """
+ Check Postfix service. Postfix is used to deliver OSSEC alerts via
+ encrypted email. On staging hosts, Postfix is disabled, due to lack
+ of SASL authentication credentials, but on prod hosts it should run.
+ """
+ # Elevated privileges are required to read Postfix service info,
+ # specifically `/var/spool/postfix/pid/master.pid`.
+ with Sudo():
+ postfix = Service("postfix")
+ assert postfix.is_running == securedrop_test_vars.postfix_enabled
+ assert postfix.is_enabled == securedrop_test_vars.postfix_enabled
+
+ socket = Socket("tcp://127.0.0.1:25")
+ assert socket.is_listening == securedrop_test_vars.postfix_enabled
diff --git a/testinfra/test.py b/testinfra/test.py
--- a/testinfra/test.py
+++ b/testinfra/test.py
@@ -33,8 +33,7 @@ def get_target_roles(target_host):
"mon-staging": ['testinfra/mon',
'testinfra/common'],
"mon-prod": ['testinfra/mon'],
- "apptestclient": ['testinfra/functional'],
- "build": ['testinfra/build']}
+ "apptestclient": ['testinfra/functional']}
try:
return target_roles[target_host]
@@ -64,10 +63,12 @@ def run_testinfra(target_host, verbose=True):
if target_host.endswith("-prod"):
os.environ['SECUREDROP_SSH_OVER_TOR'] = '1'
# Dump SSH config to tempfile so it can be passed as arg to testinfra.
- ssh_config_output = subprocess.check_output(["vagrant", "ssh-config", target_host])
- # Create temporary file to store ssh-config. Not deleting it automatically
- # because there's no sensitive info (HidServAuth is required to connect),
- # and we'll need it outside of the context-manager block that writes to it.
+ ssh_config_output = subprocess.check_output(["vagrant", "ssh-config",
+ target_host])
+ # Create temporary file to store ssh-config. Not deleting it
+ # automatically because there's no sensitive info (HidServAuth is
+ # required to connect), and we'll need it outside of the
+ # context-manager block that writes to it.
ssh_config_tmpfile = tempfile.NamedTemporaryFile(delete=False)
with ssh_config_tmpfile.file as f:
f.write(ssh_config_output)
@@ -84,12 +85,12 @@ def run_testinfra(target_host, verbose=True):
""".lstrip().rstrip()
elif os.environ.get("FPF_CI", 'false') == 'true':
- if os.environ.get("CI_SD_ENV","development") == "development":
+ if os.environ.get("CI_SD_ENV", "development") == "development":
os.environ['SECUREDROP_TESTINFRA_TARGET_HOST'] = "travis"
ssh_config_path = ""
testinfra_command_template = "testinfra -vv {target_roles}"
else:
- if target_host in ["build", "apptestclient"]:
+ if target_host in ["apptestclient"]:
conn_type = "docker"
ssh_config_path = "{}/.ssh/sshconfig-securedrop-ci-{}".format(
os.environ["HOME"],
@@ -130,5 +131,6 @@ def run_testinfra(target_host, verbose=True):
# Execute config tests.
subprocess.check_call(testinfra_command)
+
if __name__ == "__main__":
run_testinfra(target_host)
| Skip staging CI run if only docs changed
# Feature request
## Description
The staging CI run, managed by CircleCI, takes 30-45 minutes, and is overkill if a contributor submits a docs-only PR change. Docs linting happens in the Travis CI run, so we'll still get docs linting checks even when skipping Circle CI.
## User Stories
As a SecureDrop developer, I don't want to wait around for CI that's meant to validate app code and config changes if only docs were edited in the PR.
Replace connect-proxy with netcat
During the development of #1070, we realized that `netcat` can be used as a SOCKS proxy for various applications. In `develop.md`, we currently recommend using `connect-proxy` when a SOCKS proxy for Tor is needed on Ubuntu. However, `netcat` has some advantages over `connect-proxy`:
1. It is installed on Ubuntu (and Mac OS X, and most Unix-y systems) by default
2. It is more actively maintained than connect-proxy (at least on Ubuntu, where the last update for netcat was in [2012](http://changelogs.ubuntu.com/changelogs/pool/universe/n/netcat/netcat_1.10-40/changelog), whereas the last update for connect-proxy was in [2009](http://changelogs.ubuntu.com/changelogs/pool/universe/c/connect-proxy/connect-proxy_1.101-1/changelog)).
In my opinion, we should just replace all references to connect-proxy with netcat.
The main thing we should test is that netcat **does not leak DNS lookups**. `connect-proxy` has a flag `-R remote` that explicitly requires it to do DNS resolution remotely (through the proxy), which is safe. Netcat does not have a comparable flag (although it does have `-n`, which might be useful -- needs investigation). However, it defaults to SOCKS5 by default, which supports [remote DNS resolution](https://www.inet.no/dante/doc/1.3.x/config/socks.html), so I think it probably does the right thing. From my [limited testing](https://github.com/freedomofpress/securedrop/pull/1070#issuecomment-123109555), `nc` on Mac OS X 10.10 does remote DNS resolution by default (or it wouldn't have been able to resolve the `.onion` addresses I was trying to SSH to).
Set up Makefile to be self-documenting
Like so.
```
help: ## Print this message
@awk 'BEGIN {FS = ":.*?## "} /^[0-9a-zA-Z_-]+:.*?## / {printf "\033[36m%16s\033[0m : %s\n", $$1, $$2}' $(MAKEFILE_LIST)
```
|
For what it's worth, I uninstalled `connect-proxy` when you opened this issue, and I haven't noticed any incompatibility with our config. As for guarding against DNS leakage, the defaults of `VerifyHostKeyDNS=no` for `ssh` and `-X 5` for `netcat` should be confirmed, and maybe hardcoded in our SSH proxy commands out of caution. The `-n` option for `netcat` does look interesting as well, but I haven't tested it yet.
This was already handled by 6cced9dcb16e0fd15841bf0aec75fa15fe64a847 in #1441, but there's a dangling reference in a comment in the Vagrantfile that should be cleaned up as well.
| 2017-09-12T18:56:37Z | [] | [] |
freedomofpress/securedrop | 2,299 | freedomofpress__securedrop-2299 | [
"1634"
] | 95d09b70783a279353180a48c53d006cbe0737e2 | diff --git a/migration_scripts/0.2.1/0.2.1_collect.py b/migration_scripts/0.2.1/0.2.1_collect.py
deleted file mode 100755
--- a/migration_scripts/0.2.1/0.2.1_collect.py
+++ /dev/null
@@ -1,97 +0,0 @@
-#!/usr/bin/python2.7
-"""
-This script should be copied to a running SecureDrop 0.2.1 instance. When run
-(as root), it collects all of the necessary information to migrate the system
-to 0.3 and stores it in the .tar.gz file specified in the first argument.
-"""
-
-import sys
-import os
-import re
-import tarfile
-
-
-# Arbitrarily pick the source chroot jail (doesn't matter)
-securedrop_root = "/var/chroot/source/var/www/securedrop"
-
-
-def collect_config_file(backup):
- config_file = os.path.join(securedrop_root, "config.py")
- backup.add(config_file)
- return config_file
-
-
-def collect_securedrop_root(backup):
- # The store and key dirs are shared between the chroot jails in
- # 0.2.1, and are both linked from /var/securedrop
- securedrop_root = "/var/securedrop"
- backup.add(securedrop_root)
- return securedrop_root
-
-
-def collect_database(backup):
- # Copy the db file, which is only present in the journalist interface's
- # chroot jail in 0.2.1
- db_file = "/var/chroot/document/var/www/securedrop/db.sqlite"
- backup.add(db_file)
- return db_file
-
-
-def collect_custom_header_image(backup):
- # 0.2.1's deployment didn't actually use
- # config.CUSTOM_HEADER_IMAGE - it just overwrote the default
- # header image, `static/i/securedrop.png`.
- header_image = os.path.join(securedrop_root, "static/i/securedrop.png")
- backup.add(header_image)
- return header_image
-
-
-def collect_tor_files(backup):
- tor_files = [
- "/etc/tor/torrc",
- "/var/lib/tor/hidden_service/client_keys",
- "/var/chroot/source/var/lib/tor/hidden_service/private_key",
- "/var/chroot/document/var/lib/tor/hidden_service/client_keys",
- ]
- collected = []
- for tor_file in tor_files:
- # Since the 0.2.1 install process was occasionally somewaht ad
- # hoc, the SSH ATHS was not always set up. We treat that as a
- # non-fatal error and continue.
- if (not os.path.isfile(tor_file)
- and tor_file == "/var/lib/tor/hidden_service/client_keys"):
- print ("[!] Warning: expected file '{}' not found. "
- "Continuing anyway.".format(tor_file))
- continue
- backup.add(tor_file)
- collected.append(tor_file)
-
- return ', '.join(collected)
-
-
-def main():
- if len(sys.argv) <= 1:
- print "Usage: 0.2.1_collect.py <backup filename>"
- sys.exit(1)
-
- backup_filename = sys.argv[1]
- if not backup_filename.endswith(".tar.gz"):
- backup_filename += ".tar.gz"
-
- backup_fns = [
- collect_config_file,
- collect_securedrop_root,
- collect_database,
- collect_custom_header_image,
- collect_tor_files
- ]
-
- print "Backing up..."
- with tarfile.open(backup_filename, 'w:gz') as backup:
- for fn in backup_fns:
- print "[+] Collected {}".format(fn(backup))
-
- print "Done!"
-
-if __name__ == "__main__":
- main()
diff --git a/migration_scripts/0.2.1/0.3_migrate.py b/migration_scripts/0.2.1/0.3_migrate.py
deleted file mode 100755
--- a/migration_scripts/0.2.1/0.3_migrate.py
+++ /dev/null
@@ -1,296 +0,0 @@
-#!/usr/bin/python2.7
-"""
-This script should be copied to a running SecureDrop 0.3 instance, along with
-the output of `0.2.1_collect.py`. When run (as root), it migrates all of the
-information from the 0.2.1 instance to create a matching 0.3 instance.
-"""
-
-import sys
-import os
-import re
-import tarfile
-import subprocess
-import sqlite3
-import shutil
-from datetime import datetime
-from operator import itemgetter
-import calendar
-import traceback
-
-
-def migrate_config_file(backup):
- print "* Migrating values from old config file..."
-
- # Back up new config just in case something goes horribly wrong
- config_fn = "/var/www/securedrop/config.py"
- shutil.copy(config_fn, config_fn + '.backup')
-
- # Substitute values in new config with values from old config
- old_config = backup.extractfile('var/chroot/source/var/www/securedrop/config.py').read()
- new_config = open(config_fn, 'r').read()
- subs = [
- (r"JOURNALIST_KEY=('.*')", r"^(JOURNALIST_KEY = )('.*')"),
- (r"SCRYPT_ID_PEPPER=('.*')", r"^(SCRYPT_ID_PEPPER = )('.*')"),
- (r"SCRYPT_GPG_PEPPER=('.*')", r"^(SCRYPT_GPG_PEPPER = )('.*')")
- ]
- for sub in subs:
- old_value_repl = r"\1{}".format(re.search(sub[0], old_config).groups()[0])
- new_config = re.sub(sub[1], old_value_repl, new_config, flags=re.MULTILINE)
-
- # Write out migrated config
- with open(config_fn, 'w') as config:
- config.write(new_config)
-
- # Restart Apache so the web application picks up the changes to config.py
- subprocess.call(["service", "apache2", "restart"])
-
-
-def extract_tree_to(tar, selector, dest):
- # http://stackoverflow.com/a/15171308/1093000
- if type(selector) is str:
- prefix = selector
- selector = lambda m: m.name.startswith(prefix)
- members = [m for m in tar.getmembers() if selector(m)]
- for m in members:
- m.name = m.name[len(prefix):]
- tar.extractall(path=dest, members=members)
-
-
-def extract_file_to(tar, src, dst):
- src_member = tar.getmember(src)
- # Hack member name to change where it gets extracted to
- src_member.name = dst
- tar.extract(src_member)
-
-
-def migrate_securedrop_root(backup):
- print "* Migrating directories from SECUREDROP_ROOT..."
- extract_tree_to(backup, "var/securedrop/", "/var/lib/securedrop")
- subprocess.call(['chown', '-R', 'www-data:www-data', "/var/lib/securedrop"])
-
-
-def migrate_database(backup):
- print "* Migrating database..."
-
- # Get the sources table from the 0.2.1 instance's db
- old_db = backup.getmember("var/chroot/document/var/www/securedrop/db.sqlite")
- old_db.name = "db.old.sqlite"
- backup.extract(old_db)
- conn = sqlite3.connect("db.old.sqlite")
- c = conn.cursor()
- sources = c.execute("SELECT * FROM sources").fetchall()
- os.remove("db.old.sqlite")
-
- # Fill in the rest of the sources. Since sources were only added to the
- # database if their codename was changed by the journalist, we need to fill
- # in the rest by examining all of the filesystem designations in the source
- # directory and re-generating the codenames.
- #
- # Note: Must be called after /var/lib/securedrop/store is populated
- from old_crypto_util import displayid
- # Generate a list of the filesystem ids that have journalist designations
- # stored in the database, since they are already known and should not be
- # generated from the filesystem id
- already_processed = set([source[0] for source in sources])
- for fs_id in os.listdir("/var/lib/securedrop/store"):
- if fs_id in already_processed:
- continue
- sources.append((fs_id, displayid(fs_id)))
-
- # Import current application's config so we can easily populate the db
- sys.path.append("/var/www/securedrop")
- import config
- from db import Source, Journalist, Submission, Reply, db_session, init_db
-
- # We need to be able to link replies to the Journalist that sent
- # them. Since this information was not recorded in 0.2.1, we
- # arbitrarily say all replies were sent by an arbitrary journalist
- # that is present on this system. Since this information is not
- # currently exposed in the UI, this does not create a problem (for
- # now).
- if len(Journalist.query.all()) == 0:
- print "!!! FATAL: You must create a journalist account before running this migration."
- print " Run ./manage.py add_admin and try again."
- sys.exit(1)
- else:
- arbitrary_journalist = Journalist.query.all()[0]
-
- # Back up current database just in case
- shutil.copy("/var/lib/securedrop/db.sqlite",
- "/var/lib/securedrop/db.sqlite.bak")
-
- # Copied from db.py to compute filesystem-safe journalist filenames
- def journalist_filename(s):
- valid_chars = 'abcdefghijklmnopqrstuvwxyz1234567890-_'
- return ''.join([c for c in s.lower().replace(' ', '_') if c in valid_chars])
-
- # Migrate rows to new database with SQLAlchemy ORM
- for source in sources:
- migrated_source = Source(source[0], source[1])
- source_dir = os.path.join("/var/lib/securedrop/store", source[0])
-
- # It appears that there was a bug in 0.2.1 where sources with changed
- # names were not always successfully removed from the database. Skip
- # any sources that didn't have files copied for them, they were deleted
- # and are in the database erroneously.
- if not os.path.isdir(source_dir):
- continue
-
- # Can infer "flagged" state by looking for _FLAG files in store
- if "_FLAG" in os.listdir(source_dir):
- # Mark the migrated source as flagged
- migrated_source.flagged = True
- # Delete the _FLAG file
- os.remove(os.path.join(source_dir, "_FLAG"))
-
- # Sort the submissions by the date of submission so we can infer the
- # correct interaction_count for the new filenames later, and so we can
- # set source.last_updated to the time of the most recently uploaded
- # submission in the store now.
- submissions = []
- replies = []
- for fn in os.listdir(source_dir):
- append_to = submissions
- if fn.startswith('reply-'):
- append_to = replies
- append_to.append((fn, os.path.getmtime(os.path.join(source_dir, fn))))
-
- # Sort by submission time
- submissions.sort(key=itemgetter(1))
- replies.sort(key=itemgetter(1))
-
- if len(submissions) > 0:
- migrated_source.last_updated = datetime.utcfromtimestamp(submissions[-1][1])
- else:
- # The source will have the default .last_updated of utcnow(), which
- # might be a little confusing, but it's the best we can do.
- pass
-
- # Since the concept of "pending" is introduced in 0.3, it's tricky to
- # figure out how to set this value. We can't distinguish between sources
- # who created an account but never submitted anything and sources who
- # had been active, but didn't have any stored submissions or replies at
- # the time of migration.
- #
- # After having explored the options, I think the best thing to do here
- # is set pending to True if there are no submissions or replies. Sources
- # who created an account but never submitted anything won't create noise
- # in the list, and sources who are active can probably be expected to
- # log back in relatively soon and so will automatially reappear once
- # they submit something new.
- if len(submissions + replies) == 0:
- migrated_source.pending = True
- else:
- migrated_source.pending = False
-
- # Set source.interaction_count to the number of current submissions for
- # each source. This is not technicially correct, but since we can't
- # know how many submissions have been deleted it will give us a
- # reasonable, monotonically increasing basis for future increments to
- # the interaction_count.
- migrated_source.interaction_count = len(submissions) + len(replies)
-
- # Add and commit the source to the db so they will have a primary key
- # assigned to use with the ForeignKey relationship with their
- # submissions
- db_session.add(migrated_source)
- db_session.commit()
-
- # Combine everything into one list, sorted by date, so we can
- # correctly set the interaction counts for each file.
- everything = submissions + replies
- everything.sort(key=itemgetter(1))
- for count, item in enumerate(everything):
- # Rename the file to fit the new file naming scheme used by 0.3
- fn = item[0]
-
- if fn.startswith('reply-'):
- new_fn = "{0}-{1}-reply.gpg".format(count+1, journalist_filename(source[1]))
- else:
- new_fn = "{0}-{1}-{2}".format(count+1, journalist_filename(source[1]), "msg.gpg" if fn.endswith("msg.gpg") else "doc.zip.gpg")
-
- # Move to the new filename
- os.rename(os.path.join(source_dir, fn),
- os.path.join(source_dir, new_fn))
-
- # Add a database entry for this item
- db_entry = None
-
- if fn.startswith('reply-'):
- migrated_reply = Reply(arbitrary_journalist, migrated_source, new_fn)
- db_entry = migrated_reply
- else:
- migrated_submission = Submission(migrated_source, new_fn)
- # Assume that all submissions that are being migrated
- # have already been downloaded
- migrated_submission.downloaded = True
- db_entry = migrated_submission
-
- db_session.add(db_entry)
- db_session.commit()
-
- # chown the database file to the securedrop user
- subprocess.call(['chown', 'www-data:www-data', "/var/lib/securedrop/db.sqlite"])
-
-
-def migrate_custom_header_image(backup):
- print "* Migrating custom header image..."
- extract_file_to(backup,
- "var/chroot/source/var/www/securedrop/static/i/securedrop.png",
- "/var/www/securedrop/static/i/logo.png")
- subprocess.call(['chown', '-R', 'www-data:www-data', "/var/www/securedrop/static/i/logo.png"])
-
-
-def migrate_tor_files(backup):
- print "* Migrating source interface .onion..."
-
- tor_root_dir = "/var/lib/tor"
- ths_root_dir = os.path.join(tor_root_dir, "services")
-
- # For now, we're going to re-provision the monitor and SSH
- # hidden services. The only hidden service whose address
- # we want to maintain is the source interface. Modify the
- # code below to migrate other hidden services as well.
-
- # Restore source interface hidden sevice key to maintain the original
- # .onion address
- source_ths_dir = os.path.join(ths_root_dir, "source")
-
- # Delete the files created by ansible
- for fn in os.listdir(source_ths_dir):
- os.remove(os.path.join(source_ths_dir, fn))
-
- # Extract the original source interface THS key
- extract_file_to(backup,
- "var/chroot/source/var/lib/tor/hidden_service/private_key",
- os.path.join(source_ths_dir, "private_key"))
-
- # chmod the files so they're owned by debian-tor:debian-tor
- subprocess.call(['chown', '-R', 'debian-tor:debian-tor', source_ths_dir])
- # Reload Tor to trigger registering the migrated Tor Hidden Service address
- subprocess.call(['service', 'tor', 'reload'])
-
-
-def main():
- if len(sys.argv) <= 1:
- print "Usage: 0.3_migrate.py <backup filename>"
- sys.exit(1)
-
- try:
- backup_fn = sys.argv[1]
- with tarfile.open(backup_fn, 'r:*') as backup:
- migrate_config_file(backup)
- migrate_securedrop_root(backup)
- migrate_database(backup)
- migrate_custom_header_image(backup)
- migrate_tor_files(backup)
- except SystemExit as e:
- pass
- except:
- print "\n!!! Something went wrong, please file an issue.\n"
- print traceback.format_exc()
- else:
- print "Done!"
-
-if __name__ == "__main__":
- main()
diff --git a/migration_scripts/0.2.1/old_crypto_util.py b/migration_scripts/0.2.1/old_crypto_util.py
deleted file mode 100644
--- a/migration_scripts/0.2.1/old_crypto_util.py
+++ /dev/null
@@ -1,19 +0,0 @@
-# -*- coding: utf-8 -*-
-# Minimal set of functions and variables from 0.2.1's crypto_util.py needed to
-# regenerate journalist designations from soure's filesystem id's.
-import os
-import random as badrandom
-
-# Find the absolute path relative to this file so this script can be run
-# anywhere
-SRC_DIR = os.path.dirname(os.path.realpath(__file__))
-
-nouns = file(os.path.join(SRC_DIR, "nouns.txt")).read().split('\n')
-adjectives = file(os.path.join(SRC_DIR, "adjectives.txt")).read().split('\n')
-
-
-def displayid(n):
- badrandom_value = badrandom.WichmannHill()
- badrandom_value.seed(n)
- return badrandom_value.choice(
- adjectives) + " " + badrandom_value.choice(nouns)
| diff --git a/install_files/securedrop-ossec-server/var/ossec/setup_gmail_test.sh b/install_files/securedrop-ossec-server/var/ossec/setup_gmail_test.sh
deleted file mode 100755
--- a/install_files/securedrop-ossec-server/var/ossec/setup_gmail_test.sh
+++ /dev/null
@@ -1,60 +0,0 @@
-#!/bin/bash
-
-set -u -e
-# ------------------------------------------------------------------------------
-#
-# This script is used to setup postfix on the monitor server to use
-# gmail as the smtp server to validate gpg and ossec alerts are sent
-#
-# This script should be run after the production install scripts
-#
-# TODO:
-# - Support default test gmail account [email protected]
-#
-# ------------------------------------------------------------------------------
-#set -x
-
-echo ""
-echo "In order to test sending ossec emails via gpg we need to do the following:"
-echo ""
-echo "1. Setup postfix to use google as the smpt relay"
-echo "2. Import your public gpg key to the ossec user"
-echo ""
-echo "What Gmail email address do you want to send test alerts to?"
-read EMAIL_DISTRO
-echo "What is your email password. (Needed to auth google as smtp server)"
-read PASSWORD
-echo "Please import your public key into the ossec keystore"
-echo "gpg --homedir /var/ossec/.gnu --import <your key here>"
-
-# Install required testing tools
-apt-get install -y postfix mailutils libsasl2-2 ca-certificates libsasl2-modules
-
-# Setup postfix config
-sed -ie "/^relayhost/d" /etc/postfix/main.cf
-cat <<EOF >> /etc/postfix/main.cf
-relayhost = [smtp.gmail.com]:587
-smtp_sasl_auth_enable = yes
-smtp_sasl_password_maps = hash:/etc/postfix/sasl_passwd
-smtp_sasl_security_options = noanonymous
-smtp_tls_CAfile = /etc/postfix/cacert.pem
-smtp_use_tls = yes
-EOF
-
-
-
-# Setup gmail user auth for postfix
-echo "[smtp.gmail.com]:587 ${EMAIL_DISTRO}:${PASSWORD}" > /etc/postfix/sasl_passwd
-chmod 400 /etc/postfix/sasl_passwd
-postmap /etc/postfix/sasl_passwd
-
-# Import Thawte CA cert into postfix
-cat /etc/ssl/certs/Thawte_Premium_Server_CA.pem >> /etc/postfix/cacert.pem
-
-# Reload postfix config
-service postfix reload
-
-sed -e "s/EMAIL_DISTRO/$EMAIL_DISTRO/g" send_encrypted_alarm.sh > /var/ossec/send_encrypted_alarm.sh
-
-# Send test email
-echo "Test mail from postfix" | mail -s "Test Postfix" $EMAIL_DISTRO
| Linting/testing of our shell scripts
#1633 was a reminder that our shell scripts often only receive manual review before making it into develop, and that some basic linting--not even proper tests, which are a lot more difficult to implement and run regularly given the context in which most of these scripts are run--could go a long way.
Shellcheck seems like a good option and has [a section in the README](https://github.com/koalaman/shellcheck#travis-ci-setup) on use in Travis. Shellcheck caught #1633. I think it would be a good idea to add it to CI.
Would love to hear other ideas and thoughts on what we could do to improve (especially cheaply) the robustness of our shell script analysis.
| Running `shellcheck tail_files/install.sh` locally shows some good output, so I'm inclined to go with that solution for CI. There's a barebones approach in `bash -n tails_files/install.sh` that would have caught the error described in #1633, but shellcheck finds more, and provides more informative output. | 2017-09-13T22:48:57Z | [] | [] |
freedomofpress/securedrop | 2,328 | freedomofpress__securedrop-2328 | [
"2168"
] | 62e86d8ac2de494f10e7ed76dd02f41997d3deea | diff --git a/securedrop/journalist.py b/securedrop/journalist.py
--- a/securedrop/journalist.py
+++ b/securedrop/journalist.py
@@ -298,7 +298,7 @@ def admin_edit_user(user_id):
user = Journalist.query.get(user_id)
if request.method == 'POST':
- if request.form['username']:
+ if request.form.get('username', None):
new_username = request.form['username']
try:
| diff --git a/securedrop/tests/test_journalist.py b/securedrop/tests/test_journalist.py
--- a/securedrop/tests/test_journalist.py
+++ b/securedrop/tests/test_journalist.py
@@ -316,7 +316,7 @@ def test_admin_add_user_when_username_already_in_use(self):
resp = self.client.post(url_for('admin_add_user'),
data=dict(username=self.admin.username,
password=VALID_PASSWORD,
- is_admin=False))
+ is_admin=None))
self.assertIn('That username is already in use', resp.data)
def test_max_password_length(self):
@@ -344,13 +344,12 @@ def test_admin_edits_user_password_too_long_warning(self):
overly_long_password = VALID_PASSWORD + \
'a' * (Journalist.MAX_PASSWORD_LEN - len(VALID_PASSWORD) + 1)
- resp = self.client.post(
+ self.client.post(
url_for('admin_new_password', user_id=self.user.id),
- data=dict(username=self.user.username, is_admin=False,
+ data=dict(username=self.user.username, is_admin=None,
password=overly_long_password),
follow_redirects=True)
- print resp.data.decode('utf-8')
self.assertMessageFlashed('You submitted a bad password! '
'Password not changed.', 'error')
@@ -375,7 +374,7 @@ def test_admin_add_user_password_too_long_warning(self):
url_for('admin_add_user'),
data=dict(username='dellsberg',
password=overly_long_password,
- is_admin=False))
+ is_admin=None))
self.assertMessageFlashed('There was an error with the autogenerated '
'password. User not created. '
@@ -389,7 +388,7 @@ def test_admin_edits_user_invalid_username(self):
self.client.post(
url_for('admin_edit_user', user_id=self.user.id),
- data=dict(username=new_username, is_admin=False))
+ data=dict(username=new_username, is_admin=None))
self.assertMessageFlashed('Username "{}" is already taken!'.format(
new_username), 'error')
@@ -534,9 +533,7 @@ def test_admin_add_user(self):
resp = self.client.post(url_for('admin_add_user'),
data=dict(username='dellsberg',
password=VALID_PASSWORD,
- is_admin=False))
-
- print resp.data.decode('utf-8')
+ is_admin=None))
self.assertRedirects(resp, url_for('admin_new_user_two_factor',
uid=max_journalist_pk+1))
@@ -546,7 +543,7 @@ def test_admin_add_user_without_username(self):
resp = self.client.post(url_for('admin_add_user'),
data=dict(username='',
password=VALID_PASSWORD,
- is_admin=False))
+ is_admin=None))
self.assertIn('Invalid username', resp.data)
def test_admin_add_user_too_short_username(self):
@@ -556,9 +553,47 @@ def test_admin_add_user_too_short_username(self):
data=dict(username=username,
password='pentagonpapers',
password_again='pentagonpapers',
- is_admin=False))
+ is_admin=None))
self.assertIn('Invalid username', resp.data)
+ def test_admin_sets_user_to_admin(self):
+ self._login_admin()
+ new_user = 'admin-set-user-to-admin-test'
+ resp = self.client.post(url_for('admin_add_user'),
+ data=dict(username=new_user,
+ password=VALID_PASSWORD,
+ is_admin=None))
+ assert resp.status_code in (200, 302)
+ journo = Journalist.query.filter(Journalist.username == new_user).one()
+ assert not journo.is_admin
+
+ resp = self.client.post(url_for('admin_edit_user', user_id=journo.id),
+ data=dict(is_admin=True))
+ assert resp.status_code in (200, 302), resp.data.decode('utf-8')
+
+ # there are better ways to do this, but flake8 complains
+ journo = Journalist.query.filter(Journalist.username == new_user).one()
+ assert journo.is_admin is True
+
+ def test_admin_renames_user(self):
+ self._login_admin()
+ new_user = 'admin-renames-user-test'
+ resp = self.client.post(url_for('admin_add_user'),
+ data=dict(username=new_user,
+ password=VALID_PASSWORD,
+ is_admin=None))
+ assert resp.status_code in (200, 302)
+ journo = Journalist.query.filter(Journalist.username == new_user).one()
+
+ new_user = new_user + 'a'
+ resp = self.client.post(url_for('admin_edit_user', user_id=journo.id),
+ data=dict(username=new_user))
+ assert resp.status_code in (200, 302), resp.data.decode('utf-8')
+
+ # the following will throw an exception if new_user is not found
+ # therefore asserting it has been created
+ Journalist.query.filter(Journalist.username == new_user).one()
+
@patch('journalist.app.logger.error')
@patch('journalist.Journalist',
side_effect=IntegrityError('STATEMENT', 'PARAMETERS', None))
@@ -570,7 +605,7 @@ def test_admin_add_user_integrity_error(self,
self.client.post(url_for('admin_add_user'),
data=dict(username='username',
password=VALID_PASSWORD,
- is_admin=False))
+ is_admin=None))
mocked_error_logger.assert_called_once_with(
"Adding user 'username' failed: (__builtin__.NoneType) "
| Add integration tests for `admin_edit_user`
Integration tests need to be added to the endpoint for `admin_edit_user`.
From this comment: https://github.com/freedomofpress/securedrop/pull/1509#issuecomment-323562439
| 2017-09-17T18:03:57Z | [] | [] |
|
freedomofpress/securedrop | 2,336 | freedomofpress__securedrop-2336 | [
"2262"
] | 62e86d8ac2de494f10e7ed76dd02f41997d3deea | diff --git a/securedrop/store.py b/securedrop/store.py
--- a/securedrop/store.py
+++ b/securedrop/store.py
@@ -78,6 +78,7 @@ def get_bulk_archive(selected_submissions, zip_directory=''):
# folder structure per #383
with zipfile.ZipFile(zip_file, 'w') as zip:
for source in sources:
+ fname = ""
submissions = [s for s in selected_submissions
if s.source.journalist_designation == source]
for submission in submissions:
@@ -85,9 +86,12 @@ def get_bulk_archive(selected_submissions, zip_directory=''):
submission.filename)
verify(filename)
document_number = submission.filename.split('-')[0]
+ if zip_directory == submission.source.journalist_filename:
+ fname = zip_directory
+ else:
+ fname = os.path.join(zip_directory, source)
zip.write(filename, arcname=os.path.join(
- zip_directory,
- source,
+ fname,
"%s_%s" % (document_number,
submission.source.last_updated.date()),
os.path.basename(filename)
| diff --git a/securedrop/tests/test_journalist.py b/securedrop/tests/test_journalist.py
--- a/securedrop/tests/test_journalist.py
+++ b/securedrop/tests/test_journalist.py
@@ -774,7 +774,6 @@ def test_download_selected_submissions_from_source(self):
zipfile.ZipFile(StringIO(resp.data)).getinfo(
os.path.join(
source.journalist_filename,
- source.journalist_designation,
"%s_%s" % (filename.split('-')[0],
source.last_updated.date()),
filename
| Extra level in zip folders for single sources
## Description
There is an extra unnecessary folder for zipfiles downloaded from the journalist interface for single sources.
## Steps to Reproduce
0. Submit some files to SecureDrop
1. Load the home page of the journalist interface
2. Select one source using the checkbox
3. Select "Download"
4. Unzip the resulting file
## Expected Behavior
File structure would ideally be:
`source-name/number_date`
e.g.:
`ecstatic-potato/1_03-20-20`
## Actual Behavior
Instead there is an additional layer:
`ecstatic-potato/ecstatic-potato/1_03-20-20`
## Comment
This additional folder is confusing to users and should be removed.
Note: if you download unread or download all for multiple sources - the outer folder will not be named after the source and will be e.g. `all`:
`all/ecstatic-potato/1_03-20-20`
| (this was not discovered doing 0.4.3 release QA, this is in production)
i can verify with the following details.
## Steps to Reproduce
0. Submit some files to SecureDrop
1. Load the home page of the journalist interface
2. Click on one source
3. Select one document from source using the checkbox
4. Select "Download"
5. Unzip the resulting file
## Expected Behavior
File structure would ideally be:
```
source_name/number_date
```
e.g.:
```
ecstatic_potato/1_03-20-20
```
## Actual Behavior
Instead there is an additional layer:
```
ecstatic_potato/ecstatic potato/1_03-20-20
```
| 2017-09-18T17:13:50Z | [] | [] |
freedomofpress/securedrop | 2,355 | freedomofpress__securedrop-2355 | [
"2354"
] | c192ff8842bc79ade52547ab9b3469559138350f | diff --git a/securedrop/i18n.py b/securedrop/i18n.py
--- a/securedrop/i18n.py
+++ b/securedrop/i18n.py
@@ -22,7 +22,9 @@
import collections
import config
import os
+import re
+LOCALE_SPLIT = re.compile('(-|_)')
LOCALES = set(['en_US'])
babel = None
@@ -129,3 +131,13 @@ def get_locale2name():
locale = core.Locale.parse(l)
locale2name[l] = locale.languages[locale.language]
return locale2name
+
+
+def locale_to_rfc_5646(locale):
+ lower = locale.lower()
+ if 'hant' in lower:
+ return 'zh-Hant'
+ elif 'hans' in lower:
+ return 'zh-Hans'
+ else:
+ return LOCALE_SPLIT.split(locale)[0]
diff --git a/securedrop/journalist.py b/securedrop/journalist.py
--- a/securedrop/journalist.py
+++ b/securedrop/journalist.py
@@ -71,6 +71,7 @@ def setup_g():
g.locale = i18n.get_locale()
g.text_direction = i18n.get_text_direction(g.locale)
+ g.html_lang = i18n.locale_to_rfc_5646(g.locale)
g.locales = i18n.get_locale2name()
if request.method == 'POST':
diff --git a/securedrop/source.py b/securedrop/source.py
--- a/securedrop/source.py
+++ b/securedrop/source.py
@@ -95,7 +95,9 @@ def setup_g():
"""Store commonly used values in Flask's special g object"""
g.locale = i18n.get_locale()
g.text_direction = i18n.get_text_direction(g.locale)
+ g.html_lang = i18n.locale_to_rfc_5646(g.locale)
g.locales = i18n.get_locale2name()
+
# ignore_static here because `crypto_util.hash_codename` is scrypt (very
# time consuming), and we don't need to waste time running if we're just
# serving a static resource that won't need to access these common values.
| diff --git a/securedrop/tests/test_i18n.py b/securedrop/tests/test_i18n.py
--- a/securedrop/tests/test_i18n.py
+++ b/securedrop/tests/test_i18n.py
@@ -19,6 +19,7 @@
import argparse
import logging
import os
+import re
from flask import request, session, render_template_string, render_template
from flask_babel import gettext
@@ -32,10 +33,15 @@
import pytest
import source
import version
+import utils
class TestI18N(object):
+ @classmethod
+ def setup_class(cls):
+ utils.env.setup()
+
def test_get_supported_locales(self):
locales = ['en_US', 'fr_FR']
assert locales == i18n._get_supported_locales(locales, None, None)
@@ -218,6 +224,29 @@ def test_verify_default_locale_en_us_if_not_defined_in_config(self):
finally:
config.DEFAULT_LOCALE = DEFAULT_LOCALE
+ def test_locale_to_rfc_5646(self):
+ assert i18n.locale_to_rfc_5646('en') == 'en'
+ assert i18n.locale_to_rfc_5646('en-US') == 'en'
+ assert i18n.locale_to_rfc_5646('en_US') == 'en'
+ assert i18n.locale_to_rfc_5646('en-us') == 'en'
+ assert i18n.locale_to_rfc_5646('zh-hant') == 'zh-Hant'
+
+ def test_html_lang_correct(self):
+ app = journalist.app.test_client()
+ resp = app.get('/', follow_redirects=True)
+ html = resp.data.decode('utf-8')
+ assert re.compile('<html .*lang="en".*>').search(html), html
+
+ app = source.app.test_client()
+ resp = app.get('/', follow_redirects=True)
+ html = resp.data.decode('utf-8')
+ assert re.compile('<html .*lang="en".*>').search(html), html
+
+ # check '/generate' too because '/' uses a different template
+ resp = app.get('/generate', follow_redirects=True)
+ html = resp.data.decode('utf-8')
+ assert re.compile('<html .*lang="en".*>').search(html), html
+
@classmethod
def teardown_class(cls):
reload(journalist)
| Add <html lang="$some_lang"> to HTML templates
# Feature request
## Description
All pages should have the `<html lang="en-US">` or other appropriate language tag.
Reference: https://www.w3schools.com/Tags/ref_language_codes.asp
## User Stories
As a source/journalist, I want my browser to appropriately handle things like CSS text transforms.
| 2017-09-21T13:08:29Z | [] | [] |
|
freedomofpress/securedrop | 2,380 | freedomofpress__securedrop-2380 | [
"2379"
] | e3fea9822eb34ee7128bb2e82604949da7443c0b | diff --git a/securedrop/i18n.py b/securedrop/i18n.py
--- a/securedrop/i18n.py
+++ b/securedrop/i18n.py
@@ -66,14 +66,14 @@ def get_locale():
- 'en_US'
"""
locale = None
- accept_languages = set()
+ accept_languages = []
for l in request.accept_languages.values():
if '-' in l:
sep = '-'
else:
sep = '_'
try:
- accept_languages.add(str(core.Locale.parse(l, sep)))
+ accept_languages.append(str(core.Locale.parse(l, sep)))
except:
pass
if 'l' in request.args:
| diff --git a/securedrop/tests/test_i18n.py b/securedrop/tests/test_i18n.py
--- a/securedrop/tests/test_i18n.py
+++ b/securedrop/tests/test_i18n.py
@@ -80,6 +80,13 @@ def verify_i18n(self, app):
{{ gettext('code hello i18n') }}
''').strip() == translated_fr
+ # https://github.com/freedomofpress/securedrop/issues/2379
+ headers = Headers([('Accept-Language',
+ 'en-US;q=0.6,fr_FR;q=0.4,nb_NO;q=0.2')])
+ with app.test_request_context(headers=headers):
+ assert not hasattr(request, 'babel_locale')
+ assert not_translated == gettext(not_translated)
+
translated_cn = 'code chinese'
for lang in ('zh-CN', 'zh-Hans-CN'):
| i18n Doesn't respect user locale preferences
# Bug
I accept several languages, and send a header with a different priority. I prefer for example Dutch to German and a typical Accept-Language header here could look like:
`Accept-Language:en-US,en;q=0.8,nl;q=0.6,fr;q=0.4,de;q=0.2
`
This will *not* display the Dutch interface but the French one. If I send one like:
`Accept-Language: en-US,en;q=0.8,nl;q=0.6,de;q=0.4
`
I would get the German one.
## Steps to Reproduce
Browse to http://source.i18n.securedrop.club/ and send such headers:
GET / HTTP/1.1
Host: source.i18n.securedrop.club
Connection: keep-alive
Pragma: no-cache
Cache-Control: no-cache
Upgrade-Insecure-Requests: 1
User-Agent: Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chromium/63.0.XXXXX Safari/537.36
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Referer: http://i18n.securedrop.club/
Accept-Encoding: gzip, deflate
Accept-Language: en-US,en;q=0.8,nl;q=0.6,de;q=0.4
| 2017-09-28T15:57:39Z | [] | [] |
|
freedomofpress/securedrop | 2,388 | freedomofpress__securedrop-2388 | [
"2376"
] | e2e384c0e633a39142ce199f074be1b19fe252c6 | diff --git a/securedrop/source_app/forms.py b/securedrop/source_app/forms.py
new file mode 100644
--- /dev/null
+++ b/securedrop/source_app/forms.py
@@ -0,0 +1,19 @@
+from flask_babel import gettext
+from flask_wtf import FlaskForm
+from wtforms import PasswordField
+from wtforms.validators import InputRequired, Regexp, Length
+
+from db import Source
+
+
+class LoginForm(FlaskForm):
+ codename = PasswordField('codename', validators=[
+ InputRequired(message=gettext('This field is required.')),
+ Length(1, Source.MAX_CODENAME_LEN,
+ message=gettext('Field must be between 1 and '
+ '{max_codename_len} characters long. '.format(
+ max_codename_len=Source.MAX_CODENAME_LEN))),
+ # The regex here allows either whitespace (\s) or
+ # alphanumeric characters (\W) except underscore (_)
+ Regexp(r'(\s|[^\W_])+$', message=gettext('Invalid input.'))
+ ])
diff --git a/securedrop/source_app/main.py b/securedrop/source_app/main.py
--- a/securedrop/source_app/main.py
+++ b/securedrop/source_app/main.py
@@ -16,6 +16,7 @@
from source_app.utils import (logged_in, generate_unique_codename,
async_genkey, normalize_timestamps,
valid_codename)
+from source_app.forms import LoginForm
def make_blueprint(config):
@@ -196,7 +197,8 @@ def batch_delete():
@view.route('/login', methods=('GET', 'POST'))
def login():
- if request.method == 'POST':
+ form = LoginForm()
+ if form.validate_on_submit():
codename = request.form['codename'].strip()
if valid_codename(codename):
session.update(codename=codename, logged_in=True)
@@ -206,7 +208,7 @@ def login():
"Login failed for invalid codename".format(codename))
flash(gettext("Sorry, that is not a recognized codename."),
"error")
- return render_template('login.html')
+ return render_template('login.html', form=form)
@view.route('/logout')
def logout():
diff --git a/securedrop/source_app/utils.py b/securedrop/source_app/utils.py
--- a/securedrop/source_app/utils.py
+++ b/securedrop/source_app/utils.py
@@ -15,12 +15,6 @@ def logged_in():
def valid_codename(codename):
- # Ignore codenames that are too long to avoid DoS
- if len(codename) > Source.MAX_CODENAME_LEN:
- current_app.logger.info(
- "Ignored attempted login because the codename was too long.")
- return False
-
try:
filesystem_id = crypto_util.hash_codename(codename)
except crypto_util.CryptoException as e:
| diff --git a/securedrop/tests/test_source.py b/securedrop/tests/test_source.py
--- a/securedrop/tests/test_source.py
+++ b/securedrop/tests/test_source.py
@@ -353,7 +353,8 @@ def test_login_with_overly_long_codename(self, mock_hash_codename):
resp = c.post('/login', data=dict(codename=overly_long_codename),
follow_redirects=True)
self.assertEqual(resp.status_code, 200)
- self.assertIn("Sorry, that is not a recognized codename.",
+ self.assertIn("Field must be between 1 and {} "
+ "characters long.".format(Source.MAX_CODENAME_LEN),
resp.data)
self.assertFalse(mock_hash_codename.called,
"Called hash_codename for codename w/ invalid "
@@ -407,5 +408,16 @@ def test_source_is_deleted_while_logged_in(self, logger):
logger.assert_called_once_with(
"Found no Sources when one was expected: "
- "No row was found for one()"
- )
+ "No row was found for one()")
+
+ def test_login_with_invalid_codename(self):
+ """Logging in with a codename with invalid characters should return
+ an informative message to the user."""
+
+ invalid_codename = '[]'
+
+ with self.client as c:
+ resp = c.post('/login', data=dict(codename=invalid_codename),
+ follow_redirects=True)
+ self.assertEqual(resp.status_code, 200)
+ self.assertIn("Invalid input.", resp.data)
| Unexpected characters in source codename should not cause 500 error
## Description
On the source interface, unexpected characters in the source codename login a cause a `CryptoException` and 500 error.
## Steps to Reproduce
1. Go to `/login` on source interface.
2. Enter '[]' in the source codename.
3. Hit "Continue"
## Expected Behavior
A reasonable flow would be to flash a message informing the user of the expected format of the codename or which characters in the form input were invalid.
## Actual Behavior
The generic 500 error page is shown due to a `CryptoException`:
<img width="1026" alt="screen shot 2017-09-27 at 12 16 36 pm" src="https://user-images.githubusercontent.com/7832803/30933009-1837b54a-a37e-11e7-8b6c-7b7b75429a70.png">
## Comments
The current logic here is overly complicated: `login` calls `source_app.utils.valid_codename` which calls `crypto_util.hash_codename` which calls `crypto_util.clean`. In `crypto_util.clean`, if there are unexpected characters in the codename _then_ a `CryptoException` will be thrown. We could just handle the `CryptoException` and flash a message. However, we should be validating these forms separately from `crypto_util.py` (see #2312) and _not_ throw `CryptoException`s for form validation errors.
| 2017-09-30T03:17:54Z | [] | [] |
|
freedomofpress/securedrop | 2,402 | freedomofpress__securedrop-2402 | [
"2400"
] | 2badbfb09b87d92411b55cf6adfbb73c854c008e | diff --git a/securedrop/source_app/__init__.py b/securedrop/source_app/__init__.py
--- a/securedrop/source_app/__init__.py
+++ b/securedrop/source_app/__init__.py
@@ -79,8 +79,8 @@ def setup_g():
if 'expires' in session and datetime.utcnow() >= session['expires']:
session.clear()
- flash(gettext('You have been logged out due to inactivity'),
- 'error')
+ msg = render_template('session_timeout.html')
+ flash(Markup(msg), "important")
session['expires'] = datetime.utcnow() + \
timedelta(minutes=getattr(config,
| diff --git a/securedrop/tests/functional/functional_test.py b/securedrop/tests/functional/functional_test.py
--- a/securedrop/tests/functional/functional_test.py
+++ b/securedrop/tests/functional/functional_test.py
@@ -21,9 +21,10 @@
from selenium.webdriver.support import expected_conditions
os.environ['SECUREDROP_ENV'] = 'test' # noqa
+import config
import db
import journalist
-import source
+from source_app import create_app
import tests.utils.env as env
LOG_DIR = abspath(join(dirname(realpath(__file__)), '..', 'log'))
@@ -41,7 +42,7 @@ def __call__(self, driver):
return True
-class FunctionalTest():
+class FunctionalTest(object):
def _unused_port(self):
s = socket.socket()
@@ -80,7 +81,7 @@ def _prepare_webdriver(self):
log_file.flush()
return firefox_binary.FirefoxBinary(log_file=log_file)
- def setup(self):
+ def setup(self, session_expiration=30):
# Patch the two-factor verification to avoid intermittent errors
self.patcher = mock.patch('journalist.Journalist.verify_token')
self.mock_journalist_verify_token = self.patcher.start()
@@ -98,6 +99,9 @@ def setup(self):
self.source_location = "http://localhost:%d" % source_port
self.journalist_location = "http://localhost:%d" % journalist_port
+ # Allow custom session expiration lengths
+ self.session_expiration = session_expiration
+
def start_source_server():
# We call Random.atfork() here because we fork the source and
# journalist server from the main Python process we use to drive
@@ -106,7 +110,12 @@ def start_source_server():
# is a problem because they would produce identical output if we
# didn't re-seed them after forking.
Random.atfork()
- source.app.run(
+
+ config.SESSION_EXPIRATION_MINUTES = self.session_expiration
+
+ source_app = create_app(config)
+
+ source_app.run(
port=source_port,
debug=True,
use_reloader=False,
diff --git a/securedrop/tests/functional/source_navigation_steps.py b/securedrop/tests/functional/source_navigation_steps.py
--- a/securedrop/tests/functional/source_navigation_steps.py
+++ b/securedrop/tests/functional/source_navigation_steps.py
@@ -1,4 +1,5 @@
import tempfile
+import time
from selenium.webdriver.common.action_chains import ActionChains
from step_helpers import screenshots
@@ -14,8 +15,7 @@ def _source_visits_source_homepage(self):
assert ("SecureDrop | Protecting Journalists and Sources" ==
self.driver.title)
- @screenshots
- def _source_chooses_to_submit_documents(self):
+ def _source_clicks_submit_documents_on_homepage(self):
# First move the cursor to a known position in case it happens to
# be hovering over one of the buttons we are testing below.
header_image = self.driver.find_element_by_css_selector('.header')
@@ -44,6 +44,10 @@ def _source_chooses_to_submit_documents(self):
# The source clicks the submit button.
submit_button.click()
+ @screenshots
+ def _source_chooses_to_submit_documents(self):
+ self._source_clicks_submit_documents_on_homepage()
+
codename = self.driver.find_element_by_css_selector('#codename')
assert len(codename.text) > 0
@@ -170,12 +174,8 @@ def _source_submits_a_file(self):
@screenshots
def _source_submits_a_message(self):
- text_box = self.driver.find_element_by_css_selector('[name=msg]')
- # send_keys = type into text box
- text_box.send_keys(self.secret_message)
-
- submit_button = self.driver.find_element_by_id('submit-doc-button')
- submit_button.click()
+ self._source_enters_text_in_message_field()
+ self._source_clicks_submit_button_on_submission_page()
if not hasattr(self, 'accept_languages'):
notification = self.driver.find_element_by_css_selector(
@@ -186,6 +186,10 @@ def _source_enters_text_in_message_field(self):
text_box = self.driver.find_element_by_css_selector('[name=msg]')
text_box.send_keys(self.secret_message)
+ def _source_clicks_submit_button_on_submission_page(self):
+ submit_button = self.driver.find_element_by_id('submit-doc-button')
+ submit_button.click()
+
@screenshots
def _source_deletes_a_journalist_reply(self):
# Get the reply filename so we can use IDs to select the delete buttons
@@ -226,3 +230,13 @@ def _source_tor2web_warning(self):
def _source_why_journalist_key(self):
self.driver.get(self.source_location + "/why-journalist-key")
+
+ def _source_waits_for_session_to_timeout(self, session_length_minutes):
+ time.sleep(session_length_minutes * 60 + 0.1)
+
+ def _source_sees_session_timeout_message(self):
+ notification = self.driver.find_element_by_css_selector('.important')
+
+ if not hasattr(self, 'accept_languages'):
+ expected_text = 'Your session timed out due to inactivity.'
+ assert expected_text in notification.text
diff --git a/securedrop/tests/functional/test_source.py b/securedrop/tests/functional/test_source.py
--- a/securedrop/tests/functional/test_source.py
+++ b/securedrop/tests/functional/test_source.py
@@ -2,7 +2,7 @@
import functional_test
-class TestSourceInterfaceBannerWarnings(
+class TestSourceInterface(
functional_test.FunctionalTest,
source_navigation_steps.SourceNavigationSteps):
diff --git a/securedrop/tests/functional/test_source_notfound.py b/securedrop/tests/functional/test_source_notfound.py
--- a/securedrop/tests/functional/test_source_notfound.py
+++ b/securedrop/tests/functional/test_source_notfound.py
@@ -2,7 +2,7 @@
import functional_test
-class TestSourceInterfaceBannerWarnings(
+class TestSourceInterfaceNotFound(
functional_test.FunctionalTest,
source_navigation_steps.SourceNavigationSteps):
diff --git a/securedrop/tests/functional/test_source_session_timeout.py b/securedrop/tests/functional/test_source_session_timeout.py
new file mode 100644
--- /dev/null
+++ b/securedrop/tests/functional/test_source_session_timeout.py
@@ -0,0 +1,26 @@
+import source_navigation_steps
+import functional_test
+
+
+class TestSourceSessions(
+ functional_test.FunctionalTest,
+ source_navigation_steps.SourceNavigationSteps):
+
+ def setup(self):
+ # The session expiration here cannot be set to -1
+ # as it will trigger an exception in /create.
+ # Instead, we pick a 1-2s value to allow the account
+ # to be generated.
+ self.session_length_minutes = 0.03
+ super(TestSourceSessions, self).setup(
+ session_expiration=self.session_length_minutes)
+
+ def test_source_session_timeout(self):
+ self._source_visits_source_homepage()
+ self._source_clicks_submit_documents_on_homepage()
+ self._source_continues_to_submit_page()
+ self._source_waits_for_session_to_timeout(
+ self.session_length_minutes)
+ self._source_enters_text_in_message_field()
+ self._source_clicks_submit_button_on_submission_page()
+ self._source_sees_session_timeout_message()
diff --git a/securedrop/tests/pages-layout/test_source.py b/securedrop/tests/pages-layout/test_source.py
--- a/securedrop/tests/pages-layout/test_source.py
+++ b/securedrop/tests/pages-layout/test_source.py
@@ -136,3 +136,25 @@ def test_tor2web_warning(self):
def test_why_journalist_key(self):
self._source_why_journalist_key()
self._screenshot('source-why_journalist_key.png')
+
+
[email protected]
+class TestSourceSessionLayout(
+ functional_test.FunctionalTest,
+ source_navigation_steps.SourceNavigationSteps,
+ journalist_navigation_steps.JournalistNavigationSteps):
+
+ def setup(self):
+ self.session_length_minutes = 0.03
+ super(TestSourceSessionLayout, self).setup(
+ session_expiration=self.session_length_minutes)
+
+ def test_source_session_timeout(self):
+ self._source_visits_source_homepage()
+ self._source_clicks_submit_documents_on_homepage()
+ self._source_continues_to_submit_page()
+ self._source_waits_for_session_to_timeout(self.session_length_minutes)
+ self._source_enters_text_in_message_field()
+ self._source_clicks_submit_button_on_submission_page()
+ self._source_sees_session_timeout_message()
+ self._screenshot('source-session_timeout.png')
| Source session expiry notification should notify them to close or restart Tor Browser
## Description
Currently we flash:
![screen shot 2017-10-04 at 5 20 24 pm](https://user-images.githubusercontent.com/7832803/31246736-3643e16c-a9c3-11e7-8b4d-4f5df30e6e95.png)
But we should probably flash a message similar to this one:
![Logout](https://docs.securedrop.org/en/latest/_images/source-logout_flashed_message.png)
## User Stories
As a SecureDrop source, I want to be clearly notified of additional steps that I should be taking for my security.
| 2017-10-05T22:45:06Z | [] | [] |
|
freedomofpress/securedrop | 2,410 | freedomofpress__securedrop-2410 | [
"2398"
] | 1e1fafc2f62bfdeb7b170079a660bb35e6af1007 | diff --git a/securedrop/source_app/forms.py b/securedrop/source_app/forms.py
--- a/securedrop/source_app/forms.py
+++ b/securedrop/source_app/forms.py
@@ -13,7 +13,6 @@ class LoginForm(FlaskForm):
message=gettext('Field must be between 1 and '
'{max_codename_len} characters long. '.format(
max_codename_len=Source.MAX_CODENAME_LEN))),
- # The regex here allows either whitespace (\s) or
- # alphanumeric characters (\W) except underscore (_)
- Regexp(r'(\s|[^\W_])+$', message=gettext('Invalid input.'))
+ # Make sure to allow dashes since some words in the wordlist have them
+ Regexp(r'[\sA-Za-z0-9-]+$', message=gettext('Invalid input.'))
])
| diff --git a/securedrop/tests/test_source.py b/securedrop/tests/test_source.py
--- a/securedrop/tests/test_source.py
+++ b/securedrop/tests/test_source.py
@@ -44,6 +44,25 @@ def test_index(self):
self.assertIn("Submit documents for the first time", response.data)
self.assertIn("Already submitted something?", response.data)
+ def test_all_words_in_wordlist_validate(self):
+ """Verify that all words in the wordlist are allowed by the form
+ validation. Otherwise a source will have a codename and be unable to
+ return."""
+
+ wordlist_en = crypto_util._get_wordlist('en')
+
+ for word in wordlist_en:
+ with self.client as c:
+ resp = c.post('/login', data=dict(codename=word),
+ follow_redirects=True)
+ self.assertEqual(resp.status_code, 200)
+ # If the word does not validate, then it will show
+ # 'Invalid input'. If it does validate, it should show that
+ # it isn't a recognized codename.
+ self.assertIn('Sorry, that is not a recognized codename.',
+ resp.data)
+ self.assertNotIn('logged_in', session)
+
def _find_codename(self, html):
"""Find a source codename (diceware passphrase) in HTML"""
# Codenames may contain HTML escape characters, and the wordlist
| Transient testing errors for source login
# Bug
## Description
```
____________________________________________________________ TestSourceApp.test_login_and_logout _____________________________________________________________
self = <tests.test_source.TestSourceApp testMethod=test_login_and_logout>
def test_login_and_logout(self):
resp = self.client.get('/login')
self.assertEqual(resp.status_code, 200)
self.assertIn("Enter Codename", resp.data)
codename = self._new_codename()
with self.client as c:
resp = c.post('/login', data=dict(codename=codename),
follow_redirects=True)
self.assertEqual(resp.status_code, 200)
> self.assertIn("Submit Materials", resp.data)
E AssertionError: 'Submit Materials' not found in '<!DOCTYPE html>\n<html lang="en" dir="ltr">\n <head>\n <meta charset="utf-8">\n <meta name="viewport" content="width=device-width, initial-scale=1">\n <title>SecureDrop | Protecting Journalists and Sources</title>\n\n <link rel="stylesheet" href="/static/css/source.css">\n <link rel="icon" type="image/png" href="/static/i/favicon.png">\n \n </head>\n <body>\n \n\n \n\n\n <div class="content">\n \n <div id="header">\n <a href="/lookup">\n <img src="/static/i/logo.png" class="logo small" alt="SecureDrop" width="250px">\n </a>\n \n \n </div>\n \n\n <div class="panel selected">\n \n <a href="/logout" class="sd-button btn pull-right" id="logout">LOG OUT</a>\n \n <hr class="no-line">\n\n \n\n<h1>Enter Codename</h1>\n\n\n\n\n\n<form method="post" action="/login" autocomplete="off">\n<input name="csrf_token" type="hidden" value="IjBkMTg0N2ZmYzU4YzExZjgzOGJiYzJjZTFkYjdkOWZlNmI2OWFhN2Ui.DLa79A.Y7lYbUepqRHo6eF63pCNX1nViJI">\n\n<p class="center">\n <input autocomplete="off" autofocus class="codename-box" id="login-with-existing-codename" name="codename" placeholder="Enter your codename" type="password" value="">\n <br>\n \n <span class="form-validation-error">Invalid input.</span>\n \n</p>\n\n<div class="pull-right">\n <a href="/" class="sd-button btn secondary" id="cancel">CANCEL</a>\n <button type="submit" class="sd-button btn primary" id="login">\n <img class="icon off-hover" src="/static/i/font-awesome/fa-arrow-circle-o-right-white.png" width="18px" height="18px">\n <img class="icon on-hover" src="/static/i/font-awesome/fa-arrow-circle-o-right-blue.png" width="18px" height="18px">\n CONTINUE\n </button>\n</div>\n\n</form>\n\n\n </div>\n\n \n <footer>\n Like all software, SecureDrop may contain security bugs. Use at your own risk. Powered by SecureDrop 0.4.3.\n </footer>\n \n </div>\n </body>\n</html>'
tests/test_source.py:151: AssertionError
```
## Steps to Reproduce
I can't reproduce this, but it pops up on occasion and has been fixed by some combination of `vagrant reload` or wiping the translations directory. Though I'm not sure if either of those actually affect it. :/
| The relevant error in the resp.data string is
<pre>
<span class="form-validation-error">Invalid input.</span>
</pre>
which comes from source_app/forms.py and is rejecting **-** which shows in four words. The regexp
<pre>
(\s|[^\W_])+$
</pre>
could be simplified as:
<pre>
[\sA-Za-z0-9-]+$
</pre>
It would be useful to add a test verifying it validates all words in the wordlist to avoid similar regressions in the future. | 2017-10-06T22:50:00Z | [] | [] |
freedomofpress/securedrop | 2,426 | freedomofpress__securedrop-2426 | [
"2327"
] | ae7a6d9007a03382112604244563634de422bba9 | diff --git a/securedrop/source_app/main.py b/securedrop/source_app/main.py
--- a/securedrop/source_app/main.py
+++ b/securedrop/source_app/main.py
@@ -38,6 +38,7 @@ def generate():
codename = generate_unique_codename()
session['codename'] = codename
+ session['new_user'] = True
return render_template('generate.html', codename=codename)
@view.route('/create', methods=['POST'])
@@ -91,6 +92,7 @@ def lookup():
codename=g.codename,
replies=replies,
flagged=g.source.flagged,
+ new_user=session.get('new_user', None),
haskey=crypto_util.getkey(
g.filesystem_id))
| diff --git a/securedrop/tests/functional/source_navigation_steps.py b/securedrop/tests/functional/source_navigation_steps.py
--- a/securedrop/tests/functional/source_navigation_steps.py
+++ b/securedrop/tests/functional/source_navigation_steps.py
@@ -54,6 +54,9 @@ def _source_shows_codename(self):
assert not content.is_displayed()
self.driver.find_element_by_id('codename-hint-show').click()
assert content.is_displayed()
+ content_content = self.driver.find_element_by_css_selector(
+ '#codename-hint-content p')
+ assert content_content.text == self.source_name
def _source_hides_codename(self):
content = self.driver.find_element_by_id('codename-hint-content')
@@ -61,6 +64,10 @@ def _source_hides_codename(self):
self.driver.find_element_by_id('codename-hint-hide').click()
assert not content.is_displayed()
+ def _source_sees_no_codename(self):
+ codename = self.driver.find_elements_by_css_selector('.code-reminder')
+ assert len(codename) == 0
+
@screenshots
def _source_chooses_to_login(self):
self.driver.find_element_by_id('login-button').click()
@@ -90,6 +97,10 @@ def _source_proceeds_to_login(self):
assert ("SecureDrop | Protecting Journalists and Sources" ==
self.driver.title)
+ # Check that we've logged in
+
+ replies = self.driver.find_elements_by_id("replies")
+ assert len(replies) == 1
def _source_enters_codename_in_login_form(self):
codename_input = self.driver.find_element_by_id(
diff --git a/securedrop/tests/functional/test_source.py b/securedrop/tests/functional/test_source.py
--- a/securedrop/tests/functional/test_source.py
+++ b/securedrop/tests/functional/test_source.py
@@ -12,3 +12,8 @@ def test_lookup_codename_hint(self):
self._source_continues_to_submit_page()
self._source_shows_codename()
self._source_hides_codename()
+ self._source_logs_out()
+ self._source_visits_source_homepage()
+ self._source_chooses_to_login()
+ self._source_proceeds_to_login()
+ self._source_sees_no_codename()
| Only show source codename on first session
# Feature request
## Description
A source is shown their codename on the `/lookup` page to help them remember it on their first session. This hint should not be shown again after the source logs out. The reason being is in the future, we maybe remove the actual codename from the cookie to prevent it from being used to login indefinitely (either by encrypting it or using some uuid that maps to the codename per session). If we show this on subsequent logins, an attack could steal the cookie, not learn the codename, but login see the codename, and then login again indefinitely.
Plus, is a source has logged in successfully, they definitely know their codename, and there is little (nothing?) to be gained by showing it again.
| Adding hackathon, and some notes. This could be done simply by adding a single field to the session cookie that is only added on account creation and not login. The templates could check for that either as a standalone variable or in `g`.
Taking a look.
Might be nice to stop storing the codename from the session too. | 2017-10-08T14:28:41Z | [] | [] |
freedomofpress/securedrop | 2,430 | freedomofpress__securedrop-2430 | [
"2304"
] | e10f71c5b52d17347daea0bd55e4f75aa493d433 | diff --git a/securedrop/journalist.py b/securedrop/journalist.py
--- a/securedrop/journalist.py
+++ b/securedrop/journalist.py
@@ -26,7 +26,8 @@
import template_filters
from db import (db_session, Source, Journalist, Submission, Reply,
SourceStar, get_one_or_else, LoginThrottledException,
- PasswordError, InvalidUsernameException)
+ PasswordError, InvalidUsernameException,
+ BadTokenException, WrongPasswordException)
import worker
app = Flask(__name__, template_folder=config.JOURNALIST_TEMPLATES_DIR)
@@ -131,42 +132,60 @@ def wrapper(*args, **kwargs):
return wrapper
+def validate_user(username, password, token, error_message=None):
+ """
+ Validates the user by calling the login and handling exceptions
+ :param username: Username
+ :param password: Password
+ :param token: Two-factor authentication token
+ :param error_message: Localized error message string to use on failure
+ :return: Journalist user object if successful, None otherwise.
+ """
+ try:
+ return Journalist.login(username, password, token)
+ except (InvalidUsernameException,
+ BadTokenException,
+ WrongPasswordException,
+ LoginThrottledException) as e:
+ app.logger.error("Login for '{}' failed: {}".format(
+ username, e))
+ if not error_message:
+ error_message = gettext('Login failed.')
+ login_flashed_msg = error_message
+
+ if isinstance(e, LoginThrottledException):
+ login_flashed_msg += " "
+ period = Journalist._LOGIN_ATTEMPT_PERIOD
+ # ngettext is needed although we always have period > 1
+ # see https://github.com/freedomofpress/securedrop/issues/2422
+ login_flashed_msg += ngettext(
+ "Please wait at least {seconds} second "
+ "before logging in again.",
+ "Please wait at least {seconds} seconds "
+ "before logging in again.", period).format(seconds=period)
+ else:
+ try:
+ user = Journalist.query.filter_by(
+ username=username).one()
+ if user.is_totp:
+ login_flashed_msg += " "
+ login_flashed_msg += gettext(
+ "Please wait for a new two-factor token"
+ " before trying again.")
+ except:
+ pass
+
+ flash(login_flashed_msg, "error")
+ return None
+
+
@app.route('/login', methods=('GET', 'POST'))
def login():
if request.method == 'POST':
- try:
- user = Journalist.login(request.form['username'],
- request.form['password'],
- request.form['token'])
- except Exception as e:
- app.logger.error("Login for '{}' failed: {}".format(
- request.form['username'], e))
- login_flashed_msg = gettext('Login failed.')
-
- if isinstance(e, LoginThrottledException):
- login_flashed_msg += " "
- period = Journalist._LOGIN_ATTEMPT_PERIOD
- # ngettext is needed although we always have period > 1
- # see https://github.com/freedomofpress/securedrop/issues/2422
- login_flashed_msg += ngettext(
- "Please wait at least {seconds} second "
- "before logging in again.",
- "Please wait at least {seconds} seconds "
- "before logging in again.", period).format(seconds=period)
- else:
- try:
- user = Journalist.query.filter_by(
- username=request.form['username']).one()
- if user.is_totp:
- login_flashed_msg += " "
- login_flashed_msg += gettext(
- "Please wait for a new two-factor token"
- " before logging in again.")
- except:
- pass
-
- flash(login_flashed_msg, "error")
- else:
+ user = validate_user(request.form['username'],
+ request.form['password'],
+ request.form['token'])
+ if user:
app.logger.info("'{}' logged in with the token {}".format(
request.form['username'], request.form['token']))
@@ -408,12 +427,17 @@ def edit_account():
password=password)
[email protected]('/account/new-password', methods=['POST'])
[email protected]('/account/new-password', methods=('POST',))
@login_required
def new_password():
user = g.user
- password = request.form.get('password')
- _set_diceware_password(user, password)
+ current_password = request.form.get('current_password')
+ token = request.form.get('token')
+ error_message = gettext('Incorrect password or two-factor code.')
+ # If the user is validated, change their password
+ if validate_user(user.username, current_password, token, error_message):
+ password = request.form.get('password')
+ _set_diceware_password(user, password)
return redirect(url_for('edit_account'))
| diff --git a/securedrop/tests/test_integration.py b/securedrop/tests/test_integration.py
--- a/securedrop/tests/test_integration.py
+++ b/securedrop/tests/test_integration.py
@@ -545,7 +545,9 @@ def test_user_change_password(self):
# change password
new_pw = 'another correct horse battery staply long password'
self.journalist_app.post('/account/new-password',
- data=dict(password=new_pw))
+ data=dict(password=new_pw,
+ current_password=self.user_pw,
+ token='mocked'))
# logout
self.journalist_app.get('/logout')
diff --git a/securedrop/tests/test_journalist.py b/securedrop/tests/test_journalist.py
--- a/securedrop/tests/test_journalist.py
+++ b/securedrop/tests/test_journalist.py
@@ -294,7 +294,9 @@ def test_user_edits_password_success_reponse(self):
self._login_user()
resp = self.client.post(
url_for('new_password'),
- data=dict(password=VALID_PASSWORD_2),
+ data=dict(current_password=self.user_pw,
+ token='mocked',
+ password=VALID_PASSWORD_2),
follow_redirects=True)
text = resp.data.decode('utf-8')
@@ -307,7 +309,9 @@ def test_user_edits_password_error_reponse(self):
with patch('db.db_session.commit', side_effect=Exception()):
resp = self.client.post(
url_for('new_password'),
- data=dict(password=VALID_PASSWORD_2),
+ data=dict(current_password=self.user_pw,
+ token='mocked',
+ password=VALID_PASSWORD_2),
follow_redirects=True)
assert ('There was an error, and the new password might not have '
@@ -361,7 +365,9 @@ def test_user_edits_password_too_long_warning(self):
'a' * (Journalist.MAX_PASSWORD_LEN - len(VALID_PASSWORD) + 1)
self.client.post(url_for('new_password'),
- data=dict(password=overly_long_password),
+ data=dict(password=overly_long_password,
+ token='mocked',
+ current_password=self.user_pw),
follow_redirects=True)
self.assertMessageFlashed('You submitted a bad password! '
@@ -662,10 +668,23 @@ def test_user_authorization_for_posts(self):
res = self.client.post(url)
self.assertStatus(res, 302)
+ def test_incorrect_current_password_change(self):
+ self._login_user()
+ resp = self.client.post(url_for('new_password'),
+ data=dict(password=VALID_PASSWORD,
+ token='mocked',
+ current_password='badpw'),
+ follow_redirects=True)
+
+ text = resp.data.decode('utf-8')
+ self.assertIn('Incorrect password or two-factor code', text)
+
def test_invalid_user_password_change(self):
self._login_user()
res = self.client.post(url_for('new_password'),
- data=dict(password='badpw'))
+ data=dict(password='badpw',
+ token='mocked',
+ current_password=self.user_pw))
self.assertRedirects(res, url_for('edit_account'))
def test_too_long_user_password_change(self):
@@ -675,7 +694,9 @@ def test_too_long_user_password_change(self):
'a' * (Journalist.MAX_PASSWORD_LEN - len(VALID_PASSWORD) + 1)
self.client.post(url_for('new_password'),
- data=dict(password=overly_long_password),
+ data=dict(password=overly_long_password,
+ token='mocked',
+ current_password=self.user_pw),
follow_redirects=True)
self.assertMessageFlashed('You submitted a bad password! Password not '
@@ -685,7 +706,9 @@ def test_valid_user_password_change(self):
self._login_user()
resp = self.client.post(
url_for('new_password'),
- data=dict(password=VALID_PASSWORD_2),
+ data=dict(password=VALID_PASSWORD_2,
+ token='mocked',
+ current_password=self.user_pw),
follow_redirects=True)
assert 'Password updated.' in \
| User does not need to enter their old password to change their password
## Description
A user can change their password without entering their old password first.
## Steps to Reproduce
1. Login
2. Go to change password form
## Expected Behavior
User must enter old password and then generate the new password.
## Actual Behavior
User can generate their new password without entering their old one.
## Comments
Related: #2300, #2303. This is a low priority to fix, but filing nevertheless.
| Proposing that the frontend change would look like the following (only when user edits their own account, not for admins), including the password and 2fa code:
<img width="710" alt="screen shot 2017-10-08 at 17 07 54" src="https://user-images.githubusercontent.com/1524722/31318057-40df067a-ac4c-11e7-8a2d-6ca5d9890ec1.png">
I'm hoping to get a pull request in later today. | 2017-10-08T17:37:49Z | [] | [] |
freedomofpress/securedrop | 2,431 | freedomofpress__securedrop-2431 | [
"2420"
] | 2badbfb09b87d92411b55cf6adfbb73c854c008e | diff --git a/securedrop/journalist.py b/securedrop/journalist.py
--- a/securedrop/journalist.py
+++ b/securedrop/journalist.py
@@ -218,7 +218,7 @@ def admin_add_user():
else:
flash(gettext("An error occurred saving this user"
" to the database."
- " Please check the application logs."),
+ " Please inform your administrator."),
"error")
app.logger.error("Adding user '{}' failed: {}".format(
username, e))
@@ -286,8 +286,7 @@ def admin_reset_two_factor_hotp():
else:
flash(gettext(
"An unexpected error occurred! "
- "Please check the application "
- "logs or inform your adminstrator."), "error")
+ "Please inform your administrator."), "error")
app.logger.error(
"set_hotp_secret '{}' (id {}) failed: {}".format(
otp_secret, uid, e))
@@ -310,8 +309,8 @@ def commit_account_changes(user):
db_session.commit()
except Exception as e:
flash(gettext(
- "An unexpected error occurred! Please check the application "
- "logs or inform your adminstrator."), "error")
+ "An unexpected error occurred! Please "
+ "inform your administrator."), "error")
app.logger.error("Account changes for '{}' failed: {}".format(user,
e))
db_session.rollback()
@@ -718,8 +717,8 @@ def reply():
db_session.commit()
except Exception as exc:
flash(gettext(
- "An unexpected error occurred! Please check the application "
- "logs or inform your adminstrator."), "error")
+ "An unexpected error occurred! Please "
+ "inform your administrator."), "error")
# We take a cautious approach to logging here because we're dealing
# with responses to sources. It's possible the exception message could
# contain information we don't want to write to disk.
| diff --git a/securedrop/tests/test_journalist.py b/securedrop/tests/test_journalist.py
--- a/securedrop/tests/test_journalist.py
+++ b/securedrop/tests/test_journalist.py
@@ -84,8 +84,8 @@ def test_reply_error_flashed_message(self):
data={'filesystem_id': filesystem_id, 'msg': '_'})
self.assertMessageFlashed(
- 'An unexpected error occurred! Please check '
- 'the application logs or inform your adminstrator.', 'error')
+ 'An unexpected error occurred! Please '
+ 'inform your administrator.', 'error')
def test_empty_replies_are_rejected(self):
source, _ = utils.db_helper.init_source()
@@ -452,8 +452,7 @@ def test_admin_resets_user_hotp_error(self,
self.assertEqual(old_hotp, new_hotp)
self.assertMessageFlashed("An unexpected error occurred! "
- "Please check the application "
- "logs or inform your adminstrator.", "error")
+ "Please inform your administrator.", "error")
mocked_error_logger.assert_called_once_with(
"set_hotp_secret '{}' (id {}) failed: {}".format(
otp_secret, self.user.id, error_message))
@@ -612,7 +611,7 @@ def test_admin_add_user_integrity_error(self,
"None [SQL: 'STATEMENT'] [parameters: 'PARAMETERS']")
self.assertMessageFlashed(
"An error occurred saving this user to the database."
- " Please check the application logs.",
+ " Please inform your administrator.",
"error")
def test_admin_page_restriction_http_gets(self):
| Typo in string, useless error message.
# Bug
There is a typo in one of the strings and also normal users can't check the application logs, so why do we even tell them to?
Found by @xella
## Actual Behavior
> An unexpected error occurred! Please check the application logs or inform your adminstrator.
## Expected Behavior
> An unexpected error occurred! Please inform your administrator.
| 2017-10-08T20:25:30Z | [] | [] |
|
freedomofpress/securedrop | 2,438 | freedomofpress__securedrop-2438 | [
"2374"
] | 4d2672ff6a8ec6502c3765d84d7450f5295e5f24 | diff --git a/securedrop/journalist.py b/securedrop/journalist.py
--- a/securedrop/journalist.py
+++ b/securedrop/journalist.py
@@ -45,7 +45,8 @@
app.jinja_env.globals['header_image'] = 'logo.png'
app.jinja_env.globals['use_custom_header_image'] = False
-app.jinja_env.filters['datetimeformat'] = template_filters.datetimeformat
+app.jinja_env.filters['rel_datetime_format'] = \
+ template_filters.rel_datetime_format
app.jinja_env.filters['filesizeformat'] = template_filters.filesizeformat
diff --git a/securedrop/source_app/__init__.py b/securedrop/source_app/__init__.py
--- a/securedrop/source_app/__init__.py
+++ b/securedrop/source_app/__init__.py
@@ -36,8 +36,6 @@ def create_app(config):
assets = Environment(app)
app.config['assets'] = assets
- # this set needs to happen *before* we set the jinja filters otherwise
- # we get name collisions
i18n.setup_app(app)
app.jinja_env.globals['version'] = version.__version__
@@ -48,7 +46,8 @@ def create_app(config):
app.jinja_env.globals['header_image'] = 'logo.png'
app.jinja_env.globals['use_custom_header_image'] = False
- app.jinja_env.filters['datetimeformat'] = template_filters.datetimeformat
+ app.jinja_env.filters['rel_datetime_format'] = \
+ template_filters.rel_datetime_format
app.jinja_env.filters['nl2br'] = evalcontextfilter(template_filters.nl2br)
app.jinja_env.filters['filesizeformat'] = template_filters.filesizeformat
diff --git a/securedrop/template_filters.py b/securedrop/template_filters.py
--- a/securedrop/template_filters.py
+++ b/securedrop/template_filters.py
@@ -6,7 +6,7 @@
import math
-def datetimeformat(dt, fmt=None, relative=False):
+def rel_datetime_format(dt, fmt=None, relative=False):
"""Template filter for readable formatting of datetime.datetime"""
if relative:
time = dates.format_timedelta(datetime.utcnow() - dt,
| diff --git a/securedrop/tests/test_template_filters.py b/securedrop/tests/test_template_filters.py
--- a/securedrop/tests/test_template_filters.py
+++ b/securedrop/tests/test_template_filters.py
@@ -18,34 +18,36 @@
class TestTemplateFilters(object):
- def verify_datetimeformat(self, app):
+ def verify_rel_datetime_format(self, app):
with app.test_client() as c:
c.get('/')
assert session.get('locale') is None
- result = template_filters.datetimeformat(
+ result = template_filters.rel_datetime_format(
datetime(2016, 1, 1, 1, 1, 1))
assert "Jan 01, 2016 01:01 AM" == result
- result = template_filters.datetimeformat(
+ result = template_filters.rel_datetime_format(
datetime(2016, 1, 1, 1, 1, 1), fmt="yyyy")
assert "2016" == result
test_time = datetime.utcnow() - timedelta(hours=2)
- result = template_filters.datetimeformat(test_time, relative=True)
+ result = template_filters.rel_datetime_format(test_time,
+ relative=True)
assert "2 hours ago" == result
c.get('/?l=fr_FR')
assert session.get('locale') == 'fr_FR'
- result = template_filters.datetimeformat(
+ result = template_filters.rel_datetime_format(
datetime(2016, 1, 1, 1, 1, 1))
assert "janv. 01, 2016 01:01 AM" == result
- result = template_filters.datetimeformat(
+ result = template_filters.rel_datetime_format(
datetime(2016, 1, 1, 1, 1, 1), fmt="yyyy")
assert "2016" == result
test_time = datetime.utcnow() - timedelta(hours=2)
- result = template_filters.datetimeformat(test_time, relative=True)
+ result = template_filters.rel_datetime_format(test_time,
+ relative=True)
assert "2 heures ago" == result
def verify_filesizeformat(self, app):
@@ -106,7 +108,7 @@ def test_filters(self):
app.config['BABEL_TRANSLATION_DIRECTORIES'] = config.TEMP_DIR
i18n.setup_app(app)
self.verify_filesizeformat(app)
- self.verify_datetimeformat(app)
+ self.verify_rel_datetime_format(app)
@classmethod
def teardown_class(cls):
| Rename `datetimeformat` to prevent collisions with Babel functions
# Feature request
## Description
Babel injects a function called `datetimeformat` into the jinja environment. We do too, manually. They do not have the same args, and we get `TypeError`s. We should rename our functions to fix this.
## User Stories
As a dev, I don't want to have to track down weird errors if we don't do things in exactly the right order.
| I don't think this is the root of the problem because the error we sometime get is about an argument (relative) which exists in both implementation.
The current fix for this iproblem (with the global journalist/source implemenation) s to ensure the test file reloads the file that journalist/source module. See for instance test_i18n.py
<pre>
@classmethod
def teardown_class(cls):
reload(journalist)
reload(source)
</pre>
I take that back. The [format_dateime](https://github.com/python-babel/flask-babel/blob/master/flask_babel/__init__.py#L381) provided by default does **not** have a **relative** kwargs argument. So the problem is that the babel module gets reloaded (how ?) and resets the jinja filters. And if the journalist/source is not also reloaded, the implementation is not overloaded.
I suspect changing the name won't help. If the filters are reset we will just get an error because the filter does not exist at all. We need to figure out why the jinja filters are being reset after a test.
Doing a `reoload(journalist)` just resets it the same way. `journalist.app` is calling `journanlst_app.create_app` which calls `i18n.setup_app` and also does the jinja stuff. And this happens in the same order every time. The reason this was a problem was that in the refactor, I changed the order and our function was always being overwritten by the babel one.
Ah, that makes sense. π on renaming the function to avoid that kind of headache. | 2017-10-12T19:44:53Z | [] | [] |
freedomofpress/securedrop | 2,471 | freedomofpress__securedrop-2471 | [
"2386"
] | 6732c539ba1decd252fd38221ff5ea26f9882eea | diff --git a/securedrop/source_app/main.py b/securedrop/source_app/main.py
--- a/securedrop/source_app/main.py
+++ b/securedrop/source_app/main.py
@@ -54,6 +54,10 @@ def create():
current_app.logger.error(
"Attempt to create a source with duplicate codename: %s" %
(e,))
+
+ # Issue 2386: don't log in on duplicates
+ del session['codename']
+ abort(500)
else:
os.mkdir(store.path(filesystem_id))
| diff --git a/securedrop/tests/test_source.py b/securedrop/tests/test_source.py
--- a/securedrop/tests/test_source.py
+++ b/securedrop/tests/test_source.py
@@ -149,6 +149,7 @@ def test_create_duplicate_codename(self, logger):
logger.assert_called_once()
self.assertIn("Attempt to create a source with duplicate codename",
logger.call_args[0][0])
+ assert 'codename' not in session
def test_lookup(self):
"""Test various elements on the /lookup page."""
| Abort with 500 if duplicate codename is ever generated
## Description
For defense in depth, we should bail (`abort(500)`) if a duplicate codename is ever generated instead of logging an error and continuing on. [See discussion here](https://github.com/freedomofpress/securedrop/pull/2377#discussion_r141571021).
| 2017-10-23T19:47:54Z | [] | [] |
|
freedomofpress/securedrop | 2,479 | freedomofpress__securedrop-2479 | [
"2290"
] | 8c2d123a9d21cfb0df6c45a0a9ad844a76296205 | diff --git a/securedrop/journalist.py b/securedrop/journalist.py
--- a/securedrop/journalist.py
+++ b/securedrop/journalist.py
@@ -5,7 +5,7 @@
from flask import (Flask, request, render_template, send_file, redirect, flash,
url_for, g, abort, session)
-from flask_wtf.csrf import CSRFProtect
+from flask_wtf.csrf import CSRFProtect, CSRFError
from flask_assets import Environment
from jinja2 import Markup
from sqlalchemy.orm.exc import NoResultFound
@@ -33,6 +33,16 @@
app.config.from_object(config.JournalistInterfaceFlaskConfig)
CSRFProtect(app)
+
[email protected](CSRFError)
+def handle_csrf_error(e):
+ # render the message first to ensure it's localized.
+ msg = gettext('You have been logged out due to inactivity')
+ session.clear()
+ flash(msg, 'error')
+ return redirect(url_for('login'))
+
+
i18n.setup_app(app)
assets = Environment(app)
diff --git a/securedrop/source_app/__init__.py b/securedrop/source_app/__init__.py
--- a/securedrop/source_app/__init__.py
+++ b/securedrop/source_app/__init__.py
@@ -3,7 +3,7 @@
url_for, redirect)
from flask_babel import gettext
from flask_assets import Environment
-from flask_wtf.csrf import CSRFProtect
+from flask_wtf.csrf import CSRFProtect, CSRFError
from jinja2 import evalcontextfilter
from os import path
from sqlalchemy.orm.exc import NoResultFound
@@ -31,8 +31,16 @@ def create_app(config):
# The default CSRF token expiration is 1 hour. Since large uploads can
# take longer than an hour over Tor, we increase the valid window to 24h.
app.config['WTF_CSRF_TIME_LIMIT'] = 60 * 60 * 24
+
CSRFProtect(app)
+ @app.errorhandler(CSRFError)
+ def handle_csrf_error(e):
+ msg = render_template('session_timeout.html')
+ session.clear()
+ flash(Markup(msg), "important")
+ return redirect(url_for('main.index'))
+
assets = Environment(app)
app.config['assets'] = assets
| diff --git a/securedrop/tests/test_journalist.py b/securedrop/tests/test_journalist.py
--- a/securedrop/tests/test_journalist.py
+++ b/securedrop/tests/test_journalist.py
@@ -1002,6 +1002,21 @@ def test_journalist_session_expiration(self):
else:
del config.SESSION_EXPIRATION_MINUTES
+ def test_csrf_error_page(self):
+ old_enabled = self.app.config['WTF_CSRF_ENABLED']
+ self.app.config['WTF_CSRF_ENABLED'] = True
+
+ try:
+ with self.app.test_client() as app:
+ resp = app.post(url_for('login'))
+ self.assertRedirects(resp, url_for('login'))
+
+ resp = app.post(url_for('login'), follow_redirects=True)
+ self.assertIn('You have been logged out due to inactivity',
+ resp.data)
+ finally:
+ self.app.config['WTF_CSRF_ENABLED'] = old_enabled
+
class TestJournalistAppTwo(unittest.TestCase):
diff --git a/securedrop/tests/test_source.py b/securedrop/tests/test_source.py
--- a/securedrop/tests/test_source.py
+++ b/securedrop/tests/test_source.py
@@ -5,7 +5,7 @@
import re
from bs4 import BeautifulSoup
-from flask import session, escape
+from flask import session, escape, url_for
from flask_testing import TestCase
import crypto_util
@@ -493,3 +493,18 @@ def _test_source_session_expiration(self):
config.SESSION_EXPIRATION_MINUTES = old_expiration
else:
del config.SESSION_EXPIRATION_MINUTES
+
+ def test_csrf_error_page(self):
+ old_enabled = self.app.config['WTF_CSRF_ENABLED']
+ self.app.config['WTF_CSRF_ENABLED'] = True
+
+ try:
+ with self.app.test_client() as app:
+ resp = app.post(url_for('main.create'))
+ self.assertRedirects(resp, url_for('main.index'))
+
+ resp = app.post(url_for('main.create'), follow_redirects=True)
+ self.assertIn('Your session timed out due to inactivity',
+ resp.data)
+ finally:
+ self.app.config['WTF_CSRF_ENABLED'] = old_enabled
| Create a better session expiry page
# Feature request
Create a better session expiry page.
## Description
Currently, when a journalist session expires, they run into a blank "CSRF token expired" page. Ideally, should instead be given a more context-specific, rather than tech-specific error message e.g. "Your SecureDrop session has expired, please log back in to continue." and redirected or given a link to go back to a login page so that they're not stuck/worried not knowing what happened or what to do next.
## User Stories
As a journalist I'd like to be notified when my session expires and what to do when it does. Also I'd like to not worry about weird messages that sounds scary but aren't actually anything to worry about.
| Closed by #1494, users will as of 0.4.4 get a friendlier message as suggested here
Reopening because this is *not* actually closed. We just *happened* to work around it. Flask-WTF sets the CSRF protection tokens to have a time limit of 60 minutes by default (set with `WTF_CSRF_TIME_LIMIT`). We set the session timeout to be 30 minutes by default. This means we never see the error page unless an admin adjusts the timeout.
This can be verified with:
```python
app.config['WTF_CSRF_TIME_LIMIT'] = 10 # seconds
```
In `journalist.py`. Run the app, load the login page, wait over 10 seconds, then attempt to log in (either good or bad credentials). This will display the ugly 400 Bad Request page.
![screenshot_2017-10-07_15-27-01](https://user-images.githubusercontent.com/3998464/31308268-043a1b0c-ab74-11e7-96cf-62b2b6cf093d.png)
A possible solution would be to set the CSRF timeout to be equal to the session timeout. I also know the CSRF value is set to 24 hours for sources, but I suspect that setting it lower won't break the uploads because it would be very strange for Flask-WTF to set a callback that breaks the connection at the instant the token expires. This would need to be investigated.
Or, we could just use a custom error handler [as described in the Flask-WTF docs](https://flask-wtf.readthedocs.io/en/stable/csrf.html#customize-the-error-response).
It would also be good to clear the session in the aforementioned handler since the CSRF token expiring should indicate that the session is unattended as well. | 2017-10-24T20:43:26Z | [] | [] |
freedomofpress/securedrop | 2,494 | freedomofpress__securedrop-2494 | [
"2311",
"2311"
] | 4e099a2884d9889a449583bb3604cb48fb6316fa | diff --git a/securedrop/manage.py b/securedrop/manage.py
--- a/securedrop/manage.py
+++ b/securedrop/manage.py
@@ -158,6 +158,13 @@ def _add_user(is_admin=False):
while True:
otp_secret = raw_input(
"Please configure this user's YubiKey and enter the secret: ")
+ if otp_secret:
+ tmp_str = otp_secret.replace(" ", "")
+ if len(tmp_str) != 40:
+ print("The length of the secret is not correct. "
+ "Expected 40 characters, but received {0}. "
+ "Try again.".format(len(tmp_str)))
+ continue
if otp_secret:
break
| diff --git a/securedrop/tests/test_manage.py b/securedrop/tests/test_manage.py
--- a/securedrop/tests/test_manage.py
+++ b/securedrop/tests/test_manage.py
@@ -21,6 +21,10 @@
from db import Journalist, db_session
+YUBIKEY_HOTP = ['cb a0 5f ad 41 a2 ff 4e eb 53 56 3a 1b f7 23 2e ce fc dc',
+ 'cb a0 5f ad 41 a2 ff 4e eb 53 56 3a 1b f7 23 2e ce fc dc d7']
+
+
class TestManagePy(object):
def test_parse_args(self):
# just test that the arg parser is stable
@@ -63,6 +67,20 @@ def test_get_yubikey_usage_yes(self, mock_stdin):
def test_get_yubikey_usage_no(self, mock_stdin):
assert not manage._get_yubikey_usage()
+ @mock.patch("manage._get_username", return_value='ntoll')
+ @mock.patch("manage._get_yubikey_usage", return_value=True)
+ @mock.patch("__builtin__.raw_input", side_effect=YUBIKEY_HOTP)
+ @mock.patch("sys.stdout", new_callable=StringIO)
+ def test_handle_invalid_secret(self, mock_username, mock_yubikey,
+ mock_htop, mock_stdout):
+ """Regression test for bad secret logic in manage.py"""
+
+ # We will try to provide one invalid and one valid secret
+ return_value = manage._add_user()
+ self.assertEqual(return_value, 0)
+ self.assertIn('Try again.', sys.stdout.getvalue())
+ self.assertIn('successfully added', sys.stdout.getvalue())
+
@mock.patch("manage._get_username", return_value='foo-bar-baz')
@mock.patch("manage._get_yubikey_usage", return_value=False)
@mock.patch("sys.stdout", new_callable=StringIO)
| Poor input validation on Yubikey HOTP setup
# Bug
## Description
When configuring a Yubikey for HOTP 2FA, the `./manage.py` script does not valid input properly.
## Steps to Reproduce
For instance, a hardware Yubikey with secret key:
> cb a0 5f ad 41 a2 ff 4e eb 53 56 3a 1b f7 23 2e ce fc dc d7
that entire secret should be pasted as-is into the prompt **Please configure your YubiKey and enter the secret:**. However, if you make a mistake copy/pasting, and truncate the string, e.g.:
> cb a0 5f ad 41 a2 ff 4e eb 53 56 3a 1b f7
Then the script will still accept the bad value, and logins will be impossible with the newly created account.
## Expected Behavior
`./manage.py` script fails if HOTP secret input is bad, forcing re-entry.
## Actual Behavior
`./manage.py` shows a lackadaisical attitude toward HOTP secret structure, then staunchly refuses logins using the Yubikey.
## Comments
Logs show, predictably, a bad token error:
> ERROR in journalist: Login for 'conor4' failed: invalid token
Poor input validation on Yubikey HOTP setup
# Bug
## Description
When configuring a Yubikey for HOTP 2FA, the `./manage.py` script does not valid input properly.
## Steps to Reproduce
For instance, a hardware Yubikey with secret key:
> cb a0 5f ad 41 a2 ff 4e eb 53 56 3a 1b f7 23 2e ce fc dc d7
that entire secret should be pasted as-is into the prompt **Please configure your YubiKey and enter the secret:**. However, if you make a mistake copy/pasting, and truncate the string, e.g.:
> cb a0 5f ad 41 a2 ff 4e eb 53 56 3a 1b f7
Then the script will still accept the bad value, and logins will be impossible with the newly created account.
## Expected Behavior
`./manage.py` script fails if HOTP secret input is bad, forcing re-entry.
## Actual Behavior
`./manage.py` shows a lackadaisical attitude toward HOTP secret structure, then staunchly refuses logins using the Yubikey.
## Comments
Logs show, predictably, a bad token error:
> ERROR in journalist: Login for 'conor4' failed: invalid token
| Intriguingly we do have oddly specific input validation if the string is "odd-length", meaning not-even:
```
Please configure your YubiKey and enter the secret: a0 4a 39 4d ff 7c cd 54 87 7b f2 63 d4 5c e
['Traceback (most recent call last):\n', ' File "./manage.py", line 162, in _add_user\n otp_secret=otp_secret)\n', ' File "<string>", line 4, in __init__\n', ' File "/usr/local/lib/python2.7/dist-packages/sqlalchemy/orm/state.py", line 414, in _initialize_instance\n manager.dispatch.init_failure(self, args, kwargs)\n', ' File "/usr/local/lib/python2.7/dist-packages/sqlalchemy/util/langhelpers.py", line 66, in __exit__\n compat.reraise(exc_type, exc_value, exc_tb)\n', ' File "/usr/local/lib/python2.7/dist-packages/sqlalchemy/orm/state.py", line 411, in _initialize_instance\n return manager.original_init(*mixed[1:], **kwargs)\n', ' File "/var/www/securedrop/db.py", line 277, in __init__\n self.set_hotp_secret(otp_secret)\n', ' File "/var/www/securedrop/db.py", line 347, in set_hotp_secret\n "")))\n', 'TypeError: Odd-length string\n']
```
That exception should be caught and a readable error message displayed.
Picking this one.
Intriguingly we do have oddly specific input validation if the string is "odd-length", meaning not-even:
```
Please configure your YubiKey and enter the secret: a0 4a 39 4d ff 7c cd 54 87 7b f2 63 d4 5c e
['Traceback (most recent call last):\n', ' File "./manage.py", line 162, in _add_user\n otp_secret=otp_secret)\n', ' File "<string>", line 4, in __init__\n', ' File "/usr/local/lib/python2.7/dist-packages/sqlalchemy/orm/state.py", line 414, in _initialize_instance\n manager.dispatch.init_failure(self, args, kwargs)\n', ' File "/usr/local/lib/python2.7/dist-packages/sqlalchemy/util/langhelpers.py", line 66, in __exit__\n compat.reraise(exc_type, exc_value, exc_tb)\n', ' File "/usr/local/lib/python2.7/dist-packages/sqlalchemy/orm/state.py", line 411, in _initialize_instance\n return manager.original_init(*mixed[1:], **kwargs)\n', ' File "/var/www/securedrop/db.py", line 277, in __init__\n self.set_hotp_secret(otp_secret)\n', ' File "/var/www/securedrop/db.py", line 347, in set_hotp_secret\n "")))\n', 'TypeError: Odd-length string\n']
```
That exception should be caught and a readable error message displayed.
Picking this one. | 2017-10-31T15:13:48Z | [] | [] |
freedomofpress/securedrop | 2,502 | freedomofpress__securedrop-2502 | [
"2500",
"2500"
] | ea389c2c97eef5a3a346a720f7a6d4109ac56b65 | diff --git a/securedrop/journalist_app/admin.py b/securedrop/journalist_app/admin.py
--- a/securedrop/journalist_app/admin.py
+++ b/securedrop/journalist_app/admin.py
@@ -11,6 +11,7 @@
from journalist_app.decorators import admin_required
from journalist_app.utils import (make_password, commit_account_changes,
set_diceware_password)
+from journalist_app.forms import NewUserForm
def make_blueprint(config):
@@ -25,52 +26,52 @@ def index():
@view.route('/add', methods=('GET', 'POST'))
@admin_required
def add_user():
- if request.method == 'POST':
+ form = NewUserForm()
+ if form.validate_on_submit():
form_valid = True
username = request.form['username']
-
password = request.form['password']
is_admin = bool(request.form.get('is_admin'))
- if form_valid:
- try:
- otp_secret = None
- if request.form.get('is_hotp', False):
- otp_secret = request.form.get('otp_secret', '')
- new_user = Journalist(username=username,
- password=password,
- is_admin=is_admin,
- otp_secret=otp_secret)
- db_session.add(new_user)
- db_session.commit()
- except PasswordError:
- flash(gettext(
- 'There was an error with the autogenerated password. '
- 'User not created. Please try again.'), 'error')
- form_valid = False
- except InvalidUsernameException as e:
- form_valid = False
- flash('Invalid username: ' + str(e), "error")
- except IntegrityError as e:
- db_session.rollback()
- form_valid = False
- if ("UNIQUE constraint failed: journalists.username"
- in str(e)):
- flash(gettext("That username is already in use"),
- "error")
- else:
- flash(gettext("An error occurred saving this user"
- " to the database."
- " Please inform your administrator."),
- "error")
- current_app.logger.error("Adding user '{}' failed: {}"
- .format(username, e))
+ try:
+ otp_secret = None
+ if request.form.get('is_hotp', False):
+ otp_secret = request.form.get('otp_secret', '')
+ new_user = Journalist(username=username,
+ password=password,
+ is_admin=is_admin,
+ otp_secret=otp_secret)
+ db_session.add(new_user)
+ db_session.commit()
+ except PasswordError:
+ flash(gettext(
+ 'There was an error with the autogenerated password. '
+ 'User not created. Please try again.'), 'error')
+ form_valid = False
+ except InvalidUsernameException as e:
+ form_valid = False
+ flash('Invalid username: ' + str(e), "error")
+ except IntegrityError as e:
+ db_session.rollback()
+ form_valid = False
+ if "UNIQUE constraint failed: journalists.username" in str(e):
+ flash(gettext("That username is already in use"),
+ "error")
+ else:
+ flash(gettext("An error occurred saving this user"
+ " to the database."
+ " Please inform your administrator."),
+ "error")
+ current_app.logger.error("Adding user "
+ "'{}' failed: {}".format(
+ username, e))
if form_valid:
return redirect(url_for('admin.new_user_two_factor',
uid=new_user.id))
- return render_template("admin_add_user.html", password=make_password())
+ return render_template("admin_add_user.html", password=make_password(),
+ form=form)
@view.route('/2fa', methods=('GET', 'POST'))
@admin_required
diff --git a/securedrop/journalist_app/forms.py b/securedrop/journalist_app/forms.py
--- a/securedrop/journalist_app/forms.py
+++ b/securedrop/journalist_app/forms.py
@@ -2,8 +2,44 @@
from flask_babel import gettext
from flask_wtf import FlaskForm
-from wtforms import TextAreaField
-from wtforms.validators import InputRequired
+from wtforms import (TextAreaField, TextField, BooleanField, HiddenField,
+ ValidationError)
+from wtforms.validators import InputRequired, Optional
+
+from db import Journalist
+
+
+def otp_secret_validation(form, field):
+ strip_whitespace = field.data.replace(' ', '')
+ if len(strip_whitespace) != 40:
+ raise ValidationError(gettext('Field must be 40 characters long but '
+ 'got {num_chars}.'.format(
+ num_chars=len(strip_whitespace)
+ )))
+
+
+def minimum_length_validation(form, field):
+ if len(field.data) < Journalist.MIN_USERNAME_LEN:
+ raise ValidationError(
+ gettext('Field must be at least {min_chars} '
+ 'characters long but only got '
+ '{num_chars}.'.format(
+ min_chars=Journalist.MIN_USERNAME_LEN,
+ num_chars=len(field.data))))
+
+
+class NewUserForm(FlaskForm):
+ username = TextField('username', validators=[
+ InputRequired(message=gettext('This field is required.')),
+ minimum_length_validation
+ ])
+ password = HiddenField('password')
+ is_admin = BooleanField('is_admin')
+ is_hotp = BooleanField('is_hotp')
+ otp_secret = TextField('otp_secret', validators=[
+ otp_secret_validation,
+ Optional()
+ ])
class ReplyForm(FlaskForm):
| diff --git a/securedrop/tests/test_journalist.py b/securedrop/tests/test_journalist.py
--- a/securedrop/tests/test_journalist.py
+++ b/securedrop/tests/test_journalist.py
@@ -552,7 +552,7 @@ def test_admin_add_user_without_username(self):
data=dict(username='',
password=VALID_PASSWORD,
is_admin=None))
- self.assertIn('Invalid username', resp.data)
+ self.assertIn('This field is required.', resp.data)
def test_admin_add_user_too_short_username(self):
self._login_admin()
@@ -562,7 +562,52 @@ def test_admin_add_user_too_short_username(self):
password='pentagonpapers',
password_again='pentagonpapers',
is_admin=None))
- self.assertIn('Invalid username', resp.data)
+ self.assertIn('Field must be at least {} characters long'.format(
+ Journalist.MIN_USERNAME_LEN),
+ resp.data)
+
+ def test_admin_add_user_yubikey_odd_length(self):
+ self._login_admin()
+ resp = self.client.post(url_for('admin.add_user'),
+ data=dict(username='dellsberg',
+ password=VALID_PASSWORD,
+ password_again=VALID_PASSWORD,
+ is_admin=None,
+ is_hotp=True,
+ otp_secret='123'))
+ self.assertIn('Field must be 40 characters long', resp.data)
+
+ def test_admin_add_user_yubikey_valid_length(self):
+ self._login_admin()
+
+ otp = '1234567890123456789012345678901234567890'
+ resp = self.client.post(url_for('admin.add_user'),
+ data=dict(username='dellsberg',
+ password=VALID_PASSWORD,
+ password_again=VALID_PASSWORD,
+ is_admin=None,
+ is_hotp=True,
+ otp_secret=otp),
+ follow_redirects=True)
+
+ # Should redirect to the token verification page
+ self.assertIn('Enable YubiKey (OATH-HOTP)', resp.data)
+
+ def test_admin_add_user_yubikey_correct_length_with_whitespace(self):
+ self._login_admin()
+
+ otp = '12 34 56 78 90 12 34 56 78 90 12 34 56 78 90 12 34 56 78 90'
+ resp = self.client.post(url_for('admin.add_user'),
+ data=dict(username='dellsberg',
+ password=VALID_PASSWORD,
+ password_again=VALID_PASSWORD,
+ is_admin=None,
+ is_hotp=True,
+ otp_secret=otp),
+ follow_redirects=True)
+
+ # Should redirect to the token verification page
+ self.assertIn('Enable YubiKey (OATH-HOTP)', resp.data)
def test_admin_sets_user_to_admin(self):
self._login_admin()
| Poor input validation on OTP secret field in journalist interface
## Description
500 error thrown due to lack of validation in new user form
Same issue as #2311, except that issue was for `manage.py`
## Steps to Reproduce
0. Sign in as administrator on journalist interface
1. Try to create new user using a Yubikey and provide a secret with an odd length
## Expected Behavior
Happy error message is shown indicating that the expected length of the secret is 40 characters
## Actual Behavior
![screen shot 2017-10-31 at 3 01 26 pm](https://user-images.githubusercontent.com/7832803/32251032-7280e2e0-be4c-11e7-9de5-10746fd27def.png)
## Comments
Simple fix, check length of string and flash an appropriate error message
Poor input validation on OTP secret field in journalist interface
## Description
500 error thrown due to lack of validation in new user form
Same issue as #2311, except that issue was for `manage.py`
## Steps to Reproduce
0. Sign in as administrator on journalist interface
1. Try to create new user using a Yubikey and provide a secret with an odd length
## Expected Behavior
Happy error message is shown indicating that the expected length of the secret is 40 characters
## Actual Behavior
![screen shot 2017-10-31 at 3 01 26 pm](https://user-images.githubusercontent.com/7832803/32251032-7280e2e0-be4c-11e7-9de5-10746fd27def.png)
## Comments
Simple fix, check length of string and flash an appropriate error message
| 2017-11-01T20:48:38Z | [] | [] |
|
freedomofpress/securedrop | 2,582 | freedomofpress__securedrop-2582 | [
"2460"
] | d8b8eb9c8566079552d62a11b35a9c00c71b4d8c | diff --git a/securedrop/journalist_app/__init__.py b/securedrop/journalist_app/__init__.py
--- a/securedrop/journalist_app/__init__.py
+++ b/securedrop/journalist_app/__init__.py
@@ -13,7 +13,9 @@
from db import db_session, Journalist
from journalist_app import account, admin, main, col
-from journalist_app.utils import get_source
+from journalist_app.utils import get_source, logged_in
+
+_insecure_views = ['main.login', 'static']
def create_app(config):
@@ -78,6 +80,9 @@ def setup_g():
g.html_lang = i18n.locale_to_rfc_5646(g.locale)
g.locales = i18n.get_locale2name()
+ if request.endpoint not in _insecure_views and not logged_in():
+ return redirect(url_for('main.login'))
+
if request.method == 'POST':
filesystem_id = request.form.get('filesystem_id')
if filesystem_id:
diff --git a/securedrop/journalist_app/account.py b/securedrop/journalist_app/account.py
--- a/securedrop/journalist_app/account.py
+++ b/securedrop/journalist_app/account.py
@@ -5,7 +5,6 @@
from flask_babel import gettext
from db import db_session
-from journalist_app.decorators import login_required
from journalist_app.utils import (make_password, set_diceware_password,
validate_user)
@@ -14,14 +13,12 @@ def make_blueprint(config):
view = Blueprint('account', __name__)
@view.route('/account', methods=('GET',))
- @login_required
def edit():
password = make_password()
return render_template('edit_account.html',
password=password)
@view.route('/new-password', methods=('POST',))
- @login_required
def new_password():
user = g.user
current_password = request.form.get('current_password')
@@ -35,7 +32,6 @@ def new_password():
return redirect(url_for('account.edit'))
@view.route('/2fa', methods=('GET', 'POST'))
- @login_required
def new_two_factor():
if request.method == 'POST':
token = request.form['token']
@@ -51,7 +47,6 @@ def new_two_factor():
return render_template('account_new_two_factor.html', user=g.user)
@view.route('/reset-2fa-totp', methods=['POST'])
- @login_required
def reset_two_factor_totp():
g.user.is_totp = True
g.user.regenerate_totp_shared_secret()
@@ -59,7 +54,6 @@ def reset_two_factor_totp():
return redirect(url_for('account.new_two_factor'))
@view.route('/reset-2fa-hotp', methods=['POST'])
- @login_required
def reset_two_factor_hotp():
otp_secret = request.form.get('otp_secret', None)
if otp_secret:
diff --git a/securedrop/journalist_app/col.py b/securedrop/journalist_app/col.py
--- a/securedrop/journalist_app/col.py
+++ b/securedrop/journalist_app/col.py
@@ -9,7 +9,6 @@
import store
from db import db_session, Submission
-from journalist_app.decorators import login_required
from journalist_app.forms import ReplyForm
from journalist_app.utils import (make_star_true, make_star_false, get_source,
delete_collection, col_download_unread,
@@ -21,21 +20,18 @@ def make_blueprint(config):
view = Blueprint('col', __name__)
@view.route('/add_star/<filesystem_id>', methods=('POST',))
- @login_required
def add_star(filesystem_id):
make_star_true(filesystem_id)
db_session.commit()
return redirect(url_for('main.index'))
@view.route("/remove_star/<filesystem_id>", methods=('POST',))
- @login_required
def remove_star(filesystem_id):
make_star_false(filesystem_id)
db_session.commit()
return redirect(url_for('main.index'))
@view.route('/<filesystem_id>')
- @login_required
def col(filesystem_id):
form = ReplyForm()
source = get_source(filesystem_id)
@@ -44,7 +40,6 @@ def col(filesystem_id):
source=source, form=form)
@view.route('/delete/<filesystem_id>', methods=('POST',))
- @login_required
def delete_single(filesystem_id):
"""deleting a single collection from its /col page"""
source = get_source(filesystem_id)
@@ -55,7 +50,6 @@ def delete_single(filesystem_id):
return redirect(url_for('main.index'))
@view.route('/process', methods=('POST',))
- @login_required
def process():
actions = {'download-unread': col_download_unread,
'download-all': col_download_all, 'star': col_star,
@@ -75,7 +69,6 @@ def process():
return method(cols_selected)
@view.route('/<filesystem_id>/<fn>')
- @login_required
def download_single_submission(filesystem_id, fn):
"""Sends a client the contents of a single submission."""
if '..' in fn or fn.startswith('/'):
diff --git a/securedrop/journalist_app/decorators.py b/securedrop/journalist_app/decorators.py
--- a/securedrop/journalist_app/decorators.py
+++ b/securedrop/journalist_app/decorators.py
@@ -7,15 +7,6 @@
from journalist_app.utils import logged_in
-def login_required(func):
- @wraps(func)
- def wrapper(*args, **kwargs):
- if not logged_in():
- return redirect(url_for('main.login'))
- return func(*args, **kwargs)
- return wrapper
-
-
def admin_required(func):
@wraps(func)
def wrapper(*args, **kwargs):
diff --git a/securedrop/journalist_app/main.py b/securedrop/journalist_app/main.py
--- a/securedrop/journalist_app/main.py
+++ b/securedrop/journalist_app/main.py
@@ -10,7 +10,6 @@
import store
from db import db_session, Source, SourceStar, Submission, Reply
-from journalist_app.decorators import login_required
from journalist_app.forms import ReplyForm
from journalist_app.utils import (validate_user, bulk_delete, download,
confirm_bulk_delete, get_source)
@@ -47,7 +46,6 @@ def logout():
return redirect(url_for('main.index'))
@view.route('/')
- @login_required
def index():
unstarred = []
starred = []
@@ -73,7 +71,6 @@ def index():
starred=starred)
@view.route('/reply', methods=('POST',))
- @login_required
def reply():
"""Attempt to send a Reply from a Journalist to a Source. Empty
messages are rejected, and an informative error message is flashed
@@ -124,7 +121,6 @@ def reply():
return redirect(url_for('col.col', filesystem_id=g.filesystem_id))
@view.route('/flag', methods=('POST',))
- @login_required
def flag():
g.source.flagged = True
db_session.commit()
@@ -132,7 +128,6 @@ def flag():
codename=g.source.journalist_designation)
@view.route('/bulk', methods=('POST',))
- @login_required
def bulk():
action = request.form['action']
@@ -159,7 +154,6 @@ def bulk():
abort(400)
@view.route('/regenerate-code', methods=('POST',))
- @login_required
def regenerate_code():
original_journalist_designation = g.source.journalist_designation
g.source.journalist_designation = crypto_util.display_id()
@@ -179,7 +173,6 @@ def regenerate_code():
return redirect(url_for('col.col', filesystem_id=g.filesystem_id))
@view.route('/download_unread/<filesystem_id>')
- @login_required
def download_unread_filesystem_id(filesystem_id):
id = Source.query.filter(Source.filesystem_id == filesystem_id) \
.one().id
| diff --git a/securedrop/tests/test_journalist.py b/securedrop/tests/test_journalist.py
--- a/securedrop/tests/test_journalist.py
+++ b/securedrop/tests/test_journalist.py
@@ -147,6 +147,12 @@ def test_login_invalid_credentials(self):
self.assert200(resp)
self.assertIn("Login failed", resp.data)
+ def test_validate_redirect(self):
+ resp = self.client.post(url_for('main.index'),
+ follow_redirects=True)
+ self.assert200(resp)
+ self.assertIn("Login to access", resp.data)
+
def test_login_valid_credentials(self):
resp = self.client.post(url_for('main.login'),
data=dict(username=self.user.username,
| Invert `login_required` decorator on journalist interface and require logins by default
# Feature request
## Description
We use a decorator called `login_required` but may forget to add that to an endpoint in the future. A better way would be to use `app.before_request` and check that if the URL doesn't match `/static/*` or `/login` then to redirect to login if the user isn't logged in.
## User Stories
As a dev, I don't want to have to remember to add a decorator to every endpoint and then miss one and break so much of SecureDrop's security.
| I have a patch set ready which requires any view to be under login (that is logged in users), except
`main.login` and `static`.
The insecure (unauthenticated) above mentioned views are in a list in the primary blueprint code, which has to explicitly updated if one wants to add new view which works without login.
How does this sound?
@kushaldas What you described is exactly what I do in another project:
```python
@app.before_request
@ignore_static
def ensure_logged_in():
if not g.user:
if request.path == url_for('account.login'):
pass
else:
flash('You need to be logged in.', 'error')
return redirect(url_for('account.login'))
```
This assignment happens after the `before_request` function that does a lookup and adds the user to the `g` object (as it would obviously fail otherwise). | 2017-11-15T14:29:30Z | [] | [] |
freedomofpress/securedrop | 2,592 | freedomofpress__securedrop-2592 | [
"2091"
] | 1e4a6eb339b5b831b577f88110b679067162c099 | diff --git a/admin/securedrop_admin/__init__.py b/admin/securedrop_admin/__init__.py
--- a/admin/securedrop_admin/__init__.py
+++ b/admin/securedrop_admin/__init__.py
@@ -338,6 +338,10 @@ def __init__(self, args):
u'SASL password for sending OSSEC alerts',
SiteConfig.ValidateOSSECPassword(),
None],
+ ['enable_ssh_over_tor', True, bool,
+ u'Enable SSH over Tor',
+ SiteConfig.ValidateYesNo(),
+ lambda x: x.lower() == 'yes'],
['securedrop_supported_locales', [], types.ListType,
u'Space separated list of additional locales to support '
'(' + translations + ')',
| diff --git a/admin/tests/test_securedrop-admin.py b/admin/tests/test_securedrop-admin.py
--- a/admin/tests/test_securedrop-admin.py
+++ b/admin/tests/test_securedrop-admin.py
@@ -580,14 +580,7 @@ def verify_desc_consistency(self, site_config, desc):
with pytest.raises(ValidationError):
site_config.user_prompt_config_one(desc, '')
- verify_prompt_ssh_users = verify_desc_consistency
- verify_prompt_app_ip = verify_desc_consistency
- verify_prompt_monitor_ip = verify_desc_consistency
- verify_prompt_app_hostname = verify_desc_consistency
- verify_prompt_monitor_hostname = verify_desc_consistency
- verify_prompt_dns_server = verify_desc_consistency
-
- def verify_prompt_securedrop_app_https_on_source_interface(
+ def verify_prompt_boolean(
self, site_config, desc):
self.verify_desc_consistency(site_config, desc)
(var, default, etype, prompt, validator, transform) = desc
@@ -596,6 +589,17 @@ def verify_prompt_securedrop_app_https_on_source_interface(
assert site_config.user_prompt_config_one(desc, 'YES') is True
assert site_config.user_prompt_config_one(desc, 'NO') is False
+ verify_prompt_ssh_users = verify_desc_consistency
+ verify_prompt_app_ip = verify_desc_consistency
+ verify_prompt_monitor_ip = verify_desc_consistency
+ verify_prompt_app_hostname = verify_desc_consistency
+ verify_prompt_monitor_hostname = verify_desc_consistency
+ verify_prompt_dns_server = verify_desc_consistency
+
+ verify_prompt_securedrop_app_https_on_source_interface = \
+ verify_prompt_boolean
+ verify_prompt_enable_ssh_over_tor = verify_prompt_boolean
+
verify_prompt_securedrop_app_gpg_public_key = verify_desc_consistency
def verify_prompt_not_empty(self, site_config, desc):
diff --git a/docs/test_the_installation.rst b/docs/test_the_installation.rst
--- a/docs/test_the_installation.rst
+++ b/docs/test_the_installation.rst
@@ -7,6 +7,9 @@ Test connectivity
SSH to both servers over Tor
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+Assuming you haven't disabled ssh over tor, SSH access will be
+restricted to the tor network.
+
On the *Admin Workstation*, you should be able to SSH to the *Application Server* and the *Monitor Server*. ::
ssh app
diff --git a/testinfra/common/test_tor_config.py b/testinfra/app/test_tor_config.py
similarity index 100%
rename from testinfra/common/test_tor_config.py
rename to testinfra/app/test_tor_config.py
diff --git a/testinfra/common/test_tor_hidden_services.py b/testinfra/app/test_tor_hidden_services.py
similarity index 100%
rename from testinfra/common/test_tor_hidden_services.py
rename to testinfra/app/test_tor_hidden_services.py
| Allow optional local management of SD servers without tor
# Feature request
## Description
Currently a production securedrop install configures the servers so that they are only manageable remotely over tor. There are particularly use-cases where this could be overkill -- such as when an admin only needs to "remotely" manage the servers on the local network.
We should allow the setting of a boolean flag (disabled by default to leave the behavior as-is) that will disable ssh over tor and restrict ssh by a local admin workstation IP address (we'll throw warnings if users try to set non-RFC1918 addresses).
I realize that there is value to ssh over tor -- but I also see the perspective of:
> If i don't need to remotely manage SD outside of the local building why am I opening up a remote network service for anyone on tor to access it? Yes I realize it's an authenticated service but its still a possible attack vector that isn't present if I just restrict SSH to the local network
## User Stories
As an admin, I'd like to manage my securedrop servers from my local network and do not require access remotely.
| I would also add that backup and restore over tor can be BRUTAL if the site is large (has over 1GB+) in the `store` folder. It also seems inefficient to send data out through an external network just to get data between two systems in very close physical proximity. Being able to toggle straight SSH here quickly via the admin workstation would be super helpful (or if they just had traffic restricted to local network ssh the entire time).
I don't know enough about a storage case where someone would have a folder that large and you could easily argue that in that scenario its imperative that they clean up old documents. In spite of that, I still think this is another valid use-case for having this feature available easily for admins. | 2017-11-20T17:14:31Z | [] | [] |
freedomofpress/securedrop | 2,640 | freedomofpress__securedrop-2640 | [
"2631"
] | 02611385d8ec5ff234cb0678ce17f7434d90cd47 | diff --git a/docs/conf.py b/docs/conf.py
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -59,9 +59,9 @@
# built documents.
#
# The short X.Y version.
-version = '0.4.4'
+version = '0.5-rc2'
# The full version, including alpha/beta/rc tags.
-release = '0.4.4'
+release = '0.5-rc2'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
diff --git a/securedrop/version.py b/securedrop/version.py
--- a/securedrop/version.py
+++ b/securedrop/version.py
@@ -1 +1 @@
-__version__ = '0.4.4'
+__version__ = '0.5-rc2'
| diff --git a/molecule/builder/tests/vars.yml b/molecule/builder/tests/vars.yml
--- a/molecule/builder/tests/vars.yml
+++ b/molecule/builder/tests/vars.yml
@@ -1,5 +1,5 @@
---
-securedrop_version: "0.4.4"
+securedrop_version: "0.5-rc2"
ossec_version: "2.8.2"
keyring_version: "0.1.1"
| Add Brazil flag for nice display in the language picker
Since the `pt_BR` locale is ready to go (see #2630), we should add a Brazil flag to the language picker.
| 2017-11-30T12:38:53Z | [] | [] |
|
freedomofpress/securedrop | 2,648 | freedomofpress__securedrop-2648 | [
"2647",
"2647"
] | b77003d2ad0f58f9bbc7e1742dbbdb24883ef4f8 | diff --git a/securedrop/source_app/main.py b/securedrop/source_app/main.py
--- a/securedrop/source_app/main.py
+++ b/securedrop/source_app/main.py
@@ -221,8 +221,11 @@ def logout():
if logged_in():
msg = render_template('logout_flashed_message.html')
- # clear the session after we render the message so it's localized
+ # Clear the session after we render the message so it's localized
+ # If a user specified a locale, save it and restore it
+ user_locale = g.locale
session.clear()
+ session['locale'] = user_locale
flash(Markup(msg), "important hide-if-not-tor-browser")
return redirect(url_for('.index'))
| diff --git a/securedrop/tests/test_source.py b/securedrop/tests/test_source.py
--- a/securedrop/tests/test_source.py
+++ b/securedrop/tests/test_source.py
@@ -194,7 +194,9 @@ def test_login_and_logout(self):
# sessions always have 'expires', so pop it for the next check
session.pop('expires', None)
- self.assertTrue(not session)
+
+ self.assertNotIn('logged_in', session)
+ self.assertNotIn('codename', session)
self.assertIn('Thank you for exiting your session!', resp.data)
| Logout message in localized language and landing page in default language
# Bug
## Description
A source logging out of session in non-default language may yield a home page that has different languages.
## Steps to Reproduce
0. SecureDrop instance must support more than 1 language
1. Navigate to source interface
2. Change to another language
2. Click on submit document (or equivalent)Change language to non-default language
4. Logout
5. Observe multilingual SecureDrop login page (Session Ending box in chosen language, Landing page in default language
## Expected Behavior
Logout should preserve user's language settings ?
## Actual Behavior
The session settings are cleared and the user's language selection is not preserved
Logout message in localized language and landing page in default language
# Bug
## Description
A source logging out of session in non-default language may yield a home page that has different languages.
## Steps to Reproduce
0. SecureDrop instance must support more than 1 language
1. Navigate to source interface
2. Change to another language
2. Click on submit document (or equivalent)Change language to non-default language
4. Logout
5. Observe multilingual SecureDrop login page (Session Ending box in chosen language, Landing page in default language
## Expected Behavior
Logout should preserve user's language settings ?
## Actual Behavior
The session settings are cleared and the user's language selection is not preserved
| ![screenshot from 2017-11-30 16-29-26](https://user-images.githubusercontent.com/15223328/33455946-a9d4a0b8-d5eb-11e7-9a00-2ecc553dd41e.png)
This behavior does not happen on the journalist interface.
We could make it so the logout redirects to the index page with **l=de_DE** to preserve the language. @redshiftzero if you agree that's a desirable solution I'll propose a pull request.
This is happening because when we log out we use `session.clear()` on the source interface and `session.pop('uid', None)` and `session.pop('expires', None)` on the journalist interface
![screenshot from 2017-11-30 16-29-26](https://user-images.githubusercontent.com/15223328/33455946-a9d4a0b8-d5eb-11e7-9a00-2ecc553dd41e.png)
This behavior does not happen on the journalist interface.
We could make it so the logout redirects to the index page with **l=de_DE** to preserve the language. @redshiftzero if you agree that's a desirable solution I'll propose a pull request.
This is happening because when we log out we use `session.clear()` on the source interface and `session.pop('uid', None)` and `session.pop('expires', None)` on the journalist interface | 2017-11-30T22:17:42Z | [] | [] |
freedomofpress/securedrop | 2,661 | freedomofpress__securedrop-2661 | [
"2656"
] | 6489897d9498dfe6c120a9126ba82df6df7c3992 | diff --git a/docs/conf.py b/docs/conf.py
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -59,9 +59,9 @@
# built documents.
#
# The short X.Y version.
-version = '0.5-rc2'
+version = '0.5-rc4'
# The full version, including alpha/beta/rc tags.
-release = '0.5-rc2'
+release = '0.5-rc4'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
diff --git a/securedrop/source_app/main.py b/securedrop/source_app/main.py
--- a/securedrop/source_app/main.py
+++ b/securedrop/source_app/main.py
@@ -227,8 +227,11 @@ def logout():
if logged_in():
msg = render_template('logout_flashed_message.html')
- # clear the session after we render the message so it's localized
+ # Clear the session after we render the message so it's localized
+ # If a user specified a locale, save it and restore it
+ user_locale = g.locale
session.clear()
+ session['locale'] = user_locale
flash(Markup(msg), "important hide-if-not-tor-browser")
return redirect(url_for('.index'))
diff --git a/securedrop/version.py b/securedrop/version.py
--- a/securedrop/version.py
+++ b/securedrop/version.py
@@ -1 +1 @@
-__version__ = '0.5-rc2'
+__version__ = '0.5-rc4'
| diff --git a/molecule/builder/tests/vars.yml b/molecule/builder/tests/vars.yml
--- a/molecule/builder/tests/vars.yml
+++ b/molecule/builder/tests/vars.yml
@@ -1,5 +1,5 @@
---
-securedrop_version: "0.5-rc2"
+securedrop_version: "0.5-rc4"
ossec_version: "2.8.2"
keyring_version: "0.1.1"
diff --git a/securedrop/tests/test_source.py b/securedrop/tests/test_source.py
--- a/securedrop/tests/test_source.py
+++ b/securedrop/tests/test_source.py
@@ -194,7 +194,9 @@ def test_login_and_logout(self):
# sessions always have 'expires', so pop it for the next check
session.pop('expires', None)
- self.assertTrue(not session)
+
+ self.assertNotIn('logged_in', session)
+ self.assertNotIn('codename', session)
self.assertIn('Thank you for exiting your session!', resp.data)
| CSS breakage for source in de_DE
# Bug
## Description
The source interface when someone tries to submit a document using `de_DE` locale, the css breaks for the buttons below.
## Steps to Reproduce
Using `de_DE` locale, try to submit a document as source.
## Expected Behavior
The buttons should come up horizontally.
## Actual Behavior
![de_css_break](https://user-images.githubusercontent.com/272303/33506010-d2037fc2-d713-11e7-849e-da9c5702a755.png)
| 2017-12-02T08:43:51Z | [] | [] |
|
freedomofpress/securedrop | 2,684 | freedomofpress__securedrop-2684 | [
"2681"
] | fedcc950195226269294f6149757cfc3fff8a0b9 | diff --git a/securedrop/i18n.py b/securedrop/i18n.py
--- a/securedrop/i18n.py
+++ b/securedrop/i18n.py
@@ -20,14 +20,13 @@
from babel import core
import collections
-import config
import os
import re
from os import path
LOCALE_SPLIT = re.compile('(-|_)')
-LOCALES = set(['en_US'])
+LOCALES = ['en_US']
babel = None
@@ -36,9 +35,12 @@ class LocaleNotFound(Exception):
"""Raised when the desired locale is not in the translations directory"""
-def setup_app(app, translation_dirs=None):
+def setup_app(config, app):
+ global LOCALES
global babel
+ translation_dirs = getattr(config, 'TRANSLATION_DIRS', None)
+
if translation_dirs is None:
translation_dirs = \
path.join(path.dirname(path.realpath(__file__)),
@@ -54,14 +56,21 @@ def setup_app(app, translation_dirs=None):
'Expected exactly one translation directory but got {}.'
.format(babel.translation_directories))
- for dirname in os.listdir(next(babel.translation_directories)):
+ translation_directories = next(babel.translation_directories)
+ for dirname in os.listdir(translation_directories):
if dirname != 'messages.pot':
- LOCALES.add(dirname)
+ LOCALES.append(dirname)
+
+ LOCALES = _get_supported_locales(
+ LOCALES,
+ getattr(config, 'SUPPORTED_LOCALES', None),
+ getattr(config, 'DEFAULT_LOCALE', None),
+ translation_directories)
- babel.localeselector(get_locale)
+ babel.localeselector(lambda: get_locale(config))
-def get_locale():
+def get_locale(config):
"""
Get the locale as follows, by order of precedence:
- l request argument or session['locale']
@@ -104,10 +113,10 @@ def get_text_direction(locale):
return core.Locale.parse(locale).text_direction
-def _get_supported_locales(locales, supported, default_locale):
- """Return SUPPORTED_LOCALES from config.py. If it is missing return
- the default locale.
-
+def _get_supported_locales(locales, supported, default_locale,
+ translation_directories):
+ """Sanity checks on locales and supported locales from config.py.
+ Return the list of supported locales.
"""
if not supported:
@@ -118,7 +127,7 @@ def _get_supported_locales(locales, supported, default_locale):
"config.py SUPPORTED_LOCALES contains {} which is not among the "
"locales found in the {} directory: {}".format(
list(unsupported),
- babel.translation_directories,
+ translation_directories,
locales))
if default_locale and default_locale not in supported:
raise LocaleNotFound("config.py SUPPORTED_LOCALES contains {} "
@@ -126,7 +135,7 @@ def _get_supported_locales(locales, supported, default_locale):
"the value of DEFAULT_LOCALE '{}'".format(
supported, default_locale))
- return supported
+ return list(supported)
NAME_OVERRIDES = {
@@ -135,12 +144,8 @@ def _get_supported_locales(locales, supported, default_locale):
def get_locale2name():
- locales = _get_supported_locales(
- LOCALES,
- getattr(config, 'SUPPORTED_LOCALES', None),
- getattr(config, 'DEFAULT_LOCALE', None))
locale2name = collections.OrderedDict()
- for l in locales:
+ for l in LOCALES:
if l in NAME_OVERRIDES:
locale2name[l] = NAME_OVERRIDES[l]
else:
@@ -159,5 +164,5 @@ def locale_to_rfc_5646(locale):
return LOCALE_SPLIT.split(locale)[0]
-def get_language():
- return get_locale().split('_')[0]
+def get_language(config):
+ return get_locale(config).split('_')[0]
diff --git a/securedrop/journalist_app/__init__.py b/securedrop/journalist_app/__init__.py
--- a/securedrop/journalist_app/__init__.py
+++ b/securedrop/journalist_app/__init__.py
@@ -36,7 +36,7 @@ def handle_csrf_error(e):
flash(msg, 'error')
return redirect(url_for('main.login'))
- i18n.setup_app(app)
+ i18n.setup_app(config, app)
app.jinja_env.trim_blocks = True
app.jinja_env.lstrip_blocks = True
@@ -75,7 +75,7 @@ def setup_g():
if uid:
g.user = Journalist.query.get(uid)
- g.locale = i18n.get_locale()
+ g.locale = i18n.get_locale(config)
g.text_direction = i18n.get_text_direction(g.locale)
g.html_lang = i18n.locale_to_rfc_5646(g.locale)
g.locales = i18n.get_locale2name()
diff --git a/securedrop/journalist_app/account.py b/securedrop/journalist_app/account.py
--- a/securedrop/journalist_app/account.py
+++ b/securedrop/journalist_app/account.py
@@ -14,7 +14,7 @@ def make_blueprint(config):
@view.route('/account', methods=('GET',))
def edit():
- password = make_password()
+ password = make_password(config)
return render_template('edit_account.html',
password=password)
diff --git a/securedrop/journalist_app/admin.py b/securedrop/journalist_app/admin.py
--- a/securedrop/journalist_app/admin.py
+++ b/securedrop/journalist_app/admin.py
@@ -70,7 +70,8 @@ def add_user():
return redirect(url_for('admin.new_user_two_factor',
uid=new_user.id))
- return render_template("admin_add_user.html", password=make_password(),
+ return render_template("admin_add_user.html",
+ password=make_password(config),
form=form)
@view.route('/2fa', methods=('GET', 'POST'))
@@ -171,7 +172,7 @@ def edit_user(user_id):
commit_account_changes(user)
- password = make_password()
+ password = make_password(config)
return render_template("edit_account.html", user=user,
password=password)
diff --git a/securedrop/journalist_app/utils.py b/securedrop/journalist_app/utils.py
--- a/securedrop/journalist_app/utils.py
+++ b/securedrop/journalist_app/utils.py
@@ -198,9 +198,9 @@ def col_delete(cols_selected):
return redirect(url_for('main.index'))
-def make_password():
+def make_password(config):
while True:
- password = crypto_util.genrandomid(7, i18n.get_language())
+ password = crypto_util.genrandomid(7, i18n.get_language(config))
try:
Journalist.check_password_acceptable(password)
return password
diff --git a/securedrop/source_app/__init__.py b/securedrop/source_app/__init__.py
--- a/securedrop/source_app/__init__.py
+++ b/securedrop/source_app/__init__.py
@@ -44,7 +44,7 @@ def handle_csrf_error(e):
assets = Environment(app)
app.config['assets'] = assets
- i18n.setup_app(app)
+ i18n.setup_app(config, app)
app.jinja_env.trim_blocks = True
app.jinja_env.lstrip_blocks = True
@@ -81,7 +81,7 @@ def check_tor2web():
@ignore_static
def setup_g():
"""Store commonly used values in Flask's special g object"""
- g.locale = i18n.get_locale()
+ g.locale = i18n.get_locale(config)
g.text_direction = i18n.get_text_direction(g.locale)
g.html_lang = i18n.locale_to_rfc_5646(g.locale)
g.locales = i18n.get_locale2name()
diff --git a/securedrop/source_app/main.py b/securedrop/source_app/main.py
--- a/securedrop/source_app/main.py
+++ b/securedrop/source_app/main.py
@@ -36,7 +36,7 @@ def generate():
"notification")
return redirect(url_for('.lookup'))
- codename = generate_unique_codename()
+ codename = generate_unique_codename(config)
session['codename'] = codename
session['new_user'] = True
return render_template('generate.html', codename=codename)
diff --git a/securedrop/source_app/utils.py b/securedrop/source_app/utils.py
--- a/securedrop/source_app/utils.py
+++ b/securedrop/source_app/utils.py
@@ -28,11 +28,11 @@ def valid_codename(codename):
return source is not None
-def generate_unique_codename():
+def generate_unique_codename(config):
"""Generate random codenames until we get an unused one"""
while True:
codename = crypto_util.genrandomid(Source.NUM_WORDS,
- i18n.get_language())
+ i18n.get_language(config))
# The maximum length of a word in the wordlist is 9 letters and the
# codename length is 7 words, so it is currently impossible to
| diff --git a/securedrop/tests/test_2fa.py b/securedrop/tests/test_2fa.py
--- a/securedrop/tests/test_2fa.py
+++ b/securedrop/tests/test_2fa.py
@@ -119,9 +119,3 @@ def test_bad_token_fails_to_verify_on_new_user_two_factor_page(self):
# A flashed message should appear
self.assertMessageFlashed(
'Could not verify token in two-factor authentication.', 'error')
-
- @classmethod
- def tearDownClass(cls):
- # Reset the module variables that were changed to mocks so we don't
- # break other tests
- reload(journalist)
diff --git a/securedrop/tests/test_i18n.py b/securedrop/tests/test_i18n.py
--- a/securedrop/tests/test_i18n.py
+++ b/securedrop/tests/test_i18n.py
@@ -28,10 +28,10 @@
os.environ['SECUREDROP_ENV'] = 'test' # noqa
import config
import i18n
-import journalist
+import journalist_app
import manage
import pytest
-import source
+import source_app
import version
import utils
@@ -42,18 +42,25 @@ class TestI18N(object):
def setup_class(cls):
utils.env.setup()
+ def get_fake_config(self):
+ class Config:
+ def __getattr__(self, name):
+ return getattr(config, name)
+ return Config()
+
def test_get_supported_locales(self):
locales = ['en_US', 'fr_FR']
- assert ['en_US'] == i18n._get_supported_locales(locales, None, None)
+ assert ['en_US'] == i18n._get_supported_locales(
+ locales, None, None, None)
locales = ['en_US', 'fr_FR']
supported = ['en_US', 'not_found']
with pytest.raises(i18n.LocaleNotFound) as excinfo:
- i18n._get_supported_locales(locales, supported, None)
+ i18n._get_supported_locales(locales, supported, None, None)
assert "contains ['not_found']" in str(excinfo.value)
supported = ['fr_FR']
locale = 'not_found'
with pytest.raises(i18n.LocaleNotFound) as excinfo:
- i18n._get_supported_locales(locales, supported, locale)
+ i18n._get_supported_locales(locales, supported, locale, None)
assert "DEFAULT_LOCALE 'not_found'" in str(excinfo.value)
def verify_i18n(self, app):
@@ -209,33 +216,33 @@ def test_i18n(self):
pybabel init -i {d}/messages.pot -d {d} -l nb_NO
sed -i -e '/code hello i18n/,+1s/msgstr ""/msgstr "code norwegian"/' \
{d}/nb_NO/LC_MESSAGES/messages.po
+
+ pybabel init -i {d}/messages.pot -d {d} -l es_ES
+ sed -i -e '/code hello i18n/,+1s/msgstr ""/msgstr "code spanish"/' \
+ {d}/es_ES/LC_MESSAGES/messages.po
""".format(d=config.TEMP_DIR))
manage.translate_messages(args)
- supported = getattr(config, 'SUPPORTED_LOCALES', None)
- try:
- if supported:
- del config.SUPPORTED_LOCALES
- for app in (journalist.app, source.app):
- config.SUPPORTED_LOCALES = [
- 'en_US', 'fr_FR', 'zh_Hans_CN', 'ar', 'nb_NO']
- i18n.setup_app(app, translation_dirs=config.TEMP_DIR)
- self.verify_i18n(app)
- finally:
- if supported:
- config.SUPPORTED_LOCALES = supported
+ fake_config = self.get_fake_config()
+ fake_config.SUPPORTED_LOCALES = [
+ 'en_US', 'fr_FR', 'zh_Hans_CN', 'ar', 'nb_NO']
+ fake_config.TRANSLATION_DIRS = config.TEMP_DIR
+ for app in (journalist_app.create_app(fake_config),
+ source_app.create_app(fake_config)):
+ assert i18n.LOCALES == fake_config.SUPPORTED_LOCALES
+ self.verify_i18n(app)
def test_verify_default_locale_en_us_if_not_defined_in_config(self):
- DEFAULT_LOCALE = config.DEFAULT_LOCALE
- try:
- del config.DEFAULT_LOCALE
- not_translated = 'code hello i18n'
- with source.app.test_client() as c:
- c.get('/')
- assert not_translated == gettext(not_translated)
- finally:
- config.DEFAULT_LOCALE = DEFAULT_LOCALE
+ class Config:
+ def __getattr__(self, name):
+ if name == 'DEFAULT_LOCALE':
+ raise AttributeError()
+ return getattr(config, name)
+ not_translated = 'code hello i18n'
+ with source_app.create_app(Config()).test_client() as c:
+ c.get('/')
+ assert not_translated == gettext(not_translated)
def test_locale_to_rfc_5646(self):
assert i18n.locale_to_rfc_5646('en') == 'en'
@@ -245,12 +252,13 @@ def test_locale_to_rfc_5646(self):
assert i18n.locale_to_rfc_5646('zh-hant') == 'zh-Hant'
def test_html_en_lang_correct(self):
- app = journalist.app.test_client()
+ fake_config = self.get_fake_config()
+ app = journalist_app.create_app(fake_config).test_client()
resp = app.get('/', follow_redirects=True)
html = resp.data.decode('utf-8')
assert re.compile('<html .*lang="en".*>').search(html), html
- app = source.app.test_client()
+ app = source_app.create_app(fake_config).test_client()
resp = app.get('/', follow_redirects=True)
html = resp.data.decode('utf-8')
assert re.compile('<html .*lang="en".*>').search(html), html
@@ -262,12 +270,14 @@ def test_html_en_lang_correct(self):
def test_html_fr_lang_correct(self):
"""Check that when the locale is fr_FR the lang property is correct"""
- app = journalist.app.test_client()
+ fake_config = self.get_fake_config()
+ fake_config.SUPPORTED_LOCALES = ['fr_FR', 'en_US']
+ app = journalist_app.create_app(fake_config).test_client()
resp = app.get('/?l=fr_FR', follow_redirects=True)
html = resp.data.decode('utf-8')
assert re.compile('<html .*lang="fr".*>').search(html), html
- app = source.app.test_client()
+ app = source_app.create_app(fake_config).test_client()
resp = app.get('/?l=fr_FR', follow_redirects=True)
html = resp.data.decode('utf-8')
assert re.compile('<html .*lang="fr".*>').search(html), html
@@ -276,8 +286,3 @@ def test_html_fr_lang_correct(self):
resp = app.get('/generate?l=fr_FR', follow_redirects=True)
html = resp.data.decode('utf-8')
assert re.compile('<html .*lang="fr".*>').search(html), html
-
- @classmethod
- def teardown_class(cls):
- reload(journalist)
- reload(source)
diff --git a/securedrop/tests/test_journalist.py b/securedrop/tests/test_journalist.py
--- a/securedrop/tests/test_journalist.py
+++ b/securedrop/tests/test_journalist.py
@@ -18,6 +18,7 @@
Submission)
import db
import journalist
+import journalist_app
import journalist_app.utils
import utils
@@ -50,7 +51,10 @@ def tearDown(self):
@patch('crypto_util.genrandomid', side_effect=['bad', VALID_PASSWORD])
def test_make_password(self, mocked_pw_gen):
- assert journalist_app.utils.make_password() == VALID_PASSWORD
+ class fake_config:
+ pass
+ assert (journalist_app.utils.make_password(fake_config) ==
+ VALID_PASSWORD)
@patch('journalist.app.logger.error')
def test_reply_error_logging(self, mocked_error_logger):
@@ -1188,29 +1192,46 @@ def test_col_process_successfully_unstars_sources(self):
# Verify the source is not starred
self.assertFalse(source_1.star.starred)
+
+class TestJournalistLocale(TestCase):
+
+ def setUp(self):
+ utils.env.setup()
+
+ # Patch the two-factor verification to avoid intermittent errors
+ utils.db_helper.mock_verify_token(self)
+
+ # Setup test user
+ self.user, self.user_pw = utils.db_helper.init_journalist()
+
+ def tearDown(self):
+ utils.env.teardown()
+
+ def get_fake_config(self):
+ class Config:
+ def __getattr__(self, name):
+ return getattr(config, name)
+ return Config()
+
+ # A method required by flask_testing.TestCase
+ def create_app(self):
+ fake_config = self.get_fake_config()
+ fake_config.SUPPORTED_LOCALES = ['en_US', 'fr_FR']
+ return journalist_app.create_app(fake_config)
+
def test_render_locales(self):
"""the locales.html template must collect both request.args (l=XX) and
request.view_args (/<filesystem_id>) to build the URL to
change the locale
"""
- supported = getattr(config, 'SUPPORTED_LOCALES', None)
- try:
- if supported:
- del config.SUPPORTED_LOCALES
- config.SUPPORTED_LOCALES = ['en_US', 'fr_FR']
-
- source, _ = utils.db_helper.init_source()
- self._login_user()
-
- url = url_for('col.col', filesystem_id=source.filesystem_id)
- resp = self.client.get(url + '?l=fr_FR')
- self.assertNotIn('?l=fr_FR', resp.data)
- self.assertIn(url + '?l=en_US', resp.data)
+ source, _ = utils.db_helper.init_source()
+ self._ctx.g.user = self.user
- finally:
- if supported:
- config.SUPPORTED_LOCALES = supported
+ url = url_for('col.col', filesystem_id=source.filesystem_id)
+ resp = self.client.get(url + '?l=fr_FR')
+ self.assertNotIn('?l=fr_FR', resp.data)
+ self.assertIn(url + '?l=en_US', resp.data)
class TestJournalistLogin(unittest.TestCase):
@@ -1250,9 +1271,3 @@ def test_login_with_invalid_password_doesnt_call_scrypt(self,
self.assertFalse(
mock_scrypt_hash.called,
"Called _scrypt_hash for password w/ invalid length")
-
- @classmethod
- def tearDownClass(cls):
- # Reset the module variables that were changed to mocks so we don't
- # break other tests
- reload(journalist)
diff --git a/securedrop/tests/test_template_filters.py b/securedrop/tests/test_template_filters.py
--- a/securedrop/tests/test_template_filters.py
+++ b/securedrop/tests/test_template_filters.py
@@ -9,15 +9,21 @@
os.environ['SECUREDROP_ENV'] = 'test' # noqa
import config
import i18n
-import journalist
+import journalist_app
import manage
-import source
+import source_app
import template_filters
import version
class TestTemplateFilters(object):
+ def get_fake_config(self):
+ class Config:
+ def __getattr__(self, name):
+ return getattr(config, name)
+ return Config()
+
def verify_rel_datetime_format(self, app):
with app.test_client() as c:
c.get('/')
@@ -104,21 +110,11 @@ def test_filters(self):
pybabel init -i {d}/messages.pot -d {d} -l fr_FR
""".format(d=config.TEMP_DIR))
- supported = getattr(config, 'SUPPORTED_LOCALES', None)
- try:
- if supported:
- del config.SUPPORTED_LOCALES
- for app in (journalist.app, source.app):
- config.SUPPORTED_LOCALES = ['en_US', 'fr_FR']
- app.config['BABEL_TRANSLATION_DIRECTORIES'] = config.TEMP_DIR
- i18n.setup_app(app)
- self.verify_filesizeformat(app)
- self.verify_rel_datetime_format(app)
- finally:
- if supported:
- config.SUPPORTED_LOCALES = supported
-
- @classmethod
- def teardown_class(cls):
- reload(journalist)
- reload(source)
+ fake_config = self.get_fake_config()
+ fake_config.SUPPORTED_LOCALES = ['en_US', 'fr_FR']
+ fake_config.TRANSLATION_DIRS = config.TEMP_DIR
+ for app in (journalist_app.create_app(fake_config),
+ source_app.create_app(fake_config)):
+ assert i18n.LOCALES == fake_config.SUPPORTED_LOCALES
+ self.verify_filesizeformat(app)
+ self.verify_rel_datetime_format(app)
diff --git a/securedrop/tests/utils/env.py b/securedrop/tests/utils/env.py
--- a/securedrop/tests/utils/env.py
+++ b/securedrop/tests/utils/env.py
@@ -62,6 +62,7 @@ def teardown():
if t.is_alive() and not isinstance(t, threading._MainThread):
t.join()
db_session.remove()
+ shutil.rmtree(config.TEMP_DIR)
try:
shutil.rmtree(config.SECUREDROP_DATA_ROOT)
assert not os.path.exists(config.SECUREDROP_DATA_ROOT) # safeguard for #844
| Disable languages that are not supported
# Bug
## Description
The source and journalist interface can be switched to a non-supported language by:
* l=fr_FR : this is ok as the person doing that knows details that are unlikely to confuse anyone
* Accept-Languages is set to fr_FR : this is not good because it will lead the person into thinking French is supported
## Steps to Reproduce
* Set the SUPPORTED_LOCALES to ['en_US'] only
* Change the preferred language of your browser to French and visit the source interface
## Expected Behavior
* The page displays in English
## Actual Behavior
* The page displays in French
| Expected behavior is as described in the release notes (https://securedrop.org/news/securedrop-05-released), so I agree about it being a bug. | 2017-12-06T18:41:46Z | [] | [] |
freedomofpress/securedrop | 2,689 | freedomofpress__securedrop-2689 | [
"2656",
"2500",
"2460"
] | 365dc82bf3a8d44150e5885d95239c1faff98668 | diff --git a/docs/conf.py b/docs/conf.py
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -59,9 +59,9 @@
# built documents.
#
# The short X.Y version.
-version = '0.4.4'
+version = '0.5'
# The full version, including alpha/beta/rc tags.
-release = '0.4.4'
+release = '0.5'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
diff --git a/install_files/ansible-base/action_plugins/synchronize.py b/install_files/ansible-base/action_plugins/synchronize.py
deleted file mode 100644
--- a/install_files/ansible-base/action_plugins/synchronize.py
+++ /dev/null
@@ -1,415 +0,0 @@
-# -*- coding: utf-8 -*-
-
-# (c) 2012-2013, Timothy Appnel <[email protected]>
-#
-# Ansible is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Ansible is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-import os.path
-from collections import MutableSequence
-
-from ansible import constants as C
-from ansible.module_utils.six import string_types
-from ansible.module_utils._text import to_text
-from ansible.playbook.play_context import MAGIC_VARIABLE_MAPPING
-from ansible.plugins.action import ActionBase
-from ansible.plugins import connection_loader
-
-boolean = C.mk_boolean
-
-
-class ActionModule(ActionBase):
-
- def _get_absolute_path(self, path):
- original_path = path
-
- if path.startswith('rsync://'):
- return path
-
- if self._task._role is not None:
- path = self._loader.path_dwim_relative(self._task._role._role_path, 'files', path)
- else:
- path = self._loader.path_dwim_relative(self._loader.get_basedir(), 'files', path)
-
- if original_path and original_path[-1] == '/' and path[-1] != '/':
- # make sure the dwim'd path ends in a trailing "/"
- # if the original path did
- path += '/'
-
- return path
-
- def _host_is_ipv6_address(self, host):
- return ':' in host
-
- def _format_rsync_rsh_target(self, host, path, user):
- ''' formats rsync rsh target, escaping ipv6 addresses if needed '''
-
- user_prefix = ''
-
- if path.startswith('rsync://'):
- return path
-
- # If using docker, do not add user information
- if self._remote_transport not in [ 'docker' ] and user:
- user_prefix = '%s@' % (user, )
-
- if self._host_is_ipv6_address(host):
- return '[%s%s]:%s' % (user_prefix, host, path)
- else:
- return '%s%s:%s' % (user_prefix, host, path)
-
- def _process_origin(self, host, path, user):
-
- if host not in C.LOCALHOST:
- return self._format_rsync_rsh_target(host, path, user)
-
- if ':' not in path and not path.startswith('/'):
- path = self._get_absolute_path(path=path)
- return path
-
- def _process_remote(self, task_args, host, path, user, port_matches_localhost_port):
- """
- :arg host: hostname for the path
- :arg path: file path
- :arg user: username for the transfer
- :arg port_matches_localhost_port: boolean whether the remote port
- matches the port used by localhost's sshd. This is used in
- conjunction with seeing whether the host is localhost to know
- if we need to have the module substitute the pathname or if it
- is a different host (for instance, an ssh tunnelled port or an
- alternative ssh port to a vagrant host.)
- """
- transport = self._connection.transport
- # If we're connecting to a remote host or we're delegating to another
- # host or we're connecting to a different ssh instance on the
- # localhost then we have to format the path as a remote rsync path
- if host not in C.LOCALHOST or transport != "local" or \
- (host in C.LOCALHOST and not port_matches_localhost_port):
- # If we're delegating to non-localhost and but the
- # inventory_hostname host is localhost then we need the module to
- # fix up the rsync path to use the controller's public DNS/IP
- # instead of "localhost"
- if port_matches_localhost_port and host in C.LOCALHOST:
- task_args['_substitute_controller'] = True
- return self._format_rsync_rsh_target(host, path, user)
-
- if ':' not in path and not path.startswith('/'):
- path = self._get_absolute_path(path=path)
- return path
-
- def _override_module_replaced_vars(self, task_vars):
- """ Some vars are substituted into the modules. Have to make sure
- that those are correct for localhost when synchronize creates its own
- connection to localhost."""
-
- # Clear the current definition of these variables as they came from the
- # connection to the remote host
- if 'ansible_syslog_facility' in task_vars:
- del task_vars['ansible_syslog_facility']
- for key in list(task_vars.keys()):
- if key.startswith("ansible_") and key.endswith("_interpreter"):
- del task_vars[key]
-
- # Add the definitions from localhost
- for host in C.LOCALHOST:
- if host in task_vars['hostvars']:
- localhost = task_vars['hostvars'][host]
- break
- if 'ansible_syslog_facility' in localhost:
- task_vars['ansible_syslog_facility'] = localhost['ansible_syslog_facility']
- for key in localhost:
- if key.startswith("ansible_") and key.endswith("_interpreter"):
- task_vars[key] = localhost[key]
-
- def run(self, tmp=None, task_vars=None):
- ''' generates params and passes them on to the rsync module '''
- # When modifying this function be aware of the tricky convolutions
- # your thoughts have to go through:
- #
- # In normal ansible, we connect from controller to inventory_hostname
- # (playbook's hosts: field) or controller to delegate_to host and run
- # a module on one of those hosts.
- #
- # So things that are directly related to the core of ansible are in
- # terms of that sort of connection that always originate on the
- # controller.
- #
- # In synchronize we use ansible to connect to either the controller or
- # to the delegate_to host and then run rsync which makes its own
- # connection from controller to inventory_hostname or delegate_to to
- # inventory_hostname.
- #
- # That means synchronize needs to have some knowledge of the
- # controller to inventory_host/delegate host that ansible typically
- # establishes and use those to construct a command line for rsync to
- # connect from the inventory_host to the controller/delegate. The
- # challenge for coders is remembering which leg of the trip is
- # associated with the conditions that you're checking at any one time.
- if task_vars is None:
- task_vars = dict()
-
- # We make a copy of the args here because we may fail and be asked to
- # retry. If that happens we don't want to pass the munged args through
- # to our next invocation. Munged args are single use only.
- _tmp_args = self._task.args.copy()
-
- result = super(ActionModule, self).run(tmp, task_vars)
-
- # Store remote connection type
- self._remote_transport = self._connection.transport
-
- # Handle docker connection options
- if self._remote_transport == 'docker':
- self._docker_cmd = self._connection.docker_cmd
- if self._play_context.docker_extra_args:
- self._docker_cmd = "%s %s" % (self._docker_cmd, self._play_context.docker_extra_args)
-
- # self._connection accounts for delegate_to so
- # remote_transport is the transport ansible thought it would need
- # between the controller and the delegate_to host or the controller
- # and the remote_host if delegate_to isn't set.
-
- remote_transport = False
- if self._connection.transport != 'local':
- remote_transport = True
-
- try:
- delegate_to = self._task.delegate_to
- except (AttributeError, KeyError):
- delegate_to = None
-
- # ssh paramiko docker and local are fully supported transports. Anything
- # else only works with delegate_to
- if delegate_to is None and self._connection.transport not in ('ssh', 'paramiko', 'local', 'docker'):
- result['failed'] = True
- result['msg'] = ("synchronize uses rsync to function. rsync needs to connect to the remote host via ssh, docker client or a direct filesystem "
- "copy. This remote host is being accessed via %s instead so it cannot work." % self._connection.transport)
- return result
-
- use_ssh_args = _tmp_args.pop('use_ssh_args', None)
-
- # Parameter name needed by the ansible module
- _tmp_args['_local_rsync_path'] = task_vars.get('ansible_rsync_path') or 'rsync'
-
- # rsync thinks that one end of the connection is localhost and the
- # other is the host we're running the task for (Note: We use
- # ansible's delegate_to mechanism to determine which host rsync is
- # running on so localhost could be a non-controller machine if
- # delegate_to is used)
- src_host = '127.0.0.1'
- inventory_hostname = task_vars.get('inventory_hostname')
- dest_host_inventory_vars = task_vars['hostvars'].get(inventory_hostname)
- try:
- dest_host = dest_host_inventory_vars['ansible_host']
- except KeyError:
- dest_host = dest_host_inventory_vars.get('ansible_ssh_host', inventory_hostname)
-
- dest_host_ids = [hostid for hostid in (dest_host_inventory_vars.get('inventory_hostname'),
- dest_host_inventory_vars.get('ansible_host'),
- dest_host_inventory_vars.get('ansible_ssh_host'))
- if hostid is not None]
-
- localhost_ports = set()
- for host in C.LOCALHOST:
- localhost_vars = task_vars['hostvars'].get(host, {})
- for port_var in MAGIC_VARIABLE_MAPPING['port']:
- port = localhost_vars.get(port_var, None)
- if port:
- break
- else:
- port = C.DEFAULT_REMOTE_PORT
- localhost_ports.add(port)
-
- # dest_is_local tells us if the host rsync runs on is the same as the
- # host rsync puts the files on. This is about *rsync's connection*,
- # not about the ansible connection to run the module.
- dest_is_local = False
- if delegate_to is None and remote_transport is False:
- dest_is_local = True
- elif delegate_to is not None and delegate_to in dest_host_ids:
- dest_is_local = True
-
- # CHECK FOR NON-DEFAULT SSH PORT
- inv_port = task_vars.get('ansible_ssh_port', None) or C.DEFAULT_REMOTE_PORT
- if _tmp_args.get('dest_port', None) is None:
- if inv_port is not None:
- _tmp_args['dest_port'] = inv_port
-
- # Set use_delegate if we are going to run rsync on a delegated host
- # instead of localhost
- use_delegate = False
- if delegate_to is not None and delegate_to in dest_host_ids:
- # edge case: explicit delegate and dest_host are the same
- # so we run rsync on the remote machine targeting its localhost
- # (itself)
- dest_host = '127.0.0.1'
- use_delegate = True
- elif delegate_to is not None and remote_transport:
- # If we're delegating to a remote host then we need to use the
- # delegate_to settings
- use_delegate = True
-
- # Delegate to localhost as the source of the rsync unless we've been
- # told (via delegate_to) that a different host is the source of the
- # rsync
- if not use_delegate and remote_transport:
- # Create a connection to localhost to run rsync on
- new_stdin = self._connection._new_stdin
-
- # Unike port, there can be only one shell
- localhost_shell = None
- for host in C.LOCALHOST:
- localhost_vars = task_vars['hostvars'].get(host, {})
- for shell_var in MAGIC_VARIABLE_MAPPING['shell']:
- localhost_shell = localhost_vars.get(shell_var, None)
- if localhost_shell:
- break
- if localhost_shell:
- break
- else:
- localhost_shell = os.path.basename(C.DEFAULT_EXECUTABLE)
- self._play_context.shell = localhost_shell
-
- # Unike port, there can be only one executable
- localhost_executable = None
- for host in C.LOCALHOST:
- localhost_vars = task_vars['hostvars'].get(host, {})
- for executable_var in MAGIC_VARIABLE_MAPPING['executable']:
- localhost_executable = localhost_vars.get(executable_var, None)
- if localhost_executable:
- break
- if localhost_executable:
- break
- else:
- localhost_executable = C.DEFAULT_EXECUTABLE
- self._play_context.executable = localhost_executable
-
- new_connection = connection_loader.get('local', self._play_context, new_stdin)
- self._connection = new_connection
- self._override_module_replaced_vars(task_vars)
-
- # SWITCH SRC AND DEST HOST PER MODE
- if _tmp_args.get('mode', 'push') == 'pull':
- (dest_host, src_host) = (src_host, dest_host)
-
- # MUNGE SRC AND DEST PER REMOTE_HOST INFO
- src = _tmp_args.get('src', None)
- dest = _tmp_args.get('dest', None)
- if src is None or dest is None:
- return dict(failed=True,
- msg="synchronize requires both src and dest parameters are set")
-
- if not dest_is_local:
- # Private key handling
- private_key = self._play_context.private_key_file
-
- if private_key is not None:
- private_key = os.path.expanduser(private_key)
- _tmp_args['private_key'] = private_key
-
- # Src and dest rsync "path" handling
- # Determine if we need a user@
- user = None
- if boolean(_tmp_args.get('set_remote_user', 'yes')):
- if use_delegate:
- user = task_vars.get('ansible_delegated_vars', dict()).get('ansible_ssh_user', None)
- if not user:
- user = C.DEFAULT_REMOTE_USER
-
- else:
- user = task_vars.get('ansible_ssh_user') or self._play_context.remote_user
-
- # use the mode to define src and dest's url
- if _tmp_args.get('mode', 'push') == 'pull':
- # src is a remote path: <user>@<host>, dest is a local path
- src = self._process_remote(_tmp_args, src_host, src, user, inv_port in localhost_ports)
- dest = self._process_origin(dest_host, dest, user)
- else:
- # src is a local path, dest is a remote path: <user>@<host>
- src = self._process_origin(src_host, src, user)
- dest = self._process_remote(_tmp_args, dest_host, dest, user, inv_port in localhost_ports)
- else:
- # Still need to munge paths (to account for roles) even if we aren't
- # copying files between hosts
- if not src.startswith('/'):
- src = self._get_absolute_path(path=src)
- if not dest.startswith('/'):
- dest = self._get_absolute_path(path=dest)
-
- _tmp_args['src'] = src
- _tmp_args['dest'] = dest
-
- # Allow custom rsync path argument
- rsync_path = _tmp_args.get('rsync_path', None)
-
- # backup original become as we are probably about to unset it
- become = self._play_context.become
-
- if not dest_is_local:
- # don't escalate for docker. doing --rsync-path with docker exec fails
- # and we can switch directly to the user via docker arguments
- if self._play_context.become and not rsync_path and self._remote_transport != 'docker':
- # If no rsync_path is set, become was originally set, and dest is
- # remote then add privilege escalation here.
- if self._play_context.become_method == 'sudo':
- rsync_path = "'sudo rsync'"
- # TODO: have to add in the rest of the become methods here
-
- # We cannot use privilege escalation on the machine running the
- # module. Instead we run it on the machine rsync is connecting
- # to.
- self._play_context.become = False
-
- _tmp_args['rsync_path'] = rsync_path
-
- if use_ssh_args:
- ssh_args = [
- getattr(self._play_context, 'ssh_args', ''),
- getattr(self._play_context, 'ssh_common_args', ''),
- getattr(self._play_context, 'ssh_extra_args', ''),
- ]
- _tmp_args['ssh_args'] = ' '.join([a for a in ssh_args if a])
-
- # If launching synchronize against docker container
- # use rsync_opts to support container to override rsh options
- if self._remote_transport in [ 'docker' ]:
- # Replicate what we do in the module argumentspec handling for lists
- if not isinstance(_tmp_args.get('rsync_opts'), MutableSequence):
- tmp_rsync_opts = _tmp_args.get('rsync_opts', [])
- if isinstance(tmp_rsync_opts, string_types):
- tmp_rsync_opts = tmp_rsync_opts.split(',')
- elif isinstance(tmp_rsync_opts, (int, float)):
- tmp_rsync_opts = [to_text(tmp_rsync_opts)]
- _tmp_args['rsync_opts'] = tmp_rsync_opts
-
- if '--blocking-io' not in _tmp_args['rsync_opts']:
- _tmp_args['rsync_opts'].append('--blocking-io')
- if become and self._play_context.become_user:
- _tmp_args['rsync_opts'].append("--rsh='%s exec -u %s -i'" % (self._docker_cmd, self._play_context.become_user))
- elif user is not None:
- _tmp_args['rsync_opts'].append("--rsh='%s exec -u %s -i'" % (self._docker_cmd, user))
- else:
- _tmp_args['rsync_opts'].append("--rsh='%s exec -i'" % self._docker_cmd)
-
- # run the module and store the result
- result.update(self._execute_module('synchronize', module_args=_tmp_args, task_vars=task_vars))
-
- if 'SyntaxError' in result.get('exception', result.get('msg', '')):
- # Emit a warning about using python3 because synchronize is
- # somewhat unique in running on localhost
- result['exception'] = result['msg']
- result['msg'] = ('SyntaxError parsing module. Perhaps invoking "python" on your local (or delegate_to) machine invokes python3. '
- 'You can set ansible_python_interpreter for localhost (or the delegate_to machine) to the location of python2 to fix this')
- return result
diff --git a/install_files/ansible-base/roles/tails-config/files/securedrop_init.py b/install_files/ansible-base/roles/tails-config/files/securedrop_init.py
--- a/install_files/ansible-base/roles/tails-config/files/securedrop_init.py
+++ b/install_files/ansible-base/roles/tails-config/files/securedrop_init.py
@@ -1,6 +1,8 @@
#!/usr/bin/python
+import grp
import os
+import pwd
import sys
import subprocess
@@ -13,6 +15,8 @@
path_torrc_additions = '/home/amnesia/Persistent/.securedrop/torrc_additions'
path_torrc_backup = '/etc/tor/torrc.bak'
path_torrc = '/etc/tor/torrc'
+path_desktop = '/home/amnesia/Desktop/'
+path_persistent_desktop = '/lib/live/mount/persistence/TailsData_unlocked/dotfiles/Desktop/'
# load torrc_additions
if os.path.isfile(path_torrc_additions):
@@ -48,7 +52,29 @@
'/org/gnome/nautilus/preferences/automatic-decompression',
'false'])
-# notify the user
+# Set journalist.desktop and source.desktop links as trusted with Nautilus (see
+# https://github.com/freedomofpress/securedrop/issues/2586)
+# set euid and env variables to amnesia user
+amnesia_gid = grp.getgrnam('amnesia').gr_gid
+amnesia_uid = pwd.getpwnam('amnesia').pw_uid
+os.setresgid(amnesia_gid, amnesia_gid, -1)
+os.setresuid(amnesia_uid, amnesia_uid, -1)
+env = os.environ.copy()
+env['XDG_RUNTIME_DIR'] = '/run/user/{}'.format(amnesia_uid)
+env['XDG_DATA_DIR'] = '/usr/share/gnome:/usr/local/share/:/usr/share/'
+env['HOME'] = '/home/amnesia'
+env['LOGNAME'] = 'amnesia'
+env['DBUS_SESSION_BUS_ADDRESS'] = 'unix:path=/run/user/{}/bus'.format(amnesia_uid)
+
+# remove existing shortcut, recreate symlink and change metadata attribute to trust .desktop
+for shortcut in ['source.desktop', 'journalist.desktop']:
+ subprocess.call(['rm', path_desktop + shortcut], env=env)
+ subprocess.call(['ln', '-s', path_persistent_desktop + shortcut, path_desktop + shortcut], env=env)
+ subprocess.call(['gio', 'set', path_desktop + shortcut, 'metadata::trusted', 'yes'], env=env)
+
+# reacquire uid0 and notify the user
+os.setresuid(0,0,-1)
+os.setresgid(0,0,-1)
subprocess.call(['tails-notify-user',
'SecureDrop successfully auto-configured!',
'You can now access the Journalist Interface.\nIf you are an admin, you can now SSH to the servers.'])
diff --git a/migration_scripts/0.2.1/0.2.1_collect.py b/migration_scripts/0.2.1/0.2.1_collect.py
deleted file mode 100755
--- a/migration_scripts/0.2.1/0.2.1_collect.py
+++ /dev/null
@@ -1,97 +0,0 @@
-#!/usr/bin/python2.7
-"""
-This script should be copied to a running SecureDrop 0.2.1 instance. When run
-(as root), it collects all of the necessary information to migrate the system
-to 0.3 and stores it in the .tar.gz file specified in the first argument.
-"""
-
-import sys
-import os
-import re
-import tarfile
-
-
-# Arbitrarily pick the source chroot jail (doesn't matter)
-securedrop_root = "/var/chroot/source/var/www/securedrop"
-
-
-def collect_config_file(backup):
- config_file = os.path.join(securedrop_root, "config.py")
- backup.add(config_file)
- return config_file
-
-
-def collect_securedrop_root(backup):
- # The store and key dirs are shared between the chroot jails in
- # 0.2.1, and are both linked from /var/securedrop
- securedrop_root = "/var/securedrop"
- backup.add(securedrop_root)
- return securedrop_root
-
-
-def collect_database(backup):
- # Copy the db file, which is only present in the journalist interface's
- # chroot jail in 0.2.1
- db_file = "/var/chroot/document/var/www/securedrop/db.sqlite"
- backup.add(db_file)
- return db_file
-
-
-def collect_custom_header_image(backup):
- # 0.2.1's deployment didn't actually use
- # config.CUSTOM_HEADER_IMAGE - it just overwrote the default
- # header image, `static/i/securedrop.png`.
- header_image = os.path.join(securedrop_root, "static/i/securedrop.png")
- backup.add(header_image)
- return header_image
-
-
-def collect_tor_files(backup):
- tor_files = [
- "/etc/tor/torrc",
- "/var/lib/tor/hidden_service/client_keys",
- "/var/chroot/source/var/lib/tor/hidden_service/private_key",
- "/var/chroot/document/var/lib/tor/hidden_service/client_keys",
- ]
- collected = []
- for tor_file in tor_files:
- # Since the 0.2.1 install process was occasionally somewaht ad
- # hoc, the SSH ATHS was not always set up. We treat that as a
- # non-fatal error and continue.
- if (not os.path.isfile(tor_file)
- and tor_file == "/var/lib/tor/hidden_service/client_keys"):
- print ("[!] Warning: expected file '{}' not found. "
- "Continuing anyway.".format(tor_file))
- continue
- backup.add(tor_file)
- collected.append(tor_file)
-
- return ', '.join(collected)
-
-
-def main():
- if len(sys.argv) <= 1:
- print "Usage: 0.2.1_collect.py <backup filename>"
- sys.exit(1)
-
- backup_filename = sys.argv[1]
- if not backup_filename.endswith(".tar.gz"):
- backup_filename += ".tar.gz"
-
- backup_fns = [
- collect_config_file,
- collect_securedrop_root,
- collect_database,
- collect_custom_header_image,
- collect_tor_files
- ]
-
- print "Backing up..."
- with tarfile.open(backup_filename, 'w:gz') as backup:
- for fn in backup_fns:
- print "[+] Collected {}".format(fn(backup))
-
- print "Done!"
-
-if __name__ == "__main__":
- main()
diff --git a/migration_scripts/0.2.1/0.3_migrate.py b/migration_scripts/0.2.1/0.3_migrate.py
deleted file mode 100755
--- a/migration_scripts/0.2.1/0.3_migrate.py
+++ /dev/null
@@ -1,296 +0,0 @@
-#!/usr/bin/python2.7
-"""
-This script should be copied to a running SecureDrop 0.3 instance, along with
-the output of `0.2.1_collect.py`. When run (as root), it migrates all of the
-information from the 0.2.1 instance to create a matching 0.3 instance.
-"""
-
-import sys
-import os
-import re
-import tarfile
-import subprocess
-import sqlite3
-import shutil
-from datetime import datetime
-from operator import itemgetter
-import calendar
-import traceback
-
-
-def migrate_config_file(backup):
- print "* Migrating values from old config file..."
-
- # Back up new config just in case something goes horribly wrong
- config_fn = "/var/www/securedrop/config.py"
- shutil.copy(config_fn, config_fn + '.backup')
-
- # Substitute values in new config with values from old config
- old_config = backup.extractfile('var/chroot/source/var/www/securedrop/config.py').read()
- new_config = open(config_fn, 'r').read()
- subs = [
- (r"JOURNALIST_KEY=('.*')", r"^(JOURNALIST_KEY = )('.*')"),
- (r"SCRYPT_ID_PEPPER=('.*')", r"^(SCRYPT_ID_PEPPER = )('.*')"),
- (r"SCRYPT_GPG_PEPPER=('.*')", r"^(SCRYPT_GPG_PEPPER = )('.*')")
- ]
- for sub in subs:
- old_value_repl = r"\1{}".format(re.search(sub[0], old_config).groups()[0])
- new_config = re.sub(sub[1], old_value_repl, new_config, flags=re.MULTILINE)
-
- # Write out migrated config
- with open(config_fn, 'w') as config:
- config.write(new_config)
-
- # Restart Apache so the web application picks up the changes to config.py
- subprocess.call(["service", "apache2", "restart"])
-
-
-def extract_tree_to(tar, selector, dest):
- # http://stackoverflow.com/a/15171308/1093000
- if type(selector) is str:
- prefix = selector
- selector = lambda m: m.name.startswith(prefix)
- members = [m for m in tar.getmembers() if selector(m)]
- for m in members:
- m.name = m.name[len(prefix):]
- tar.extractall(path=dest, members=members)
-
-
-def extract_file_to(tar, src, dst):
- src_member = tar.getmember(src)
- # Hack member name to change where it gets extracted to
- src_member.name = dst
- tar.extract(src_member)
-
-
-def migrate_securedrop_root(backup):
- print "* Migrating directories from SECUREDROP_ROOT..."
- extract_tree_to(backup, "var/securedrop/", "/var/lib/securedrop")
- subprocess.call(['chown', '-R', 'www-data:www-data', "/var/lib/securedrop"])
-
-
-def migrate_database(backup):
- print "* Migrating database..."
-
- # Get the sources table from the 0.2.1 instance's db
- old_db = backup.getmember("var/chroot/document/var/www/securedrop/db.sqlite")
- old_db.name = "db.old.sqlite"
- backup.extract(old_db)
- conn = sqlite3.connect("db.old.sqlite")
- c = conn.cursor()
- sources = c.execute("SELECT * FROM sources").fetchall()
- os.remove("db.old.sqlite")
-
- # Fill in the rest of the sources. Since sources were only added to the
- # database if their codename was changed by the journalist, we need to fill
- # in the rest by examining all of the filesystem designations in the source
- # directory and re-generating the codenames.
- #
- # Note: Must be called after /var/lib/securedrop/store is populated
- from old_crypto_util import displayid
- # Generate a list of the filesystem ids that have journalist designations
- # stored in the database, since they are already known and should not be
- # generated from the filesystem id
- already_processed = set([source[0] for source in sources])
- for fs_id in os.listdir("/var/lib/securedrop/store"):
- if fs_id in already_processed:
- continue
- sources.append((fs_id, displayid(fs_id)))
-
- # Import current application's config so we can easily populate the db
- sys.path.append("/var/www/securedrop")
- import config
- from db import Source, Journalist, Submission, Reply, db_session, init_db
-
- # We need to be able to link replies to the Journalist that sent
- # them. Since this information was not recorded in 0.2.1, we
- # arbitrarily say all replies were sent by an arbitrary journalist
- # that is present on this system. Since this information is not
- # currently exposed in the UI, this does not create a problem (for
- # now).
- if len(Journalist.query.all()) == 0:
- print "!!! FATAL: You must create a journalist account before running this migration."
- print " Run ./manage.py add_admin and try again."
- sys.exit(1)
- else:
- arbitrary_journalist = Journalist.query.all()[0]
-
- # Back up current database just in case
- shutil.copy("/var/lib/securedrop/db.sqlite",
- "/var/lib/securedrop/db.sqlite.bak")
-
- # Copied from db.py to compute filesystem-safe journalist filenames
- def journalist_filename(s):
- valid_chars = 'abcdefghijklmnopqrstuvwxyz1234567890-_'
- return ''.join([c for c in s.lower().replace(' ', '_') if c in valid_chars])
-
- # Migrate rows to new database with SQLAlchemy ORM
- for source in sources:
- migrated_source = Source(source[0], source[1])
- source_dir = os.path.join("/var/lib/securedrop/store", source[0])
-
- # It appears that there was a bug in 0.2.1 where sources with changed
- # names were not always successfully removed from the database. Skip
- # any sources that didn't have files copied for them, they were deleted
- # and are in the database erroneously.
- if not os.path.isdir(source_dir):
- continue
-
- # Can infer "flagged" state by looking for _FLAG files in store
- if "_FLAG" in os.listdir(source_dir):
- # Mark the migrated source as flagged
- migrated_source.flagged = True
- # Delete the _FLAG file
- os.remove(os.path.join(source_dir, "_FLAG"))
-
- # Sort the submissions by the date of submission so we can infer the
- # correct interaction_count for the new filenames later, and so we can
- # set source.last_updated to the time of the most recently uploaded
- # submission in the store now.
- submissions = []
- replies = []
- for fn in os.listdir(source_dir):
- append_to = submissions
- if fn.startswith('reply-'):
- append_to = replies
- append_to.append((fn, os.path.getmtime(os.path.join(source_dir, fn))))
-
- # Sort by submission time
- submissions.sort(key=itemgetter(1))
- replies.sort(key=itemgetter(1))
-
- if len(submissions) > 0:
- migrated_source.last_updated = datetime.utcfromtimestamp(submissions[-1][1])
- else:
- # The source will have the default .last_updated of utcnow(), which
- # might be a little confusing, but it's the best we can do.
- pass
-
- # Since the concept of "pending" is introduced in 0.3, it's tricky to
- # figure out how to set this value. We can't distinguish between sources
- # who created an account but never submitted anything and sources who
- # had been active, but didn't have any stored submissions or replies at
- # the time of migration.
- #
- # After having explored the options, I think the best thing to do here
- # is set pending to True if there are no submissions or replies. Sources
- # who created an account but never submitted anything won't create noise
- # in the list, and sources who are active can probably be expected to
- # log back in relatively soon and so will automatially reappear once
- # they submit something new.
- if len(submissions + replies) == 0:
- migrated_source.pending = True
- else:
- migrated_source.pending = False
-
- # Set source.interaction_count to the number of current submissions for
- # each source. This is not technicially correct, but since we can't
- # know how many submissions have been deleted it will give us a
- # reasonable, monotonically increasing basis for future increments to
- # the interaction_count.
- migrated_source.interaction_count = len(submissions) + len(replies)
-
- # Add and commit the source to the db so they will have a primary key
- # assigned to use with the ForeignKey relationship with their
- # submissions
- db_session.add(migrated_source)
- db_session.commit()
-
- # Combine everything into one list, sorted by date, so we can
- # correctly set the interaction counts for each file.
- everything = submissions + replies
- everything.sort(key=itemgetter(1))
- for count, item in enumerate(everything):
- # Rename the file to fit the new file naming scheme used by 0.3
- fn = item[0]
-
- if fn.startswith('reply-'):
- new_fn = "{0}-{1}-reply.gpg".format(count+1, journalist_filename(source[1]))
- else:
- new_fn = "{0}-{1}-{2}".format(count+1, journalist_filename(source[1]), "msg.gpg" if fn.endswith("msg.gpg") else "doc.zip.gpg")
-
- # Move to the new filename
- os.rename(os.path.join(source_dir, fn),
- os.path.join(source_dir, new_fn))
-
- # Add a database entry for this item
- db_entry = None
-
- if fn.startswith('reply-'):
- migrated_reply = Reply(arbitrary_journalist, migrated_source, new_fn)
- db_entry = migrated_reply
- else:
- migrated_submission = Submission(migrated_source, new_fn)
- # Assume that all submissions that are being migrated
- # have already been downloaded
- migrated_submission.downloaded = True
- db_entry = migrated_submission
-
- db_session.add(db_entry)
- db_session.commit()
-
- # chown the database file to the securedrop user
- subprocess.call(['chown', 'www-data:www-data', "/var/lib/securedrop/db.sqlite"])
-
-
-def migrate_custom_header_image(backup):
- print "* Migrating custom header image..."
- extract_file_to(backup,
- "var/chroot/source/var/www/securedrop/static/i/securedrop.png",
- "/var/www/securedrop/static/i/logo.png")
- subprocess.call(['chown', '-R', 'www-data:www-data', "/var/www/securedrop/static/i/logo.png"])
-
-
-def migrate_tor_files(backup):
- print "* Migrating source interface .onion..."
-
- tor_root_dir = "/var/lib/tor"
- ths_root_dir = os.path.join(tor_root_dir, "services")
-
- # For now, we're going to re-provision the monitor and SSH
- # hidden services. The only hidden service whose address
- # we want to maintain is the source interface. Modify the
- # code below to migrate other hidden services as well.
-
- # Restore source interface hidden sevice key to maintain the original
- # .onion address
- source_ths_dir = os.path.join(ths_root_dir, "source")
-
- # Delete the files created by ansible
- for fn in os.listdir(source_ths_dir):
- os.remove(os.path.join(source_ths_dir, fn))
-
- # Extract the original source interface THS key
- extract_file_to(backup,
- "var/chroot/source/var/lib/tor/hidden_service/private_key",
- os.path.join(source_ths_dir, "private_key"))
-
- # chmod the files so they're owned by debian-tor:debian-tor
- subprocess.call(['chown', '-R', 'debian-tor:debian-tor', source_ths_dir])
- # Reload Tor to trigger registering the migrated Tor Hidden Service address
- subprocess.call(['service', 'tor', 'reload'])
-
-
-def main():
- if len(sys.argv) <= 1:
- print "Usage: 0.3_migrate.py <backup filename>"
- sys.exit(1)
-
- try:
- backup_fn = sys.argv[1]
- with tarfile.open(backup_fn, 'r:*') as backup:
- migrate_config_file(backup)
- migrate_securedrop_root(backup)
- migrate_database(backup)
- migrate_custom_header_image(backup)
- migrate_tor_files(backup)
- except SystemExit as e:
- pass
- except:
- print "\n!!! Something went wrong, please file an issue.\n"
- print traceback.format_exc()
- else:
- print "Done!"
-
-if __name__ == "__main__":
- main()
diff --git a/migration_scripts/0.2.1/old_crypto_util.py b/migration_scripts/0.2.1/old_crypto_util.py
deleted file mode 100644
--- a/migration_scripts/0.2.1/old_crypto_util.py
+++ /dev/null
@@ -1,19 +0,0 @@
-# -*- coding: utf-8 -*-
-# Minimal set of functions and variables from 0.2.1's crypto_util.py needed to
-# regenerate journalist designations from soure's filesystem id's.
-import os
-import random as badrandom
-
-# Find the absolute path relative to this file so this script can be run
-# anywhere
-SRC_DIR = os.path.dirname(os.path.realpath(__file__))
-
-nouns = file(os.path.join(SRC_DIR, "nouns.txt")).read().split('\n')
-adjectives = file(os.path.join(SRC_DIR, "adjectives.txt")).read().split('\n')
-
-
-def displayid(n):
- badrandom_value = badrandom.WichmannHill()
- badrandom_value.seed(n)
- return badrandom_value.choice(
- adjectives) + " " + badrandom_value.choice(nouns)
diff --git a/securedrop/crypto_util.py b/securedrop/crypto_util.py
--- a/securedrop/crypto_util.py
+++ b/securedrop/crypto_util.py
@@ -35,7 +35,8 @@
# Make sure these pass before the app can run
# TODO: Add more tests
def do_runtime_tests():
- assert(config.SCRYPT_ID_PEPPER != config.SCRYPT_GPG_PEPPER)
+ if config.SCRYPT_ID_PEPPER == config.SCRYPT_GPG_PEPPER:
+ raise AssertionError('SCRYPT_ID_PEPPER == SCRYPT_GPG_PEPPER')
# crash if we don't have srm:
try:
subprocess.check_call(['srm'], stdout=subprocess.PIPE)
@@ -47,7 +48,8 @@ def do_runtime_tests():
gpg = gnupg.GPG(binary='gpg2', homedir=config.GPG_KEY_DIR)
-words = open(config.WORD_LIST).read().rstrip('\n').split('\n')
+# map code for a given language to a localized wordlist
+language2words = {}
nouns = open(config.NOUNS).read().rstrip('\n').split('\n')
adjectives = open(config.ADJECTIVES).read().rstrip('\n').split('\n')
@@ -58,10 +60,10 @@ class CryptoException(Exception):
def clean(s, also=''):
"""
- >>> clean("Hello, world!")
+ >>> clean("[]")
Traceback (most recent call last):
...
- CryptoException: invalid input: Hello, world!
+ CryptoException: invalid input: []
>>> clean("Helloworld")
'Helloworld'
"""
@@ -77,8 +79,38 @@ def clean(s, also=''):
return str(s)
-def genrandomid(words_in_random_id=DEFAULT_WORDS_IN_RANDOM_ID):
- return ' '.join(random.choice(words) for x in range(words_in_random_id))
+def _get_wordlist(locale):
+ """" Ensure the wordlist for the desired locale is read and available
+ in the words global variable. If there is no wordlist for the
+ desired local, fallback to the default english wordlist.
+
+ The localized wordlist are read from wordlists/{locale}.txt but
+ for backward compatibility purposes the english wordlist is read
+ from the config.WORD_LIST file.
+
+ """
+
+ if locale not in language2words:
+ if locale != 'en':
+ path = os.path.join(config.SECUREDROP_ROOT,
+ 'wordlists',
+ locale + '.txt')
+ if os.path.exists(path):
+ wordlist_path = path
+ else:
+ wordlist_path = config.WORD_LIST
+ else:
+ wordlist_path = config.WORD_LIST
+
+ language2words[locale] = open(
+ wordlist_path).read().rstrip('\n').split('\n')
+
+ return language2words[locale]
+
+
+def genrandomid(words_in_random_id=DEFAULT_WORDS_IN_RANDOM_ID, locale='en'):
+ return ' '.join(random.choice(_get_wordlist(locale))
+ for x in range(words_in_random_id))
def display_id():
diff --git a/securedrop/i18n.py b/securedrop/i18n.py
new file mode 100644
--- /dev/null
+++ b/securedrop/i18n.py
@@ -0,0 +1,163 @@
+#
+# SecureDrop whistleblower submission system
+# Copyright (C) 2017 Loic Dachary <[email protected]>
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+#
+from flask import request, session
+from flask_babel import Babel
+from babel import core
+
+import collections
+import config
+import os
+import re
+
+from os import path
+
+LOCALE_SPLIT = re.compile('(-|_)')
+LOCALES = set(['en_US'])
+babel = None
+
+
+class LocaleNotFound(Exception):
+
+ """Raised when the desired locale is not in the translations directory"""
+
+
+def setup_app(app, translation_dirs=None):
+ global babel
+
+ if translation_dirs is None:
+ translation_dirs = \
+ path.join(path.dirname(path.realpath(__file__)),
+ 'translations')
+
+ # `babel.translation_directories` is a nightmare
+ # We need to set this manually via an absolute path
+ app.config['BABEL_TRANSLATION_DIRECTORIES'] = translation_dirs
+
+ babel = Babel(app)
+ if len(list(babel.translation_directories)) != 1:
+ raise AssertionError(
+ 'Expected exactly one translation directory but got {}.'
+ .format(babel.translation_directories))
+
+ for dirname in os.listdir(next(babel.translation_directories)):
+ if dirname != 'messages.pot':
+ LOCALES.add(dirname)
+
+ babel.localeselector(get_locale)
+
+
+def get_locale():
+ """
+ Get the locale as follows, by order of precedence:
+ - l request argument or session['locale']
+ - browser suggested locale, from the Accept-Languages header
+ - config.DEFAULT_LOCALE
+ - 'en_US'
+ """
+ locale = None
+ accept_languages = []
+ for l in request.accept_languages.values():
+ if '-' in l:
+ sep = '-'
+ else:
+ sep = '_'
+ try:
+ accept_languages.append(str(core.Locale.parse(l, sep)))
+ except:
+ pass
+ if 'l' in request.args:
+ if len(request.args['l']) == 0:
+ if 'locale' in session:
+ del session['locale']
+ locale = core.negotiate_locale(accept_languages, LOCALES)
+ else:
+ locale = core.negotiate_locale([request.args['l']], LOCALES)
+ session['locale'] = locale
+ else:
+ if 'locale' in session:
+ locale = session['locale']
+ else:
+ locale = core.negotiate_locale(accept_languages, LOCALES)
+
+ if locale:
+ return locale
+ else:
+ return getattr(config, 'DEFAULT_LOCALE', 'en_US')
+
+
+def get_text_direction(locale):
+ return core.Locale.parse(locale).text_direction
+
+
+def _get_supported_locales(locales, supported, default_locale):
+ """Return SUPPORTED_LOCALES from config.py. If it is missing return
+ the default locale.
+
+ """
+
+ if not supported:
+ return [default_locale or 'en_US']
+ unsupported = set(supported) - set(locales)
+ if unsupported:
+ raise LocaleNotFound(
+ "config.py SUPPORTED_LOCALES contains {} which is not among the "
+ "locales found in the {} directory: {}".format(
+ list(unsupported),
+ babel.translation_directories,
+ locales))
+ if default_locale and default_locale not in supported:
+ raise LocaleNotFound("config.py SUPPORTED_LOCALES contains {} "
+ "which does not include "
+ "the value of DEFAULT_LOCALE '{}'".format(
+ supported, default_locale))
+
+ return supported
+
+
+NAME_OVERRIDES = {
+ 'nb_NO': 'norsk',
+}
+
+
+def get_locale2name():
+ locales = _get_supported_locales(
+ LOCALES,
+ getattr(config, 'SUPPORTED_LOCALES', None),
+ getattr(config, 'DEFAULT_LOCALE', None))
+ locale2name = collections.OrderedDict()
+ for l in locales:
+ if l in NAME_OVERRIDES:
+ locale2name[l] = NAME_OVERRIDES[l]
+ else:
+ locale = core.Locale.parse(l)
+ locale2name[l] = locale.languages[locale.language]
+ return locale2name
+
+
+def locale_to_rfc_5646(locale):
+ lower = locale.lower()
+ if 'hant' in lower:
+ return 'zh-Hant'
+ elif 'hans' in lower:
+ return 'zh-Hans'
+ else:
+ return LOCALE_SPLIT.split(locale)[0]
+
+
+def get_language():
+ return get_locale().split('_')[0]
diff --git a/securedrop/journalist.py b/securedrop/journalist.py
--- a/securedrop/journalist.py
+++ b/securedrop/journalist.py
@@ -1,812 +1,10 @@
# -*- coding: utf-8 -*-
-from datetime import datetime
-import functools
-
-from flask import (Flask, request, render_template, send_file, redirect, flash,
- url_for, g, abort, session)
-from flask_wtf.csrf import CSRFProtect
-from flask_assets import Environment
-from jinja2 import Markup
-from sqlalchemy.orm.exc import NoResultFound
-from sqlalchemy.exc import IntegrityError
-from sqlalchemy.sql.expression import false
-
import config
-import version
-import crypto_util
-import store
-import template_filters
-from db import (db_session, Source, Journalist, Submission, Reply,
- SourceStar, get_one_or_else, LoginThrottledException,
- PasswordError, InvalidUsernameException)
-import worker
-
-app = Flask(__name__, template_folder=config.JOURNALIST_TEMPLATES_DIR)
-app.config.from_object(config.JournalistInterfaceFlaskConfig)
-CSRFProtect(app)
-
-assets = Environment(app)
-
-app.jinja_env.globals['version'] = version.__version__
-if getattr(config, 'CUSTOM_HEADER_IMAGE', None):
- app.jinja_env.globals['header_image'] = config.CUSTOM_HEADER_IMAGE
- app.jinja_env.globals['use_custom_header_image'] = True
-else:
- app.jinja_env.globals['header_image'] = 'logo.png'
- app.jinja_env.globals['use_custom_header_image'] = False
-
-app.jinja_env.filters['datetimeformat'] = template_filters.datetimeformat
-
-
[email protected]_appcontext
-def shutdown_session(exception=None):
- """Automatically remove database sessions at the end of the request, or
- when the application shuts down"""
- db_session.remove()
-
-
-def get_source(filesystem_id):
- """Return a Source object, representing the database row, for the source
- with the `filesystem_id`"""
- source = None
- query = Source.query.filter(Source.filesystem_id == filesystem_id)
- source = get_one_or_else(query, app.logger, abort)
-
- return source
-
-
[email protected]_request
-def setup_g():
- """Store commonly used values in Flask's special g object"""
- uid = session.get('uid', None)
- if uid:
- g.user = Journalist.query.get(uid)
-
- if request.method == 'POST':
- filesystem_id = request.form.get('filesystem_id')
- if filesystem_id:
- g.filesystem_id = filesystem_id
- g.source = get_source(filesystem_id)
-
-
-def logged_in():
- # When a user is logged in, we push their user id (database primary key)
- # into the session. setup_g checks for this value, and if it finds it,
- # stores a reference to the user's Journalist object in g.
- #
- # This check is good for the edge case where a user is deleted but still
- # has an active session - we will not authenticate a user if they are not
- # in the database.
- return bool(g.get('user', None))
-
-
-def login_required(func):
- @functools.wraps(func)
- def wrapper(*args, **kwargs):
- if not logged_in():
- return redirect(url_for('login'))
- return func(*args, **kwargs)
- return wrapper
-
-
-def admin_required(func):
- @functools.wraps(func)
- def wrapper(*args, **kwargs):
- if logged_in() and g.user.is_admin:
- return func(*args, **kwargs)
- # TODO: sometimes this gets flashed 2x (Chrome only?)
- flash("You must be an administrator to access that page",
- "notification")
- return redirect(url_for('index'))
- return wrapper
-
-
[email protected]('/login', methods=('GET', 'POST'))
-def login():
- if request.method == 'POST':
- try:
- user = Journalist.login(request.form['username'],
- request.form['password'],
- request.form['token'])
- except Exception as e:
- app.logger.error("Login for '{}' failed: {}".format(
- request.form['username'], e))
- login_flashed_msg = "Login failed."
-
- if isinstance(e, LoginThrottledException):
- login_flashed_msg += (
- " Please wait at least {} seconds "
- "before logging in again.".format(
- Journalist._LOGIN_ATTEMPT_PERIOD))
- else:
- try:
- user = Journalist.query.filter_by(
- username=request.form['username']).one()
- if user.is_totp:
- login_flashed_msg += (
- " Please wait for a new two-factor token"
- " before logging in again.")
- except:
- pass
-
- flash(login_flashed_msg, "error")
- else:
- app.logger.info("Successful login for '{}' with token {}".format(
- request.form['username'], request.form['token']))
-
- # Update access metadata
- user.last_access = datetime.utcnow()
- db_session.add(user)
- db_session.commit()
-
- session['uid'] = user.id
- return redirect(url_for('index'))
-
- return render_template("login.html")
-
-
[email protected]('/logout')
-def logout():
- session.pop('uid', None)
- return redirect(url_for('index'))
-
-
[email protected]('/admin', methods=('GET', 'POST'))
-@admin_required
-def admin_index():
- users = Journalist.query.all()
- return render_template("admin.html", users=users)
-
-
[email protected]('/admin/add', methods=('GET', 'POST'))
-@admin_required
-def admin_add_user():
- if request.method == 'POST':
- form_valid = True
- username = request.form['username']
-
- password = request.form['password']
- is_admin = bool(request.form.get('is_admin'))
-
- if form_valid:
- try:
- otp_secret = None
- if request.form.get('is_hotp', False):
- otp_secret = request.form.get('otp_secret', '')
- new_user = Journalist(username=username,
- password=password,
- is_admin=is_admin,
- otp_secret=otp_secret)
- db_session.add(new_user)
- db_session.commit()
- except PasswordError:
- flash('There was an error with the autogenerated password. '
- 'User not created. Please try again.', 'error')
- form_valid = False
- except InvalidUsernameException as e:
- form_valid = False
- flash('Invalid username: ' + str(e), "error")
- except IntegrityError as e:
- db_session.rollback()
- form_valid = False
- if "UNIQUE constraint failed: journalists.username" in str(e):
- flash("That username is already in use",
- "error")
- else:
- flash("An error occurred saving this user to the database."
- " Please check the application logs.",
- "error")
- app.logger.error("Adding user '{}' failed: {}".format(
- username, e))
-
- if form_valid:
- return redirect(url_for('admin_new_user_two_factor',
- uid=new_user.id))
-
- return render_template("admin_add_user.html", password=_make_password())
-
-
[email protected]('/admin/2fa', methods=('GET', 'POST'))
-@admin_required
-def admin_new_user_two_factor():
- user = Journalist.query.get(request.args['uid'])
-
- if request.method == 'POST':
- token = request.form['token']
- if user.verify_token(token):
- flash(
- "Two-factor token successfully verified for user {}!".format(
- user.username),
- "notification")
- return redirect(url_for("admin_index"))
- else:
- flash("Two-factor token failed to verify", "error")
-
- return render_template("admin_new_user_two_factor.html", user=user)
-
-
[email protected]('/admin/reset-2fa-totp', methods=['POST'])
-@admin_required
-def admin_reset_two_factor_totp():
- uid = request.form['uid']
- user = Journalist.query.get(uid)
- user.is_totp = True
- user.regenerate_totp_shared_secret()
- db_session.commit()
- return redirect(url_for('admin_new_user_two_factor', uid=uid))
-
-
[email protected]('/admin/reset-2fa-hotp', methods=['POST'])
-@admin_required
-def admin_reset_two_factor_hotp():
- uid = request.form['uid']
- otp_secret = request.form.get('otp_secret', None)
- if otp_secret:
- user = Journalist.query.get(uid)
- try:
- user.set_hotp_secret(otp_secret)
- except TypeError as e:
- if "Non-hexadecimal digit found" in str(e):
- flash("Invalid secret format: "
- "please only submit letters A-F and numbers 0-9.",
- "error")
- elif "Odd-length string" in str(e):
- flash("Invalid secret format: "
- "odd-length secret. Did you mistype the secret?",
- "error")
- else:
- flash("An unexpected error occurred! "
- "Please check the application "
- "logs or inform your adminstrator.", "error")
- app.logger.error(
- "set_hotp_secret '{}' (id {}) failed: {}".format(
- otp_secret, uid, e))
- return render_template('admin_edit_hotp_secret.html', uid=uid)
- else:
- db_session.commit()
- return redirect(url_for('admin_new_user_two_factor', uid=uid))
- else:
- return render_template('admin_edit_hotp_secret.html', uid=uid)
-
-
-class PasswordMismatchError(Exception):
- pass
-
-
-def commit_account_changes(user):
- if db_session.is_modified(user):
- try:
- db_session.add(user)
- db_session.commit()
- except Exception as e:
- flash("An unexpected error occurred! Please check the application "
- "logs or inform your adminstrator.", "error")
- app.logger.error("Account changes for '{}' failed: {}".format(user,
- e))
- db_session.rollback()
- else:
- flash("Account successfully updated!", "success")
-
-
[email protected]('/admin/edit/<int:user_id>', methods=('GET', 'POST'))
-@admin_required
-def admin_edit_user(user_id):
- user = Journalist.query.get(user_id)
-
- if request.method == 'POST':
- if request.form['username']:
- new_username = request.form['username']
-
- try:
- Journalist.check_username_acceptable(new_username)
- except InvalidUsernameException as e:
- flash('Invalid username: ' + str(e), 'error')
- return redirect(url_for("admin_edit_user", user_id=user_id))
-
- if new_username == user.username:
- pass
- elif Journalist.query.filter_by(
- username=new_username).one_or_none():
- flash('Username "{}" is already taken!'.format(new_username),
- "error")
- return redirect(url_for("admin_edit_user", user_id=user_id))
- else:
- user.username = new_username
-
- user.is_admin = bool(request.form.get('is_admin'))
-
- commit_account_changes(user)
-
- password = _make_password()
- return render_template("edit_account.html", user=user,
- password=password)
-
-
[email protected]('/admin/edit/<int:user_id>/new-password', methods=('POST',))
-@admin_required
-def admin_set_diceware_password(user_id):
- try:
- user = Journalist.query.get(user_id)
- except NoResultFound:
- abort(404)
-
- password = request.form.get('password')
- _set_diceware_password(user, password)
- return redirect(url_for('admin_edit_user', user_id=user_id))
-
-
[email protected]('/admin/delete/<int:user_id>', methods=('POST',))
-@admin_required
-def admin_delete_user(user_id):
- user = Journalist.query.get(user_id)
- if user:
- db_session.delete(user)
- db_session.commit()
- flash("Deleted user '{}'".format(user.username), "notification")
- else:
- app.logger.error(
- "Admin {} tried to delete nonexistent user with pk={}".format(
- g.user.username, user_id))
- abort(404)
-
- return redirect(url_for('admin_index'))
-
-
[email protected]('/account', methods=('GET',))
-@login_required
-def edit_account():
- password = _make_password()
- return render_template('edit_account.html',
- password=password)
-
-
[email protected]('/account/new-password', methods=['POST'])
-@login_required
-def new_password():
- user = g.user
- password = request.form.get('password')
- _set_diceware_password(user, password)
- return redirect(url_for('edit_account'))
-
-
[email protected]('/admin/edit/<int:user_id>/new-password', methods=('POST',))
-@admin_required
-def admin_new_password(user_id):
- try:
- user = Journalist.query.get(user_id)
- except NoResultFound:
- abort(404)
-
- password = request.form.get('password')
- _set_diceware_password(user, password)
- return redirect(url_for('admin_edit_user', user_id=user_id))
-
-
-def _make_password():
- while True:
- password = crypto_util.genrandomid(7)
- try:
- Journalist.check_password_acceptable(password)
- return password
- except PasswordError:
- continue
-
-
-def _set_diceware_password(user, password):
- try:
- user.set_password(password)
- except PasswordError:
- flash('You submitted a bad password! Password not changed.', 'error')
- return
-
- try:
- db_session.commit()
- except Exception:
- flash('There was an error, and the new password might not have been '
- 'saved correctly. To prevent you from getting locked '
- 'out of your account, you should reset your password again.',
- 'error')
- app.logger.error('Failed to update a valid password.')
- return
-
- # using Markup so the HTML isn't escaped
- flash(Markup("<p>The password was successfully updated! Don't forget to "
- 'save it in your KeePassX database. The new password is: '
- '<span><code>{}</code></span></p>'.format(password)),
- 'success')
-
-
[email protected]('/account/2fa', methods=('GET', 'POST'))
-@login_required
-def account_new_two_factor():
- if request.method == 'POST':
- token = request.form['token']
- if g.user.verify_token(token):
- flash("Two-factor token successfully verified!", "notification")
- return redirect(url_for('edit_account'))
- else:
- flash("Two-factor token failed to verify", "error")
-
- return render_template('account_new_two_factor.html', user=g.user)
-
-
[email protected]('/account/reset-2fa-totp', methods=['POST'])
-@login_required
-def account_reset_two_factor_totp():
- g.user.is_totp = True
- g.user.regenerate_totp_shared_secret()
- db_session.commit()
- return redirect(url_for('account_new_two_factor'))
-
-
[email protected]('/account/reset-2fa-hotp', methods=['POST'])
-@login_required
-def account_reset_two_factor_hotp():
- otp_secret = request.form.get('otp_secret', None)
- if otp_secret:
- g.user.set_hotp_secret(otp_secret)
- db_session.commit()
- return redirect(url_for('account_new_two_factor'))
- else:
- return render_template('account_edit_hotp_secret.html')
-
-
-def make_star_true(filesystem_id):
- source = get_source(filesystem_id)
- if source.star:
- source.star.starred = True
- else:
- source_star = SourceStar(source)
- db_session.add(source_star)
-
-
-def make_star_false(filesystem_id):
- source = get_source(filesystem_id)
- if not source.star:
- source_star = SourceStar(source)
- db_session.add(source_star)
- db_session.commit()
- source.star.starred = False
-
-
[email protected]('/col/add_star/<filesystem_id>', methods=('POST',))
-@login_required
-def add_star(filesystem_id):
- make_star_true(filesystem_id)
- db_session.commit()
- return redirect(url_for('index'))
-
-
[email protected]("/col/remove_star/<filesystem_id>", methods=('POST',))
-@login_required
-def remove_star(filesystem_id):
- make_star_false(filesystem_id)
- db_session.commit()
- return redirect(url_for('index'))
-
-
[email protected]('/')
-@login_required
-def index():
- unstarred = []
- starred = []
-
- # Long SQLAlchemy statements look best when formatted according to
- # the Pocoo style guide, IMHO:
- # http://www.pocoo.org/internal/styleguide/
- sources = Source.query.filter_by(pending=False) \
- .order_by(Source.last_updated.desc()) \
- .all()
- for source in sources:
- star = SourceStar.query.filter_by(source_id=source.id).first()
- if star and star.starred:
- starred.append(source)
- else:
- unstarred.append(source)
- source.num_unread = len(
- Submission.query.filter_by(source_id=source.id,
- downloaded=False).all())
-
- return render_template('index.html', unstarred=unstarred, starred=starred)
-
-
[email protected]('/col/<filesystem_id>')
-@login_required
-def col(filesystem_id):
- source = get_source(filesystem_id)
- source.has_key = crypto_util.getkey(filesystem_id)
- return render_template("col.html", filesystem_id=filesystem_id,
- source=source)
-
-
-def delete_collection(filesystem_id):
- # Delete the source's collection of submissions
- job = worker.enqueue(store.delete_source_directory, filesystem_id)
-
- # Delete the source's reply keypair
- crypto_util.delete_reply_keypair(filesystem_id)
-
- # Delete their entry in the db
- source = get_source(filesystem_id)
- db_session.delete(source)
- db_session.commit()
- return job
-
-
[email protected]('/col/process', methods=('POST',))
-@login_required
-def col_process():
- actions = {'download-unread': col_download_unread,
- 'download-all': col_download_all, 'star': col_star,
- 'un-star': col_un_star, 'delete': col_delete}
- if 'cols_selected' not in request.form:
- flash('No collections selected!', 'error')
- return redirect(url_for('index'))
-
- # getlist is cgi.FieldStorage.getlist
- cols_selected = request.form.getlist('cols_selected')
- action = request.form['action']
-
- if action not in actions:
- return abort(500)
-
- method = actions[action]
- return method(cols_selected)
-
-
-def col_download_unread(cols_selected):
- """Download all unread submissions from all selected sources."""
- submissions = []
- for filesystem_id in cols_selected:
- id = Source.query.filter(Source.filesystem_id == filesystem_id) \
- .one().id
- submissions += Submission.query.filter(
- Submission.downloaded == false(),
- Submission.source_id == id).all()
- if submissions == []:
- flash("No unread submissions in collections selected!", "error")
- return redirect(url_for('index'))
- return download("unread", submissions)
-
-
-def col_download_all(cols_selected):
- """Download all submissions from all selected sources."""
- submissions = []
- for filesystem_id in cols_selected:
- id = Source.query.filter(Source.filesystem_id == filesystem_id) \
- .one().id
- submissions += Submission.query.filter(
- Submission.source_id == id).all()
- return download("all", submissions)
-
-
-def col_star(cols_selected):
- for filesystem_id in cols_selected:
- make_star_true(filesystem_id)
-
- db_session.commit()
- return redirect(url_for('index'))
-
-
-def col_un_star(cols_selected):
- for filesystem_id in cols_selected:
- make_star_false(filesystem_id)
-
- db_session.commit()
- return redirect(url_for('index'))
-
-
[email protected]('/col/delete/<filesystem_id>', methods=('POST',))
-@login_required
-def col_delete_single(filesystem_id):
- """deleting a single collection from its /col page"""
- source = get_source(filesystem_id)
- delete_collection(filesystem_id)
- flash(
- "%s's collection deleted" %
- (source.journalist_designation,), "notification")
- return redirect(url_for('index'))
-
-
-def col_delete(cols_selected):
- """deleting multiple collections from the index"""
- if len(cols_selected) < 1:
- flash("No collections selected to delete!", "error")
- else:
- for filesystem_id in cols_selected:
- delete_collection(filesystem_id)
- flash("%s %s deleted" % (
- len(cols_selected),
- "collection" if len(cols_selected) == 1 else "collections"
- ), "notification")
-
- return redirect(url_for('index'))
-
-
[email protected]('/col/<filesystem_id>/<fn>')
-@login_required
-def download_single_submission(filesystem_id, fn):
- """Sends a client the contents of a single submission."""
- if '..' in fn or fn.startswith('/'):
- abort(404)
-
- try:
- Submission.query.filter(
- Submission.filename == fn).one().downloaded = True
- db_session.commit()
- except NoResultFound as e:
- app.logger.error("Could not mark " + fn + " as downloaded: %s" % (e,))
-
- return send_file(store.path(filesystem_id, fn),
- mimetype="application/pgp-encrypted")
-
-
[email protected]('/reply', methods=('POST',))
-@login_required
-def reply():
- """Attempt to send a Reply from a Journalist to a Source. Empty
- messages are rejected, and an informative error message is flashed
- on the client. In the case of unexpected errors involving database
- transactions (potentially caused by racing request threads that
- modify the same the database object) logging is done in such a way
- so as not to write potentially sensitive information to disk, and a
- generic error message is flashed on the client.
-
- Returns:
- flask.Response: The user is redirected to the same Source
- collection view, regardless if the Reply is created
- successfully.
- """
- msg = request.form['msg']
- # Reject empty replies
- if not msg:
- flash("You cannot send an empty reply!", "error")
- return redirect(url_for('col', filesystem_id=g.filesystem_id))
-
- g.source.interaction_count += 1
- filename = "{0}-{1}-reply.gpg".format(g.source.interaction_count,
- g.source.journalist_filename)
- crypto_util.encrypt(msg,
- [crypto_util.getkey(g.filesystem_id),
- config.JOURNALIST_KEY],
- output=store.path(g.filesystem_id, filename))
- reply = Reply(g.user, g.source, filename)
-
- try:
- db_session.add(reply)
- db_session.commit()
- except Exception as exc:
- flash("An unexpected error occurred! Please check the application "
- "logs or inform your adminstrator.", "error")
- # We take a cautious approach to logging here because we're dealing
- # with responses to sources. It's possible the exception message could
- # contain information we don't want to write to disk.
- app.logger.error(
- "Reply from '{}' (id {}) failed: {}!".format(g.user.username,
- g.user.id,
- exc.__class__))
- else:
- flash("Thanks! Your reply has been stored.", "notification")
- finally:
- return redirect(url_for('col', filesystem_id=g.filesystem_id))
-
-
[email protected]('/regenerate-code', methods=('POST',))
-@login_required
-def generate_code():
- original_journalist_designation = g.source.journalist_designation
- g.source.journalist_designation = crypto_util.display_id()
-
- for item in g.source.collection:
- item.filename = store.rename_submission(
- g.filesystem_id,
- item.filename,
- g.source.journalist_filename)
- db_session.commit()
-
- flash(
- "The source '%s' has been renamed to '%s'" %
- (original_journalist_designation,
- g.source.journalist_designation),
- "notification")
- return redirect('/col/' + g.filesystem_id)
-
-
[email protected]('/download_unread/<filesystem_id>')
-@login_required
-def download_unread_filesystem_id(filesystem_id):
- id = Source.query.filter(Source.filesystem_id == filesystem_id).one().id
- submissions = Submission.query.filter(
- Submission.source_id == id,
- Submission.downloaded == false()).all()
- if submissions == []:
- flash("No unread submissions for this source!")
- return redirect(url_for('col', filesystem_id=filesystem_id))
- source = get_source(filesystem_id)
- return download(source.journalist_filename, submissions)
-
-
[email protected]('/bulk', methods=('POST',))
-@login_required
-def bulk():
- action = request.form['action']
-
- doc_names_selected = request.form.getlist('doc_names_selected')
- selected_docs = [doc for doc in g.source.collection
- if doc.filename in doc_names_selected]
- if selected_docs == []:
- if action == 'download':
- flash("No collections selected to download!", "error")
- elif action in ('delete', 'confirm_delete'):
- flash("No collections selected to delete!", "error")
- return redirect(url_for('col', filesystem_id=g.filesystem_id))
-
- if action == 'download':
- source = get_source(g.filesystem_id)
- return download(source.journalist_filename, selected_docs)
- elif action == 'delete':
- return bulk_delete(g.filesystem_id, selected_docs)
- elif action == 'confirm_delete':
- return confirm_bulk_delete(g.filesystem_id, selected_docs)
- else:
- abort(400)
-
-
-def confirm_bulk_delete(filesystem_id, items_selected):
- return render_template('delete.html',
- filesystem_id=filesystem_id,
- source=g.source,
- items_selected=items_selected)
-
-
-def bulk_delete(filesystem_id, items_selected):
- for item in items_selected:
- item_path = store.path(filesystem_id, item.filename)
- worker.enqueue(store.secure_unlink, item_path)
- db_session.delete(item)
- db_session.commit()
-
- flash(
- "Submission{} deleted.".format(
- "s" if len(items_selected) > 1 else ""),
- "notification")
- return redirect(url_for('col', filesystem_id=filesystem_id))
-
-
-def download(zip_basename, submissions):
- """Send client contents of zipfile *zip_basename*-<timestamp>.zip
- containing *submissions*. The zipfile, being a
- :class:`tempfile.NamedTemporaryFile`, is stored on disk only
- temporarily.
-
- :param str zip_basename: The basename of the zipfile download.
-
- :param list submissions: A list of :class:`db.Submission`s to
- include in the zipfile.
- """
- zf = store.get_bulk_archive(submissions,
- zip_directory=zip_basename)
- attachment_filename = "{}--{}.zip".format(
- zip_basename, datetime.utcnow().strftime("%Y-%m-%d--%H-%M-%S"))
-
- # Mark the submissions that have been downloaded as such
- for submission in submissions:
- submission.downloaded = True
- db_session.commit()
-
- return send_file(zf.name, mimetype="application/zip",
- attachment_filename=attachment_filename,
- as_attachment=True)
+from journalist_app import create_app
[email protected]('/flag', methods=('POST',))
-@login_required
-def flag():
- g.source.flagged = True
- db_session.commit()
- return render_template('flag.html', filesystem_id=g.filesystem_id,
- codename=g.source.journalist_designation)
+app = create_app(config)
if __name__ == "__main__": # pragma: no cover
diff --git a/securedrop/journalist_app/__init__.py b/securedrop/journalist_app/__init__.py
new file mode 100644
--- /dev/null
+++ b/securedrop/journalist_app/__init__.py
@@ -0,0 +1,98 @@
+# -*- coding: utf-8 -*-
+
+from datetime import datetime, timedelta
+from flask import Flask, session, redirect, url_for, flash, g, request
+from flask_assets import Environment
+from flask_babel import gettext
+from flask_wtf.csrf import CSRFProtect, CSRFError
+from os import path
+
+import i18n
+import template_filters
+import version
+
+from db import db_session, Journalist
+from journalist_app import account, admin, main, col
+from journalist_app.utils import get_source, logged_in
+
+_insecure_views = ['main.login', 'static']
+
+
+def create_app(config):
+ app = Flask(__name__,
+ template_folder=config.JOURNALIST_TEMPLATES_DIR,
+ static_folder=path.join(config.SECUREDROP_ROOT, 'static'))
+
+ app.config.from_object(config.JournalistInterfaceFlaskConfig)
+
+ CSRFProtect(app)
+ Environment(app)
+
+ @app.errorhandler(CSRFError)
+ def handle_csrf_error(e):
+ # render the message first to ensure it's localized.
+ msg = gettext('You have been logged out due to inactivity')
+ session.clear()
+ flash(msg, 'error')
+ return redirect(url_for('main.login'))
+
+ i18n.setup_app(app)
+
+ app.jinja_env.trim_blocks = True
+ app.jinja_env.lstrip_blocks = True
+ app.jinja_env.globals['version'] = version.__version__
+ if hasattr(config, 'CUSTOM_HEADER_IMAGE'):
+ app.jinja_env.globals['header_image'] = config.CUSTOM_HEADER_IMAGE
+ app.jinja_env.globals['use_custom_header_image'] = True
+ else:
+ app.jinja_env.globals['header_image'] = 'logo.png'
+ app.jinja_env.globals['use_custom_header_image'] = False
+
+ app.jinja_env.filters['rel_datetime_format'] = \
+ template_filters.rel_datetime_format
+ app.jinja_env.filters['filesizeformat'] = template_filters.filesizeformat
+
+ @app.teardown_appcontext
+ def shutdown_session(exception=None):
+ """Automatically remove database sessions at the end of the request, or
+ when the application shuts down"""
+ db_session.remove()
+
+ @app.before_request
+ def setup_g():
+ """Store commonly used values in Flask's special g object"""
+ if 'expires' in session and datetime.utcnow() >= session['expires']:
+ session.clear()
+ flash(gettext('You have been logged out due to inactivity'),
+ 'error')
+
+ session['expires'] = datetime.utcnow() + \
+ timedelta(minutes=getattr(config,
+ 'SESSION_EXPIRATION_MINUTES',
+ 120))
+
+ uid = session.get('uid', None)
+ if uid:
+ g.user = Journalist.query.get(uid)
+
+ g.locale = i18n.get_locale()
+ g.text_direction = i18n.get_text_direction(g.locale)
+ g.html_lang = i18n.locale_to_rfc_5646(g.locale)
+ g.locales = i18n.get_locale2name()
+
+ if request.endpoint not in _insecure_views and not logged_in():
+ return redirect(url_for('main.login'))
+
+ if request.method == 'POST':
+ filesystem_id = request.form.get('filesystem_id')
+ if filesystem_id:
+ g.filesystem_id = filesystem_id
+ g.source = get_source(filesystem_id)
+
+ app.register_blueprint(main.make_blueprint(config))
+ app.register_blueprint(account.make_blueprint(config),
+ url_prefix='/account')
+ app.register_blueprint(admin.make_blueprint(config), url_prefix='/admin')
+ app.register_blueprint(col.make_blueprint(config), url_prefix='/col')
+
+ return app
diff --git a/securedrop/journalist_app/account.py b/securedrop/journalist_app/account.py
new file mode 100644
--- /dev/null
+++ b/securedrop/journalist_app/account.py
@@ -0,0 +1,66 @@
+# -*- coding: utf-8 -*-
+
+from flask import (Blueprint, render_template, request, g, redirect, url_for,
+ flash)
+from flask_babel import gettext
+
+from db import db_session
+from journalist_app.utils import (make_password, set_diceware_password,
+ validate_user)
+
+
+def make_blueprint(config):
+ view = Blueprint('account', __name__)
+
+ @view.route('/account', methods=('GET',))
+ def edit():
+ password = make_password()
+ return render_template('edit_account.html',
+ password=password)
+
+ @view.route('/new-password', methods=('POST',))
+ def new_password():
+ user = g.user
+ current_password = request.form.get('current_password')
+ token = request.form.get('token')
+ error_message = gettext('Incorrect password or two-factor code.')
+ # If the user is validated, change their password
+ if validate_user(user.username, current_password, token,
+ error_message):
+ password = request.form.get('password')
+ set_diceware_password(user, password)
+ return redirect(url_for('account.edit'))
+
+ @view.route('/2fa', methods=('GET', 'POST'))
+ def new_two_factor():
+ if request.method == 'POST':
+ token = request.form['token']
+ if g.user.verify_token(token):
+ flash(gettext("Token in two-factor authentication verified."),
+ "notification")
+ return redirect(url_for('account.edit'))
+ else:
+ flash(gettext(
+ "Could not verify token in two-factor authentication."),
+ "error")
+
+ return render_template('account_new_two_factor.html', user=g.user)
+
+ @view.route('/reset-2fa-totp', methods=['POST'])
+ def reset_two_factor_totp():
+ g.user.is_totp = True
+ g.user.regenerate_totp_shared_secret()
+ db_session.commit()
+ return redirect(url_for('account.new_two_factor'))
+
+ @view.route('/reset-2fa-hotp', methods=['POST'])
+ def reset_two_factor_hotp():
+ otp_secret = request.form.get('otp_secret', None)
+ if otp_secret:
+ g.user.set_hotp_secret(otp_secret)
+ db_session.commit()
+ return redirect(url_for('account.new_two_factor'))
+ else:
+ return render_template('account_edit_hotp_secret.html')
+
+ return view
diff --git a/securedrop/journalist_app/admin.py b/securedrop/journalist_app/admin.py
new file mode 100644
--- /dev/null
+++ b/securedrop/journalist_app/admin.py
@@ -0,0 +1,219 @@
+# -*- coding: utf-8 -*-
+
+from flask import (Blueprint, render_template, request, url_for, redirect, g,
+ current_app, flash, abort)
+from flask_babel import gettext
+from sqlalchemy.exc import IntegrityError
+from sqlalchemy.orm.exc import NoResultFound
+
+from db import (db_session, Journalist, InvalidUsernameException,
+ PasswordError)
+from journalist_app.decorators import admin_required
+from journalist_app.utils import (make_password, commit_account_changes,
+ set_diceware_password)
+from journalist_app.forms import NewUserForm
+
+
+def make_blueprint(config):
+ view = Blueprint('admin', __name__)
+
+ @view.route('/', methods=('GET', 'POST'))
+ @admin_required
+ def index():
+ users = Journalist.query.all()
+ return render_template("admin.html", users=users)
+
+ @view.route('/add', methods=('GET', 'POST'))
+ @admin_required
+ def add_user():
+ form = NewUserForm()
+ if form.validate_on_submit():
+ form_valid = True
+ username = request.form['username']
+ password = request.form['password']
+ is_admin = bool(request.form.get('is_admin'))
+
+ try:
+ otp_secret = None
+ if request.form.get('is_hotp', False):
+ otp_secret = request.form.get('otp_secret', '')
+ new_user = Journalist(username=username,
+ password=password,
+ is_admin=is_admin,
+ otp_secret=otp_secret)
+ db_session.add(new_user)
+ db_session.commit()
+ except PasswordError:
+ flash(gettext(
+ 'There was an error with the autogenerated password. '
+ 'User not created. Please try again.'), 'error')
+ form_valid = False
+ except InvalidUsernameException as e:
+ form_valid = False
+ flash('Invalid username: ' + str(e), "error")
+ except IntegrityError as e:
+ db_session.rollback()
+ form_valid = False
+ if "UNIQUE constraint failed: journalists.username" in str(e):
+ flash(gettext("That username is already in use"),
+ "error")
+ else:
+ flash(gettext("An error occurred saving this user"
+ " to the database."
+ " Please inform your administrator."),
+ "error")
+ current_app.logger.error("Adding user "
+ "'{}' failed: {}".format(
+ username, e))
+
+ if form_valid:
+ return redirect(url_for('admin.new_user_two_factor',
+ uid=new_user.id))
+
+ return render_template("admin_add_user.html", password=make_password(),
+ form=form)
+
+ @view.route('/2fa', methods=('GET', 'POST'))
+ @admin_required
+ def new_user_two_factor():
+ user = Journalist.query.get(request.args['uid'])
+
+ if request.method == 'POST':
+ token = request.form['token']
+ if user.verify_token(token):
+ flash(gettext(
+ "Token in two-factor authentication "
+ "accepted for user {user}.").format(
+ user=user.username),
+ "notification")
+ return redirect(url_for("admin.index"))
+ else:
+ flash(gettext(
+ "Could not verify token in two-factor authentication."),
+ "error")
+
+ return render_template("admin_new_user_two_factor.html", user=user)
+
+ @view.route('/reset-2fa-totp', methods=['POST'])
+ @admin_required
+ def reset_two_factor_totp():
+ uid = request.form['uid']
+ user = Journalist.query.get(uid)
+ user.is_totp = True
+ user.regenerate_totp_shared_secret()
+ db_session.commit()
+ return redirect(url_for('admin.new_user_two_factor', uid=uid))
+
+ @view.route('/reset-2fa-hotp', methods=['POST'])
+ @admin_required
+ def reset_two_factor_hotp():
+ uid = request.form['uid']
+ otp_secret = request.form.get('otp_secret', None)
+ if otp_secret:
+ user = Journalist.query.get(uid)
+ try:
+ user.set_hotp_secret(otp_secret)
+ except TypeError as e:
+ if "Non-hexadecimal digit found" in str(e):
+ flash(gettext(
+ "Invalid secret format: "
+ "please only submit letters A-F and numbers 0-9."),
+ "error")
+ elif "Odd-length string" in str(e):
+ flash(gettext(
+ "Invalid secret format: "
+ "odd-length secret. Did you mistype the secret?"),
+ "error")
+ else:
+ flash(gettext(
+ "An unexpected error occurred! "
+ "Please inform your administrator."), "error")
+ current_app.logger.error(
+ "set_hotp_secret '{}' (id {}) failed: {}".format(
+ otp_secret, uid, e))
+ return render_template('admin_edit_hotp_secret.html', uid=uid)
+ else:
+ db_session.commit()
+ return redirect(url_for('admin.new_user_two_factor', uid=uid))
+ else:
+ return render_template('admin_edit_hotp_secret.html', uid=uid)
+
+ @view.route('/edit/<int:user_id>', methods=('GET', 'POST'))
+ @admin_required
+ def edit_user(user_id):
+ user = Journalist.query.get(user_id)
+
+ if request.method == 'POST':
+ if request.form.get('username', None):
+ new_username = request.form['username']
+
+ try:
+ Journalist.check_username_acceptable(new_username)
+ except InvalidUsernameException as e:
+ flash('Invalid username: ' + str(e), 'error')
+ return redirect(url_for("admin.edit_user",
+ user_id=user_id))
+
+ if new_username == user.username:
+ pass
+ elif Journalist.query.filter_by(
+ username=new_username).one_or_none():
+ flash(gettext(
+ 'Username "{user}" already taken.').format(
+ user=new_username),
+ "error")
+ return redirect(url_for("admin.edit_user",
+ user_id=user_id))
+ else:
+ user.username = new_username
+
+ user.is_admin = bool(request.form.get('is_admin'))
+
+ commit_account_changes(user)
+
+ password = make_password()
+ return render_template("edit_account.html", user=user,
+ password=password)
+
+ @view.route('/edit/<int:user_id>/new-password', methods=('POST',))
+ @admin_required
+ def set_password(user_id):
+ try:
+ user = Journalist.query.get(user_id)
+ except NoResultFound:
+ abort(404)
+
+ password = request.form.get('password')
+ set_diceware_password(user, password)
+ return redirect(url_for('admin.edit_user', user_id=user_id))
+
+ @view.route('/delete/<int:user_id>', methods=('POST',))
+ @admin_required
+ def delete_user(user_id):
+ user = Journalist.query.get(user_id)
+ if user:
+ db_session.delete(user)
+ db_session.commit()
+ flash(gettext("Deleted user '{user}'").format(
+ user=user.username), "notification")
+ else:
+ current_app.logger.error(
+ "Admin {} tried to delete nonexistent user with pk={}".format(
+ g.user.username, user_id))
+ abort(404)
+
+ return redirect(url_for('admin.index'))
+
+ @view.route('/edit/<int:user_id>/new-password', methods=('POST',))
+ @admin_required
+ def new_password(user_id):
+ try:
+ user = Journalist.query.get(user_id)
+ except NoResultFound:
+ abort(404)
+
+ password = request.form.get('password')
+ set_diceware_password(user, password)
+ return redirect(url_for('admin.edit_user', user_id=user_id))
+
+ return view
diff --git a/securedrop/journalist_app/col.py b/securedrop/journalist_app/col.py
new file mode 100644
--- /dev/null
+++ b/securedrop/journalist_app/col.py
@@ -0,0 +1,88 @@
+# -*- coding: utf-8 -*-
+
+from flask import (Blueprint, redirect, url_for, render_template, flash,
+ request, abort, send_file, current_app)
+from flask_babel import gettext
+from sqlalchemy.orm.exc import NoResultFound
+
+import crypto_util
+import store
+
+from db import db_session, Submission
+from journalist_app.forms import ReplyForm
+from journalist_app.utils import (make_star_true, make_star_false, get_source,
+ delete_collection, col_download_unread,
+ col_download_all, col_star, col_un_star,
+ col_delete)
+
+
+def make_blueprint(config):
+ view = Blueprint('col', __name__)
+
+ @view.route('/add_star/<filesystem_id>', methods=('POST',))
+ def add_star(filesystem_id):
+ make_star_true(filesystem_id)
+ db_session.commit()
+ return redirect(url_for('main.index'))
+
+ @view.route("/remove_star/<filesystem_id>", methods=('POST',))
+ def remove_star(filesystem_id):
+ make_star_false(filesystem_id)
+ db_session.commit()
+ return redirect(url_for('main.index'))
+
+ @view.route('/<filesystem_id>')
+ def col(filesystem_id):
+ form = ReplyForm()
+ source = get_source(filesystem_id)
+ source.has_key = crypto_util.getkey(filesystem_id)
+ return render_template("col.html", filesystem_id=filesystem_id,
+ source=source, form=form)
+
+ @view.route('/delete/<filesystem_id>', methods=('POST',))
+ def delete_single(filesystem_id):
+ """deleting a single collection from its /col page"""
+ source = get_source(filesystem_id)
+ delete_collection(filesystem_id)
+ flash(gettext("{source_name}'s collection deleted")
+ .format(source_name=source.journalist_designation),
+ "notification")
+ return redirect(url_for('main.index'))
+
+ @view.route('/process', methods=('POST',))
+ def process():
+ actions = {'download-unread': col_download_unread,
+ 'download-all': col_download_all, 'star': col_star,
+ 'un-star': col_un_star, 'delete': col_delete}
+ if 'cols_selected' not in request.form:
+ flash(gettext('No collections selected.'), 'error')
+ return redirect(url_for('main.index'))
+
+ # getlist is cgi.FieldStorage.getlist
+ cols_selected = request.form.getlist('cols_selected')
+ action = request.form['action']
+
+ if action not in actions:
+ return abort(500)
+
+ method = actions[action]
+ return method(cols_selected)
+
+ @view.route('/<filesystem_id>/<fn>')
+ def download_single_submission(filesystem_id, fn):
+ """Sends a client the contents of a single submission."""
+ if '..' in fn or fn.startswith('/'):
+ abort(404)
+
+ try:
+ Submission.query.filter(
+ Submission.filename == fn).one().downloaded = True
+ db_session.commit()
+ except NoResultFound as e:
+ current_app.logger.error(
+ "Could not mark " + fn + " as downloaded: %s" % (e,))
+
+ return send_file(store.path(filesystem_id, fn),
+ mimetype="application/pgp-encrypted")
+
+ return view
diff --git a/securedrop/journalist_app/decorators.py b/securedrop/journalist_app/decorators.py
new file mode 100644
--- /dev/null
+++ b/securedrop/journalist_app/decorators.py
@@ -0,0 +1,19 @@
+# -*- coding: utf-8 -*-
+
+from flask import redirect, url_for, flash, g
+from flask_babel import gettext
+from functools import wraps
+
+from journalist_app.utils import logged_in
+
+
+def admin_required(func):
+ @wraps(func)
+ def wrapper(*args, **kwargs):
+ if logged_in() and g.user.is_admin:
+ return func(*args, **kwargs)
+ # TODO: sometimes this gets flashed 2x (Chrome only?)
+ flash(gettext("Only administrators can access this page."),
+ "notification")
+ return redirect(url_for('main.index'))
+ return wrapper
diff --git a/securedrop/journalist_app/forms.py b/securedrop/journalist_app/forms.py
new file mode 100644
--- /dev/null
+++ b/securedrop/journalist_app/forms.py
@@ -0,0 +1,54 @@
+# -*- coding: utf-8 -*-
+
+from flask_babel import lazy_gettext
+from flask_wtf import FlaskForm
+from wtforms import (TextAreaField, TextField, BooleanField, HiddenField,
+ ValidationError)
+from wtforms.validators import InputRequired, Optional
+
+from db import Journalist
+
+
+def otp_secret_validation(form, field):
+ strip_whitespace = field.data.replace(' ', '')
+ if len(strip_whitespace) != 40:
+ raise ValidationError(lazy_gettext(
+ 'Field must be 40 characters long but '
+ 'got {num_chars}.'.format(
+ num_chars=len(strip_whitespace)
+ )))
+
+
+def minimum_length_validation(form, field):
+ if len(field.data) < Journalist.MIN_USERNAME_LEN:
+ raise ValidationError(
+ lazy_gettext('Field must be at least {min_chars} '
+ 'characters long but only got '
+ '{num_chars}.'.format(
+ min_chars=Journalist.MIN_USERNAME_LEN,
+ num_chars=len(field.data))))
+
+
+class NewUserForm(FlaskForm):
+ username = TextField('username', validators=[
+ InputRequired(message=lazy_gettext('This field is required.')),
+ minimum_length_validation
+ ])
+ password = HiddenField('password')
+ is_admin = BooleanField('is_admin')
+ is_hotp = BooleanField('is_hotp')
+ otp_secret = TextField('otp_secret', validators=[
+ otp_secret_validation,
+ Optional()
+ ])
+
+
+class ReplyForm(FlaskForm):
+ message = TextAreaField(
+ u'Message',
+ id="content-area",
+ validators=[
+ InputRequired(message=lazy_gettext(
+ 'You cannot send an empty reply.')),
+ ],
+ )
diff --git a/securedrop/journalist_app/main.py b/securedrop/journalist_app/main.py
new file mode 100644
--- /dev/null
+++ b/securedrop/journalist_app/main.py
@@ -0,0 +1,188 @@
+# -*- coding: utf-8 -*-
+
+from datetime import datetime
+from flask import (Blueprint, request, current_app, session, url_for, redirect,
+ render_template, g, flash, abort)
+from flask_babel import gettext
+from sqlalchemy.sql.expression import false
+
+import crypto_util
+import store
+
+from db import db_session, Source, SourceStar, Submission, Reply
+from journalist_app.forms import ReplyForm
+from journalist_app.utils import (validate_user, bulk_delete, download,
+ confirm_bulk_delete, get_source)
+
+
+def make_blueprint(config):
+ view = Blueprint('main', __name__)
+
+ @view.route('/login', methods=('GET', 'POST'))
+ def login():
+ if request.method == 'POST':
+ user = validate_user(request.form['username'],
+ request.form['password'],
+ request.form['token'])
+ if user:
+ current_app.logger.info("'{}' logged in with the token {}"
+ .format(request.form['username'],
+ request.form['token']))
+
+ # Update access metadata
+ user.last_access = datetime.utcnow()
+ db_session.add(user)
+ db_session.commit()
+
+ session['uid'] = user.id
+ return redirect(url_for('main.index'))
+
+ return render_template("login.html")
+
+ @view.route('/logout')
+ def logout():
+ session.pop('uid', None)
+ session.pop('expires', None)
+ return redirect(url_for('main.index'))
+
+ @view.route('/')
+ def index():
+ unstarred = []
+ starred = []
+
+ # Long SQLAlchemy statements look best when formatted according to
+ # the Pocoo style guide, IMHO:
+ # http://www.pocoo.org/internal/styleguide/
+ sources = Source.query.filter_by(pending=False) \
+ .order_by(Source.last_updated.desc()) \
+ .all()
+ for source in sources:
+ star = SourceStar.query.filter_by(source_id=source.id).first()
+ if star and star.starred:
+ starred.append(source)
+ else:
+ unstarred.append(source)
+ source.num_unread = len(
+ Submission.query.filter_by(source_id=source.id,
+ downloaded=False).all())
+
+ return render_template('index.html',
+ unstarred=unstarred,
+ starred=starred)
+
+ @view.route('/reply', methods=('POST',))
+ def reply():
+ """Attempt to send a Reply from a Journalist to a Source. Empty
+ messages are rejected, and an informative error message is flashed
+ on the client. In the case of unexpected errors involving database
+ transactions (potentially caused by racing request threads that
+ modify the same the database object) logging is done in such a way
+ so as not to write potentially sensitive information to disk, and a
+ generic error message is flashed on the client.
+
+ Returns:
+ flask.Response: The user is redirected to the same Source
+ collection view, regardless if the Reply is created
+ successfully.
+ """
+ form = ReplyForm()
+ if not form.validate_on_submit():
+ for error in form.message.errors:
+ flash(error, "error")
+ return redirect(url_for('col.col', filesystem_id=g.filesystem_id))
+
+ g.source.interaction_count += 1
+ filename = "{0}-{1}-reply.gpg".format(g.source.interaction_count,
+ g.source.journalist_filename)
+ crypto_util.encrypt(form.message.data,
+ [crypto_util.getkey(g.filesystem_id),
+ config.JOURNALIST_KEY],
+ output=store.path(g.filesystem_id, filename))
+ reply = Reply(g.user, g.source, filename)
+
+ try:
+ db_session.add(reply)
+ db_session.commit()
+ except Exception as exc:
+ flash(gettext(
+ "An unexpected error occurred! Please "
+ "inform your administrator."), "error")
+ # We take a cautious approach to logging here because we're dealing
+ # with responses to sources. It's possible the exception message
+ # could contain information we don't want to write to disk.
+ current_app.logger.error(
+ "Reply from '{}' (ID {}) failed: {}!".format(g.user.username,
+ g.user.id,
+ exc.__class__))
+ else:
+ flash(gettext("Thanks. Your reply has been stored."),
+ "notification")
+ finally:
+ return redirect(url_for('col.col', filesystem_id=g.filesystem_id))
+
+ @view.route('/flag', methods=('POST',))
+ def flag():
+ g.source.flagged = True
+ db_session.commit()
+ return render_template('flag.html', filesystem_id=g.filesystem_id,
+ codename=g.source.journalist_designation)
+
+ @view.route('/bulk', methods=('POST',))
+ def bulk():
+ action = request.form['action']
+
+ doc_names_selected = request.form.getlist('doc_names_selected')
+ selected_docs = [doc for doc in g.source.collection
+ if doc.filename in doc_names_selected]
+ if selected_docs == []:
+ if action == 'download':
+ flash(gettext("No collections selected for download."),
+ "error")
+ elif action in ('delete', 'confirm_delete'):
+ flash(gettext("No collections selected for deletion."),
+ "error")
+ return redirect(url_for('col.col', filesystem_id=g.filesystem_id))
+
+ if action == 'download':
+ source = get_source(g.filesystem_id)
+ return download(source.journalist_filename, selected_docs)
+ elif action == 'delete':
+ return bulk_delete(g.filesystem_id, selected_docs)
+ elif action == 'confirm_delete':
+ return confirm_bulk_delete(g.filesystem_id, selected_docs)
+ else:
+ abort(400)
+
+ @view.route('/regenerate-code', methods=('POST',))
+ def regenerate_code():
+ original_journalist_designation = g.source.journalist_designation
+ g.source.journalist_designation = crypto_util.display_id()
+
+ for item in g.source.collection:
+ item.filename = store.rename_submission(
+ g.filesystem_id,
+ item.filename,
+ g.source.journalist_filename)
+ db_session.commit()
+
+ flash(gettext(
+ "The source '{original_name}' has been renamed to '{new_name}'")
+ .format(original_name=original_journalist_designation,
+ new_name=g.source.journalist_designation),
+ "notification")
+ return redirect(url_for('col.col', filesystem_id=g.filesystem_id))
+
+ @view.route('/download_unread/<filesystem_id>')
+ def download_unread_filesystem_id(filesystem_id):
+ id = Source.query.filter(Source.filesystem_id == filesystem_id) \
+ .one().id
+ submissions = Submission.query.filter(
+ Submission.source_id == id,
+ Submission.downloaded == false()).all()
+ if submissions == []:
+ flash(gettext("No unread submissions for this source."))
+ return redirect(url_for('col.col', filesystem_id=filesystem_id))
+ source = get_source(filesystem_id)
+ return download(source.journalist_filename, submissions)
+
+ return view
diff --git a/securedrop/journalist_app/utils.py b/securedrop/journalist_app/utils.py
new file mode 100644
--- /dev/null
+++ b/securedrop/journalist_app/utils.py
@@ -0,0 +1,275 @@
+# -*- coding: utf-8 -*-
+
+from datetime import datetime
+from flask import (g, flash, current_app, abort, send_file, redirect, url_for,
+ render_template, Markup)
+from flask_babel import gettext, ngettext
+from sqlalchemy.sql.expression import false
+
+import crypto_util
+import i18n
+import store
+import worker
+
+from db import (db_session, get_one_or_else, Source, Journalist,
+ InvalidUsernameException, WrongPasswordException,
+ LoginThrottledException, BadTokenException, SourceStar,
+ PasswordError, Submission)
+from rm import srm
+
+
+def logged_in():
+ # When a user is logged in, we push their user ID (database primary key)
+ # into the session. setup_g checks for this value, and if it finds it,
+ # stores a reference to the user's Journalist object in g.
+ #
+ # This check is good for the edge case where a user is deleted but still
+ # has an active session - we will not authenticate a user if they are not
+ # in the database.
+ return bool(g.get('user', None))
+
+
+def commit_account_changes(user):
+ if db_session.is_modified(user):
+ try:
+ db_session.add(user)
+ db_session.commit()
+ except Exception as e:
+ flash(gettext(
+ "An unexpected error occurred! Please "
+ "inform your administrator."), "error")
+ current_app.logger.error("Account changes for '{}' failed: {}"
+ .format(user, e))
+ db_session.rollback()
+ else:
+ flash(gettext("Account updated."), "success")
+
+
+def get_source(filesystem_id):
+ """Return a Source object, representing the database row, for the source
+ with the `filesystem_id`"""
+ source = None
+ query = Source.query.filter(Source.filesystem_id == filesystem_id)
+ source = get_one_or_else(query, current_app.logger, abort)
+
+ return source
+
+
+def validate_user(username, password, token, error_message=None):
+ """
+ Validates the user by calling the login and handling exceptions
+ :param username: Username
+ :param password: Password
+ :param token: Two-factor authentication token
+ :param error_message: Localized error message string to use on failure
+ :return: Journalist user object if successful, None otherwise.
+ """
+ try:
+ return Journalist.login(username, password, token)
+ except (InvalidUsernameException,
+ BadTokenException,
+ WrongPasswordException,
+ LoginThrottledException) as e:
+ current_app.logger.error("Login for '{}' failed: {}".format(
+ username, e))
+ if not error_message:
+ error_message = gettext('Login failed.')
+ login_flashed_msg = error_message
+
+ if isinstance(e, LoginThrottledException):
+ login_flashed_msg += " "
+ period = Journalist._LOGIN_ATTEMPT_PERIOD
+ # ngettext is needed although we always have period > 1
+ # see https://github.com/freedomofpress/securedrop/issues/2422
+ login_flashed_msg += ngettext(
+ "Please wait at least {seconds} second "
+ "before logging in again.",
+ "Please wait at least {seconds} seconds "
+ "before logging in again.", period).format(seconds=period)
+ else:
+ try:
+ user = Journalist.query.filter_by(
+ username=username).one()
+ if user.is_totp:
+ login_flashed_msg += " "
+ login_flashed_msg += gettext(
+ "Please wait for a new two-factor token"
+ " before trying again.")
+ except:
+ pass
+
+ flash(login_flashed_msg, "error")
+ return None
+
+
+def download(zip_basename, submissions):
+ """Send client contents of ZIP-file *zip_basename*-<timestamp>.zip
+ containing *submissions*. The ZIP-file, being a
+ :class:`tempfile.NamedTemporaryFile`, is stored on disk only
+ temporarily.
+
+ :param str zip_basename: The basename of the ZIP-file download.
+
+ :param list submissions: A list of :class:`db.Submission`s to
+ include in the ZIP-file.
+ """
+ zf = store.get_bulk_archive(submissions,
+ zip_directory=zip_basename)
+ attachment_filename = "{}--{}.zip".format(
+ zip_basename, datetime.utcnow().strftime("%Y-%m-%d--%H-%M-%S"))
+
+ # Mark the submissions that have been downloaded as such
+ for submission in submissions:
+ submission.downloaded = True
+ db_session.commit()
+
+ return send_file(zf.name, mimetype="application/zip",
+ attachment_filename=attachment_filename,
+ as_attachment=True)
+
+
+def bulk_delete(filesystem_id, items_selected):
+ for item in items_selected:
+ item_path = store.path(filesystem_id, item.filename)
+ worker.enqueue(srm, item_path)
+ db_session.delete(item)
+ db_session.commit()
+
+ flash(ngettext("Submission deleted.",
+ "Submissions deleted.",
+ len(items_selected)), "notification")
+ return redirect(url_for('col.col', filesystem_id=filesystem_id))
+
+
+def confirm_bulk_delete(filesystem_id, items_selected):
+ return render_template('delete.html',
+ filesystem_id=filesystem_id,
+ source=g.source,
+ items_selected=items_selected)
+
+
+def make_star_true(filesystem_id):
+ source = get_source(filesystem_id)
+ if source.star:
+ source.star.starred = True
+ else:
+ source_star = SourceStar(source)
+ db_session.add(source_star)
+
+
+def make_star_false(filesystem_id):
+ source = get_source(filesystem_id)
+ if not source.star:
+ source_star = SourceStar(source)
+ db_session.add(source_star)
+ db_session.commit()
+ source.star.starred = False
+
+
+def col_star(cols_selected):
+ for filesystem_id in cols_selected:
+ make_star_true(filesystem_id)
+
+ db_session.commit()
+ return redirect(url_for('main.index'))
+
+
+def col_un_star(cols_selected):
+ for filesystem_id in cols_selected:
+ make_star_false(filesystem_id)
+
+ db_session.commit()
+ return redirect(url_for('main.index'))
+
+
+def col_delete(cols_selected):
+ """deleting multiple collections from the index"""
+ if len(cols_selected) < 1:
+ flash(gettext("No collections selected for deletion."), "error")
+ else:
+ for filesystem_id in cols_selected:
+ delete_collection(filesystem_id)
+ num = len(cols_selected)
+ flash(ngettext('{num} collection deleted', '{num} collections deleted',
+ num).format(num=num),
+ "notification")
+
+ return redirect(url_for('main.index'))
+
+
+def make_password():
+ while True:
+ password = crypto_util.genrandomid(7, i18n.get_language())
+ try:
+ Journalist.check_password_acceptable(password)
+ return password
+ except PasswordError:
+ continue
+
+
+def delete_collection(filesystem_id):
+ # Delete the source's collection of submissions
+ job = worker.enqueue(srm, store.path(filesystem_id))
+
+ # Delete the source's reply keypair
+ crypto_util.delete_reply_keypair(filesystem_id)
+
+ # Delete their entry in the db
+ source = get_source(filesystem_id)
+ db_session.delete(source)
+ db_session.commit()
+ return job
+
+
+def set_diceware_password(user, password):
+ try:
+ user.set_password(password)
+ except PasswordError:
+ flash(gettext(
+ 'You submitted a bad password! Password not changed.'), 'error')
+ return
+
+ try:
+ db_session.commit()
+ except Exception:
+ flash(gettext(
+ 'There was an error, and the new password might not have been '
+ 'saved correctly. To prevent you from getting locked '
+ 'out of your account, you should reset your password again.'),
+ 'error')
+ current_app.logger.error('Failed to update a valid password.')
+ return
+
+ # using Markup so the HTML isn't escaped
+ flash(Markup("<p>" + gettext(
+ "Password updated. Don't forget to "
+ "save it in your KeePassX database. New password:") +
+ ' <span><code>{}</code></span></p>'.format(password)),
+ 'success')
+
+
+def col_download_unread(cols_selected):
+ """Download all unread submissions from all selected sources."""
+ submissions = []
+ for filesystem_id in cols_selected:
+ id = Source.query.filter(Source.filesystem_id == filesystem_id) \
+ .one().id
+ submissions += Submission.query.filter(
+ Submission.downloaded == false(),
+ Submission.source_id == id).all()
+ if submissions == []:
+ flash(gettext("No unread submissions in selected collections."),
+ "error")
+ return redirect(url_for('main.index'))
+ return download("unread", submissions)
+
+
+def col_download_all(cols_selected):
+ """Download all submissions from all selected sources."""
+ submissions = []
+ for filesystem_id in cols_selected:
+ id = Source.query.filter(Source.filesystem_id == filesystem_id) \
+ .one().id
+ submissions += Submission.query.filter(
+ Submission.source_id == id).all()
+ return download("all", submissions)
diff --git a/securedrop/manage.py b/securedrop/manage.py
--- a/securedrop/manage.py
+++ b/securedrop/manage.py
@@ -2,16 +2,18 @@
# -*- coding: utf-8 -*-
import argparse
+import codecs
import logging
import os
+from os.path import dirname, join, realpath
import shutil
import signal
import subprocess
import sys
+import time
import traceback
import version
-import psutil
import qrcode
from sqlalchemy.orm.exc import NoResultFound
@@ -49,12 +51,18 @@ def sh(command, input=None):
proc.stdin.write(input)
proc.stdin.close()
lines_of_command_output = []
+ loggable_line_list = []
with proc.stdout:
for line in iter(proc.stdout.readline, b''):
line = line.decode('utf-8')
lines_of_command_output.append(line)
- log.debug(line.strip().encode('ascii', 'ignore'))
+ loggable_line = line.strip().encode('ascii', 'ignore')
+ log.debug(loggable_line)
+ loggable_line_list.append(loggable_line)
if proc.wait() != 0:
+ if log.getEffectiveLevel() > logging.DEBUG:
+ for loggable_line in loggable_line_list:
+ log.error(loggable_line)
raise subprocess.CalledProcessError(
returncode=proc.returncode,
cmd=command
@@ -62,7 +70,7 @@ def sh(command, input=None):
return "".join(lines_of_command_output)
-def reset(args): # pragma: no cover
+def reset(args):
"""Clears the SecureDrop development applications' state, restoring them to
the way they were immediately after running `setup_dev.sh`. This command:
1. Erases the development sqlite database file.
@@ -70,9 +78,9 @@ def reset(args): # pragma: no cover
3. Erases stored submissions and replies from the store dir.
"""
# Erase the development db file
- assert hasattr(config, 'DATABASE_FILE'), ("TODO: ./manage.py doesn't know "
- 'how to clear the db if the '
- 'backend is not sqlite')
+ if not hasattr(config, 'DATABASE_FILE'):
+ raise Exception("TODO: ./manage.py doesn't know how to clear the db "
+ 'if the backend is not sqlite')
try:
os.remove(config.DATABASE_FILE)
except OSError:
@@ -142,16 +150,23 @@ def _make_password():
def _add_user(is_admin=False):
username = _get_username()
- print("Note: Journalist passwords are now autogenerated.")
+ print("Note: Passwords are now autogenerated.")
password = _make_password()
- print("This journalist's password is: {}".format(password))
+ print("This user's password is: {}".format(password))
is_hotp = _get_yubikey_usage()
otp_secret = None
if is_hotp:
while True:
otp_secret = raw_input(
- 'Please configure your YubiKey and enter the secret: ')
+ "Please configure this user's YubiKey and enter the secret: ")
+ if otp_secret:
+ tmp_str = otp_secret.replace(" ", "")
+ if len(tmp_str) != 40:
+ print("The length of the secret is not correct. "
+ "Expected 40 characters, but received {0}. "
+ "Try again.".format(len(tmp_str)))
+ continue
if otp_secret:
break
@@ -181,6 +196,7 @@ def _add_user(is_admin=False):
issuer_name='SecureDrop')
qr = qrcode.QRCode()
qr.add_data(uri)
+ sys.stdout = codecs.getwriter("utf-8")(sys.stdout)
qr.print_ascii(tty=sys.stdout.isatty())
print('\nIf the barcode does not render correctly, try changing '
"your terminal's font (Monospace for Linux, Menlo for OS "
@@ -192,10 +208,23 @@ def _add_user(is_admin=False):
return 0
-def delete_user(args): # pragma: no cover
+def _get_username_to_delete():
+ return raw_input('Username to delete: ')
+
+
+def _get_delete_confirmation(user):
+ confirmation = raw_input('Are you sure you want to delete user '
+ '"{}" (y/n)?'.format(user))
+ if confirmation.lower() != 'y':
+ print('Confirmation not received: user "{}" was NOT '
+ 'deleted'.format(user))
+ return False
+ return True
+
+
+def delete_user(args):
"""Deletes a journalist or administrator from the application."""
- # Select user to delete
- username = raw_input('Username to delete: ')
+ username = _get_username_to_delete()
try:
selected_user = Journalist.query.filter_by(username=username).one()
except NoResultFound:
@@ -203,72 +232,51 @@ def delete_user(args): # pragma: no cover
return 0
# Confirm deletion if user is found
- confirmation = raw_input('Are you sure you want to delete user '
- '{} (y/n)?'.format(selected_user))
- if confirmation.lower() != 'y':
- print('Confirmation not received: user "{}" was NOT '
- 'deleted'.format(username))
+ if not _get_delete_confirmation(selected_user.username):
return 0
# Try to delete user from the database
try:
db_session.delete(selected_user)
db_session.commit()
- except:
+ except Exception as e:
# If the user was deleted between the user selection and confirmation,
# (e.g., through the web app), we don't report any errors. If the user
# is still there, but there was a error deleting them from the
# database, we do report it.
try:
- selected_user = Journalist.query.filter_by(username=username).one()
+ Journalist.query.filter_by(username=username).one()
except NoResultFound:
pass
else:
- raise
+ raise e
print('User "{}" successfully deleted'.format(username))
return 0
def clean_tmp(args): # pragma: no cover
- """Cleanup the SecureDrop temp directory. This is intended to be run
- as an automated cron job. We skip files that are currently in use to
- avoid deleting files that are currently being downloaded."""
- # Inspired by http://stackoverflow.com/a/11115521/1093000
- def file_in_use(fname):
- for proc in psutil.process_iter():
- try:
- open_files = proc.open_files()
- in_use = False or any([open_file.path == fname
- for open_file in open_files])
- # Early return for perf
- if in_use:
- break
- except psutil.NoSuchProcess:
- # This catches a race condition where a process ends before we
- # can examine its files. Ignore this - if the process ended, it
- # can't be using fname, so this won't cause an error.
- pass
-
- return in_use
+ """Cleanup the SecureDrop temp directory. """
+ if not os.path.exists(args.directory):
+ log.debug('{} does not exist, do nothing'.format(args.directory))
+ return 0
def listdir_fullpath(d):
- # Thanks to http://stackoverflow.com/a/120948/1093000
return [os.path.join(d, f) for f in os.listdir(d)]
- try:
- os.stat(config.TEMP_DIR)
- except OSError:
- pass
- else:
- for path in listdir_fullpath(config.TEMP_DIR):
- if not file_in_use(path):
- os.remove(path)
+ too_old = args.days * 24 * 60 * 60
+ for path in listdir_fullpath(args.directory):
+ if time.time() - os.stat(path).st_mtime > too_old:
+ os.remove(path)
+ log.debug('{} removed'.format(path))
+ else:
+ log.debug('{} modified less than {} days ago'.format(
+ path, args.days))
return 0
-def translate(args):
+def translate_messages(args):
messages_file = os.path.join(args.translations_dir, 'messages.pot')
if args.extract_update:
@@ -287,25 +295,29 @@ def translate(args):
--copyright-holder='Freedom of the Press Foundation' \
{sources}
- # we do not handle fuzzy translations yet
- sed -i '/^#, fuzzy$/d' {messages_file}
+ # remove this line so the file does not change if no
+ # strings are modified
+ sed -i '/^"POT-Creation-Date/d' {messages_file}
""".format(translations_dir=args.translations_dir,
mapping=args.mapping,
messages_file=messages_file,
version=args.version,
sources=" ".join(args.source)))
- if len(os.listdir(args.translations_dir)) > 1:
+ changed = subprocess.call("git diff --quiet {}".format(messages_file),
+ shell=True)
+
+ if changed and len(os.listdir(args.translations_dir)) > 1:
sh("""
set -xe
- pybabel update \
- --input-file {messages_file} \
- --output-dir {translations_dir} \
- --no-fuzzy-matching --ignore-obsolete
+ for translation in {translations_dir}/*/LC_MESSAGES/*.po ; do
+ msgmerge --previous --update $translation {messages_file}
+ done
""".format(translations_dir=args.translations_dir,
messages_file=messages_file))
+ log.warning("messages translations updated in " + messages_file)
else:
- log.warning("no translations found (ok for tests, not otherwise)")
+ log.warning("messages translations are already up to date")
if args.compile and len(os.listdir(args.translations_dir)) > 1:
sh("""
@@ -314,6 +326,60 @@ def translate(args):
""".format(translations_dir=args.translations_dir))
+def translate_desktop(args):
+ messages_file = os.path.join(args.translations_dir, 'desktop.pot')
+
+ if args.extract_update:
+ sh("""
+ set -xe
+ cd {translations_dir}
+ xgettext \
+ --output=desktop.pot \
+ --language=Desktop \
+ --keyword \
+ --keyword=Name \
+ --package-version={version} \
+ --msgid-bugs-address='[email protected]' \
+ --copyright-holder='Freedom of the Press Foundation' \
+ {sources}
+
+ # remove this line so the file does not change if no
+ # strings are modified
+ sed -i '/^"POT-Creation-Date/d' {messages_file}
+ """.format(translations_dir=args.translations_dir,
+ messages_file=messages_file,
+ version=args.version,
+ sources=" ".join(args.source)))
+
+ changed = subprocess.call("git diff --quiet {}".format(messages_file),
+ shell=True)
+
+ if changed:
+ for f in os.listdir(args.translations_dir):
+ if not f.endswith('.po'):
+ continue
+ po_file = os.path.join(args.translations_dir, f)
+ sh("""
+ msgmerge --update {po_file} {messages_file}
+ """.format(po_file=po_file,
+ messages_file=messages_file))
+ log.warning("messages translations updated in " + messages_file)
+ else:
+ log.warning("desktop translations are already up to date")
+
+ if args.compile:
+ sh("""
+ set -ex
+ cd {translations_dir}
+ find *.po | sed -e 's/\.po$//' > LINGUAS
+ for source in {sources} ; do
+ target=$(basename $source .in)
+ msgfmt --desktop --template $source -o $target -d .
+ done
+ """.format(translations_dir=args.translations_dir,
+ sources=" ".join(args.source)))
+
+
def get_args():
parser = argparse.ArgumentParser(prog=__file__, description='Management '
'and testing utility for SecureDrop.')
@@ -347,35 +413,27 @@ def get_args():
"SecureDrop application's state.")
reset_subp.set_defaults(func=reset)
# Cleanup the SD temp dir
- clean_tmp_subp = subps.add_parser('clean-tmp', help='Cleanup the '
- 'SecureDrop temp directory.')
- clean_tmp_subp.set_defaults(func=clean_tmp)
- clean_tmp_subp_a = subps.add_parser('clean_tmp', help='^')
- clean_tmp_subp_a.set_defaults(func=clean_tmp)
+ set_clean_tmp_parser(subps, 'clean-tmp')
+ set_clean_tmp_parser(subps, 'clean_tmp')
- set_translate_parser(subps)
+ set_translate_messages_parser(subps)
+ set_translate_desktop_parser(subps)
return parser
-def set_translate_parser(subps):
- parser = subps.add_parser('translate',
- help='Update and compile translations')
- translations_dir = 'translations'
+def set_translate_parser(subps,
+ parser,
+ translations_dir,
+ sources):
parser.add_argument(
'--extract-update',
action='store_true',
- help='run pybabel extract and pybabel update')
+ help='extract strings to translate and update existing translations')
parser.add_argument(
'--compile',
action='store_true',
- help='run pybabel compile')
- mapping = 'babel.cfg'
- parser.add_argument(
- '--mapping',
- default=mapping,
- help='Mapping of files to consider (default {})'.format(
- mapping))
+ help='compile translations')
parser.add_argument(
'--translations-dir',
default=translations_dir,
@@ -386,14 +444,58 @@ def set_translate_parser(subps):
default=version.__version__,
help='SecureDrop version to store in pot files (default {})'.format(
version.__version__))
- sources = ['.', 'source_templates', 'journalist_templates']
parser.add_argument(
'--source',
default=sources,
action='append',
- help='Source file or directory to extract (default {})'.format(
+ help='Source files and directories to extract (default {})'.format(
sources))
- parser.set_defaults(func=translate)
+
+
+def set_translate_messages_parser(subps):
+ parser = subps.add_parser('translate-messages',
+ help=('Update and compile '
+ 'source and template translations'))
+ translations_dir = join(dirname(realpath(__file__)), 'translations')
+ sources = ['.', 'source_templates', 'journalist_templates']
+ set_translate_parser(subps, parser, translations_dir, sources)
+ mapping = 'babel.cfg'
+ parser.add_argument(
+ '--mapping',
+ default=mapping,
+ help='Mapping of files to consider (default {})'.format(
+ mapping))
+ parser.set_defaults(func=translate_messages)
+
+
+def set_clean_tmp_parser(subps, name):
+ parser = subps.add_parser(name, help='Cleanup the '
+ 'SecureDrop temp directory.')
+ default_days = 7
+ parser.add_argument(
+ '--days',
+ default=default_days,
+ type=int,
+ help=('remove files not modified in a given number of DAYS '
+ '(default {} days)'.format(default_days)))
+ parser.add_argument(
+ '--directory',
+ default=config.TEMP_DIR,
+ help=('remove old files from DIRECTORY '
+ '(default {})'.format(config.TEMP_DIR)))
+ parser.set_defaults(func=clean_tmp)
+
+
+def set_translate_desktop_parser(subps):
+ parser = subps.add_parser('translate-desktop',
+ help=('Update and compile '
+ 'desktop icons translations'))
+ translations_dir = join(
+ dirname(realpath(__file__)),
+ '../install_files/ansible-base/roles/tails-config/templates')
+ sources = ['desktop-journalist-icon.j2.in', 'desktop-source-icon.j2.in']
+ set_translate_parser(subps, parser, translations_dir, sources)
+ parser.set_defaults(func=translate_desktop)
def setup_verbosity(args):
diff --git a/securedrop/rm.py b/securedrop/rm.py
new file mode 100644
--- /dev/null
+++ b/securedrop/rm.py
@@ -0,0 +1,24 @@
+# -*- coding: utf-8 -*-
+#
+# SecureDrop whistleblower submission system
+# Copyright (C) 2017 Loic Dachary <[email protected]>
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+#
+import subprocess
+
+
+def srm(fn):
+ subprocess.check_call(['srm', '-r', fn])
+ return "success"
diff --git a/securedrop/secure_tempfile.py b/securedrop/secure_tempfile.py
--- a/securedrop/secure_tempfile.py
+++ b/securedrop/secure_tempfile.py
@@ -77,7 +77,8 @@ def write(self, data):
but after calling :meth:`read`, you cannot write to the file
again.
"""
- assert self.last_action != 'read', 'You cannot write after reading!'
+ if self.last_action == 'read':
+ raise AssertionError('You cannot write after reading!')
self.last_action = 'write'
if isinstance(data, unicode): # noqa
@@ -103,7 +104,8 @@ def read(self, count=None):
count (int): the number of bytes to try to read from the
file from the current position.
"""
- assert self.last_action != 'init', 'You must write before reading!'
+ if self.last_action == 'init':
+ raise AssertionError('You must write before reading!')
if self.last_action == 'write':
self.seek(0, 0)
self.last_action = 'read'
diff --git a/securedrop/source.py b/securedrop/source.py
--- a/securedrop/source.py
+++ b/securedrop/source.py
@@ -1,453 +1,10 @@
# -*- coding: utf-8 -*-
-import os
-from datetime import datetime
-from functools import wraps
-from cStringIO import StringIO
-import subprocess
-from threading import Thread
-import operator
-from flask import (Flask, request, render_template, session, redirect, url_for,
- flash, abort, g, send_file, Markup, make_response)
-from flask_wtf.csrf import CSRFProtect
-from flask_assets import Environment
-
-from sqlalchemy.orm.exc import MultipleResultsFound, NoResultFound
-from sqlalchemy.exc import IntegrityError
import config
-import json
-import version
-import crypto_util
-import store
-import template_filters
-from db import db_session, Source, Submission, Reply, get_one_or_else
-from request_that_secures_file_uploads import RequestThatSecuresFileUploads
-from jinja2 import evalcontextfilter
-
-import logging
-# This module's logger is explicitly labeled so the correct logger is used,
-# even when this is run from the command line (e.g. during development)
-log = logging.getLogger('source')
-
-app = Flask(__name__, template_folder=config.SOURCE_TEMPLATES_DIR)
-app.request_class = RequestThatSecuresFileUploads
-app.config.from_object(config.SourceInterfaceFlaskConfig)
-
-assets = Environment(app)
-
-# The default CSRF token expiration is 1 hour. Since large uploads can
-# take longer than an hour over Tor, we increase the valid window to 24h.
-app.config['WTF_CSRF_TIME_LIMIT'] = 60 * 60 * 24
-CSRFProtect(app)
-
-app.jinja_env.globals['version'] = version.__version__
-if getattr(config, 'CUSTOM_HEADER_IMAGE', None):
- app.jinja_env.globals['header_image'] = config.CUSTOM_HEADER_IMAGE
- app.jinja_env.globals['use_custom_header_image'] = True
-else:
- app.jinja_env.globals['header_image'] = 'logo.png'
- app.jinja_env.globals['use_custom_header_image'] = False
-
-app.jinja_env.filters['datetimeformat'] = template_filters.datetimeformat
-app.jinja_env.filters['nl2br'] = evalcontextfilter(template_filters.nl2br)
-
-
[email protected]_appcontext
-def shutdown_session(exception=None):
- """Automatically remove database sessions at the end of the request, or
- when the application shuts down"""
- db_session.remove()
-
-
-def logged_in():
- return 'logged_in' in session
-
-
-def login_required(f):
- @wraps(f)
- def decorated_function(*args, **kwargs):
- if not logged_in():
- return redirect(url_for('login'))
- return f(*args, **kwargs)
- return decorated_function
-
-
-def ignore_static(f):
- """Only executes the wrapped function if we're not loading
- a static resource."""
- @wraps(f)
- def decorated_function(*args, **kwargs):
- if request.path.startswith('/static'):
- return # don't execute the decorated function
- return f(*args, **kwargs)
- return decorated_function
-
-
[email protected]_request
-@ignore_static
-def setup_g():
- """Store commonly used values in Flask's special g object"""
- # ignore_static here because `crypto_util.hash_codename` is scrypt (very
- # time consuming), and we don't need to waste time running if we're just
- # serving a static resource that won't need to access these common values.
- if logged_in():
- g.codename = session['codename']
- g.filesystem_id = crypto_util.hash_codename(g.codename)
- try:
- g.source = Source.query \
- .filter(Source.filesystem_id == g.filesystem_id) \
- .one()
- except MultipleResultsFound as e:
- app.logger.error(
- "Found multiple Sources when one was expected: %s" %
- (e,))
- abort(500)
- except NoResultFound as e:
- app.logger.error(
- "Found no Sources when one was expected: %s" %
- (e,))
- del session['logged_in']
- del session['codename']
- return redirect(url_for('index'))
- g.loc = store.path(g.filesystem_id)
-
-
[email protected]_request
-@ignore_static
-def check_tor2web():
- # ignore_static here so we only flash a single message warning
- # about Tor2Web, corresponding to the initial page load.
- if 'X-tor2web' in request.headers:
- flash(Markup('<strong>WARNING:</strong> You appear to be using '
- 'Tor2Web. This <strong>does not</strong> provide '
- 'anonymity. <a href="/tor2web-warning">Why is this '
- 'dangerous?</a>'),
- "banner-warning")
-
-
[email protected]('/')
-def index():
- return render_template('index.html')
-
-
-def generate_unique_codename():
- """Generate random codenames until we get an unused one"""
- while True:
- codename = crypto_util.genrandomid(Source.NUM_WORDS)
-
- # The maximum length of a word in the wordlist is 9 letters and the
- # codename length is 7 words, so it is currently impossible to
- # generate a codename that is longer than the maximum codename length
- # (currently 128 characters). This code is meant to be defense in depth
- # to guard against potential future changes, such as modifications to
- # the word list or the maximum codename length.
- if len(codename) > Source.MAX_CODENAME_LEN:
- app.logger.warning(
- "Generated a source codename that was too long, "
- "skipping it. This should not happen. "
- "(Codename='{}')".format(codename))
- continue
-
- filesystem_id = crypto_util.hash_codename(codename) # scrypt (slow)
- matching_sources = Source.query.filter(
- Source.filesystem_id == filesystem_id).all()
- if len(matching_sources) == 0:
- return codename
-
-
[email protected]('/generate', methods=('GET', 'POST'))
-def generate():
- if logged_in():
- flash("You were redirected because you are already logged in. "
- "If you want to create a new account, you should log out first.",
- "notification")
- return redirect(url_for('lookup'))
-
- codename = generate_unique_codename()
- session['codename'] = codename
- return render_template('generate.html', codename=codename)
-
-
[email protected]('/create', methods=['POST'])
-def create():
- filesystem_id = crypto_util.hash_codename(session['codename'])
-
- source = Source(filesystem_id, crypto_util.display_id())
- db_session.add(source)
- try:
- db_session.commit()
- except IntegrityError as e:
- app.logger.error(
- "Attempt to create a source with duplicate codename: %s" %
- (e,))
- else:
- os.mkdir(store.path(filesystem_id))
-
- session['logged_in'] = True
- return redirect(url_for('lookup'))
-
-
-def async(f):
- def wrapper(*args, **kwargs):
- thread = Thread(target=f, args=args, kwargs=kwargs)
- thread.start()
- return wrapper
-
-
-@async
-def async_genkey(filesystem_id, codename):
- crypto_util.genkeypair(filesystem_id, codename)
-
- # Register key generation as update to the source, so sources will
- # filter to the top of the list in the journalist interface if a
- # flagged source logs in and has a key generated for them. #789
- try:
- source = Source.query.filter(Source.filesystem_id == filesystem_id) \
- .one()
- source.last_updated = datetime.utcnow()
- db_session.commit()
- except Exception as e:
- app.logger.error("async_genkey for source "
- "(filesystem_id={}): {}".format(filesystem_id, e))
-
-
[email protected]('/lookup', methods=('GET',))
-@login_required
-def lookup():
- replies = []
- for reply in g.source.replies:
- reply_path = store.path(g.filesystem_id, reply.filename)
- try:
- reply.decrypted = crypto_util.decrypt(
- g.codename,
- open(reply_path).read()).decode('utf-8')
- except UnicodeDecodeError:
- app.logger.error("Could not decode reply %s" % reply.filename)
- else:
- reply.date = datetime.utcfromtimestamp(
- os.stat(reply_path).st_mtime)
- replies.append(reply)
-
- # Sort the replies by date
- replies.sort(key=operator.attrgetter('date'), reverse=True)
-
- # Generate a keypair to encrypt replies from the journalist
- # Only do this if the journalist has flagged the source as one
- # that they would like to reply to. (Issue #140.)
- if not crypto_util.getkey(g.filesystem_id) and g.source.flagged:
- async_genkey(g.filesystem_id, g.codename)
-
- return render_template(
- 'lookup.html',
- codename=g.codename,
- replies=replies,
- flagged=g.source.flagged,
- haskey=crypto_util.getkey(
- g.filesystem_id))
-
-
-def normalize_timestamps(filesystem_id):
- """
- Update the timestamps on all of the source's submissions to match that of
- the latest submission. This minimizes metadata that could be useful to
- investigators. See #301.
- """
- sub_paths = [store.path(filesystem_id, submission.filename)
- for submission in g.source.submissions]
- if len(sub_paths) > 1:
- args = ["touch"]
- args.extend(sub_paths[:-1])
- rc = subprocess.call(args)
- if rc != 0:
- app.logger.warning(
- "Couldn't normalize submission "
- "timestamps (touch exited with %d)" %
- rc)
-
-
[email protected]('/submit', methods=('POST',))
-@login_required
-def submit():
- msg = request.form['msg']
- fh = request.files['fh']
-
- # Don't bother submitting anything if it was an "empty" submission. #878.
- if not (msg or fh):
- flash("You must enter a message or choose a file to submit.", "error")
- return redirect(url_for('lookup'))
-
- fnames = []
- journalist_filename = g.source.journalist_filename
- first_submission = g.source.interaction_count == 0
-
- if msg:
- g.source.interaction_count += 1
- fnames.append(
- store.save_message_submission(
- g.filesystem_id,
- g.source.interaction_count,
- journalist_filename,
- msg))
- if fh:
- g.source.interaction_count += 1
- fnames.append(
- store.save_file_submission(
- g.filesystem_id,
- g.source.interaction_count,
- journalist_filename,
- fh.filename,
- fh.stream))
-
- if first_submission:
- msg = render_template('first_submission_flashed_message.html')
- flash(Markup(msg), "success")
-
- else:
- if msg and not fh:
- things = 'message'
- elif not msg and fh:
- things = 'document'
- else:
- things = 'message and document'
-
- msg = render_template('next_submission_flashed_message.html',
- things=things)
- flash(Markup(msg), "success")
-
- for fname in fnames:
- submission = Submission(g.source, fname)
- db_session.add(submission)
-
- if g.source.pending:
- g.source.pending = False
-
- # Generate a keypair now, if there's enough entropy (issue #303)
- entropy_avail = int(
- open('/proc/sys/kernel/random/entropy_avail').read())
- if entropy_avail >= 2400:
- async_genkey(g.filesystem_id, g.codename)
-
- g.source.last_updated = datetime.utcnow()
- db_session.commit()
- normalize_timestamps(g.filesystem_id)
-
- return redirect(url_for('lookup'))
-
-
[email protected]('/delete', methods=('POST',))
-@login_required
-def delete():
- query = Reply.query.filter(
- Reply.filename == request.form['reply_filename'])
- reply = get_one_or_else(query, app.logger, abort)
- store.secure_unlink(store.path(g.filesystem_id, reply.filename))
- db_session.delete(reply)
- db_session.commit()
-
- flash("Reply deleted", "notification")
- return redirect(url_for('lookup'))
-
-
[email protected]('/delete-all', methods=('POST',))
-@login_required
-def batch_delete():
- replies = g.source.replies
- if len(replies) == 0:
- app.logger.error("Found no replies when at least one was expected")
- return redirect(url_for('lookup'))
- for reply in replies:
- store.secure_unlink(store.path(g.filesystem_id, reply.filename))
- db_session.delete(reply)
- db_session.commit()
-
- flash("All replies have been deleted", "notification")
- return redirect(url_for('lookup'))
-
-
-def valid_codename(codename):
- # Ignore codenames that are too long to avoid DoS
- if len(codename) > Source.MAX_CODENAME_LEN:
- app.logger.info(
- "Ignored attempted login because the codename was too long.")
- return False
-
- try:
- filesystem_id = crypto_util.hash_codename(codename)
- except crypto_util.CryptoException as e:
- app.logger.info(
- "Could not compute filesystem ID for codename '{}': {}".format(
- codename, e))
- abort(500)
-
- source = Source.query.filter_by(filesystem_id=filesystem_id).first()
- return source is not None
-
-
[email protected]('/login', methods=('GET', 'POST'))
-def login():
- if request.method == 'POST':
- codename = request.form['codename'].strip()
- if valid_codename(codename):
- session.update(codename=codename, logged_in=True)
- return redirect(url_for('lookup', from_login='1'))
- else:
- app.logger.info(
- "Login failed for invalid codename".format(codename))
- flash("Sorry, that is not a recognized codename.", "error")
- return render_template('login.html')
-
-
[email protected]('/logout')
-def logout():
- if logged_in():
- session.clear()
- msg = render_template('logout_flashed_message.html')
- flash(Markup(msg), "important hide-if-not-tor-browser")
- return redirect(url_for('index'))
-
-
[email protected]('/tor2web-warning')
-def tor2web_warning():
- return render_template("tor2web-warning.html")
-
-
[email protected]('/use-tor')
-def recommend_tor_browser():
- return render_template("use-tor-browser.html")
-
-
[email protected]('/journalist-key')
-def download_journalist_pubkey():
- journalist_pubkey = crypto_util.gpg.export_keys(config.JOURNALIST_KEY)
- return send_file(StringIO(journalist_pubkey),
- mimetype="application/pgp-keys",
- attachment_filename=config.JOURNALIST_KEY + ".asc",
- as_attachment=True)
-
-
[email protected]('/why-journalist-key')
-def why_download_journalist_pubkey():
- return render_template("why-journalist-key.html")
-
-
[email protected]('/metadata')
-def metadata():
- meta = {'gpg_fpr': config.JOURNALIST_KEY,
- 'sd_version': version.__version__,
- }
- resp = make_response(json.dumps(meta))
- resp.headers['Content-Type'] = 'application/json'
- return resp
-
-
[email protected](404)
-def page_not_found(error):
- return render_template('notfound.html'), 404
+from source_app import create_app
[email protected](500)
-def internal_error(error):
- return render_template('error.html'), 500
+app = create_app(config)
if __name__ == "__main__": # pragma: no cover
diff --git a/securedrop/source_app/__init__.py b/securedrop/source_app/__init__.py
new file mode 100644
--- /dev/null
+++ b/securedrop/source_app/__init__.py
@@ -0,0 +1,136 @@
+from datetime import datetime, timedelta
+from flask import (Flask, render_template, flash, Markup, request, g, session,
+ url_for, redirect)
+from flask_babel import gettext
+from flask_assets import Environment
+from flask_wtf.csrf import CSRFProtect, CSRFError
+from jinja2 import evalcontextfilter
+from os import path
+from sqlalchemy.orm.exc import NoResultFound
+
+import crypto_util
+import i18n
+import store
+import template_filters
+import version
+
+from db import Source, db_session
+from request_that_secures_file_uploads import RequestThatSecuresFileUploads
+from source_app import main, info, api
+from source_app.decorators import ignore_static
+from source_app.utils import logged_in
+
+
+def create_app(config):
+ app = Flask(__name__,
+ template_folder=config.SOURCE_TEMPLATES_DIR,
+ static_folder=path.join(config.SECUREDROP_ROOT, 'static'))
+ app.request_class = RequestThatSecuresFileUploads
+ app.config.from_object(config.SourceInterfaceFlaskConfig)
+
+ # The default CSRF token expiration is 1 hour. Since large uploads can
+ # take longer than an hour over Tor, we increase the valid window to 24h.
+ app.config['WTF_CSRF_TIME_LIMIT'] = 60 * 60 * 24
+
+ CSRFProtect(app)
+
+ @app.errorhandler(CSRFError)
+ def handle_csrf_error(e):
+ msg = render_template('session_timeout.html')
+ session.clear()
+ flash(Markup(msg), "important")
+ return redirect(url_for('main.index'))
+
+ assets = Environment(app)
+ app.config['assets'] = assets
+
+ i18n.setup_app(app)
+
+ app.jinja_env.trim_blocks = True
+ app.jinja_env.lstrip_blocks = True
+ app.jinja_env.globals['version'] = version.__version__
+ if getattr(config, 'CUSTOM_HEADER_IMAGE', None):
+ app.jinja_env.globals['header_image'] = config.CUSTOM_HEADER_IMAGE
+ app.jinja_env.globals['use_custom_header_image'] = True
+ else:
+ app.jinja_env.globals['header_image'] = 'logo.png'
+ app.jinja_env.globals['use_custom_header_image'] = False
+
+ app.jinja_env.filters['rel_datetime_format'] = \
+ template_filters.rel_datetime_format
+ app.jinja_env.filters['nl2br'] = evalcontextfilter(template_filters.nl2br)
+ app.jinja_env.filters['filesizeformat'] = template_filters.filesizeformat
+
+ for module in [main, info, api]:
+ app.register_blueprint(module.make_blueprint(config))
+
+ @app.before_request
+ @ignore_static
+ def check_tor2web():
+ # ignore_static here so we only flash a single message warning
+ # about Tor2Web, corresponding to the initial page load.
+ if 'X-tor2web' in request.headers:
+ flash(Markup(gettext(
+ '<strong>WARNING:</strong> You appear to be using Tor2Web. '
+ 'This <strong>does not</strong> provide anonymity. '
+ '<a href="{url}">Why is this dangerous?</a>')
+ .format(url=url_for('info.tor2web_warning'))),
+ "banner-warning")
+
+ @app.before_request
+ @ignore_static
+ def setup_g():
+ """Store commonly used values in Flask's special g object"""
+ g.locale = i18n.get_locale()
+ g.text_direction = i18n.get_text_direction(g.locale)
+ g.html_lang = i18n.locale_to_rfc_5646(g.locale)
+ g.locales = i18n.get_locale2name()
+
+ if 'expires' in session and datetime.utcnow() >= session['expires']:
+ msg = render_template('session_timeout.html')
+
+ # clear the session after we render the message so it's localized
+ session.clear()
+
+ flash(Markup(msg), "important")
+
+ session['expires'] = datetime.utcnow() + \
+ timedelta(minutes=getattr(config,
+ 'SESSION_EXPIRATION_MINUTES',
+ 120))
+
+ # ignore_static here because `crypto_util.hash_codename` is scrypt
+ # (very time consuming), and we don't need to waste time running if
+ # we're just serving a static resource that won't need to access
+ # these common values.
+ if logged_in():
+ g.codename = session['codename']
+ g.filesystem_id = crypto_util.hash_codename(g.codename)
+ try:
+ g.source = Source.query \
+ .filter(Source.filesystem_id == g.filesystem_id) \
+ .one()
+ except NoResultFound as e:
+ app.logger.error(
+ "Found no Sources when one was expected: %s" %
+ (e,))
+ del session['logged_in']
+ del session['codename']
+ return redirect(url_for('main.index'))
+ g.loc = store.path(g.filesystem_id)
+
+ @app.teardown_appcontext
+ def shutdown_session(exception=None):
+ """Automatically remove database sessions at the end of the request, or
+ when the application shuts down"""
+ db_session.remove()
+
+ @app.errorhandler(404)
+ def page_not_found(error):
+ return render_template('notfound.html'), 404
+
+ @app.errorhandler(500)
+ def internal_error(error):
+ return render_template('error.html'), 500
+
+ return app
diff --git a/securedrop/source_app/api.py b/securedrop/source_app/api.py
new file mode 100644
--- /dev/null
+++ b/securedrop/source_app/api.py
@@ -0,0 +1,20 @@
+import json
+
+from flask import Blueprint, make_response
+
+import version
+
+
+def make_blueprint(config):
+ view = Blueprint('api', __name__)
+
+ @view.route('/metadata')
+ def metadata():
+ meta = {'gpg_fpr': config.JOURNALIST_KEY,
+ 'sd_version': version.__version__,
+ }
+ resp = make_response(json.dumps(meta))
+ resp.headers['Content-Type'] = 'application/json'
+ return resp
+
+ return view
diff --git a/securedrop/source_app/decorators.py b/securedrop/source_app/decorators.py
new file mode 100644
--- /dev/null
+++ b/securedrop/source_app/decorators.py
@@ -0,0 +1,24 @@
+from flask import redirect, url_for, request
+from functools import wraps
+
+from source_app.utils import logged_in
+
+
+def login_required(f):
+ @wraps(f)
+ def decorated_function(*args, **kwargs):
+ if not logged_in():
+ return redirect(url_for('main.login'))
+ return f(*args, **kwargs)
+ return decorated_function
+
+
+def ignore_static(f):
+ """Only executes the wrapped function if we're not loading
+ a static resource."""
+ @wraps(f)
+ def decorated_function(*args, **kwargs):
+ if request.path.startswith('/static'):
+ return # don't execute the decorated function
+ return f(*args, **kwargs)
+ return decorated_function
diff --git a/securedrop/source_app/forms.py b/securedrop/source_app/forms.py
new file mode 100644
--- /dev/null
+++ b/securedrop/source_app/forms.py
@@ -0,0 +1,19 @@
+from flask_babel import lazy_gettext
+from flask_wtf import FlaskForm
+from wtforms import PasswordField
+from wtforms.validators import InputRequired, Regexp, Length
+
+from db import Source
+
+
+class LoginForm(FlaskForm):
+ codename = PasswordField('codename', validators=[
+ InputRequired(message=lazy_gettext('This field is required.')),
+ Length(1, Source.MAX_CODENAME_LEN,
+ message=lazy_gettext(
+ 'Field must be between 1 and '
+ '{max_codename_len} characters long.'.format(
+ max_codename_len=Source.MAX_CODENAME_LEN))),
+ # Make sure to allow dashes since some words in the wordlist have them
+ Regexp(r'[\sA-Za-z0-9-]+$', message=lazy_gettext('Invalid input.'))
+ ])
diff --git a/securedrop/source_app/info.py b/securedrop/source_app/info.py
new file mode 100644
--- /dev/null
+++ b/securedrop/source_app/info.py
@@ -0,0 +1,30 @@
+from cStringIO import StringIO
+from flask import Blueprint, render_template, send_file
+
+import crypto_util
+
+
+def make_blueprint(config):
+ view = Blueprint('info', __name__)
+
+ @view.route('/tor2web-warning')
+ def tor2web_warning():
+ return render_template("tor2web-warning.html")
+
+ @view.route('/use-tor')
+ def recommend_tor_browser():
+ return render_template("use-tor-browser.html")
+
+ @view.route('/journalist-key')
+ def download_journalist_pubkey():
+ journalist_pubkey = crypto_util.gpg.export_keys(config.JOURNALIST_KEY)
+ return send_file(StringIO(journalist_pubkey),
+ mimetype="application/pgp-keys",
+ attachment_filename=config.JOURNALIST_KEY + ".asc",
+ as_attachment=True)
+
+ @view.route('/why-journalist-key')
+ def why_download_journalist_pubkey():
+ return render_template("why-journalist-key.html")
+
+ return view
diff --git a/securedrop/source_app/main.py b/securedrop/source_app/main.py
new file mode 100644
--- /dev/null
+++ b/securedrop/source_app/main.py
@@ -0,0 +1,233 @@
+import operator
+import os
+
+from datetime import datetime
+from flask import (Blueprint, render_template, flash, redirect, url_for, g,
+ session, current_app, request, Markup, abort)
+from flask_babel import gettext
+from sqlalchemy.exc import IntegrityError
+
+import crypto_util
+import store
+
+from db import Source, db_session, Submission, Reply, get_one_or_else
+from rm import srm
+from source_app.decorators import login_required
+from source_app.utils import (logged_in, generate_unique_codename,
+ async_genkey, normalize_timestamps,
+ valid_codename)
+from source_app.forms import LoginForm
+
+
+def make_blueprint(config):
+ view = Blueprint('main', __name__)
+
+ @view.route('/')
+ def index():
+ return render_template('index.html')
+
+ @view.route('/generate', methods=('GET', 'POST'))
+ def generate():
+ if logged_in():
+ flash(gettext(
+ "You were redirected because you are already logged in. "
+ "If you want to create a new account, you should log out "
+ "first."),
+ "notification")
+ return redirect(url_for('.lookup'))
+
+ codename = generate_unique_codename()
+ session['codename'] = codename
+ session['new_user'] = True
+ return render_template('generate.html', codename=codename)
+
+ @view.route('/create', methods=['POST'])
+ def create():
+ filesystem_id = crypto_util.hash_codename(session['codename'])
+
+ source = Source(filesystem_id, crypto_util.display_id())
+ db_session.add(source)
+ try:
+ db_session.commit()
+ except IntegrityError as e:
+ db_session.rollback()
+ current_app.logger.error(
+ "Attempt to create a source with duplicate codename: %s" %
+ (e,))
+
+ # Issue 2386: don't log in on duplicates
+ del session['codename']
+ abort(500)
+ else:
+ os.mkdir(store.path(filesystem_id))
+
+ session['logged_in'] = True
+ return redirect(url_for('.lookup'))
+
+ @view.route('/lookup', methods=('GET',))
+ @login_required
+ def lookup():
+ replies = []
+ for reply in g.source.replies:
+ reply_path = store.path(g.filesystem_id, reply.filename)
+ try:
+ reply.decrypted = crypto_util.decrypt(
+ g.codename,
+ open(reply_path).read()).decode('utf-8')
+ except UnicodeDecodeError:
+ current_app.logger.error("Could not decode reply %s" %
+ reply.filename)
+ else:
+ reply.date = datetime.utcfromtimestamp(
+ os.stat(reply_path).st_mtime)
+ replies.append(reply)
+
+ # Sort the replies by date
+ replies.sort(key=operator.attrgetter('date'), reverse=True)
+
+ # Generate a keypair to encrypt replies from the journalist
+ # Only do this if the journalist has flagged the source as one
+ # that they would like to reply to. (Issue #140.)
+ if not crypto_util.getkey(g.filesystem_id) and g.source.flagged:
+ async_genkey(g.filesystem_id, g.codename)
+
+ return render_template(
+ 'lookup.html',
+ codename=g.codename,
+ replies=replies,
+ flagged=g.source.flagged,
+ new_user=session.get('new_user', None),
+ haskey=crypto_util.getkey(
+ g.filesystem_id))
+
+ @view.route('/submit', methods=('POST',))
+ @login_required
+ def submit():
+ msg = request.form['msg']
+ fh = request.files['fh']
+
+ # Don't submit anything if it was an "empty" submission. #878
+ if not (msg or fh):
+ flash(gettext(
+ "You must enter a message or choose a file to submit."),
+ "error")
+ return redirect(url_for('main.lookup'))
+
+ fnames = []
+ journalist_filename = g.source.journalist_filename
+ first_submission = g.source.interaction_count == 0
+
+ if msg:
+ g.source.interaction_count += 1
+ fnames.append(
+ store.save_message_submission(
+ g.filesystem_id,
+ g.source.interaction_count,
+ journalist_filename,
+ msg))
+ if fh:
+ g.source.interaction_count += 1
+ fnames.append(
+ store.save_file_submission(
+ g.filesystem_id,
+ g.source.interaction_count,
+ journalist_filename,
+ fh.filename,
+ fh.stream))
+
+ if first_submission:
+ msg = render_template('first_submission_flashed_message.html')
+ flash(Markup(msg), "success")
+
+ else:
+ if msg and not fh:
+ html_contents = gettext('Thanks! We received your message.')
+ elif not msg and fh:
+ html_contents = gettext('Thanks! We received your document.')
+ else:
+ html_contents = gettext('Thanks! We received your message and '
+ 'document.')
+
+ msg = render_template('next_submission_flashed_message.html',
+ html_contents=html_contents)
+ flash(Markup(msg), "success")
+
+ for fname in fnames:
+ submission = Submission(g.source, fname)
+ db_session.add(submission)
+
+ if g.source.pending:
+ g.source.pending = False
+
+ # Generate a keypair now, if there's enough entropy (issue #303)
+ entropy_avail = int(
+ open('/proc/sys/kernel/random/entropy_avail').read())
+ if entropy_avail >= 2400:
+ async_genkey(g.filesystem_id, g.codename)
+
+ g.source.last_updated = datetime.utcnow()
+ db_session.commit()
+ normalize_timestamps(g.filesystem_id)
+
+ return redirect(url_for('main.lookup'))
+
+ @view.route('/delete', methods=('POST',))
+ @login_required
+ def delete():
+ query = Reply.query.filter(
+ Reply.filename == request.form['reply_filename'])
+ reply = get_one_or_else(query, current_app.logger, abort)
+ srm(store.path(g.filesystem_id, reply.filename))
+ db_session.delete(reply)
+ db_session.commit()
+
+ flash(gettext("Reply deleted"), "notification")
+ return redirect(url_for('.lookup'))
+
+ @view.route('/delete-all', methods=('POST',))
+ @login_required
+ def batch_delete():
+ replies = g.source.replies
+ if len(replies) == 0:
+ current_app.logger.error("Found no replies when at least one was "
+ "expected")
+ return redirect(url_for('.lookup'))
+
+ for reply in replies:
+ srm(store.path(g.filesystem_id, reply.filename))
+ db_session.delete(reply)
+ db_session.commit()
+
+ flash(gettext("All replies have been deleted"), "notification")
+ return redirect(url_for('.lookup'))
+
+ @view.route('/login', methods=('GET', 'POST'))
+ def login():
+ form = LoginForm()
+ if form.validate_on_submit():
+ codename = request.form['codename'].strip()
+ if valid_codename(codename):
+ session.update(codename=codename, logged_in=True)
+ return redirect(url_for('.lookup', from_login='1'))
+ else:
+ current_app.logger.info(
+ "Login failed for invalid codename".format(codename))
+ flash(gettext("Sorry, that is not a recognized codename."),
+ "error")
+ return render_template('login.html', form=form)
+
+ @view.route('/logout')
+ def logout():
+ if logged_in():
+ msg = render_template('logout_flashed_message.html')
+
+ # Clear the session after we render the message so it's localized
+ # If a user specified a locale, save it and restore it
+ user_locale = g.locale
+ session.clear()
+ session['locale'] = user_locale
+
+ flash(Markup(msg), "important hide-if-not-tor-browser")
+ return redirect(url_for('.index'))
+
+ return view
diff --git a/securedrop/source_app/utils.py b/securedrop/source_app/utils.py
new file mode 100644
--- /dev/null
+++ b/securedrop/source_app/utils.py
@@ -0,0 +1,98 @@
+import subprocess
+
+from datetime import datetime
+from flask import session, current_app, abort, g
+from threading import Thread
+
+import crypto_util
+import i18n
+import store
+
+from db import Source, db_session
+
+
+def logged_in():
+ return 'logged_in' in session
+
+
+def valid_codename(codename):
+ try:
+ filesystem_id = crypto_util.hash_codename(codename)
+ except crypto_util.CryptoException as e:
+ current_app.logger.info(
+ "Could not compute filesystem ID for codename '{}': {}".format(
+ codename, e))
+ abort(500)
+
+ source = Source.query.filter_by(filesystem_id=filesystem_id).first()
+ return source is not None
+
+
+def generate_unique_codename():
+ """Generate random codenames until we get an unused one"""
+ while True:
+ codename = crypto_util.genrandomid(Source.NUM_WORDS,
+ i18n.get_language())
+
+ # The maximum length of a word in the wordlist is 9 letters and the
+ # codename length is 7 words, so it is currently impossible to
+ # generate a codename that is longer than the maximum codename length
+ # (currently 128 characters). This code is meant to be defense in depth
+ # to guard against potential future changes, such as modifications to
+ # the word list or the maximum codename length.
+ if len(codename) > Source.MAX_CODENAME_LEN:
+ current_app.logger.warning(
+ "Generated a source codename that was too long, "
+ "skipping it. This should not happen. "
+ "(Codename='{}')".format(codename))
+ continue
+
+ filesystem_id = crypto_util.hash_codename(codename) # scrypt (slow)
+ matching_sources = Source.query.filter(
+ Source.filesystem_id == filesystem_id).all()
+ if len(matching_sources) == 0:
+ return codename
+
+
+def async(f):
+ def wrapper(*args, **kwargs):
+ thread = Thread(target=f, args=args, kwargs=kwargs)
+ thread.start()
+ return wrapper
+
+
+@async
+def async_genkey(filesystem_id, codename):
+ crypto_util.genkeypair(filesystem_id, codename)
+
+ # Register key generation as update to the source, so sources will
+ # filter to the top of the list in the journalist interface if a
+ # flagged source logs in and has a key generated for them. #789
+ try:
+ source = Source.query.filter(Source.filesystem_id == filesystem_id) \
+ .one()
+ source.last_updated = datetime.utcnow()
+ db_session.commit()
+ except Exception as e:
+ current_app.logger.error(
+ "async_genkey for source (filesystem_id={}): {}"
+ .format(filesystem_id, e))
+
+
+def normalize_timestamps(filesystem_id):
+ """
+ Update the timestamps on all of the source's submissions to match that of
+ the latest submission. This minimizes metadata that could be useful to
+ investigators. See #301.
+ """
+ sub_paths = [store.path(filesystem_id, submission.filename)
+ for submission in g.source.submissions]
+ if len(sub_paths) > 1:
+ args = ["touch"]
+ args.extend(sub_paths[:-1])
+ rc = subprocess.call(args)
+ if rc != 0:
+ current_app.logger.warning(
+ "Couldn't normalize submission "
+ "timestamps (touch exited with %d)" %
+ rc)
diff --git a/securedrop/store.py b/securedrop/store.py
--- a/securedrop/store.py
+++ b/securedrop/store.py
@@ -5,7 +5,6 @@
import zipfile
import crypto_util
import tempfile
-import subprocess
import gzip
from werkzeug import secure_filename
@@ -78,6 +77,7 @@ def get_bulk_archive(selected_submissions, zip_directory=''):
# folder structure per #383
with zipfile.ZipFile(zip_file, 'w') as zip:
for source in sources:
+ fname = ""
submissions = [s for s in selected_submissions
if s.source.journalist_designation == source]
for submission in submissions:
@@ -85,9 +85,12 @@ def get_bulk_archive(selected_submissions, zip_directory=''):
submission.filename)
verify(filename)
document_number = submission.filename.split('-')[0]
+ if zip_directory == submission.source.journalist_filename:
+ fname = zip_directory
+ else:
+ fname = os.path.join(zip_directory, source)
zip.write(filename, arcname=os.path.join(
- zip_directory,
- source,
+ fname,
"%s_%s" % (document_number,
submission.source.last_updated.date()),
os.path.basename(filename)
@@ -156,18 +159,3 @@ def rename_submission(filesystem_id, orig_filename, journalist_filename):
else:
return new_filename # Only return new filename if successful
return orig_filename
-
-
-def secure_unlink(fn, recursive=False):
- verify(fn)
- command = ['srm']
- if recursive:
- command.append('-r')
- command.append(fn)
- subprocess.check_call(command)
- return "success"
-
-
-def delete_source_directory(filesystem_id):
- secure_unlink(path(filesystem_id), recursive=True)
- return "success"
diff --git a/securedrop/template_filters.py b/securedrop/template_filters.py
--- a/securedrop/template_filters.py
+++ b/securedrop/template_filters.py
@@ -1,42 +1,20 @@
# -*- coding: utf-8 -*-
+from flask_babel import gettext, get_locale
+from babel import units, dates
from datetime import datetime
from jinja2 import Markup, escape
+import math
-def datetimeformat(dt, fmt=None, relative=False):
+def rel_datetime_format(dt, fmt=None, relative=False):
"""Template filter for readable formatting of datetime.datetime"""
- fmt = fmt or '%b %d, %Y %I:%M %p'
if relative:
- time_difference = _relative_timestamp(dt)
- if time_difference:
- return '{} ago'.format(time_difference)
- return dt.strftime(fmt)
-
-
-def _relative_timestamp(dt):
- """"
- Format a human readable relative time for timestamps up to 30 days old
- """
- delta = datetime.utcnow() - dt
- diff = (
- delta.microseconds + (delta.seconds +
- delta.days * 24 * 3600) * 1e6) / 1e6
- if diff < 45:
- return '{} second{}'.format(int(diff), '' if int(diff) == 1 else 's')
- elif diff < 90:
- return 'a minute'
- elif diff < 2700:
- return '{} minutes'.format(int(max(diff / 60, 2)))
- elif diff < 5400:
- return 'an hour'
- elif diff < 79200:
- return '{} hours'.format(int(max(diff / 3600, 2)))
- elif diff < 129600:
- return 'a day'
- elif diff < 2592000:
- return '{} days'.format(int(max(diff / 86400, 2)))
+ time = dates.format_timedelta(datetime.utcnow() - dt,
+ locale=get_locale())
+ return gettext('{time} ago').format(time=time)
else:
- return None
+ fmt = fmt or 'MMM dd, yyyy hh:mm a'
+ return dates.format_datetime(dt, fmt, locale=get_locale())
def nl2br(context, value):
@@ -44,3 +22,26 @@ def nl2br(context, value):
if context.autoescape:
formatted = Markup(formatted)
return formatted
+
+
+def filesizeformat(value):
+ prefixes = [
+ 'digital-kilobyte',
+ 'digital-megabyte',
+ 'digital-gigabyte',
+ 'digital-terabyte',
+ ]
+ locale = get_locale()
+ base = 1024
+ #
+ # we are using the long length because the short length has no
+ # plural variant and it reads like a bug instead of something
+ # on purpose
+ #
+ if value < base:
+ return units.format_unit(value, "byte", locale=locale, length="long")
+ else:
+ i = min(int(math.log(value, base)), len(prefixes)) - 1
+ prefix = prefixes[i]
+ bytes = float(value) / base ** (i + 1)
+ return units.format_unit(bytes, prefix, locale=locale, length="short")
diff --git a/securedrop/version.py b/securedrop/version.py
--- a/securedrop/version.py
+++ b/securedrop/version.py
@@ -1 +1 @@
-__version__ = '0.4.4'
+__version__ = '0.5'
| diff --git a/docs/development/testing_application_tests.rst b/docs/development/testing_application_tests.rst
--- a/docs/development/testing_application_tests.rst
+++ b/docs/development/testing_application_tests.rst
@@ -74,6 +74,13 @@ Some Selenium tests are decorated to produce before and after screenshots to aid
in debugging. This behavior is enabled with the ``SCREENSHOTS_ENABLED`` environment
variable. Output PNG files will be placed in the ``tests/log/`` directory.
+The `gnupg
+<https://pythonhosted.org/python-gnupg>`_ library can be quite verbose in its
+output. The default log level applied to this package is ``ERROR`` but this can
+be controlled via the ``GNUPG_LOG_LEVEL`` environment variable. It can have values
+such as ``INFO`` or ``DEBUG`` if some particular test case or test run needs
+greater verbosity.
+
.. code:: sh
SCREENSHOTS_ENABLED=1 pytest tests/functional/
diff --git a/install_files/securedrop-ossec-server/var/ossec/setup_gmail_test.sh b/install_files/securedrop-ossec-server/var/ossec/setup_gmail_test.sh
deleted file mode 100755
--- a/install_files/securedrop-ossec-server/var/ossec/setup_gmail_test.sh
+++ /dev/null
@@ -1,60 +0,0 @@
-#!/bin/bash
-
-set -u -e
-# ------------------------------------------------------------------------------
-#
-# This script is used to setup postfix on the monitor server to use
-# gmail as the smtp server to validate gpg and ossec alerts are sent
-#
-# This script should be run after the production install scripts
-#
-# TODO:
-# - Support default test gmail account [email protected]
-#
-# ------------------------------------------------------------------------------
-#set -x
-
-echo ""
-echo "In order to test sending ossec emails via gpg we need to do the following:"
-echo ""
-echo "1. Setup postfix to use google as the smpt relay"
-echo "2. Import your public gpg key to the ossec user"
-echo ""
-echo "What Gmail email address do you want to send test alerts to?"
-read EMAIL_DISTRO
-echo "What is your email password. (Needed to auth google as smtp server)"
-read PASSWORD
-echo "Please import your public key into the ossec keystore"
-echo "gpg --homedir /var/ossec/.gnu --import <your key here>"
-
-# Install required testing tools
-apt-get install -y postfix mailutils libsasl2-2 ca-certificates libsasl2-modules
-
-# Setup postfix config
-sed -ie "/^relayhost/d" /etc/postfix/main.cf
-cat <<EOF >> /etc/postfix/main.cf
-relayhost = [smtp.gmail.com]:587
-smtp_sasl_auth_enable = yes
-smtp_sasl_password_maps = hash:/etc/postfix/sasl_passwd
-smtp_sasl_security_options = noanonymous
-smtp_tls_CAfile = /etc/postfix/cacert.pem
-smtp_use_tls = yes
-EOF
-
-
-
-# Setup gmail user auth for postfix
-echo "[smtp.gmail.com]:587 ${EMAIL_DISTRO}:${PASSWORD}" > /etc/postfix/sasl_passwd
-chmod 400 /etc/postfix/sasl_passwd
-postmap /etc/postfix/sasl_passwd
-
-# Import Thawte CA cert into postfix
-cat /etc/ssl/certs/Thawte_Premium_Server_CA.pem >> /etc/postfix/cacert.pem
-
-# Reload postfix config
-service postfix reload
-
-sed -e "s/EMAIL_DISTRO/$EMAIL_DISTRO/g" send_encrypted_alarm.sh > /var/ossec/send_encrypted_alarm.sh
-
-# Send test email
-echo "Test mail from postfix" | mail -s "Test Postfix" $EMAIL_DISTRO
diff --git a/molecule/builder/tests/test_build_dependencies.py b/molecule/builder/tests/test_build_dependencies.py
--- a/molecule/builder/tests/test_build_dependencies.py
+++ b/molecule/builder/tests/test_build_dependencies.py
@@ -31,6 +31,7 @@ def get_build_directories():
"libssl-dev",
"python-dev",
"python-pip",
+ "secure-delete",
])
def test_build_dependencies(Package, package):
"""
@@ -61,6 +62,15 @@ def test_sass_gem_installed(Command):
assert c.rc == 0
+def test_pip_dependencies_installed(Command):
+ """
+ Ensure the development pip dependencies are installed
+ """
+ c = Command("pip list installed")
+ assert "Flask-Babel" in c.stdout
+ assert c.rc == 0
+
+
@pytest.mark.parametrize("directory", get_build_directories())
def test_build_directories(File, directory):
"""
diff --git a/molecule/builder/tests/test_securedrop_deb_package.py b/molecule/builder/tests/test_securedrop_deb_package.py
--- a/molecule/builder/tests/test_securedrop_deb_package.py
+++ b/molecule/builder/tests/test_securedrop_deb_package.py
@@ -47,6 +47,23 @@ def get_deb_packages():
deb_packages = get_deb_packages()
+def get_deb_tags():
+ """
+ Helper function to build array of package and tag tuples
+ for lintian.
+ """
+ deb_tags = []
+
+ for deb in get_deb_packages():
+ for tag in securedrop_test_vars.lintian_tags:
+ deb_tags.append((deb, tag))
+
+ return deb_tags
+
+
+deb_tags = get_deb_tags()
+
+
@pytest.mark.parametrize("deb", deb_packages)
def test_build_deb_packages(File, deb):
"""
@@ -123,47 +140,46 @@ def test_deb_package_control_fields_homepage(File, Command, deb):
@pytest.mark.parametrize("deb", deb_packages)
-def test_deb_package_contains_no_update_dependencies_file(File, Command, deb):
+def test_deb_package_contains_no_config_file(File, Command, deb):
"""
- Ensures the update_python_dependencies script is not shipped via the
- Debian packages.
+ Ensures the `securedrop-app-code` package does not ship a `config.py`
+ file. Doing so would clobber the site-specific changes made via Ansible.
+
+ Somewhat lazily checking all deb packages, rather than just the app-code
+ package, but it accomplishes the same in a DRY manner.
"""
deb_package = File(deb.format(
securedrop_test_vars.securedrop_version))
- # Using `dpkg-deb` but `lintian --tag package-installs-python-bytecode`
- # would be cleaner. Will defer to adding lintian tests later.
c = Command("dpkg-deb --contents {}".format(deb_package.path))
- assert not re.search("^.*update_python_dependencies$", c.stdout, re.M)
+ assert not re.search("^.*config\.py$", c.stdout, re.M)
@pytest.mark.parametrize("deb", deb_packages)
-def test_deb_package_contains_no_pyc_files(File, Command, deb):
+def test_deb_package_contains_pot_file(File, Command, deb):
"""
- Ensures no .pyc files are shipped via the Debian packages.
+ Ensures the `securedrop-app-code` package has the
+ messages.pot file
"""
deb_package = File(deb.format(
securedrop_test_vars.securedrop_version))
- # Using `dpkg-deb` but `lintian --tag package-installs-python-bytecode`
- # would be cleaner. Will defer to adding lintian tests later.
c = Command("dpkg-deb --contents {}".format(deb_package.path))
- assert not re.search("^.*\.pyc$", c.stdout, re.M)
+ # Only relevant for the securedrop-app-code package:
+ if "securedrop-app-code" in deb_package.path:
+ assert re.search("^.*messages.pot$", c.stdout, re.M)
@pytest.mark.parametrize("deb", deb_packages)
-def test_deb_package_contains_no_config_file(File, Command, deb):
+def test_deb_package_contains_mo_file(File, Command, deb):
"""
- Ensures the `securedrop-app-code` package does not ship a `config.py`
- file. Doing so would clobber the site-specific changes made via Ansible.
-
- Somewhat lazily checking all deb packages, rather than just the app-code
- package, but it accomplishes the same in a DRY manner.
+ Ensures the `securedrop-app-code` package has at least one
+ compiled mo file.
"""
deb_package = File(deb.format(
securedrop_test_vars.securedrop_version))
- # Using `dpkg-deb` but `lintian --tag package-installs-python-bytecode`
- # would be cleaner. Will defer to adding lintian tests later.
c = Command("dpkg-deb --contents {}".format(deb_package.path))
- assert not re.search("^.*config\.py$", c.stdout, re.M)
+ # Only relevant for the securedrop-app-code package:
+ if "securedrop-app-code" in deb_package.path:
+ assert re.search("^.*messages\.mo$", c.stdout, re.M)
@pytest.mark.parametrize("deb", deb_packages)
@@ -216,3 +232,31 @@ def test_deb_package_contains_css(File, Command, deb):
assert re.search("^.*\./var/www/securedrop/static/"
"css/{}.css.map$".format(css_type), c.stdout,
re.M)
+
+
[email protected]("deb, tag", deb_tags)
+def test_deb_package_lintian(File, Command, deb, tag):
+ """
+ Ensures lintian likes our Debian packages.
+ """
+ deb_package = File(deb.format(
+ securedrop_test_vars.securedrop_version))
+ c = Command("""lintian --tags {} --no-tag-display-limit {}""".format(
+ tag, deb_package.path))
+ assert len(c.stdout) == 0
+
[email protected]("deb", deb_packages)
+def test_deb_app_package_contains_https_validate_dir(host, deb):
+ """
+ Ensures the `securedrop-app-code` package ships with a validation
+ '.well-known' directory
+ """
+ deb_package = host.file(deb.format(
+ securedrop_test_vars.securedrop_version))
+
+ # Only relevant for the securedrop-app-code package:
+ if "securedrop-app-code" in deb_package.path:
+ c = host.run("dpkg-deb --contents {}".format(deb_package.path))
+ # static/gen/ directory should exist
+ assert re.search("^.*\./var/www/securedrop/"
+ ".well-known/$", c.stdout, re.M)
diff --git a/molecule/builder/tests/vars.yml b/molecule/builder/tests/vars.yml
--- a/molecule/builder/tests/vars.yml
+++ b/molecule/builder/tests/vars.yml
@@ -1,5 +1,5 @@
---
-securedrop_version: "0.4.4"
+securedrop_version: "0.5"
ossec_version: "2.8.2"
keyring_version: "0.1.1"
@@ -23,3 +23,10 @@ build_deb_packages:
- /tmp/build/ossec-server-{ossec_version}-amd64.deb
- /tmp/build/ossec-agent-{ossec_version}-amd64.deb
- /tmp/build/securedrop-keyring-{keyring_version}+{securedrop_version}-amd64.deb
+
+lintian_tags:
+ # - non-standard-file-perm
+ - package-contains-vcs-control-file
+ - package-installs-python-bytecode
+ # - wrong-file-owner-uid-or-gid
+
diff --git a/securedrop/test b/securedrop/test
new file mode 100755
--- /dev/null
+++ b/securedrop/test
@@ -0,0 +1,31 @@
+#!/bin/bash
+set -euo pipefail
+set -x
+export DISPLAY=:1
+Xvfb :1 -screen 0 1024x768x24 -ac +extension GLX +render -noreset &
+haveged &
+redis-server &
+
+echo -n 4096 > /proc/sys/kernel/random/write_wakeup_threshold
+
+rm /dev/random
+ln -s /dev/urandom /dev/random
+
+touch tests/log/firefox.log
+function cleanup {
+ cp tests/log/firefox.log /tmp/test-results/logs/
+}
+trap cleanup EXIT
+
+mkdir -p "/tmp/test-results/logs"
+
+export PAGE_LAYOUT_LOCALES="en_US,fr_FR"
+pytest \
+ --page-layout \
+ --durations 10 \
+ --junitxml=/tmp/test-results/junit.xml \
+ --cov-report html:/tmp/test-results/cov_html \
+ --cov-report xml:/tmp/test-results/cov.xml \
+ --cov-report annotate:/tmp/test-results/cov_annotate \
+ --cov=. \
+ "$@"
diff --git a/securedrop/tests/conftest.py b/securedrop/tests/conftest.py
--- a/securedrop/tests/conftest.py
+++ b/securedrop/tests/conftest.py
@@ -3,7 +3,9 @@
import shutil
import signal
import subprocess
+import logging
+import gnupg
import psutil
import pytest
@@ -16,6 +18,14 @@
# in order to isolate the test vars from prod vars.
TEST_WORKER_PIDFILE = '/tmp/securedrop_test_worker.pid'
+# Quiet down gnupg output. (See Issue #2595)
+gnupg_logger = logging.getLogger(gnupg.__name__)
+gnupg_logger.setLevel(logging.ERROR)
+valid_levels = {'INFO': logging.INFO, 'DEBUG': logging.DEBUG}
+gnupg_logger.setLevel(
+ valid_levels.get(os.environ.get('GNUPG_LOG_LEVEL', None), logging.ERROR)
+)
+
def pytest_addoption(parser):
parser.addoption("--page-layout", action="store_true",
diff --git a/securedrop/tests/functional/functional_test.py b/securedrop/tests/functional/functional_test.py
--- a/securedrop/tests/functional/functional_test.py
+++ b/securedrop/tests/functional/functional_test.py
@@ -21,9 +21,10 @@
from selenium.webdriver.support import expected_conditions
os.environ['SECUREDROP_ENV'] = 'test' # noqa
+import config
import db
import journalist
-import source
+from source_app import create_app
import tests.utils.env as env
LOG_DIR = abspath(join(dirname(realpath(__file__)), '..', 'log'))
@@ -41,7 +42,7 @@ def __call__(self, driver):
return True
-class FunctionalTest():
+class FunctionalTest(object):
def _unused_port(self):
s = socket.socket()
@@ -80,9 +81,9 @@ def _prepare_webdriver(self):
log_file.flush()
return firefox_binary.FirefoxBinary(log_file=log_file)
- def setup(self):
+ def setup(self, session_expiration=30):
# Patch the two-factor verification to avoid intermittent errors
- self.patcher = mock.patch('journalist.Journalist.verify_token')
+ self.patcher = mock.patch('db.Journalist.verify_token')
self.mock_journalist_verify_token = self.patcher.start()
self.mock_journalist_verify_token.return_value = True
@@ -98,6 +99,9 @@ def setup(self):
self.source_location = "http://localhost:%d" % source_port
self.journalist_location = "http://localhost:%d" % journalist_port
+ # Allow custom session expiration lengths
+ self.session_expiration = session_expiration
+
def start_source_server():
# We call Random.atfork() here because we fork the source and
# journalist server from the main Python process we use to drive
@@ -106,7 +110,12 @@ def start_source_server():
# is a problem because they would produce identical output if we
# didn't re-seed them after forking.
Random.atfork()
- source.app.run(
+
+ config.SESSION_EXPIRATION_MINUTES = self.session_expiration
+
+ source_app = create_app(config)
+
+ source_app.run(
port=source_port,
debug=True,
use_reloader=False,
@@ -158,7 +167,8 @@ def start_journalist_server():
self.driver.set_window_position(0, 0)
self.driver.set_window_size(1024, 768)
- self.secret_message = 'blah blah blah'
+ self.secret_message = ('These documents outline a major government '
+ 'invasion of privacy.')
def teardown(self):
self.patcher.stop()
diff --git a/securedrop/tests/functional/journalist_navigation_steps.py b/securedrop/tests/functional/journalist_navigation_steps.py
--- a/securedrop/tests/functional/journalist_navigation_steps.py
+++ b/securedrop/tests/functional/journalist_navigation_steps.py
@@ -28,7 +28,7 @@ def _get_submission_content(self, file_url, raw_content):
return content
- def _try_login_user(self, username, password, token):
+ def _input_text_in_login_form(self, username, password, token):
self.driver.get(self.journalist_location + "/login")
username_field = self.driver.find_element_by_css_selector(
'input[name="username"]')
@@ -42,6 +42,9 @@ def _try_login_user(self, username, password, token):
'input[name="token"]')
token_field.send_keys(token)
+ def _try_login_user(self, username, password, token):
+ self._input_text_in_login_form(username, password, token)
+
submit_button = self.driver.find_element_by_css_selector(
'button[type=submit]')
submit_button.click()
@@ -177,8 +180,9 @@ def _admin_adds_a_user(self):
# Successfully verifying the code should redirect to the admin
# interface, and flash a message indicating success
flashed_msgs = self.driver.find_elements_by_css_selector('.flash')
- assert (("Two-factor token successfully verified for user"
- " {}!").format(self.new_user['username']) in
+ assert (("Token in two-factor authentication "
+ "accepted for user {}.").format(
+ self.new_user['username']) in
[el.text for el in flashed_msgs])
@screenshots
@@ -367,8 +371,7 @@ def found_sources():
reset_pw_btn.click()
def update_password_success():
- assert ('The password was successfully updated' in
- self.driver.page_source)
+ assert 'Password updated.' in self.driver.page_source
# Wait until page refreshes to avoid causing a broken pipe error (#623)
self.wait_for(update_password_success)
@@ -388,9 +391,11 @@ def _journalist_checks_messages(self):
code_names = self.driver.find_elements_by_class_name('code-name')
assert 1 == len(code_names)
- # There should be a "1 unread" span in the sole collection entry
- unread_span = self.driver.find_element_by_css_selector('span.unread')
- assert "1 unread" in unread_span.text
+ if not hasattr(self, 'accept_languages'):
+ # There should be a "1 unread" span in the sole collection entry
+ unread_span = self.driver.find_element_by_css_selector(
+ 'span.unread')
+ assert "1 unread" in unread_span.text
@screenshots
def _journalist_stars_and_unstars_single_message(self):
@@ -420,11 +425,17 @@ def _journalist_selects_all_sources_then_selects_none(self):
for checkbox in checkboxes:
assert checkbox.is_selected() is False
- @screenshots
- def _journalist_downloads_message(self):
+ def _journalist_selects_the_first_source(self):
self.driver.find_element_by_css_selector(
'#un-starred-source-link-1').click()
+ def _journalist_selects_documents_to_download(self):
+ self.driver.find_element_by_id('select_all').click()
+
+ @screenshots
+ def _journalist_downloads_message(self):
+ self._journalist_selects_the_first_source()
+
submissions = self.driver.find_elements_by_css_selector(
'#submissions a')
assert 1 == len(submissions)
@@ -454,26 +465,41 @@ def cookie_string_from_selenium_cookies(cookies):
decrypted_submission)
assert self.secret_message == submission
- def _journalist_sends_reply_to_source(self):
+ def _journalist_composes_reply(self):
+ reply_text = ('Thanks for the documents. Can you submit more '
+ 'information about the main program?')
+ self.wait_for(lambda: self.driver.find_element_by_id(
+ 'reply-text-field'
+ ), timeout=60)
self.driver.find_element_by_id('reply-text-field').send_keys(
- 'Nice docs')
+ reply_text
+ )
+ def _journalist_sends_reply_to_source(self):
+ self._journalist_composes_reply()
self.driver.find_element_by_id('reply-button').click()
- assert "Thanks! Your reply has been stored." in self.driver.page_source
+ if not hasattr(self, 'accept_languages'):
+ assert ("Thanks. Your reply has been stored." in
+ self.driver.page_source)
def _visit_edit_account(self):
edit_account_link = self.driver.find_element_by_id(
'link-edit-account')
edit_account_link.click()
- def _visit_edit_hotp_secret(self):
- hotp_reset_button = self.driver.find_elements_by_css_selector(
- '#reset-two-factor-hotp')[0]
- assert ('/account/reset-2fa-hotp' in
- hotp_reset_button.get_attribute('action'))
+ def _visit_edit_secret(self, type):
+ reset_form = self.driver.find_elements_by_css_selector(
+ '#reset-two-factor-' + type)[0]
+ assert ('/account/reset-2fa-' + type in
+ reset_form.get_attribute('action'))
- hotp_reset_button.click()
+ reset_button = self.driver.find_elements_by_css_selector(
+ '#button-reset-two-factor-' + type)[0]
+ reset_button.click()
+
+ def _visit_edit_hotp_secret(self):
+ self._visit_edit_secret('hotp')
def _set_hotp_secret(self):
hotp_secret_field = self.driver.find_elements_by_css_selector(
@@ -484,11 +510,7 @@ def _set_hotp_secret(self):
submit_button.click()
def _visit_edit_totp_secret(self):
- totp_reset_button = self.driver.find_elements_by_css_selector(
- '#reset-two-factor-totp')[0]
- assert ('/account/reset-2fa-totp' in
- totp_reset_button.get_attribute('action'))
- totp_reset_button.click()
+ self._visit_edit_secret('totp')
def _admin_visits_add_user(self):
add_user_btn = self.driver.find_element_by_css_selector(
diff --git a/securedrop/tests/functional/source_navigation_steps.py b/securedrop/tests/functional/source_navigation_steps.py
--- a/securedrop/tests/functional/source_navigation_steps.py
+++ b/securedrop/tests/functional/source_navigation_steps.py
@@ -1,4 +1,5 @@
import tempfile
+import time
from selenium.webdriver.common.action_chains import ActionChains
from step_helpers import screenshots
@@ -14,8 +15,7 @@ def _source_visits_source_homepage(self):
assert ("SecureDrop | Protecting Journalists and Sources" ==
self.driver.title)
- @screenshots
- def _source_chooses_to_submit_documents(self):
+ def _source_clicks_submit_documents_on_homepage(self):
# First move the cursor to a known position in case it happens to
# be hovering over one of the buttons we are testing below.
header_image = self.driver.find_element_by_css_selector('.header')
@@ -44,11 +44,34 @@ def _source_chooses_to_submit_documents(self):
# The source clicks the submit button.
submit_button.click()
+ @screenshots
+ def _source_chooses_to_submit_documents(self):
+ self._source_clicks_submit_documents_on_homepage()
+
codename = self.driver.find_element_by_css_selector('#codename')
assert len(codename.text) > 0
self.source_name = codename.text
+ def _source_shows_codename(self):
+ content = self.driver.find_element_by_id('codename-hint-content')
+ assert not content.is_displayed()
+ self.driver.find_element_by_id('codename-hint-show').click()
+ assert content.is_displayed()
+ content_content = self.driver.find_element_by_css_selector(
+ '#codename-hint-content p')
+ assert content_content.text == self.source_name
+
+ def _source_hides_codename(self):
+ content = self.driver.find_element_by_id('codename-hint-content')
+ assert content.is_displayed()
+ self.driver.find_element_by_id('codename-hint-hide').click()
+ assert not content.is_displayed()
+
+ def _source_sees_no_codename(self):
+ codename = self.driver.find_elements_by_css_selector('.code-reminder')
+ assert len(codename) == 0
+
@screenshots
def _source_chooses_to_login(self):
self.driver.find_element_by_id('login-button').click()
@@ -64,8 +87,9 @@ def _source_hits_cancel_at_login_page(self):
self.driver.get(self.source_location)
- assert ("SecureDrop | Protecting Journalists and Sources" ==
- self.driver.title)
+ if not hasattr(self, 'accept_languages'):
+ assert ("SecureDrop | Protecting Journalists and Sources" ==
+ self.driver.title)
@screenshots
def _source_proceeds_to_login(self):
@@ -76,8 +100,18 @@ def _source_proceeds_to_login(self):
continue_button = self.driver.find_element_by_id('login')
continue_button.click()
- assert ("SecureDrop | Protecting Journalists and Sources" ==
- self.driver.title)
+ if not hasattr(self, 'accept_languages'):
+ assert ("SecureDrop | Protecting Journalists and Sources" ==
+ self.driver.title)
+ # Check that we've logged in
+
+ replies = self.driver.find_elements_by_id("replies")
+ assert len(replies) == 1
+
+ def _source_enters_codename_in_login_form(self):
+ codename_input = self.driver.find_element_by_id(
+ 'login-with-existing-codename')
+ codename_input.send_keys('ascension hypertext concert synopses')
@screenshots
def _source_hits_cancel_at_submit_page(self):
@@ -142,18 +176,22 @@ def _source_submits_a_file(self):
@screenshots
def _source_submits_a_message(self):
- text_box = self.driver.find_element_by_css_selector('[name=msg]')
- # send_keys = type into text box
- text_box.send_keys(self.secret_message)
-
- submit_button = self.driver.find_element_by_id('submit-doc-button')
- submit_button.click()
+ self._source_enters_text_in_message_field()
+ self._source_clicks_submit_button_on_submission_page()
if not hasattr(self, 'accept_languages'):
notification = self.driver.find_element_by_css_selector(
'.success')
assert 'Thank' in notification.text
+ def _source_enters_text_in_message_field(self):
+ text_box = self.driver.find_element_by_css_selector('[name=msg]')
+ text_box.send_keys(self.secret_message)
+
+ def _source_clicks_submit_button_on_submission_page(self):
+ submit_button = self.driver.find_element_by_id('submit-doc-button')
+ submit_button.click()
+
@screenshots
def _source_deletes_a_journalist_reply(self):
# Get the reply filename so we can use IDs to select the delete buttons
@@ -194,3 +232,13 @@ def _source_tor2web_warning(self):
def _source_why_journalist_key(self):
self.driver.get(self.source_location + "/why-journalist-key")
+
+ def _source_waits_for_session_to_timeout(self, session_length_minutes):
+ time.sleep(session_length_minutes * 60 + 0.1)
+
+ def _source_sees_session_timeout_message(self):
+ notification = self.driver.find_element_by_css_selector('.important')
+
+ if not hasattr(self, 'accept_languages'):
+ expected_text = 'Your session timed out due to inactivity.'
+ assert expected_text in notification.text
diff --git a/securedrop/tests/functional/test_source.py b/securedrop/tests/functional/test_source.py
new file mode 100644
--- /dev/null
+++ b/securedrop/tests/functional/test_source.py
@@ -0,0 +1,19 @@
+import source_navigation_steps
+import functional_test
+
+
+class TestSourceInterface(
+ functional_test.FunctionalTest,
+ source_navigation_steps.SourceNavigationSteps):
+
+ def test_lookup_codename_hint(self):
+ self._source_visits_source_homepage()
+ self._source_chooses_to_submit_documents()
+ self._source_continues_to_submit_page()
+ self._source_shows_codename()
+ self._source_hides_codename()
+ self._source_logs_out()
+ self._source_visits_source_homepage()
+ self._source_chooses_to_login()
+ self._source_proceeds_to_login()
+ self._source_sees_no_codename()
diff --git a/securedrop/tests/functional/test_source_notfound.py b/securedrop/tests/functional/test_source_notfound.py
--- a/securedrop/tests/functional/test_source_notfound.py
+++ b/securedrop/tests/functional/test_source_notfound.py
@@ -2,7 +2,7 @@
import functional_test
-class TestSourceInterfaceBannerWarnings(
+class TestSourceInterfaceNotFound(
functional_test.FunctionalTest,
source_navigation_steps.SourceNavigationSteps):
diff --git a/securedrop/tests/functional/test_source_session_timeout.py b/securedrop/tests/functional/test_source_session_timeout.py
new file mode 100644
--- /dev/null
+++ b/securedrop/tests/functional/test_source_session_timeout.py
@@ -0,0 +1,26 @@
+import source_navigation_steps
+import functional_test
+
+
+class TestSourceSessions(
+ functional_test.FunctionalTest,
+ source_navigation_steps.SourceNavigationSteps):
+
+ def setup(self):
+ # The session expiration here cannot be set to -1
+ # as it will trigger an exception in /create.
+ # Instead, we pick a 1-2s value to allow the account
+ # to be generated.
+ self.session_length_minutes = 0.03
+ super(TestSourceSessions, self).setup(
+ session_expiration=self.session_length_minutes)
+
+ def test_source_session_timeout(self):
+ self._source_visits_source_homepage()
+ self._source_clicks_submit_documents_on_homepage()
+ self._source_continues_to_submit_page()
+ self._source_waits_for_session_to_timeout(
+ self.session_length_minutes)
+ self._source_enters_text_in_message_field()
+ self._source_clicks_submit_button_on_submission_page()
+ self._source_sees_session_timeout_message()
diff --git a/securedrop/tests/functional/submission_not_in_memory.py b/securedrop/tests/functional/test_submission_not_in_memory.py
similarity index 88%
rename from securedrop/tests/functional/submission_not_in_memory.py
rename to securedrop/tests/functional/test_submission_not_in_memory.py
--- a/securedrop/tests/functional/submission_not_in_memory.py
+++ b/securedrop/tests/functional/test_submission_not_in_memory.py
@@ -1,22 +1,21 @@
-from unittest import TestCase
from functional_test import FunctionalTest
import subprocess
from source_navigation_steps import SourceNavigationSteps
import os
+import pytest
import getpass
import re
-from step_helpers import screenshots
-class SubmissionNotInMemoryTest(TestCase, FunctionalTest,
+class TestSubmissionNotInMemory(FunctionalTest,
SourceNavigationSteps):
- def setUp(self):
+ def setup(self):
self.devnull = open('/dev/null', 'r')
- FunctionalTest.setUp(self)
+ FunctionalTest.setup(self)
- def tearDown(self):
- FunctionalTest.tearDown(self)
+ def teardown(self):
+ FunctionalTest.teardown(self)
def _memory_dump(self, pid):
core_dump_base_name = '/tmp/core_dump'
@@ -30,12 +29,13 @@ def _memory_dump(self, pid):
with open(core_dump_file_name, 'r') as fp:
return fp.read()
finally:
+ pass
os.remove(core_dump_file_name)
def _num_strings_in(self, needle, haystack):
return sum(1 for _ in re.finditer(re.escape(needle), haystack))
- @screenshots
+ @pytest.mark.xfail()
def test_message_is_not_retained_in_memory(self):
self._source_visits_source_homepage()
self._source_chooses_to_submit_documents()
@@ -50,7 +50,7 @@ def test_message_is_not_retained_in_memory(self):
assert secrets_in_memory < 1
- @screenshots
+ @pytest.mark.xfail()
def test_file_upload_is_not_retained_in_memory(self):
self._source_visits_source_homepage()
self._source_chooses_to_submit_documents()
diff --git a/securedrop/tests/i18n/journalist.desktop.in b/securedrop/tests/i18n/journalist.desktop.in
new file mode 100644
--- /dev/null
+++ b/securedrop/tests/i18n/journalist.desktop.in
@@ -0,0 +1,10 @@
+#!/usr/bin/env xdg-open
+
+[Desktop Entry]
+Version=1.0
+Type=Application
+Terminal=false
+Categories=Network;
+Name=SecureDrop Journalist Interface
+Icon=securedrop_icon.png
+Exec=/usr/local/bin/tor-browser journalist.onion
diff --git a/securedrop/tests/i18n/source.desktop.in b/securedrop/tests/i18n/source.desktop.in
new file mode 100644
--- /dev/null
+++ b/securedrop/tests/i18n/source.desktop.in
@@ -0,0 +1,10 @@
+#!/usr/bin/env xdg-open
+
+[Desktop Entry]
+Version=1.0
+Type=Application
+Terminal=false
+Categories=Network;
+Name=SecureDrop Source Interfaces
+Icon=securedrop_icon.png
+Exec=/usr/local/bin/tor-browser source.onion
diff --git a/securedrop/tests/pages-layout/functional_test.py b/securedrop/tests/pages-layout/functional_test.py
--- a/securedrop/tests/pages-layout/functional_test.py
+++ b/securedrop/tests/pages-layout/functional_test.py
@@ -30,11 +30,10 @@
def list_locales():
- d = os.path.join(dirname(__file__), '..', '..', 'translations')
- locales = ['en_US']
- if os.path.isdir(d):
- files = os.listdir(d)
- locales.extend([f for f in files if f != 'messages.pot'])
+ if 'PAGE_LAYOUT_LOCALES' in os.environ:
+ locales = os.environ['PAGE_LAYOUT_LOCALES'].split(',')
+ else:
+ locales = ['en_US']
return locales
@@ -59,6 +58,7 @@ def webdriver_fixture(self, request):
self.driver.quit()
def _screenshot(self, filename):
+ self.driver.set_window_size(1024, 500) # Trim size of images for docs
self.driver.save_screenshot(os.path.join(self.log_dir, filename))
def _javascript_toggle(self):
diff --git a/securedrop/tests/pages-layout/test_journalist.py b/securedrop/tests/pages-layout/test_journalist.py
--- a/securedrop/tests/pages-layout/test_journalist.py
+++ b/securedrop/tests/pages-layout/test_journalist.py
@@ -103,7 +103,8 @@ def test_admin(self):
def test_admin_new_user_two_factor_hotp(self):
self._admin_logs_in()
self._admin_visits_admin_interface()
- self._admin_creates_a_user(hotp='123456')
+ valid_hotp = '1234567890123456789012345678901234567890'
+ self._admin_creates_a_user(hotp=valid_hotp)
self._screenshot('journalist-admin_new_user_two_factor_hotp.png')
def test_admin_new_user_two_factor_totp(self):
@@ -169,6 +170,18 @@ def test_col_javascript(self):
self._journalist_visits_col()
self._screenshot('journalist-col_javascript.png')
+ def test_journalist_composes_reply(self):
+ self._source_visits_source_homepage()
+ self._source_chooses_to_submit_documents()
+ self._source_continues_to_submit_page()
+ self._source_submits_a_file()
+ self._source_logs_out()
+ self._journalist_logs_in()
+ self._journalist_checks_messages()
+ self._journalist_downloads_message()
+ self._journalist_composes_reply()
+ self._screenshot('journalist-composes_reply.png')
+
def test_delete_none(self):
self._source_visits_source_homepage()
self._source_chooses_to_submit_documents()
@@ -246,6 +259,15 @@ def test_edit_account_admin(self):
self._admin_visits_edit_user()
self._screenshot('journalist-edit_account_admin.png')
+ def test_index_no_documents_admin(self):
+ self._admin_logs_in()
+ self._screenshot('journalist-admin_index_no_documents.png')
+
+ def test_admin_interface_index(self):
+ self._admin_logs_in()
+ self._admin_visits_admin_interface()
+ self._screenshot('journalist-admin_interface_index.png')
+
def test_flag(self):
self._source_visits_source_homepage()
self._source_chooses_to_submit_documents()
@@ -282,6 +304,14 @@ def test_index_javascript(self):
self._source_logs_out()
self._journalist_logs_in()
self._screenshot('journalist-index_javascript.png')
+ self._journalist_selects_the_first_source()
+ self._journalist_selects_documents_to_download()
+ self._screenshot('journalist-clicks_on_source_and_selects_documents.png')
+
+ def test_index_entered_text(self):
+ self._input_text_in_login_form('jane_doe', 'my password is long',
+ '117264')
+ self._screenshot('journalist-index_with_text.png')
def test_fail_to_visit_admin(self):
self._journalist_visits_admin()
diff --git a/securedrop/tests/pages-layout/test_source.py b/securedrop/tests/pages-layout/test_source.py
--- a/securedrop/tests/pages-layout/test_source.py
+++ b/securedrop/tests/pages-layout/test_source.py
@@ -43,11 +43,24 @@ def test_lookup(self):
self._source_submits_a_file()
self._screenshot('source-lookup.png')
+ def test_lookup_shows_codename(self):
+ self._source_visits_source_homepage()
+ self._source_chooses_to_submit_documents()
+ self._source_continues_to_submit_page()
+ self._source_shows_codename()
+ self._screenshot('source-lookup-shows-codename.png')
+
def test_login(self):
self._source_visits_source_homepage()
self._source_chooses_to_login()
self._screenshot('source-login.png')
+ def test_enters_text_in_login_form(self):
+ self._source_visits_source_homepage()
+ self._source_chooses_to_login()
+ self._source_enters_codename_in_login_form()
+ self._screenshot('source-enter-codename-in-login.png')
+
def test_use_tor_browser(self):
self._source_visits_use_tor()
self._screenshot('source-use_tor_browser.png')
@@ -65,6 +78,13 @@ def test_logout_flashed_message(self):
self._source_logs_out()
self._screenshot('source-logout_flashed_message.png')
+ def test_submission_entered_text(self):
+ self._source_visits_source_homepage()
+ self._source_chooses_to_submit_documents()
+ self._source_continues_to_submit_page()
+ self._source_enters_text_in_message_field()
+ self._screenshot('source-submission_entered_text.png')
+
def test_next_submission_flashed_message(self):
self._source_visits_source_homepage()
self._source_chooses_to_submit_documents()
@@ -73,6 +93,38 @@ def test_next_submission_flashed_message(self):
self._source_submits_a_message()
self._screenshot('source-next_submission_flashed_message.png')
+ def test_source_checks_for_reply(self):
+ self._source_visits_source_homepage()
+ self._source_chooses_to_submit_documents()
+ self._source_continues_to_submit_page()
+ self._source_submits_a_file()
+ self._source_logs_out()
+ self._journalist_logs_in()
+ self._journalist_checks_messages()
+ self._journalist_downloads_message()
+ self._journalist_sends_reply_to_source()
+ self._source_visits_source_homepage()
+ self._source_chooses_to_login()
+ self._source_proceeds_to_login()
+ self._screenshot('source-checks_for_reply.png')
+ self._source_deletes_a_journalist_reply()
+ self._screenshot('source-deletes_reply.png')
+
+ def test_source_flagged(self):
+ self._source_visits_source_homepage()
+ self._source_chooses_to_submit_documents()
+ self._source_continues_to_submit_page()
+ self._source_submits_a_file()
+ self._source_logs_out()
+ self._journalist_logs_in()
+ self._source_delete_key()
+ self._journalist_visits_col()
+ self._journalist_flags_source()
+ self._source_visits_source_homepage()
+ self._source_chooses_to_login()
+ self._source_proceeds_to_login()
+ self._screenshot('source-flagged.png')
+
def test_notfound(self):
self._source_not_found()
self._screenshot('source-notfound.png')
@@ -84,3 +136,25 @@ def test_tor2web_warning(self):
def test_why_journalist_key(self):
self._source_why_journalist_key()
self._screenshot('source-why_journalist_key.png')
+
+
[email protected]
+class TestSourceSessionLayout(
+ functional_test.FunctionalTest,
+ source_navigation_steps.SourceNavigationSteps,
+ journalist_navigation_steps.JournalistNavigationSteps):
+
+ def setup(self):
+ self.session_length_minutes = 0.03
+ super(TestSourceSessionLayout, self).setup(
+ session_expiration=self.session_length_minutes)
+
+ def test_source_session_timeout(self):
+ self._source_visits_source_homepage()
+ self._source_clicks_submit_documents_on_homepage()
+ self._source_continues_to_submit_page()
+ self._source_waits_for_session_to_timeout(self.session_length_minutes)
+ self._source_enters_text_in_message_field()
+ self._source_clicks_submit_button_on_submission_page()
+ self._source_sees_session_timeout_message()
+ self._screenshot('source-session_timeout.png')
diff --git a/securedrop/tests/test_2fa.py b/securedrop/tests/test_2fa.py
--- a/securedrop/tests/test_2fa.py
+++ b/securedrop/tests/test_2fa.py
@@ -33,7 +33,7 @@ def _login_admin(self, token=None):
"""
if token is None:
token = self.admin.totp.now()
- self.client.post(url_for('login'),
+ self.client.post(url_for('main.login'),
data=dict(username=self.admin.username,
password=self.admin_pw,
token=token))
@@ -43,7 +43,7 @@ def _login_user(self, token=None):
"""
if token is None:
token = self.user.totp.now()
- resp = self.client.post(url_for('login'),
+ resp = self.client.post(url_for('main.login'),
data=dict(username=self.user.username,
password=self.user_pw,
token=token))
@@ -55,7 +55,7 @@ def test_totp_reuse_protections(self):
"""
token = self.user.totp.now()
resp = self._login_user(token)
- self.assertRedirects(resp, url_for('index'))
+ self.assertRedirects(resp, url_for('main.index'))
resp = self._login_user(token)
self.assert200(resp)
@@ -77,22 +77,24 @@ def test_bad_token_fails_to_verify_on_admin_new_user_two_factor_page(self):
# Create and submit an invalid 2FA token
invalid_token = u'000000'
- resp = self.client.post(url_for('admin_new_user_two_factor',
+ resp = self.client.post(url_for('admin.new_user_two_factor',
uid=self.admin.id),
data=dict(token=invalid_token))
self.assert200(resp)
- self.assertMessageFlashed('Two-factor token failed to verify', 'error')
+ self.assertMessageFlashed(
+ 'Could not verify token in two-factor authentication.', 'error')
# last_token should be set to the invalid token we just tried to use
self.assertEqual(self.admin.last_token, invalid_token)
# Submit the same invalid token again
- resp = self.client.post(url_for('admin_new_user_two_factor',
+ resp = self.client.post(url_for('admin.new_user_two_factor',
uid=self.admin.id),
data=dict(token=invalid_token))
# A flashed message should appear
- self.assertMessageFlashed('Two-factor token failed to verify', 'error')
+ self.assertMessageFlashed(
+ 'Could not verify token in two-factor authentication.', 'error')
def test_bad_token_fails_to_verify_on_new_user_two_factor_page(self):
# Regression test
@@ -101,20 +103,22 @@ def test_bad_token_fails_to_verify_on_new_user_two_factor_page(self):
# Create and submit an invalid 2FA token
invalid_token = u'000000'
- resp = self.client.post(url_for('account_new_two_factor'),
+ resp = self.client.post(url_for('account.new_two_factor'),
data=dict(token=invalid_token))
self.assert200(resp)
- self.assertMessageFlashed('Two-factor token failed to verify', 'error')
+ self.assertMessageFlashed(
+ 'Could not verify token in two-factor authentication.', 'error')
# last_token should be set to the invalid token we just tried to use
self.assertEqual(self.user.last_token, invalid_token)
# Submit the same invalid token again
- resp = self.client.post(url_for('account_new_two_factor'),
+ resp = self.client.post(url_for('account.new_two_factor'),
data=dict(token=invalid_token))
# A flashed message should appear
- self.assertMessageFlashed('Two-factor token failed to verify', 'error')
+ self.assertMessageFlashed(
+ 'Could not verify token in two-factor authentication.', 'error')
@classmethod
def tearDownClass(cls):
diff --git a/securedrop/tests/test_crypto_util.py b/securedrop/tests/test_crypto_util.py
--- a/securedrop/tests/test_crypto_util.py
+++ b/securedrop/tests/test_crypto_util.py
@@ -21,7 +21,7 @@ def tearDown(self):
utils.env.teardown()
def test_word_list_does_not_contain_empty_strings(self):
- self.assertNotIn('', (crypto_util.words
+ self.assertNotIn('', (crypto_util._get_wordlist('en')
+ crypto_util.nouns
+ crypto_util.adjectives))
@@ -133,14 +133,29 @@ def test_basic_encrypt_then_decrypt_multiple_recipients(self):
self.assertEqual(message, plaintext_)
- def test_genrandomid(self):
- id = crypto_util.genrandomid()
+ def verify_genrandomid(self, locale):
+ id = crypto_util.genrandomid(locale=locale)
id_words = id.split()
self.assertEqual(id, crypto_util.clean(id))
self.assertEqual(len(id_words), crypto_util.DEFAULT_WORDS_IN_RANDOM_ID)
for word in id_words:
- self.assertIn(word, crypto_util.words)
+ self.assertIn(word, crypto_util._get_wordlist(locale))
+
+ def test_genrandomid_default_locale_is_en(self):
+ self.verify_genrandomid('en')
+
+ def test_get_wordlist(self):
+ locales = []
+ wordlists_path = os.path.join(config.SECUREDROP_ROOT, 'wordlists')
+ for f in os.listdir(wordlists_path):
+ if f.endswith('.txt') and f != 'en.txt':
+ locales.append(f.split('.')[0])
+ wordlist_en = crypto_util._get_wordlist('en')
+ for locale in locales:
+ self.assertNotEqual(wordlist_en, crypto_util._get_wordlist(locale))
+ self.verify_genrandomid(locale)
+ self.assertEqual(wordlist_en, crypto_util._get_wordlist('unknown'))
def test_display_id(self):
id = crypto_util.display_id()
diff --git a/securedrop/tests/test_i18n.py b/securedrop/tests/test_i18n.py
new file mode 100644
--- /dev/null
+++ b/securedrop/tests/test_i18n.py
@@ -0,0 +1,283 @@
+# -*- coding: utf-8 -*-
+#
+# SecureDrop whistleblower submission system
+# Copyright (C) 2017 Loic Dachary <[email protected]>
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+#
+import argparse
+import logging
+import os
+import re
+
+from flask import request, session, render_template_string, render_template
+from flask_babel import gettext
+from werkzeug.datastructures import Headers
+
+os.environ['SECUREDROP_ENV'] = 'test' # noqa
+import config
+import i18n
+import journalist
+import manage
+import pytest
+import source
+import version
+import utils
+
+
+class TestI18N(object):
+
+ @classmethod
+ def setup_class(cls):
+ utils.env.setup()
+
+ def test_get_supported_locales(self):
+ locales = ['en_US', 'fr_FR']
+ assert ['en_US'] == i18n._get_supported_locales(locales, None, None)
+ locales = ['en_US', 'fr_FR']
+ supported = ['en_US', 'not_found']
+ with pytest.raises(i18n.LocaleNotFound) as excinfo:
+ i18n._get_supported_locales(locales, supported, None)
+ assert "contains ['not_found']" in str(excinfo.value)
+ supported = ['fr_FR']
+ locale = 'not_found'
+ with pytest.raises(i18n.LocaleNotFound) as excinfo:
+ i18n._get_supported_locales(locales, supported, locale)
+ assert "DEFAULT_LOCALE 'not_found'" in str(excinfo.value)
+
+ def verify_i18n(self, app):
+ not_translated = 'code hello i18n'
+ translated_fr = 'code bonjour'
+
+ for accepted in ('unknown', 'en_US'):
+ headers = Headers([('Accept-Language', accepted)])
+ with app.test_request_context(headers=headers):
+ assert not hasattr(request, 'babel_locale')
+ assert not_translated == gettext(not_translated)
+ assert hasattr(request, 'babel_locale')
+ assert render_template_string('''
+ {{ gettext('code hello i18n') }}
+ ''').strip() == not_translated
+
+ for lang in ('fr_FR', 'fr', 'fr-FR'):
+ headers = Headers([('Accept-Language', lang)])
+ with app.test_request_context(headers=headers):
+ assert not hasattr(request, 'babel_locale')
+ assert translated_fr == gettext(not_translated)
+ assert hasattr(request, 'babel_locale')
+ assert render_template_string('''
+ {{ gettext('code hello i18n') }}
+ ''').strip() == translated_fr
+
+ # https://github.com/freedomofpress/securedrop/issues/2379
+ headers = Headers([('Accept-Language',
+ 'en-US;q=0.6,fr_FR;q=0.4,nb_NO;q=0.2')])
+ with app.test_request_context(headers=headers):
+ assert not hasattr(request, 'babel_locale')
+ assert not_translated == gettext(not_translated)
+
+ translated_cn = 'code chinese'
+
+ for lang in ('zh-CN', 'zh-Hans-CN'):
+ headers = Headers([('Accept-Language', lang)])
+ with app.test_request_context(headers=headers):
+ assert not hasattr(request, 'babel_locale')
+ assert translated_cn == gettext(not_translated)
+ assert hasattr(request, 'babel_locale')
+ assert render_template_string('''
+ {{ gettext('code hello i18n') }}
+ ''').strip() == translated_cn
+
+ translated_ar = 'code arabic'
+
+ for lang in ('ar', 'ar-kw'):
+ headers = Headers([('Accept-Language', lang)])
+ with app.test_request_context(headers=headers):
+ assert not hasattr(request, 'babel_locale')
+ assert translated_ar == gettext(not_translated)
+ assert hasattr(request, 'babel_locale')
+ assert render_template_string('''
+ {{ gettext('code hello i18n') }}
+ ''').strip() == translated_ar
+
+ with app.test_client() as c:
+ page = c.get('/login')
+ assert session.get('locale') is None
+ assert not_translated == gettext(not_translated)
+ assert '?l=fr_FR' in page.data
+ assert '?l=en_US' not in page.data
+
+ page = c.get('/login?l=fr_FR',
+ headers=Headers([('Accept-Language', 'en_US')]))
+ assert session.get('locale') == 'fr_FR'
+ assert translated_fr == gettext(not_translated)
+ assert '?l=fr_FR' not in page.data
+ assert '?l=en_US' in page.data
+
+ c.get('/', headers=Headers([('Accept-Language', 'en_US')]))
+ assert session.get('locale') == 'fr_FR'
+ assert translated_fr == gettext(not_translated)
+
+ c.get('/?l=')
+ assert session.get('locale') is None
+ assert not_translated == gettext(not_translated)
+
+ c.get('/?l=en_US', headers=Headers([('Accept-Language', 'fr_FR')]))
+ assert session.get('locale') == 'en_US'
+ assert not_translated == gettext(not_translated)
+
+ c.get('/', headers=Headers([('Accept-Language', 'fr_FR')]))
+ assert session.get('locale') == 'en_US'
+ assert not_translated == gettext(not_translated)
+
+ c.get('/?l=', headers=Headers([('Accept-Language', 'fr_FR')]))
+ assert session.get('locale') is None
+ assert translated_fr == gettext(not_translated)
+
+ c.get('/')
+ assert session.get('locale') is None
+ assert not_translated == gettext(not_translated)
+
+ c.get('/?l=YY_ZZ')
+ assert session.get('locale') is None
+ assert not_translated == gettext(not_translated)
+
+ with app.test_request_context():
+ assert '' == render_template('locales.html')
+
+ with app.test_client() as c:
+ c.get('/')
+ locales = render_template('locales.html')
+ assert '?l=fr_FR' in locales
+ assert '?l=en_US' not in locales
+ c.get('/?l=ar')
+ base = render_template('base.html')
+ assert 'dir="rtl"' in base
+
+ # the canonical locale name is norsk bokmΓ₯l but
+ # this is overriden with just norsk by i18n.NAME_OVERRIDES
+ with app.test_client() as c:
+ c.get('/?l=nb_NO')
+ base = render_template('base.html')
+ assert 'norsk' in base
+ assert 'norsk bo' not in base
+
+ def test_i18n(self):
+ sources = [
+ 'tests/i18n/code.py',
+ 'tests/i18n/template.html',
+ ]
+ kwargs = {
+ 'translations_dir': config.TEMP_DIR,
+ 'mapping': 'tests/i18n/babel.cfg',
+ 'source': sources,
+ 'extract_update': True,
+ 'compile': True,
+ 'verbose': logging.DEBUG,
+ 'version': version.__version__,
+ }
+ args = argparse.Namespace(**kwargs)
+ manage.setup_verbosity(args)
+ manage.translate_messages(args)
+
+ manage.sh("""
+ pybabel init -i {d}/messages.pot -d {d} -l en_US
+
+ pybabel init -i {d}/messages.pot -d {d} -l fr_FR
+ sed -i -e '/code hello i18n/,+1s/msgstr ""/msgstr "code bonjour"/' \
+ {d}/fr_FR/LC_MESSAGES/messages.po
+
+ pybabel init -i {d}/messages.pot -d {d} -l zh_Hans_CN
+ sed -i -e '/code hello i18n/,+1s/msgstr ""/msgstr "code chinese"/' \
+ {d}/zh_Hans_CN/LC_MESSAGES/messages.po
+
+ pybabel init -i {d}/messages.pot -d {d} -l ar
+ sed -i -e '/code hello i18n/,+1s/msgstr ""/msgstr "code arabic"/' \
+ {d}/ar/LC_MESSAGES/messages.po
+
+ pybabel init -i {d}/messages.pot -d {d} -l nb_NO
+ sed -i -e '/code hello i18n/,+1s/msgstr ""/msgstr "code norwegian"/' \
+ {d}/nb_NO/LC_MESSAGES/messages.po
+ """.format(d=config.TEMP_DIR))
+
+ manage.translate_messages(args)
+
+ supported = getattr(config, 'SUPPORTED_LOCALES', None)
+ try:
+ if supported:
+ del config.SUPPORTED_LOCALES
+ for app in (journalist.app, source.app):
+ config.SUPPORTED_LOCALES = [
+ 'en_US', 'fr_FR', 'zh_Hans_CN', 'ar', 'nb_NO']
+ i18n.setup_app(app, translation_dirs=config.TEMP_DIR)
+ self.verify_i18n(app)
+ finally:
+ if supported:
+ config.SUPPORTED_LOCALES = supported
+
+ def test_verify_default_locale_en_us_if_not_defined_in_config(self):
+ DEFAULT_LOCALE = config.DEFAULT_LOCALE
+ try:
+ del config.DEFAULT_LOCALE
+ not_translated = 'code hello i18n'
+ with source.app.test_client() as c:
+ c.get('/')
+ assert not_translated == gettext(not_translated)
+ finally:
+ config.DEFAULT_LOCALE = DEFAULT_LOCALE
+
+ def test_locale_to_rfc_5646(self):
+ assert i18n.locale_to_rfc_5646('en') == 'en'
+ assert i18n.locale_to_rfc_5646('en-US') == 'en'
+ assert i18n.locale_to_rfc_5646('en_US') == 'en'
+ assert i18n.locale_to_rfc_5646('en-us') == 'en'
+ assert i18n.locale_to_rfc_5646('zh-hant') == 'zh-Hant'
+
+ def test_html_en_lang_correct(self):
+ app = journalist.app.test_client()
+ resp = app.get('/', follow_redirects=True)
+ html = resp.data.decode('utf-8')
+ assert re.compile('<html .*lang="en".*>').search(html), html
+
+ app = source.app.test_client()
+ resp = app.get('/', follow_redirects=True)
+ html = resp.data.decode('utf-8')
+ assert re.compile('<html .*lang="en".*>').search(html), html
+
+ # check '/generate' too because '/' uses a different template
+ resp = app.get('/generate', follow_redirects=True)
+ html = resp.data.decode('utf-8')
+ assert re.compile('<html .*lang="en".*>').search(html), html
+
+ def test_html_fr_lang_correct(self):
+ """Check that when the locale is fr_FR the lang property is correct"""
+ app = journalist.app.test_client()
+ resp = app.get('/?l=fr_FR', follow_redirects=True)
+ html = resp.data.decode('utf-8')
+ assert re.compile('<html .*lang="fr".*>').search(html), html
+
+ app = source.app.test_client()
+ resp = app.get('/?l=fr_FR', follow_redirects=True)
+ html = resp.data.decode('utf-8')
+ assert re.compile('<html .*lang="fr".*>').search(html), html
+
+ # check '/generate' too because '/' uses a different template
+ resp = app.get('/generate?l=fr_FR', follow_redirects=True)
+ html = resp.data.decode('utf-8')
+ assert re.compile('<html .*lang="fr".*>').search(html), html
+
+ @classmethod
+ def teardown_class(cls):
+ reload(journalist)
+ reload(source)
diff --git a/securedrop/tests/test_integration.py b/securedrop/tests/test_integration.py
--- a/securedrop/tests/test_integration.py
+++ b/securedrop/tests/test_integration.py
@@ -344,14 +344,14 @@ def helper_test_reply(self, test_reply, expected_success=True):
for i in range(2):
resp = self.journalist_app.post('/reply', data=dict(
filesystem_id=filesystem_id,
- msg=test_reply
+ message=test_reply
), follow_redirects=True)
self.assertEqual(resp.status_code, 200)
if not expected_success:
pass
else:
- self.assertIn("Thanks! Your reply has been stored.", resp.data)
+ self.assertIn("Thanks. Your reply has been stored.", resp.data)
with self.journalist_app as journalist_app:
resp = journalist_app.get(col_url)
@@ -419,11 +419,13 @@ def test_delete_collection(self):
# first, add a source
self.source_app.get('/generate')
self.source_app.post('/create')
- self.source_app.post('/submit', data=dict(
+ resp = self.source_app.post('/submit', data=dict(
msg="This is a test.",
fh=(StringIO(''), ''),
), follow_redirects=True)
+ assert resp.status_code == 200, resp.data.decode('utf-8')
+
resp = self.journalist_app.get('/')
# navigate to the collection page
soup = BeautifulSoup(resp.data, 'html.parser')
@@ -543,7 +545,9 @@ def test_user_change_password(self):
# change password
new_pw = 'another correct horse battery staply long password'
self.journalist_app.post('/account/new-password',
- data=dict(password=new_pw))
+ data=dict(password=new_pw,
+ current_password=self.user_pw,
+ token='mocked'))
# logout
self.journalist_app.get('/logout')
diff --git a/securedrop/tests/test_journalist.py b/securedrop/tests/test_journalist.py
--- a/securedrop/tests/test_journalist.py
+++ b/securedrop/tests/test_journalist.py
@@ -5,9 +5,9 @@
import unittest
import zipfile
-from flask import url_for, escape
+from flask import url_for, escape, session
from flask_testing import TestCase
-from mock import patch, ANY, MagicMock
+from mock import patch
from sqlalchemy.orm.exc import StaleDataError
from sqlalchemy.exc import IntegrityError
@@ -18,6 +18,7 @@
Submission)
import db
import journalist
+import journalist_app.utils
import utils
# Smugly seed the RNG for deterministic testing
@@ -49,7 +50,7 @@ def tearDown(self):
@patch('crypto_util.genrandomid', side_effect=['bad', VALID_PASSWORD])
def test_make_password(self, mocked_pw_gen):
- assert journalist._make_password() == VALID_PASSWORD
+ assert journalist_app.utils.make_password() == VALID_PASSWORD
@patch('journalist.app.logger.error')
def test_reply_error_logging(self, mocked_error_logger):
@@ -62,13 +63,14 @@ def test_reply_error_logging(self, mocked_error_logger):
with patch('db.db_session.commit',
side_effect=exception_class(exception_msg)):
- self.client.post(url_for('reply'),
- data={'filesystem_id': filesystem_id, 'msg': '_'})
+ self.client.post(url_for('main.reply'),
+ data={'filesystem_id': filesystem_id,
+ 'message': '_'})
# Notice the "potentially sensitive" exception_msg is not present in
# the log event.
mocked_error_logger.assert_called_once_with(
- "Reply from '{}' (id {}) failed: {}!".format(self.user.username,
+ "Reply from '{}' (ID {}) failed: {}!".format(self.user.username,
self.user.id,
exception_class))
@@ -80,53 +82,54 @@ def test_reply_error_flashed_message(self):
exception_class = StaleDataError
with patch('db.db_session.commit', side_effect=exception_class()):
- self.client.post(url_for('reply'),
- data={'filesystem_id': filesystem_id, 'msg': '_'})
+ self.client.post(url_for('main.reply'),
+ data={'filesystem_id': filesystem_id,
+ 'message': '_'})
self.assertMessageFlashed(
- 'An unexpected error occurred! Please check '
- 'the application logs or inform your adminstrator.', 'error')
+ 'An unexpected error occurred! Please '
+ 'inform your administrator.', 'error')
def test_empty_replies_are_rejected(self):
source, _ = utils.db_helper.init_source()
filesystem_id = source.filesystem_id
self._login_user()
- resp = self.client.post(url_for('reply'),
+ resp = self.client.post(url_for('main.reply'),
data={'filesystem_id': filesystem_id,
- 'msg': ''},
+ 'message': ''},
follow_redirects=True)
- self.assertIn("You cannot send an empty reply!", resp.data)
+ self.assertIn("You cannot send an empty reply.", resp.data)
def test_nonempty_replies_are_accepted(self):
source, _ = utils.db_helper.init_source()
filesystem_id = source.filesystem_id
self._login_user()
- resp = self.client.post(url_for('reply'),
+ resp = self.client.post(url_for('main.reply'),
data={'filesystem_id': filesystem_id,
- 'msg': '_'},
+ 'message': '_'},
follow_redirects=True)
- self.assertNotIn("You cannot send an empty reply!", resp.data)
+ self.assertNotIn("You cannot send an empty reply.", resp.data)
def test_unauthorized_access_redirects_to_login(self):
- resp = self.client.get(url_for('index'))
- self.assertRedirects(resp, url_for('login'))
+ resp = self.client.get(url_for('main.index'))
+ self.assertRedirects(resp, url_for('main.login'))
def test_login_throttle(self):
db.LOGIN_HARDENING = True
try:
for _ in range(Journalist._MAX_LOGIN_ATTEMPTS_PER_PERIOD):
- resp = self.client.post(url_for('login'),
+ resp = self.client.post(url_for('main.login'),
data=dict(username=self.user.username,
password='invalid',
token='mocked'))
self.assert200(resp)
self.assertIn("Login failed", resp.data)
- resp = self.client.post(url_for('login'),
+ resp = self.client.post(url_for('main.login'),
data=dict(username=self.user.username,
password='invalid',
token='mocked'))
@@ -137,15 +140,21 @@ def test_login_throttle(self):
db.LOGIN_HARDENING = False
def test_login_invalid_credentials(self):
- resp = self.client.post(url_for('login'),
+ resp = self.client.post(url_for('main.login'),
data=dict(username=self.user.username,
password='invalid',
token='mocked'))
self.assert200(resp)
self.assertIn("Login failed", resp.data)
+ def test_validate_redirect(self):
+ resp = self.client.post(url_for('main.index'),
+ follow_redirects=True)
+ self.assert200(resp)
+ self.assertIn("Login to access", resp.data)
+
def test_login_valid_credentials(self):
- resp = self.client.post(url_for('login'),
+ resp = self.client.post(url_for('main.login'),
data=dict(username=self.user.username,
password=self.user_pw,
token='mocked'),
@@ -155,57 +164,57 @@ def test_login_valid_credentials(self):
self.assertIn("No documents have been submitted!", resp.data)
def test_admin_login_redirects_to_index(self):
- resp = self.client.post(url_for('login'),
+ resp = self.client.post(url_for('main.login'),
data=dict(username=self.admin.username,
password=self.admin_pw,
token='mocked'))
- self.assertRedirects(resp, url_for('index'))
+ self.assertRedirects(resp, url_for('main.index'))
def test_user_login_redirects_to_index(self):
- resp = self.client.post(url_for('login'),
+ resp = self.client.post(url_for('main.login'),
data=dict(username=self.user.username,
password=self.user_pw,
token='mocked'))
- self.assertRedirects(resp, url_for('index'))
+ self.assertRedirects(resp, url_for('main.index'))
def test_admin_has_link_to_edit_account_page_in_index_page(self):
- resp = self.client.post(url_for('login'),
+ resp = self.client.post(url_for('main.login'),
data=dict(username=self.admin.username,
password=self.admin_pw,
token='mocked'),
follow_redirects=True)
edit_account_link = '<a href="{}" id="link-edit-account">'.format(
- url_for('edit_account'))
+ url_for('account.edit'))
self.assertIn(edit_account_link, resp.data)
def test_user_has_link_to_edit_account_page_in_index_page(self):
- resp = self.client.post(url_for('login'),
+ resp = self.client.post(url_for('main.login'),
data=dict(username=self.user.username,
password=self.user_pw,
token='mocked'),
follow_redirects=True)
edit_account_link = '<a href="{}" id="link-edit-account">'.format(
- url_for('edit_account'))
+ url_for('account.edit'))
self.assertIn(edit_account_link, resp.data)
def test_admin_has_link_to_admin_index_page_in_index_page(self):
- resp = self.client.post(url_for('login'),
+ resp = self.client.post(url_for('main.login'),
data=dict(username=self.admin.username,
password=self.admin_pw,
token='mocked'),
follow_redirects=True)
admin_link = '<a href="{}" id="link-admin-index">'.format(
- url_for('admin_index'))
+ url_for('admin.index'))
self.assertIn(admin_link, resp.data)
def test_user_lacks_link_to_admin_index_page_in_index_page(self):
- resp = self.client.post(url_for('login'),
+ resp = self.client.post(url_for('main.login'),
data=dict(username=self.user.username,
password=self.user_pw,
token='mocked'),
follow_redirects=True)
admin_link = '<a href="{}" id="link-admin-index">'.format(
- url_for('admin_index'))
+ url_for('admin.index'))
self.assertNotIn(admin_link, resp.data)
# WARNING: we are purposely doing something that would not work in
@@ -227,17 +236,17 @@ def _login_user(self):
def test_admin_logout_redirects_to_index(self):
self._login_admin()
- resp = self.client.get(url_for('logout'))
- self.assertRedirects(resp, url_for('index'))
+ resp = self.client.get(url_for('main.logout'))
+ self.assertRedirects(resp, url_for('main.index'))
def test_user_logout_redirects_to_index(self):
self._login_user()
- resp = self.client.get(url_for('logout'))
- self.assertRedirects(resp, url_for('index'))
+ resp = self.client.get(url_for('main.logout'))
+ self.assertRedirects(resp, url_for('main.index'))
def test_admin_index(self):
self._login_admin()
- resp = self.client.get(url_for('admin_index'))
+ resp = self.client.get(url_for('admin.index'))
self.assert200(resp)
self.assertIn("Admin Interface", resp.data)
@@ -246,7 +255,7 @@ def test_admin_delete_user(self):
self.assertNotEqual(Journalist.query.get(self.user.id), None)
self._login_admin()
- resp = self.client.post(url_for('admin_delete_user',
+ resp = self.client.post(url_for('admin.delete_user',
user_id=self.user.id),
follow_redirects=True)
@@ -260,7 +269,7 @@ def test_admin_delete_user(self):
def test_admin_deletes_invalid_user_404(self):
self._login_admin()
invalid_user_pk = max([user.id for user in Journalist.query.all()]) + 1
- resp = self.client.post(url_for('admin_delete_user',
+ resp = self.client.post(url_for('admin.delete_user',
user_id=invalid_user_pk))
self.assert404(resp)
@@ -268,12 +277,12 @@ def test_admin_edits_user_password_success_response(self):
self._login_admin()
resp = self.client.post(
- url_for('admin_new_password', user_id=self.user.id),
+ url_for('admin.new_password', user_id=self.user.id),
data=dict(password=VALID_PASSWORD_2),
follow_redirects=True)
text = resp.data.decode('utf-8')
- assert 'The password was successfully updated!' in text
+ assert 'Password updated.' in text
assert VALID_PASSWORD_2 in text
def test_admin_edits_user_password_error_response(self):
@@ -281,7 +290,7 @@ def test_admin_edits_user_password_error_response(self):
with patch('db.db_session.commit', side_effect=Exception()):
resp = self.client.post(
- url_for('admin_new_password', user_id=self.user.id),
+ url_for('admin.new_password', user_id=self.user.id),
data=dict(password=VALID_PASSWORD_2),
follow_redirects=True)
@@ -291,12 +300,14 @@ def test_admin_edits_user_password_error_response(self):
def test_user_edits_password_success_reponse(self):
self._login_user()
resp = self.client.post(
- url_for('new_password'),
- data=dict(password=VALID_PASSWORD_2),
+ url_for('account.new_password'),
+ data=dict(current_password=self.user_pw,
+ token='mocked',
+ password=VALID_PASSWORD_2),
follow_redirects=True)
text = resp.data.decode('utf-8')
- assert "The password was successfully updated!" in text
+ assert "Password updated." in text
assert VALID_PASSWORD_2 in text
def test_user_edits_password_error_reponse(self):
@@ -304,8 +315,10 @@ def test_user_edits_password_error_reponse(self):
with patch('db.db_session.commit', side_effect=Exception()):
resp = self.client.post(
- url_for('new_password'),
- data=dict(password=VALID_PASSWORD_2),
+ url_for('account.new_password'),
+ data=dict(current_password=self.user_pw,
+ token='mocked',
+ password=VALID_PASSWORD_2),
follow_redirects=True)
assert ('There was an error, and the new password might not have '
@@ -313,10 +326,10 @@ def test_user_edits_password_error_reponse(self):
def test_admin_add_user_when_username_already_in_use(self):
self._login_admin()
- resp = self.client.post(url_for('admin_add_user'),
+ resp = self.client.post(url_for('admin.add_user'),
data=dict(username=self.admin.username,
password=VALID_PASSWORD,
- is_admin=False))
+ is_admin=None))
self.assertIn('That username is already in use', resp.data)
def test_max_password_length(self):
@@ -344,13 +357,12 @@ def test_admin_edits_user_password_too_long_warning(self):
overly_long_password = VALID_PASSWORD + \
'a' * (Journalist.MAX_PASSWORD_LEN - len(VALID_PASSWORD) + 1)
- resp = self.client.post(
- url_for('admin_new_password', user_id=self.user.id),
- data=dict(username=self.user.username, is_admin=False,
+ self.client.post(
+ url_for('admin.new_password', user_id=self.user.id),
+ data=dict(username=self.user.username, is_admin=None,
password=overly_long_password),
follow_redirects=True)
- print resp.data.decode('utf-8')
self.assertMessageFlashed('You submitted a bad password! '
'Password not changed.', 'error')
@@ -359,8 +371,10 @@ def test_user_edits_password_too_long_warning(self):
overly_long_password = VALID_PASSWORD + \
'a' * (Journalist.MAX_PASSWORD_LEN - len(VALID_PASSWORD) + 1)
- self.client.post(url_for('new_password'),
- data=dict(password=overly_long_password),
+ self.client.post(url_for('account.new_password'),
+ data=dict(password=overly_long_password,
+ token='mocked',
+ current_password=self.user_pw),
follow_redirects=True)
self.assertMessageFlashed('You submitted a bad password! '
@@ -372,10 +386,10 @@ def test_admin_add_user_password_too_long_warning(self):
overly_long_password = VALID_PASSWORD + \
'a' * (Journalist.MAX_PASSWORD_LEN - len(VALID_PASSWORD) + 1)
self.client.post(
- url_for('admin_add_user'),
+ url_for('admin.add_user'),
data=dict(username='dellsberg',
password=overly_long_password,
- is_admin=False))
+ is_admin=None))
self.assertMessageFlashed('There was an error with the autogenerated '
'password. User not created. '
@@ -388,17 +402,17 @@ def test_admin_edits_user_invalid_username(self):
new_username = self.admin.username
self.client.post(
- url_for('admin_edit_user', user_id=self.user.id),
- data=dict(username=new_username, is_admin=False))
+ url_for('admin.edit_user', user_id=self.user.id),
+ data=dict(username=new_username, is_admin=None))
- self.assertMessageFlashed('Username "{}" is already taken!'.format(
+ self.assertMessageFlashed('Username "{}" already taken.'.format(
new_username), 'error')
def test_admin_resets_user_hotp(self):
self._login_admin()
old_hotp = self.user.hotp
- resp = self.client.post(url_for('admin_reset_two_factor_hotp'),
+ resp = self.client.post(url_for('admin.reset_two_factor_hotp'),
data=dict(uid=self.user.id, otp_secret=123456))
new_hotp = self.user.hotp
@@ -407,13 +421,13 @@ def test_admin_resets_user_hotp(self):
# Redirect to admin 2FA view
self.assertRedirects(
resp,
- url_for('admin_new_user_two_factor', uid=self.user.id))
+ url_for('admin.new_user_two_factor', uid=self.user.id))
def test_admin_resets_user_hotp_format_non_hexa(self):
self._login_admin()
old_hotp = self.user.hotp.secret
- self.client.post(url_for('admin_reset_two_factor_hotp'),
+ self.client.post(url_for('admin.reset_two_factor_hotp'),
data=dict(uid=self.user.id, otp_secret='ZZ'))
new_hotp = self.user.hotp.secret
@@ -426,7 +440,7 @@ def test_admin_resets_user_hotp_format_odd(self):
self._login_admin()
old_hotp = self.user.hotp.secret
- self.client.post(url_for('admin_reset_two_factor_hotp'),
+ self.client.post(url_for('admin.reset_two_factor_hotp'),
data=dict(uid=self.user.id, otp_secret='Z'))
new_hotp = self.user.hotp.secret
@@ -447,14 +461,13 @@ def test_admin_resets_user_hotp_error(self,
mock_set_hotp_secret.side_effect = TypeError(error_message)
otp_secret = '1234'
- self.client.post(url_for('admin_reset_two_factor_hotp'),
+ self.client.post(url_for('admin.reset_two_factor_hotp'),
data=dict(uid=self.user.id, otp_secret=otp_secret))
new_hotp = self.user.hotp.secret
self.assertEqual(old_hotp, new_hotp)
self.assertMessageFlashed("An unexpected error occurred! "
- "Please check the application "
- "logs or inform your adminstrator.", "error")
+ "Please inform your administrator.", "error")
mocked_error_logger.assert_called_once_with(
"set_hotp_secret '{}' (id {}) failed: {}".format(
otp_secret, self.user.id, error_message))
@@ -463,21 +476,21 @@ def test_user_resets_hotp(self):
self._login_user()
old_hotp = self.user.hotp
- resp = self.client.post(url_for('account_reset_two_factor_hotp'),
+ resp = self.client.post(url_for('account.reset_two_factor_hotp'),
data=dict(otp_secret=123456))
new_hotp = self.user.hotp
# check that hotp is different
self.assertNotEqual(old_hotp.secret, new_hotp.secret)
# should redirect to verification page
- self.assertRedirects(resp, url_for('account_new_two_factor'))
+ self.assertRedirects(resp, url_for('account.new_two_factor'))
def test_admin_resets_user_totp(self):
self._login_admin()
old_totp = self.user.totp
resp = self.client.post(
- url_for('admin_reset_two_factor_totp'),
+ url_for('admin.reset_two_factor_totp'),
data=dict(uid=self.user.id))
new_totp = self.user.totp
@@ -485,24 +498,24 @@ def test_admin_resets_user_totp(self):
self.assertRedirects(
resp,
- url_for('admin_new_user_two_factor', uid=self.user.id))
+ url_for('admin.new_user_two_factor', uid=self.user.id))
def test_user_resets_totp(self):
self._login_user()
old_totp = self.user.totp
- resp = self.client.post(url_for('account_reset_two_factor_totp'))
+ resp = self.client.post(url_for('account.reset_two_factor_totp'))
new_totp = self.user.totp
# check that totp is different
self.assertNotEqual(old_totp.secret, new_totp.secret)
# should redirect to verification page
- self.assertRedirects(resp, url_for('account_new_two_factor'))
+ self.assertRedirects(resp, url_for('account.new_two_factor'))
def test_admin_resets_hotp_with_missing_otp_secret_key(self):
self._login_admin()
- resp = self.client.post(url_for('admin_reset_two_factor_hotp'),
+ resp = self.client.post(url_for('admin.reset_two_factor_hotp'),
data=dict(uid=self.user.id))
self.assertIn('Change Secret', resp.data)
@@ -510,20 +523,20 @@ def test_admin_resets_hotp_with_missing_otp_secret_key(self):
def test_admin_new_user_2fa_redirect(self):
self._login_admin()
resp = self.client.post(
- url_for('admin_new_user_two_factor', uid=self.user.id),
+ url_for('admin.new_user_two_factor', uid=self.user.id),
data=dict(token='mocked'))
- self.assertRedirects(resp, url_for('admin_index'))
+ self.assertRedirects(resp, url_for('admin.index'))
def test_http_get_on_admin_new_user_two_factor_page(self):
self._login_admin()
- resp = self.client.get(url_for('admin_new_user_two_factor',
+ resp = self.client.get(url_for('admin.new_user_two_factor',
uid=self.user.id))
- # any GET req should take a user to the admin_new_user_two_factor page
+ # any GET req should take a user to the admin.new_user_two_factor page
self.assertIn('Authenticator', resp.data)
def test_http_get_on_admin_add_user_page(self):
self._login_admin()
- resp = self.client.get(url_for('admin_add_user'))
+ resp = self.client.get(url_for('admin.add_user'))
# any GET req should take a user to the admin_add_user page
self.assertIn('ADD USER', resp.data)
@@ -531,58 +544,139 @@ def test_admin_add_user(self):
self._login_admin()
max_journalist_pk = max([user.id for user in Journalist.query.all()])
- resp = self.client.post(url_for('admin_add_user'),
+ resp = self.client.post(url_for('admin.add_user'),
data=dict(username='dellsberg',
password=VALID_PASSWORD,
- is_admin=False))
-
- print resp.data.decode('utf-8')
+ is_admin=None))
- self.assertRedirects(resp, url_for('admin_new_user_two_factor',
+ self.assertRedirects(resp, url_for('admin.new_user_two_factor',
uid=max_journalist_pk+1))
def test_admin_add_user_without_username(self):
self._login_admin()
- resp = self.client.post(url_for('admin_add_user'),
+ resp = self.client.post(url_for('admin.add_user'),
data=dict(username='',
password=VALID_PASSWORD,
- is_admin=False))
- self.assertIn('Invalid username', resp.data)
+ is_admin=None))
+ self.assertIn('This field is required.', resp.data)
def test_admin_add_user_too_short_username(self):
self._login_admin()
username = 'a' * (Journalist.MIN_USERNAME_LEN - 1)
- resp = self.client.post(url_for('admin_add_user'),
+ resp = self.client.post(url_for('admin.add_user'),
data=dict(username=username,
password='pentagonpapers',
password_again='pentagonpapers',
- is_admin=False))
- self.assertIn('Invalid username', resp.data)
+ is_admin=None))
+ self.assertIn('Field must be at least {} characters long'.format(
+ Journalist.MIN_USERNAME_LEN),
+ resp.data)
- @patch('journalist.app.logger.error')
- @patch('journalist.Journalist',
+ def test_admin_add_user_yubikey_odd_length(self):
+ self._login_admin()
+ resp = self.client.post(url_for('admin.add_user'),
+ data=dict(username='dellsberg',
+ password=VALID_PASSWORD,
+ password_again=VALID_PASSWORD,
+ is_admin=None,
+ is_hotp=True,
+ otp_secret='123'))
+ self.assertIn('Field must be 40 characters long', resp.data)
+
+ def test_admin_add_user_yubikey_valid_length(self):
+ self._login_admin()
+
+ otp = '1234567890123456789012345678901234567890'
+ resp = self.client.post(url_for('admin.add_user'),
+ data=dict(username='dellsberg',
+ password=VALID_PASSWORD,
+ password_again=VALID_PASSWORD,
+ is_admin=None,
+ is_hotp=True,
+ otp_secret=otp),
+ follow_redirects=True)
+
+ # Should redirect to the token verification page
+ self.assertIn('Enable YubiKey (OATH-HOTP)', resp.data)
+
+ def test_admin_add_user_yubikey_correct_length_with_whitespace(self):
+ self._login_admin()
+
+ otp = '12 34 56 78 90 12 34 56 78 90 12 34 56 78 90 12 34 56 78 90'
+ resp = self.client.post(url_for('admin.add_user'),
+ data=dict(username='dellsberg',
+ password=VALID_PASSWORD,
+ password_again=VALID_PASSWORD,
+ is_admin=None,
+ is_hotp=True,
+ otp_secret=otp),
+ follow_redirects=True)
+
+ # Should redirect to the token verification page
+ self.assertIn('Enable YubiKey (OATH-HOTP)', resp.data)
+
+ def test_admin_sets_user_to_admin(self):
+ self._login_admin()
+ new_user = 'admin-set-user-to-admin-test'
+ resp = self.client.post(url_for('admin.add_user'),
+ data=dict(username=new_user,
+ password=VALID_PASSWORD,
+ is_admin=None))
+ assert resp.status_code in (200, 302)
+ journo = Journalist.query.filter(Journalist.username == new_user).one()
+ assert not journo.is_admin
+
+ resp = self.client.post(url_for('admin.edit_user', user_id=journo.id),
+ data=dict(is_admin=True))
+ assert resp.status_code in (200, 302), resp.data.decode('utf-8')
+
+ # there are better ways to do this, but flake8 complains
+ journo = Journalist.query.filter(Journalist.username == new_user).one()
+ assert journo.is_admin is True
+
+ def test_admin_renames_user(self):
+ self._login_admin()
+ new_user = 'admin-renames-user-test'
+ resp = self.client.post(url_for('admin.add_user'),
+ data=dict(username=new_user,
+ password=VALID_PASSWORD,
+ is_admin=None))
+ assert resp.status_code in (200, 302)
+ journo = Journalist.query.filter(Journalist.username == new_user).one()
+
+ new_user = new_user + 'a'
+ resp = self.client.post(url_for('admin.edit_user', user_id=journo.id),
+ data=dict(username=new_user))
+ assert resp.status_code in (200, 302), resp.data.decode('utf-8')
+
+ # the following will throw an exception if new_user is not found
+ # therefore asserting it has been created
+ Journalist.query.filter(Journalist.username == new_user).one()
+
+ @patch('journalist_app.admin.current_app.logger.error')
+ @patch('journalist_app.admin.Journalist',
side_effect=IntegrityError('STATEMENT', 'PARAMETERS', None))
def test_admin_add_user_integrity_error(self,
mock_journalist,
mocked_error_logger):
self._login_admin()
- self.client.post(url_for('admin_add_user'),
+ self.client.post(url_for('admin.add_user'),
data=dict(username='username',
password=VALID_PASSWORD,
- is_admin=False))
+ is_admin=None))
mocked_error_logger.assert_called_once_with(
"Adding user 'username' failed: (__builtin__.NoneType) "
"None [SQL: 'STATEMENT'] [parameters: 'PARAMETERS']")
self.assertMessageFlashed(
"An error occurred saving this user to the database."
- " Please check the application logs.",
+ " Please inform your administrator.",
"error")
def test_admin_page_restriction_http_gets(self):
- admin_urls = [url_for('admin_index'), url_for('admin_add_user'),
- url_for('admin_edit_user', user_id=self.user.id)]
+ admin_urls = [url_for('admin.index'), url_for('admin.add_user'),
+ url_for('admin.edit_user', user_id=self.user.id)]
self._login_user()
for admin_url in admin_urls:
@@ -590,47 +684,62 @@ def test_admin_page_restriction_http_gets(self):
self.assertStatus(resp, 302)
def test_admin_page_restriction_http_posts(self):
- admin_urls = [url_for('admin_reset_two_factor_totp'),
- url_for('admin_reset_two_factor_hotp'),
- url_for('admin_add_user', user_id=self.user.id),
- url_for('admin_new_user_two_factor'),
- url_for('admin_reset_two_factor_totp'),
- url_for('admin_reset_two_factor_hotp'),
- url_for('admin_edit_user', user_id=self.user.id),
- url_for('admin_delete_user', user_id=self.user.id)]
+ admin_urls = [url_for('admin.reset_two_factor_totp'),
+ url_for('admin.reset_two_factor_hotp'),
+ url_for('admin.add_user', user_id=self.user.id),
+ url_for('admin.new_user_two_factor'),
+ url_for('admin.reset_two_factor_totp'),
+ url_for('admin.reset_two_factor_hotp'),
+ url_for('admin.edit_user', user_id=self.user.id),
+ url_for('admin.delete_user', user_id=self.user.id)]
self._login_user()
for admin_url in admin_urls:
resp = self.client.post(admin_url)
self.assertStatus(resp, 302)
def test_user_authorization_for_gets(self):
- urls = [url_for('index'), url_for('col', filesystem_id='1'),
- url_for('download_single_submission',
+ urls = [url_for('main.index'), url_for('col.col', filesystem_id='1'),
+ url_for('col.download_single_submission',
filesystem_id='1', fn='1'),
- url_for('edit_account')]
+ url_for('account.edit')]
for url in urls:
resp = self.client.get(url)
self.assertStatus(resp, 302)
def test_user_authorization_for_posts(self):
- urls = [url_for('add_star', filesystem_id='1'),
- url_for('remove_star', filesystem_id='1'),
- url_for('col_process'),
- url_for('col_delete_single', filesystem_id='1'),
- url_for('reply'), url_for('generate_code'), url_for('bulk'),
- url_for('account_new_two_factor'),
- url_for('account_reset_two_factor_totp'),
- url_for('account_reset_two_factor_hotp')]
+ urls = [url_for('col.add_star', filesystem_id='1'),
+ url_for('col.remove_star', filesystem_id='1'),
+ url_for('col.process'),
+ url_for('col.delete_single', filesystem_id='1'),
+ url_for('main.reply'),
+ url_for('main.regenerate_code'),
+ url_for('main.bulk'),
+ url_for('account.new_two_factor'),
+ url_for('account.reset_two_factor_totp'),
+ url_for('account.reset_two_factor_hotp')]
for url in urls:
res = self.client.post(url)
self.assertStatus(res, 302)
+ def test_incorrect_current_password_change(self):
+ self._login_user()
+ resp = self.client.post(url_for('account.new_password'),
+ data=dict(password=VALID_PASSWORD,
+ token='mocked',
+ current_password='badpw'),
+ follow_redirects=True)
+
+ text = resp.data.decode('utf-8')
+ self.assertIn('Incorrect password or two-factor code', text)
+
def test_invalid_user_password_change(self):
self._login_user()
- res = self.client.post(url_for('new_password'),
- data=dict(password='badpw'))
- self.assertRedirects(res, url_for('edit_account'))
+ res = self.client.post(url_for('account.new_password'),
+ data=dict(password='badpw',
+ token='mocked',
+ current_password=self.user_pw))
+ self.assertRedirects(res, url_for('account.edit'))
def test_too_long_user_password_change(self):
self._login_user()
@@ -638,8 +747,10 @@ def test_too_long_user_password_change(self):
overly_long_password = VALID_PASSWORD + \
'a' * (Journalist.MAX_PASSWORD_LEN - len(VALID_PASSWORD) + 1)
- self.client.post(url_for('new_password'),
- data=dict(password=overly_long_password),
+ self.client.post(url_for('account.new_password'),
+ data=dict(password=overly_long_password,
+ token='mocked',
+ current_password=self.user_pw),
follow_redirects=True)
self.assertMessageFlashed('You submitted a bad password! Password not '
@@ -648,32 +759,34 @@ def test_too_long_user_password_change(self):
def test_valid_user_password_change(self):
self._login_user()
resp = self.client.post(
- url_for('new_password'),
- data=dict(password=VALID_PASSWORD_2),
+ url_for('account.new_password'),
+ data=dict(password=VALID_PASSWORD_2,
+ token='mocked',
+ current_password=self.user_pw),
follow_redirects=True)
- assert 'The password was successfully updated!' in \
+ assert 'Password updated.' in \
resp.data.decode('utf-8')
def test_regenerate_totp(self):
self._login_user()
old_totp = self.user.totp
- res = self.client.post(url_for('account_reset_two_factor_totp'))
+ res = self.client.post(url_for('account.reset_two_factor_totp'))
new_totp = self.user.totp
# check that totp is different
self.assertNotEqual(old_totp.secret, new_totp.secret)
# should redirect to verification page
- self.assertRedirects(res, url_for('account_new_two_factor'))
+ self.assertRedirects(res, url_for('account.new_two_factor'))
def test_edit_hotp(self):
self._login_user()
old_hotp = self.user.hotp
res = self.client.post(
- url_for('account_reset_two_factor_hotp'),
+ url_for('account.reset_two_factor_hotp'),
data=dict(otp_secret=123456)
)
new_hotp = self.user.hotp
@@ -682,14 +795,14 @@ def test_edit_hotp(self):
self.assertNotEqual(old_hotp.secret, new_hotp.secret)
# should redirect to verification page
- self.assertRedirects(res, url_for('account_new_two_factor'))
+ self.assertRedirects(res, url_for('account.new_two_factor'))
def test_delete_source_deletes_submissions(self):
"""Verify that when a source is deleted, the submissions that
correspond to them are also deleted."""
self._delete_collection_setup()
- journalist.delete_collection(self.source.filesystem_id)
+ journalist_app.utils.delete_collection(self.source.filesystem_id)
# Source should be gone
results = db_session.query(Source).filter(
@@ -706,7 +819,7 @@ def test_delete_collection_updates_db(self):
record, as well as Reply & Submission records associated with
that record are purged from the database."""
self._delete_collection_setup()
- journalist.delete_collection(self.source.filesystem_id)
+ journalist_app.utils.delete_collection(self.source.filesystem_id)
results = Source.query.filter(Source.id == self.source.id).all()
self.assertEqual(results, [])
results = db_session.query(
@@ -724,7 +837,7 @@ def test_delete_source_deletes_source_key(self):
source_key = crypto_util.getkey(self.source.filesystem_id)
self.assertNotEqual(source_key, None)
- journalist.delete_collection(self.source.filesystem_id)
+ journalist_app.utils.delete_collection(self.source.filesystem_id)
# Source key no longer exists
source_key = crypto_util.getkey(self.source.filesystem_id)
@@ -740,7 +853,7 @@ def test_delete_source_deletes_docs_on_disk(self):
self.source.filesystem_id)
self.assertTrue(os.path.exists(dir_source_docs))
- job = journalist.delete_collection(self.source.filesystem_id)
+ job = journalist_app.utils.delete_collection(self.source.filesystem_id)
# Wait up to 5s to wait for Redis worker `srm` operation to complete
utils.async.wait_for_redis_worker(job)
@@ -774,7 +887,6 @@ def test_download_selected_submissions_from_source(self):
zipfile.ZipFile(StringIO(resp.data)).getinfo(
os.path.join(
source.journalist_filename,
- source.journalist_designation,
"%s_%s" % (filename.split('-')[0],
source.last_updated.date()),
filename
@@ -822,7 +934,7 @@ def test_download_unread_all_sources(self):
# Download all unread messages from all sources
self.resp = self.client.post(
- '/col/process',
+ url_for('col.process'),
data=dict(action='download-unread',
cols_selected=[self.source0.filesystem_id,
self.source1.filesystem_id]))
@@ -885,12 +997,12 @@ def test_download_all_selected_sources(self):
# Dowload all messages from self.source1
self.resp = self.client.post(
- '/col/process',
+ url_for('col.process'),
data=dict(action='download-all',
cols_selected=[self.source1.filesystem_id]))
resp = self.client.post(
- '/col/process',
+ url_for('col.process'),
data=dict(action='download-all',
cols_selected=[self.source1.filesystem_id]))
@@ -924,85 +1036,181 @@ def test_download_all_selected_sources(self):
submission.filename)
)
- def test_add_star_redirects_to_index(self):
+ def test_single_source_is_successfully_starred(self):
source, _ = utils.db_helper.init_source()
self._login_user()
- resp = self.client.post(url_for('add_star',
+ resp = self.client.post(url_for('col.add_star',
filesystem_id=source.filesystem_id))
- self.assertRedirects(resp, url_for('index'))
+ self.assertRedirects(resp, url_for('main.index'))
-class TestJournalistAppTwo(unittest.TestCase):
+ # Assert source is starred
+ self.assertTrue(source.star.starred)
- def setUp(self):
- journalist.logged_in = MagicMock()
- journalist.request = MagicMock()
- journalist.url_for = MagicMock()
- journalist.redirect = MagicMock()
- journalist.abort = MagicMock()
- journalist.db_session = MagicMock()
- journalist.get_docs = MagicMock()
- journalist.get_or_else = MagicMock()
+ def test_single_source_is_successfully_unstarred(self):
+ source, _ = utils.db_helper.init_source()
+ self._login_user()
- def _set_up_request(self, cols_selected, action):
- journalist.request.form.__contains__.return_value = True
- journalist.request.form.getlist = MagicMock(return_value=cols_selected)
- journalist.request.form.__getitem__.return_value = action
+ # First star the source
+ self.client.post(url_for('col.add_star',
+ filesystem_id=source.filesystem_id))
- @patch("journalist.col_delete")
- def test_col_process_delegates_to_col_delete(self, col_delete):
- cols_selected = ['source_id']
- self._set_up_request(cols_selected, 'delete')
+ # Now unstar the source
+ resp = self.client.post(url_for('col.remove_star',
+ filesystem_id=source.filesystem_id))
- journalist.col_process()
+ self.assertRedirects(resp, url_for('main.index'))
- col_delete.assert_called_with(cols_selected)
+ # Assert source is not starred
+ self.assertFalse(source.star.starred)
- @patch("journalist.col_star")
- def test_col_process_delegates_to_col_star(self, col_star):
- cols_selected = ['source_id']
- self._set_up_request(cols_selected, 'star')
+ def test_journalist_session_expiration(self):
+ try:
+ old_expiration = config.SESSION_EXPIRATION_MINUTES
+ has_session_expiration = True
+ except AttributeError:
+ has_session_expiration = False
- journalist.col_process()
+ try:
+ with self.client as client:
+ # do a real login to get a real session
+ # (none of the mocking `g` hacks)
+ resp = self.client.post(url_for('main.login'),
+ data=dict(username=self.user.username,
+ password=VALID_PASSWORD,
+ token='mocked'))
+ assert resp.status_code == 200
- col_star.assert_called_with(cols_selected)
+ # set the expiration to ensure we trigger an expiration
+ config.SESSION_EXPIRATION_MINUTES = -1
- @patch("journalist.col_un_star")
- def test_col_process_delegates_to_col_un_star(self, col_un_star):
- cols_selected = ['source_id']
- self._set_up_request(cols_selected, 'un-star')
+ resp = client.get(url_for('account.edit'),
+ follow_redirects=True)
- journalist.col_process()
+ # check that the session was cleared (apart from 'expires'
+ # which is always present and 'csrf_token' which leaks no info)
+ session.pop('expires', None)
+ session.pop('csrf_token', None)
+ assert not session, session
+ assert ('You have been logged out due to inactivity' in
+ resp.data.decode('utf-8'))
+ finally:
+ if has_session_expiration:
+ config.SESSION_EXPIRATION_MINUTES = old_expiration
+ else:
+ del config.SESSION_EXPIRATION_MINUTES
- col_un_star.assert_called_with(cols_selected)
+ def test_csrf_error_page(self):
+ old_enabled = self.app.config['WTF_CSRF_ENABLED']
+ self.app.config['WTF_CSRF_ENABLED'] = True
- @patch("journalist.abort")
- def test_col_process_returns_404_with_bad_action(self, abort):
- cols_selected = ['source_id']
- self._set_up_request(cols_selected, 'something-random')
+ try:
+ with self.app.test_client() as app:
+ resp = app.post(url_for('main.login'))
+ self.assertRedirects(resp, url_for('main.login'))
- journalist.col_process()
+ resp = app.post(url_for('main.login'), follow_redirects=True)
+ self.assertIn('You have been logged out due to inactivity',
+ resp.data)
+ finally:
+ self.app.config['WTF_CSRF_ENABLED'] = old_enabled
- abort.assert_called_with(ANY)
+ def test_col_process_aborts_with_bad_action(self):
+ """If the action is not a valid choice, a 500 should occur"""
+ self._login_user()
- @patch("journalist.make_star_true")
- @patch("journalist.db_session")
- def test_col_star_call_db_(self, db_session, make_star_true):
- journalist.col_star(['filesystem_id'])
+ form_data = {'cols_selected': 'does not matter',
+ 'action': 'this action does not exist'}
- make_star_true.assert_called_with('filesystem_id')
+ resp = self.client.post(url_for('col.process'), data=form_data)
- @patch("journalist.db_session")
- def test_col_un_star_call_db(self, db_session):
- journalist.col_un_star([])
+ self.assert500(resp)
- db_session.commit.assert_called_with()
+ def test_col_process_successfully_deletes_multiple_sources(self):
+ # Create two sources with one submission each
+ source_1, _ = utils.db_helper.init_source()
+ utils.db_helper.submit(source_1, 1)
+ source_2, _ = utils.db_helper.init_source()
+ utils.db_helper.submit(source_2, 1)
- @classmethod
- def tearDownClass(cls):
- # Reset the module variables that were changed to mocks so we don't
- # break other tests
- reload(journalist)
+ self._login_user()
+
+ form_data = {'cols_selected': [source_1.filesystem_id,
+ source_2.filesystem_id],
+ 'action': 'delete'}
+
+ resp = self.client.post(url_for('col.process'), data=form_data,
+ follow_redirects=True)
+
+ self.assert200(resp)
+
+ # Verify there are no remaining sources
+ remaining_sources = db_session.query(db.Source).all()
+ self.assertEqual(len(remaining_sources), 0)
+
+ def test_col_process_successfully_stars_sources(self):
+ source_1, _ = utils.db_helper.init_source()
+ utils.db_helper.submit(source_1, 1)
+
+ self._login_user()
+
+ form_data = {'cols_selected': [source_1.filesystem_id],
+ 'action': 'star'}
+
+ resp = self.client.post(url_for('col.process'), data=form_data,
+ follow_redirects=True)
+
+ self.assert200(resp)
+
+ # Verify the source is starred
+ self.assertTrue(source_1.star.starred)
+
+ def test_col_process_successfully_unstars_sources(self):
+ source_1, _ = utils.db_helper.init_source()
+ utils.db_helper.submit(source_1, 1)
+
+ self._login_user()
+
+ # First star the source
+ form_data = {'cols_selected': [source_1.filesystem_id],
+ 'action': 'star'}
+ self.client.post(url_for('col.process'), data=form_data,
+ follow_redirects=True)
+
+ # Now unstar the source
+ form_data = {'cols_selected': [source_1.filesystem_id],
+ 'action': 'un-star'}
+ resp = self.client.post(url_for('col.process'), data=form_data,
+ follow_redirects=True)
+
+ self.assert200(resp)
+
+ # Verify the source is not starred
+ self.assertFalse(source_1.star.starred)
+
+ def test_render_locales(self):
+ """the locales.html template must collect both request.args (l=XX) and
+ request.view_args (/<filesystem_id>) to build the URL to
+ change the locale
+
+ """
+ supported = getattr(config, 'SUPPORTED_LOCALES', None)
+ try:
+ if supported:
+ del config.SUPPORTED_LOCALES
+ config.SUPPORTED_LOCALES = ['en_US', 'fr_FR']
+
+ source, _ = utils.db_helper.init_source()
+ self._login_user()
+
+ url = url_for('col.col', filesystem_id=source.filesystem_id)
+ resp = self.client.get(url + '?l=fr_FR')
+ self.assertNotIn('?l=fr_FR', resp.data)
+ self.assertIn(url + '?l=en_US', resp.data)
+
+ finally:
+ if supported:
+ config.SUPPORTED_LOCALES = supported
class TestJournalistLogin(unittest.TestCase):
@@ -1048,62 +1256,3 @@ def tearDownClass(cls):
# Reset the module variables that were changed to mocks so we don't
# break other tests
reload(journalist)
-
-
-class TestJournalist(unittest.TestCase):
-
- def setUp(self):
- journalist.logged_in = MagicMock()
- journalist.make_star_true = MagicMock()
- journalist.db_session = MagicMock()
- journalist.url_for = MagicMock()
- journalist.redirect = MagicMock()
- journalist.get_one_or_else = MagicMock()
-
- @patch('journalist.url_for')
- @patch('journalist.redirect')
- def test_add_star_renders_template(self, redirect, url_for):
- redirect_template = journalist.add_star('filesystem_id')
-
- self.assertEqual(redirect_template, redirect(url_for('index')))
-
- @patch('journalist.db_session')
- def test_add_star_makes_commits(self, db_session):
- journalist.add_star('filesystem_id')
-
- db_session.commit.assert_called_with()
-
- @patch('journalist.make_star_true')
- def test_single_delegates_to_make_star_true(self, make_star_true):
- filesystem_id = 'filesystem_id'
-
- journalist.add_star(filesystem_id)
-
- make_star_true.assert_called_with(filesystem_id)
-
- @patch('journalist.url_for')
- @patch('journalist.redirect')
- def test_remove_star_renders_template(self, redirect, url_for):
- redirect_template = journalist.remove_star('filesystem_id')
-
- self.assertEqual(redirect_template, redirect(url_for('index')))
-
- @patch('journalist.db_session')
- def test_remove_star_makes_commits(self, db_session):
- journalist.remove_star('filesystem_id')
-
- db_session.commit.assert_called_with()
-
- @patch('journalist.make_star_false')
- def test_remove_star_delegates_to_make_star_false(self, make_star_false):
- filesystem_id = 'filesystem_id'
-
- journalist.remove_star(filesystem_id)
-
- make_star_false.assert_called_with(filesystem_id)
-
- @classmethod
- def tearDownClass(cls):
- # Reset the module variables that were changed to mocks so we don't
- # break other tests
- reload(journalist)
diff --git a/securedrop/tests/test_manage.py b/securedrop/tests/test_manage.py
--- a/securedrop/tests/test_manage.py
+++ b/securedrop/tests/test_manage.py
@@ -9,7 +9,9 @@
import manage
import mock
import pytest
+from sqlalchemy.orm.exc import NoResultFound
from StringIO import StringIO
+import shutil
import subprocess
import sys
import time
@@ -17,7 +19,11 @@
import version
import utils
-from db import Journalist
+from db import Journalist, db_session
+
+
+YUBIKEY_HOTP = ['cb a0 5f ad 41 a2 ff 4e eb 53 56 3a 1b f7 23 2e ce fc dc',
+ 'cb a0 5f ad 41 a2 ff 4e eb 53 56 3a 1b f7 23 2e ce fc dc d7']
class TestManagePy(object):
@@ -62,6 +68,20 @@ def test_get_yubikey_usage_yes(self, mock_stdin):
def test_get_yubikey_usage_no(self, mock_stdin):
assert not manage._get_yubikey_usage()
+ @mock.patch("manage._get_username", return_value='ntoll')
+ @mock.patch("manage._get_yubikey_usage", return_value=True)
+ @mock.patch("__builtin__.raw_input", side_effect=YUBIKEY_HOTP)
+ @mock.patch("sys.stdout", new_callable=StringIO)
+ def test_handle_invalid_secret(self, mock_username, mock_yubikey,
+ mock_htop, mock_stdout):
+ """Regression test for bad secret logic in manage.py"""
+
+ # We will try to provide one invalid and one valid secret
+ return_value = manage._add_user()
+ self.assertEqual(return_value, 0)
+ self.assertIn('Try again.', sys.stdout.getvalue())
+ self.assertIn('successfully added', sys.stdout.getvalue())
+
@mock.patch("manage._get_username", return_value='foo-bar-baz')
@mock.patch("manage._get_yubikey_usage", return_value=False)
@mock.patch("sys.stdout", new_callable=StringIO)
@@ -82,6 +102,55 @@ def test_exception_handling_when_duplicate_username(self,
self.assertIn('ERROR: That username is already taken!',
sys.stdout.getvalue())
+ @mock.patch("manage._get_username", return_value='test-user-56789')
+ @mock.patch("manage._get_yubikey_usage", return_value=False)
+ @mock.patch("manage._get_username_to_delete",
+ return_value='test-user-56789')
+ @mock.patch('manage._get_delete_confirmation', return_value=True)
+ def test_delete_user(self,
+ mock_username,
+ mock_yubikey,
+ mock_user_to_delete,
+ mock_user_del_confirm):
+ return_value = manage._add_user()
+ self.assertEqual(return_value, 0)
+
+ return_value = manage.delete_user(args=None)
+ self.assertEqual(return_value, 0)
+
+ @mock.patch("manage._get_username_to_delete",
+ return_value='does-not-exist')
+ @mock.patch('manage._get_delete_confirmation', return_value=True)
+ @mock.patch("sys.stdout", new_callable=StringIO)
+ def test_delete_non_existent_user(self,
+ mock_user_to_delete,
+ mock_user_del_confirm,
+ mock_stdout):
+ return_value = manage.delete_user(args=None)
+ self.assertEqual(return_value, 0)
+ self.assertIn('ERROR: That user was not found!',
+ sys.stdout.getvalue())
+
+ @mock.patch("__builtin__.raw_input", return_value='test-user-12345')
+ def test_get_username_to_delete(self, mock_username):
+ return_value = manage._get_username_to_delete()
+ self.assertEqual(return_value, 'test-user-12345')
+
+ def test_reset(self):
+ test_journalist, _ = utils.db_helper.init_journalist()
+ user_should_be_gone = test_journalist.username
+
+ return_value = manage.reset(args=None)
+
+ self.assertEqual(return_value, 0)
+ assert os.path.exists(config.DATABASE_FILE)
+ assert os.path.exists(config.STORE_DIR)
+
+ # Verify journalist user present in the database is gone
+ db_session.remove() # Close session and get a session on the new db
+ with self.assertRaises(NoResultFound):
+ Journalist.query.filter_by(username=user_should_be_gone).one()
+
class TestManage(object):
@@ -96,7 +165,76 @@ def teardown(self):
def test_get_username(self, mock_get_usernam):
assert manage._get_username() == 'foo-bar-baz'
- def test_translate_compile_code_and_template(self):
+ def test_translate_desktop_l10n(self):
+ in_files = {}
+ for what in ('source', 'journalist'):
+ in_files[what] = join(config.TEMP_DIR, what + '.desktop.in')
+ shutil.copy(join(self.dir, 'i18n/' + what + '.desktop.in'),
+ in_files[what])
+ kwargs = {
+ 'translations_dir': config.TEMP_DIR,
+ 'source': [in_files['source']],
+ 'extract_update': True,
+ 'compile': False,
+ 'verbose': logging.DEBUG,
+ 'version': version.__version__,
+ }
+ args = argparse.Namespace(**kwargs)
+ manage.setup_verbosity(args)
+ manage.translate_desktop(args)
+ messages_file = join(config.TEMP_DIR, 'desktop.pot')
+ assert exists(messages_file)
+ pot = open(messages_file).read()
+ assert 'SecureDrop Source Interfaces' in pot
+ # pretend this happened a few seconds ago
+ few_seconds_ago = time.time() - 60
+ os.utime(messages_file, (few_seconds_ago, few_seconds_ago))
+
+ i18n_file = join(config.TEMP_DIR, 'source.desktop')
+
+ #
+ # Extract+update but do not compile
+ #
+ kwargs['source'] = in_files.values()
+ old_messages_mtime = getmtime(messages_file)
+ assert not exists(i18n_file)
+ manage.translate_desktop(args)
+ assert not exists(i18n_file)
+ current_messages_mtime = getmtime(messages_file)
+ assert old_messages_mtime < current_messages_mtime
+
+ locale = 'fr_FR'
+ po_file = join(config.TEMP_DIR, locale + ".po")
+ manage.sh("""
+ msginit --no-translator \
+ --locale {locale} \
+ --output {po_file} \
+ --input {messages_file}
+ sed -i -e '/{source}/,+1s/msgstr ""/msgstr "SOURCE FR"/' \
+ {po_file}
+ """.format(source='SecureDrop Source Interfaces',
+ messages_file=messages_file,
+ po_file=po_file,
+ locale=locale))
+ assert exists(po_file)
+
+ #
+ # Compile but do not extract+update
+ #
+ kwargs['source'] = in_files.values() + ['BOOM']
+ kwargs['extract_update'] = False
+ kwargs['compile'] = True
+ args = argparse.Namespace(**kwargs)
+ old_messages_mtime = current_messages_mtime
+ manage.translate_desktop(args)
+ assert old_messages_mtime == getmtime(messages_file)
+ po = open(po_file).read()
+ assert 'SecureDrop Source Interfaces' in po
+ assert 'SecureDrop Journalist Interfaces' not in po
+ i18n = open(i18n_file).read()
+ assert 'SOURCE FR' in i18n
+
+ def test_translate_messages_l10n(self):
source = [
join(self.dir, 'i18n/code.py'),
join(self.dir, 'i18n/template.html'),
@@ -112,7 +250,7 @@ def test_translate_compile_code_and_template(self):
}
args = argparse.Namespace(**kwargs)
manage.setup_verbosity(args)
- manage.translate(args)
+ manage.translate_messages(args)
messages_file = join(config.TEMP_DIR, 'messages.pot')
assert exists(messages_file)
pot = open(messages_file).read()
@@ -128,13 +266,13 @@ def test_translate_compile_code_and_template(self):
))
mo_file = join(locale_dir, 'LC_MESSAGES/messages.mo')
assert not exists(mo_file)
- manage.translate(args)
+ manage.translate_messages(args)
assert exists(mo_file)
mo = open(mo_file).read()
assert 'code hello i18n' in mo
assert 'template hello i18n' in mo
- def test_translate_compile_arg(self):
+ def test_translate_messages_compile_arg(self):
source = [
join(self.dir, 'i18n/code.py'),
]
@@ -149,7 +287,7 @@ def test_translate_compile_arg(self):
}
args = argparse.Namespace(**kwargs)
manage.setup_verbosity(args)
- manage.translate(args)
+ manage.translate_messages(args)
messages_file = join(config.TEMP_DIR, 'messages.pot')
assert exists(messages_file)
pot = open(messages_file).read()
@@ -175,7 +313,7 @@ def test_translate_compile_arg(self):
#
old_po_mtime = getmtime(po_file)
assert not exists(mo_file)
- manage.translate(args)
+ manage.translate_messages(args)
assert not exists(mo_file)
current_po_mtime = getmtime(po_file)
assert old_po_mtime < current_po_mtime
@@ -191,12 +329,41 @@ def test_translate_compile_arg(self):
kwargs['compile'] = True
args = argparse.Namespace(**kwargs)
old_po_mtime = current_po_mtime
- manage.translate(args)
- assert old_po_mtime == current_po_mtime
+ manage.translate_messages(args)
+ assert old_po_mtime == getmtime(po_file)
mo = open(mo_file).read()
assert 'code hello i18n' in mo
assert 'template hello i18n' not in mo
+ def test_clean_tmp_do_nothing(self, caplog):
+ args = argparse.Namespace(days=0,
+ directory=' UNLIKELY ',
+ verbose=logging.DEBUG)
+ manage.setup_verbosity(args)
+ manage.clean_tmp(args)
+ assert 'does not exist, do nothing' in caplog.text()
+
+ def test_clean_tmp_too_young(self, caplog):
+ args = argparse.Namespace(days=24*60*60,
+ directory=config.TEMP_DIR,
+ verbose=logging.DEBUG)
+ open(os.path.join(config.TEMP_DIR, 'FILE'), 'a').close()
+ manage.setup_verbosity(args)
+ manage.clean_tmp(args)
+ assert 'modified less than' in caplog.text()
+
+ def test_clean_tmp_removed(self, caplog):
+ args = argparse.Namespace(days=0,
+ directory=config.TEMP_DIR,
+ verbose=logging.DEBUG)
+ fname = os.path.join(config.TEMP_DIR, 'FILE')
+ with open(fname, 'a'):
+ old = time.time() - 24*60*60
+ os.utime(fname, (old, old))
+ manage.setup_verbosity(args)
+ manage.clean_tmp(args)
+ assert 'FILE removed' in caplog.text()
+
class TestSh(object):
@@ -210,17 +377,25 @@ def test_sh_progress(self, caplog):
manage.sh("echo AB ; sleep 5 ; echo C")
records = caplog.records()
assert ':sh: ' in records[0].message
+ assert records[0].levelname == 'DEBUG'
assert 'AB' == records[1].message
+ assert records[1].levelname == 'DEBUG'
assert 'C' == records[2].message
+ assert records[2].levelname == 'DEBUG'
def test_sh_input(self, caplog):
assert 'abc' == manage.sh("cat", 'abc')
def test_sh_fail(self, caplog):
+ level = manage.log.getEffectiveLevel()
+ manage.log.setLevel(logging.INFO)
+ assert manage.log.getEffectiveLevel() == logging.INFO
with pytest.raises(subprocess.CalledProcessError) as excinfo:
- manage.sh("/bin/echo -n AB ; /bin/echo C ; exit 111")
+ manage.sh("echo AB ; echo C ; exit 111")
+ manage.log.setLevel(level)
assert excinfo.value.returncode == 111
- for record in caplog.records():
- if record.levelname == 'ERROR':
- assert ('replay full' in record.message or
- 'ABC\n' == record.message)
+ records = caplog.records()
+ assert 'AB' == records[0].message
+ assert records[0].levelname == 'ERROR'
+ assert 'C' == records[1].message
+ assert records[1].levelname == 'ERROR'
diff --git a/securedrop/tests/test_source.py b/securedrop/tests/test_source.py
--- a/securedrop/tests/test_source.py
+++ b/securedrop/tests/test_source.py
@@ -5,14 +5,19 @@
import re
from bs4 import BeautifulSoup
-from flask import session, escape
+from flask import session, escape, url_for
from flask_testing import TestCase
-from db import Source
+import crypto_util
+from db import db_session, Source
import source
import version
import utils
import json
+import config
+from utils.db_helper import new_codename
+
+overly_long_codename = 'a' * (Source.MAX_CODENAME_LEN + 1)
class TestSourceApp(TestCase):
@@ -39,6 +44,29 @@ def test_index(self):
self.assertIn("Submit documents for the first time", response.data)
self.assertIn("Already submitted something?", response.data)
+ def test_all_words_in_wordlist_validate(self):
+ """Verify that all words in the wordlist are allowed by the form
+ validation. Otherwise a source will have a codename and be unable to
+ return."""
+
+ wordlist_en = crypto_util._get_wordlist('en')
+
+ # chunk the words to cut down on the number of requets we make
+ # otherwise this test is *slow*
+ chunks = [wordlist_en[i:i + 7] for i in range(0, len(wordlist_en), 7)]
+
+ for words in chunks:
+ with self.client as c:
+ resp = c.post('/login', data=dict(codename=' '.join(words)),
+ follow_redirects=True)
+ self.assertEqual(resp.status_code, 200)
+ # If the word does not validate, then it will show
+ # 'Invalid input'. If it does validate, it should show that
+ # it isn't a recognized codename.
+ self.assertIn('Sorry, that is not a recognized codename.',
+ resp.data)
+ self.assertNotIn('logged_in', session)
+
def _find_codename(self, html):
"""Find a source codename (diceware passphrase) in HTML"""
# Codenames may contain HTML escape characters, and the wordlist
@@ -67,23 +95,24 @@ def test_generate_has_login_link(self):
if they already have a codename, rather than create a new one.
"""
resp = self.client.get('/generate')
- self.assertIn("ALREADY HAVE A CODENAME?", resp.data)
+ self.assertIn("USE EXISTING CODENAME", resp.data)
soup = BeautifulSoup(resp.data, 'html.parser')
already_have_codename_link = soup.select('a#already-have-codename')[0]
self.assertEqual(already_have_codename_link['href'], '/login')
def test_generate_already_logged_in(self):
- self._new_codename()
- # Make sure it redirects to /lookup when logged in
- resp = self.client.get('/generate')
- self.assertEqual(resp.status_code, 302)
- # Make sure it flashes the message on the lookup page
- resp = self.client.get('/generate', follow_redirects=True)
- # Should redirect to /lookup
- self.assertEqual(resp.status_code, 200)
- self.assertIn("because you are already logged in.", resp.data)
+ with self.client as client:
+ new_codename(client, session)
+ # Make sure it redirects to /lookup when logged in
+ resp = client.get('/generate')
+ self.assertEqual(resp.status_code, 302)
+ # Make sure it flashes the message on the lookup page
+ resp = client.get('/generate', follow_redirects=True)
+ # Should redirect to /lookup
+ self.assertEqual(resp.status_code, 200)
+ self.assertIn("because you are already logged in.", resp.data)
- def test_create(self):
+ def test_create_new_source(self):
with self.client as c:
resp = c.get('/generate')
resp = c.post('/create', follow_redirects=True)
@@ -91,33 +120,62 @@ def test_create(self):
# should be redirected to /lookup
self.assertIn("Submit Materials", resp.data)
- def _new_codename(self):
- return utils.db_helper.new_codename(self.client, session)
+ @patch('source.app.logger.warning')
+ @patch('crypto_util.genrandomid',
+ side_effect=[overly_long_codename, 'short codename'])
+ def test_generate_too_long_codename(self, genrandomid, logger):
+ """Generate a codename that exceeds the maximum codename length"""
+
+ with self.client as c:
+ resp = c.post('/generate')
+ self.assertEqual(resp.status_code, 200)
+
+ logger.assert_called_with(
+ "Generated a source codename that was too long, "
+ "skipping it. This should not happen. "
+ "(Codename='{}')".format(overly_long_codename)
+ )
+
+ @patch('source.app.logger.error')
+ def test_create_duplicate_codename(self, logger):
+ with self.client as c:
+ c.get('/generate')
+
+ # Create a source the first time
+ c.post('/create', follow_redirects=True)
+
+ # Attempt to add the same source
+ c.post('/create', follow_redirects=True)
+ logger.assert_called_once()
+ self.assertIn("Attempt to create a source with duplicate codename",
+ logger.call_args[0][0])
+ assert 'codename' not in session
def test_lookup(self):
"""Test various elements on the /lookup page."""
- codename = self._new_codename()
- resp = self.client.post('login', data=dict(codename=codename),
- follow_redirects=True)
- # redirects to /lookup
- self.assertIn("public key", resp.data)
- # download the public key
- resp = self.client.get('journalist-key')
- self.assertIn("BEGIN PGP PUBLIC KEY BLOCK", resp.data)
+ with self.client as client:
+ codename = new_codename(client, session)
+ resp = client.post('login', data=dict(codename=codename),
+ follow_redirects=True)
+ # redirects to /lookup
+ self.assertIn("public key", resp.data)
+ # download the public key
+ resp = client.get('journalist-key')
+ self.assertIn("BEGIN PGP PUBLIC KEY BLOCK", resp.data)
def test_login_and_logout(self):
resp = self.client.get('/login')
self.assertEqual(resp.status_code, 200)
self.assertIn("Enter Codename", resp.data)
- codename = self._new_codename()
- with self.client as c:
- resp = c.post('/login', data=dict(codename=codename),
- follow_redirects=True)
+ with self.client as client:
+ codename = new_codename(client, session)
+ resp = client.post('/login', data=dict(codename=codename),
+ follow_redirects=True)
self.assertEqual(resp.status_code, 200)
self.assertIn("Submit Materials", resp.data)
self.assertTrue(session['logged_in'])
- resp = c.get('/logout', follow_redirects=True)
+ resp = client.get('/logout', follow_redirects=True)
with self.client as c:
resp = c.post('/login', data=dict(codename='invalid'),
@@ -133,37 +191,50 @@ def test_login_and_logout(self):
self.assertEqual(resp.status_code, 200)
self.assertTrue(session['logged_in'])
resp = c.get('/logout', follow_redirects=True)
- self.assertTrue(not session)
+
+ # sessions always have 'expires', so pop it for the next check
+ session.pop('expires', None)
+
+ self.assertNotIn('logged_in', session)
+ self.assertNotIn('codename', session)
+
self.assertIn('Thank you for exiting your session!', resp.data)
+ def test_user_must_log_in_for_protected_views(self):
+ with self.client as c:
+ resp = c.get('/lookup', follow_redirects=True)
+ self.assertEqual(resp.status_code, 200)
+ self.assertIn("Enter Codename", resp.data)
+
def test_login_with_whitespace(self):
"""
Test that codenames with leading or trailing whitespace still work"""
- def login_test(codename):
- resp = self.client.get('/login')
- self.assertEqual(resp.status_code, 200)
- self.assertIn("Enter Codename", resp.data)
- with self.client as c:
- resp = c.post('/login', data=dict(codename=codename),
- follow_redirects=True)
+ with self.client as client:
+ def login_test(codename):
+ resp = client.get('/login')
+ self.assertEqual(resp.status_code, 200)
+ self.assertIn("Enter Codename", resp.data)
+
+ resp = client.post('/login', data=dict(codename=codename),
+ follow_redirects=True)
self.assertEqual(resp.status_code, 200)
self.assertIn("Submit Materials", resp.data)
self.assertTrue(session['logged_in'])
- resp = c.get('/logout', follow_redirects=True)
+ resp = client.get('/logout', follow_redirects=True)
- codename = self._new_codename()
- login_test(codename + ' ')
- login_test(' ' + codename + ' ')
- login_test(' ' + codename)
+ codename = new_codename(client, session)
+ login_test(codename + ' ')
+ login_test(' ' + codename + ' ')
+ login_test(' ' + codename)
- def _dummy_submission(self):
+ def _dummy_submission(self, client):
"""
Helper to make a submission (content unimportant), mostly useful in
testing notification behavior for a source's first vs. their
subsequent submissions
"""
- return self.client.post('/submit', data=dict(
+ return client.post('/submit', data=dict(
msg="Pay no attention to the man behind the curtain.",
fh=(StringIO(''), ''),
), follow_redirects=True)
@@ -174,31 +245,35 @@ def test_initial_submission_notification(self):
first submission is always greeted with a notification
reminding sources to check back later for replies.
"""
- self._new_codename()
- resp = self._dummy_submission()
- self.assertEqual(resp.status_code, 200)
- self.assertIn(
- "Thank you for sending this information to us.",
- resp.data)
+ with self.client as client:
+ new_codename(client, session)
+ resp = self._dummy_submission(client)
+ self.assertEqual(resp.status_code, 200)
+ self.assertIn(
+ "Thank you for sending this information to us.",
+ resp.data)
def test_submit_message(self):
- self._new_codename()
- self._dummy_submission()
- resp = self.client.post('/submit', data=dict(
- msg="This is a test.",
- fh=(StringIO(''), ''),
- ), follow_redirects=True)
- self.assertEqual(resp.status_code, 200)
- self.assertIn("Thanks! We received your message", resp.data)
+ with self.client as client:
+ new_codename(client, session)
+ self._dummy_submission(client)
+ resp = client.post('/submit', data=dict(
+ msg="This is a test.",
+ fh=(StringIO(''), ''),
+ ), follow_redirects=True)
+ self.assertEqual(resp.status_code, 200)
+ self.assertIn("Thanks! We received your message", resp.data)
def test_submit_empty_message(self):
- self._new_codename()
- resp = self.client.post('/submit', data=dict(
- msg="",
- fh=(StringIO(''), ''),
- ), follow_redirects=True)
- self.assertIn("You must enter a message or choose a file to submit.",
- resp.data)
+ with self.client as client:
+ new_codename(client, session)
+ resp = client.post('/submit', data=dict(
+ msg="",
+ fh=(StringIO(''), ''),
+ ), follow_redirects=True)
+ self.assertIn("You must enter a message or choose a file to "
+ "submit.",
+ resp.data)
def test_submit_big_message(self):
'''
@@ -206,37 +281,40 @@ def test_submit_big_message(self):
just residing in memory. Make sure the different return type of
SecureTemporaryFile is handled as well as BytesIO.
'''
- self._new_codename()
- self._dummy_submission()
- resp = self.client.post('/submit', data=dict(
- msg="AA" * (1024 * 512),
- fh=(StringIO(''), ''),
- ), follow_redirects=True)
- self.assertEqual(resp.status_code, 200)
- self.assertIn("Thanks! We received your message", resp.data)
+ with self.client as client:
+ new_codename(client, session)
+ self._dummy_submission(client)
+ resp = client.post('/submit', data=dict(
+ msg="AA" * (1024 * 512),
+ fh=(StringIO(''), ''),
+ ), follow_redirects=True)
+ self.assertEqual(resp.status_code, 200)
+ self.assertIn("Thanks! We received your message", resp.data)
def test_submit_file(self):
- self._new_codename()
- self._dummy_submission()
- resp = self.client.post('/submit', data=dict(
- msg="",
- fh=(StringIO('This is a test'), 'test.txt'),
- ), follow_redirects=True)
- self.assertEqual(resp.status_code, 200)
- self.assertIn('Thanks! We received your document', resp.data)
+ with self.client as client:
+ new_codename(client, session)
+ self._dummy_submission(client)
+ resp = client.post('/submit', data=dict(
+ msg="",
+ fh=(StringIO('This is a test'), 'test.txt'),
+ ), follow_redirects=True)
+ self.assertEqual(resp.status_code, 200)
+ self.assertIn('Thanks! We received your document', resp.data)
def test_submit_both(self):
- self._new_codename()
- self._dummy_submission()
- resp = self.client.post('/submit', data=dict(
- msg="This is a test",
- fh=(StringIO('This is a test'), 'test.txt'),
- ), follow_redirects=True)
- self.assertEqual(resp.status_code, 200)
- self.assertIn("Thanks! We received your message and document",
- resp.data)
+ with self.client as client:
+ new_codename(client, session)
+ self._dummy_submission(client)
+ resp = client.post('/submit', data=dict(
+ msg="This is a test",
+ fh=(StringIO('This is a test'), 'test.txt'),
+ ), follow_redirects=True)
+ self.assertEqual(resp.status_code, 200)
+ self.assertIn("Thanks! We received your message and document",
+ resp.data)
- def test_delete_all(self):
+ def test_delete_all_successfully_deletes_replies(self):
journalist, _ = utils.db_helper.init_journalist()
source, codename = utils.db_helper.init_source()
utils.db_helper.reply(journalist, source, 1)
@@ -248,20 +326,37 @@ def test_delete_all(self):
self.assertEqual(resp.status_code, 200)
self.assertIn("All replies have been deleted", resp.data)
+ @patch('source.app.logger.error')
+ def test_delete_all_replies_already_deleted(self, logger):
+ journalist, _ = utils.db_helper.init_journalist()
+ source, codename = utils.db_helper.init_source()
+ # Note that we are creating the source and no replies
+
+ with self.client as c:
+ resp = c.post('/login', data=dict(codename=codename),
+ follow_redirects=True)
+ self.assertEqual(resp.status_code, 200)
+ resp = c.post('/delete-all', follow_redirects=True)
+ self.assertEqual(resp.status_code, 200)
+ logger.assert_called_once_with(
+ "Found no replies when at least one was expected"
+ )
+
@patch('gzip.GzipFile', wraps=gzip.GzipFile)
def test_submit_sanitizes_filename(self, gzipfile):
"""Test that upload file name is sanitized"""
insecure_filename = '../../bin/gpg'
sanitized_filename = 'bin_gpg'
- self._new_codename()
- self.client.post('/submit', data=dict(
- msg="",
- fh=(StringIO('This is a test'), insecure_filename),
- ), follow_redirects=True)
- gzipfile.assert_called_with(filename=sanitized_filename,
- mode=ANY,
- fileobj=ANY)
+ with self.client as client:
+ new_codename(client, session)
+ client.post('/submit', data=dict(
+ msg="",
+ fh=(StringIO('This is a test'), insecure_filename),
+ ), follow_redirects=True)
+ gzipfile.assert_called_with(filename=sanitized_filename,
+ mode=ANY,
+ fileobj=ANY)
def test_tor2web_warning_headers(self):
resp = self.client.get('/', headers=[('X-tor2web', 'encrypted')])
@@ -294,13 +389,125 @@ def test_metadata_route(self):
def test_login_with_overly_long_codename(self, mock_hash_codename):
"""Attempting to login with an overly long codename should result in
an error, and scrypt should not be called to avoid DoS."""
- overly_long_codename = 'a' * (Source.MAX_CODENAME_LEN + 1)
with self.client as c:
resp = c.post('/login', data=dict(codename=overly_long_codename),
follow_redirects=True)
self.assertEqual(resp.status_code, 200)
- self.assertIn("Sorry, that is not a recognized codename.",
+ self.assertIn("Field must be between 1 and {} "
+ "characters long.".format(Source.MAX_CODENAME_LEN),
resp.data)
self.assertFalse(mock_hash_codename.called,
"Called hash_codename for codename w/ invalid "
"length")
+
+ @patch('source.app.logger.warning')
+ @patch('subprocess.call', return_value=1)
+ def test_failed_normalize_timestamps_logs_warning(self, call, logger):
+ """If a normalize timestamps event fails, the subprocess that calls
+ touch will fail and exit 1. When this happens, the submission should
+ still occur, but a warning should be logged (this will trigger an
+ OSSEC alert)."""
+
+ with self.client as client:
+ new_codename(client, session)
+ self._dummy_submission(client)
+ resp = client.post('/submit', data=dict(
+ msg="This is a test.",
+ fh=(StringIO(''), ''),
+ ), follow_redirects=True)
+ self.assertEqual(resp.status_code, 200)
+ self.assertIn("Thanks! We received your message", resp.data)
+
+ logger.assert_called_once_with(
+ "Couldn't normalize submission "
+ "timestamps (touch exited with 1)"
+ )
+
+ @patch('source.app.logger.error')
+ def test_source_is_deleted_while_logged_in(self, logger):
+ """If a source is deleted by a journalist when they are logged in,
+ a NoResultFound will occur. The source should be redirected to the
+ index when this happens, and a warning logged."""
+
+ with self.client as client:
+ codename = new_codename(client, session)
+ resp = client.post('login', data=dict(codename=codename),
+ follow_redirects=True)
+
+ # Now the journalist deletes the source
+ filesystem_id = crypto_util.hash_codename(codename)
+ crypto_util.delete_reply_keypair(filesystem_id)
+ source = Source.query.filter_by(filesystem_id=filesystem_id).one()
+ db_session.delete(source)
+ db_session.commit()
+
+ # Source attempts to continue to navigate
+ resp = client.post('/lookup', follow_redirects=True)
+ self.assertEqual(resp.status_code, 200)
+ self.assertIn('Submit documents for the first time', resp.data)
+ self.assertNotIn('logged_in', session.keys())
+ self.assertNotIn('codename', session.keys())
+
+ logger.assert_called_once_with(
+ "Found no Sources when one was expected: "
+ "No row was found for one()")
+
+ def test_login_with_invalid_codename(self):
+ """Logging in with a codename with invalid characters should return
+ an informative message to the user."""
+
+ invalid_codename = '[]'
+
+ with self.client as c:
+ resp = c.post('/login', data=dict(codename=invalid_codename),
+ follow_redirects=True)
+ self.assertEqual(resp.status_code, 200)
+ self.assertIn("Invalid input.", resp.data)
+
+ def _test_source_session_expiration(self):
+ try:
+ old_expiration = config.SESSION_EXPIRATION_MINUTES
+ has_session_expiration = True
+ except AttributeError:
+ has_session_expiration = False
+
+ try:
+ with self.client as client:
+ codename = new_codename(client, session)
+
+ # set the expiration to ensure we trigger an expiration
+ config.SESSION_EXPIRATION_MINUTES = -1
+
+ resp = client.post('/login',
+ data=dict(codename=codename),
+ follow_redirects=True)
+ assert resp.status_code == 200
+ resp = client.get('/lookup', follow_redirects=True)
+
+ # check that the session was cleared (apart from 'expires'
+ # which is always present and 'csrf_token' which leaks no info)
+ session.pop('expires', None)
+ session.pop('csrf_token', None)
+ assert not session, session
+ assert ('You have been logged out due to inactivity' in
+ resp.data.decode('utf-8'))
+ finally:
+ if has_session_expiration:
+ config.SESSION_EXPIRATION_MINUTES = old_expiration
+ else:
+ del config.SESSION_EXPIRATION_MINUTES
+
+ def test_csrf_error_page(self):
+ old_enabled = self.app.config['WTF_CSRF_ENABLED']
+ self.app.config['WTF_CSRF_ENABLED'] = True
+
+ try:
+ with self.app.test_client() as app:
+ resp = app.post(url_for('main.create'))
+ self.assertRedirects(resp, url_for('main.index'))
+
+ resp = app.post(url_for('main.create'), follow_redirects=True)
+ self.assertIn('Your session timed out due to inactivity',
+ resp.data)
+ finally:
+ self.app.config['WTF_CSRF_ENABLED'] = old_enabled
diff --git a/securedrop/tests/test_store.py b/securedrop/tests/test_store.py
--- a/securedrop/tests/test_store.py
+++ b/securedrop/tests/test_store.py
@@ -1,12 +1,12 @@
# -*- coding: utf-8 -*-
import os
+import shutil
import unittest
import zipfile
os.environ['SECUREDROP_ENV'] = 'test' # noqa
import config
from db import db_session
-import mock
import store
import utils
@@ -22,6 +22,38 @@ def tearDown(self):
utils.env.teardown()
db_session.remove()
+ def create_file_in_source_dir(self, filesystem_id, filename):
+ """Helper function for simulating files"""
+ source_directory = os.path.join(config.STORE_DIR,
+ filesystem_id)
+ os.makedirs(source_directory)
+
+ file_path = os.path.join(source_directory, filename)
+ with open(file_path, 'a'):
+ os.utime(file_path, None)
+
+ return source_directory, file_path
+
+ def test_path_returns_filename_of_folder(self):
+ """store.path is called in this way in journalist.delete_collection"""
+ filesystem_id = 'example'
+
+ generated_absolute_path = store.path(filesystem_id)
+
+ expected_absolute_path = os.path.join(config.STORE_DIR, filesystem_id)
+ self.assertEquals(generated_absolute_path, expected_absolute_path)
+
+ def test_path_returns_filename_of_items_within_folder(self):
+ """store.path is called in this way in journalist.bulk_delete"""
+ filesystem_id = 'example'
+ item_filename = '1-quintuple_cant-msg.gpg'
+
+ generated_absolute_path = store.path(filesystem_id, item_filename)
+
+ expected_absolute_path = os.path.join(config.STORE_DIR,
+ filesystem_id, item_filename)
+ self.assertEquals(generated_absolute_path, expected_absolute_path)
+
def test_verify_path_not_absolute(self):
with self.assertRaises(store.PathException):
store.verify(os.path.join(config.STORE_DIR, '..', 'etc', 'passwd'))
@@ -41,6 +73,39 @@ def test_verify_store_dir_not_absolute(self):
finally:
config.STORE_DIR = STORE_DIR
+ def test_verify_flagged_file_in_sourcedir_returns_true(self):
+ source_directory, file_path = self.create_file_in_source_dir(
+ 'example-filesystem-id', '_FLAG'
+ )
+
+ self.assertTrue(store.verify(file_path))
+
+ shutil.rmtree(source_directory) # Clean up created files
+
+ def test_verify_invalid_file_extension_in_sourcedir_raises_exception(self):
+ source_directory, file_path = self.create_file_in_source_dir(
+ 'example-filesystem-id', 'not_valid.txt'
+ )
+
+ with self.assertRaisesRegexp(
+ store.PathException,
+ 'Invalid file extension .txt'):
+ store.verify(file_path)
+
+ shutil.rmtree(source_directory) # Clean up created files
+
+ def test_verify_invalid_filename_in_sourcedir_raises_exception(self):
+ source_directory, file_path = self.create_file_in_source_dir(
+ 'example-filesystem-id', 'NOTVALID.gpg'
+ )
+
+ with self.assertRaisesRegexp(
+ store.PathException,
+ 'Invalid filename NOTVALID.gpg'):
+ store.verify(file_path)
+
+ shutil.rmtree(source_directory) # Clean up created files
+
def test_get_zip(self):
source, _ = utils.db_helper.init_source()
submissions = utils.db_helper.submit(source, 2)
@@ -69,14 +134,12 @@ def test_rename_valid_submission(self):
new_journalist_filename)
self.assertEquals(actual_filename, expected_filename)
- @mock.patch('store.subprocess.check_call')
- def test_secure_unlink(self, mock_check_call):
- path = os.path.join(config.STORE_DIR, 'FILENAME')
- self.assertEqual(store.secure_unlink(path), "success")
- mock_check_call.assert_called_with(['srm', path])
-
- @mock.patch('store.subprocess.check_call')
- def test_delete_source_directory(self, mock_check_call):
- path = os.path.join(config.STORE_DIR, 'DIRNAME')
- self.assertEqual(store.delete_source_directory('DIRNAME'), "success")
- mock_check_call.assert_called_with(['srm', '-r', path])
+ def test_rename_submission_with_invalid_filename(self):
+ original_filename = '1-quintuple_cant-msg.gpg'
+ returned_filename = store.rename_submission(
+ 'example-filesystem-id', original_filename,
+ 'this-new-filename-should-not-be-returned')
+
+ # None of the above files exist, so we expect the attempt to rename
+ # the submission to fail and the original filename to be returned.
+ self.assertEquals(original_filename, returned_filename)
diff --git a/securedrop/tests/test_template_filters.py b/securedrop/tests/test_template_filters.py
--- a/securedrop/tests/test_template_filters.py
+++ b/securedrop/tests/test_template_filters.py
@@ -1,57 +1,124 @@
# -*- coding: utf-8 -*-
+import argparse
+import logging
from datetime import datetime, timedelta
-import unittest
+import os
+from flask import session
+
+os.environ['SECUREDROP_ENV'] = 'test' # noqa
+import config
+import i18n
+import journalist
+import manage
+import source
import template_filters
+import version
+
+
+class TestTemplateFilters(object):
+
+ def verify_rel_datetime_format(self, app):
+ with app.test_client() as c:
+ c.get('/')
+ assert session.get('locale') is None
+ result = template_filters.rel_datetime_format(
+ datetime(2016, 1, 1, 1, 1, 1))
+ assert "Jan 01, 2016 01:01 AM" == result
+
+ result = template_filters.rel_datetime_format(
+ datetime(2016, 1, 1, 1, 1, 1), fmt="yyyy")
+ assert "2016" == result
+
+ test_time = datetime.utcnow() - timedelta(hours=2)
+ result = template_filters.rel_datetime_format(test_time,
+ relative=True)
+ assert "2 hours ago" == result
+
+ c.get('/?l=fr_FR')
+ assert session.get('locale') == 'fr_FR'
+ result = template_filters.rel_datetime_format(
+ datetime(2016, 1, 1, 1, 1, 1))
+ assert "janv. 01, 2016 01:01 AM" == result
+
+ result = template_filters.rel_datetime_format(
+ datetime(2016, 1, 1, 1, 1, 1), fmt="yyyy")
+ assert "2016" == result
+
+ test_time = datetime.utcnow() - timedelta(hours=2)
+ result = template_filters.rel_datetime_format(test_time,
+ relative=True)
+ assert "2 heures" in result
+
+ def verify_filesizeformat(self, app):
+ with app.test_client() as c:
+ c.get('/')
+ assert session.get('locale') is None
+ assert "1 byte" == template_filters.filesizeformat(1)
+ assert "2 bytes" == template_filters.filesizeformat(2)
+ value = 1024 * 3
+ assert "3 kB" == template_filters.filesizeformat(value)
+ value *= 1024
+ assert "3 MB" == template_filters.filesizeformat(value)
+ value *= 1024
+ assert "3 GB" == template_filters.filesizeformat(value)
+ value *= 1024
+ assert "3 TB" == template_filters.filesizeformat(value)
+ value *= 1024
+ assert "3,072 TB" == template_filters.filesizeformat(value)
+
+ c.get('/?l=fr_FR')
+ assert session.get('locale') == 'fr_FR'
+ assert "1 octet" == template_filters.filesizeformat(1)
+ assert "2 octets" == template_filters.filesizeformat(2)
+ value = 1024 * 3
+ assert "3 ko" == template_filters.filesizeformat(value)
+ value *= 1024
+ assert "3 Mo" == template_filters.filesizeformat(value)
+ value *= 1024
+ assert "3 Go" == template_filters.filesizeformat(value)
+ value *= 1024
+ assert "3 To" == template_filters.filesizeformat(value)
+ value *= 1024
+ assert "072 To" in template_filters.filesizeformat(value)
+
+ def test_filters(self):
+ sources = [
+ 'tests/i18n/code.py',
+ ]
+ kwargs = {
+ 'translations_dir': config.TEMP_DIR,
+ 'mapping': 'tests/i18n/babel.cfg',
+ 'source': sources,
+ 'extract_update': True,
+ 'compile': True,
+ 'verbose': logging.DEBUG,
+ 'version': version.__version__,
+ }
+ args = argparse.Namespace(**kwargs)
+ manage.setup_verbosity(args)
+ manage.translate_messages(args)
+
+ manage.sh("""
+ pybabel init -i {d}/messages.pot -d {d} -l en_US
+ pybabel init -i {d}/messages.pot -d {d} -l fr_FR
+ """.format(d=config.TEMP_DIR))
+ supported = getattr(config, 'SUPPORTED_LOCALES', None)
+ try:
+ if supported:
+ del config.SUPPORTED_LOCALES
+ for app in (journalist.app, source.app):
+ config.SUPPORTED_LOCALES = ['en_US', 'fr_FR']
+ app.config['BABEL_TRANSLATION_DIRECTORIES'] = config.TEMP_DIR
+ i18n.setup_app(app)
+ self.verify_filesizeformat(app)
+ self.verify_rel_datetime_format(app)
+ finally:
+ if supported:
+ config.SUPPORTED_LOCALES = supported
-class TestTemplateFilters(unittest.TestCase):
-
- def test_datetimeformat_default_fmt(self):
- result = template_filters.datetimeformat(datetime(2016, 1, 1, 1, 1, 1))
- self.assertEquals("Jan 01, 2016 01:01 AM", result)
-
- def test_datetimeformat_unusual_fmt(self):
- result = template_filters.datetimeformat(datetime(2016, 1, 1, 1, 1, 1),
- fmt="%b %d %Y")
- self.assertEquals("Jan 01 2016", result)
-
- def test_relative_timestamp_seconds(self):
- test_time = datetime.utcnow() - timedelta(seconds=5)
- result = template_filters._relative_timestamp(test_time)
- self.assertIn("seconds", result)
-
- def test_relative_timestamp_one_minute(self):
- test_time = datetime.utcnow() - timedelta(minutes=1)
- result = template_filters._relative_timestamp(test_time)
- self.assertEquals("a minute", result)
-
- def test_relative_timestamp_minutes(self):
- test_time = datetime.utcnow() - timedelta(minutes=10)
- result = template_filters._relative_timestamp(test_time)
- self.assertEquals("10 minutes", result)
-
- def test_relative_timestamp_one_hour(self):
- test_time = datetime.utcnow() - timedelta(hours=1)
- result = template_filters._relative_timestamp(test_time)
- self.assertEquals("an hour", result)
-
- def test_relative_timestamp_hours(self):
- test_time = datetime.utcnow() - timedelta(hours=10)
- result = template_filters._relative_timestamp(test_time)
- self.assertEquals("10 hours", result)
-
- def test_relative_timestamp_one_day(self):
- test_time = datetime.utcnow() - timedelta(days=1)
- result = template_filters._relative_timestamp(test_time)
- self.assertEquals("a day", result)
-
- def test_relative_timestamp_days(self):
- test_time = datetime.utcnow() - timedelta(days=4)
- result = template_filters._relative_timestamp(test_time)
- self.assertEquals("4 days", result)
-
- def test_relative_timestamp_none(self):
- test_time = datetime.utcnow() - timedelta(days=999)
- result = template_filters._relative_timestamp(test_time)
- self.assertEquals(None, result)
+ @classmethod
+ def teardown_class(cls):
+ reload(journalist)
+ reload(source)
diff --git a/securedrop/tests/utils/async.py b/securedrop/tests/utils/async.py
--- a/securedrop/tests/utils/async.py
+++ b/securedrop/tests/utils/async.py
@@ -7,7 +7,7 @@
REDIS_SUCCESS_RETURN_VALUE = 'success'
-def wait_for_redis_worker(job, timeout=5):
+def wait_for_redis_worker(job, timeout=60):
"""Raise an error if the Redis job doesn't complete successfully
before a timeout.
@@ -27,7 +27,7 @@ def wait_for_redis_worker(job, timeout=5):
assert False, 'Redis worker timed out!'
-def wait_for_assertion(assertion_expression, timeout=5):
+def wait_for_assertion(assertion_expression, timeout=10):
"""Calls an assertion_expression repeatedly, until the assertion
passes or a timeout is reached.
diff --git a/securedrop/tests/utils/db_helper.py b/securedrop/tests/utils/db_helper.py
--- a/securedrop/tests/utils/db_helper.py
+++ b/securedrop/tests/utils/db_helper.py
@@ -144,15 +144,13 @@ def submit(source, num_submissions):
return submissions
-# NOTE: this method is potentially dangerous to rely on for now due
-# to the fact flask_testing.TestCase only uses on request context
-# per method (see
-# https://github.com/freedomofpress/securedrop/issues/1444).
def new_codename(client, session):
"""Helper function to go through the "generate codename" flow.
"""
- with client as c:
- c.get('/generate')
- codename = session['codename']
- c.post('/create')
+ # clear the session because our tests have implicit reliance on each other
+ session.clear()
+
+ client.get('/generate')
+ codename = session['codename']
+ client.post('/create')
return codename
diff --git a/securedrop/tests/utils/env.py b/securedrop/tests/utils/env.py
--- a/securedrop/tests/utils/env.py
+++ b/securedrop/tests/utils/env.py
@@ -52,11 +52,6 @@ def setup():
init_db()
# Do tests that should always run on app startup
crypto_util.do_runtime_tests()
- # Start the Python-RQ worker if it's not already running
- if not exists(TEST_WORKER_PIDFILE):
- subprocess.Popen(["rqworker",
- "-P", config.SECUREDROP_ROOT,
- "--pid", TEST_WORKER_PIDFILE])
def teardown():
@@ -69,10 +64,7 @@ def teardown():
db_session.remove()
try:
shutil.rmtree(config.SECUREDROP_DATA_ROOT)
+ assert not os.path.exists(config.SECUREDROP_DATA_ROOT) # safeguard for #844
except OSError as exc:
- os.system("find " + config.SECUREDROP_DATA_ROOT) # REMOVE ME, see #844
if 'No such file or directory' not in exc:
raise
- except:
- os.system("find " + config.SECUREDROP_DATA_ROOT) # REMOVE ME, see #844
- raise
diff --git a/testinfra/app/test_network.py b/testinfra/app/test_network.py
--- a/testinfra/app/test_network.py
+++ b/testinfra/app/test_network.py
@@ -7,11 +7,12 @@
securedrop_test_vars = pytest.securedrop_test_vars
[email protected]()
def test_app_iptables_rules(SystemInfo, Command, Sudo):
# Build a dict of variables to pass to jinja for iptables comparison
kwargs = dict(
- mon_ip=securedrop_test_vars.mon_ip,
+ mon_ip=os.environ.get('MON_IP', securedrop_test_vars.mon_ip),
default_interface=Command.check_output("ip r | head -n 1 | "
"awk '{ print $5 }'"),
tor_user_id=Command.check_output("id -u debian-tor"),
diff --git a/testinfra/app/test_ossec.py b/testinfra/app/test_ossec.py
--- a/testinfra/app/test_ossec.py
+++ b/testinfra/app/test_ossec.py
@@ -1,14 +1,16 @@
+import os
import re
import pytest
sdvars = pytest.securedrop_test_vars
+testinfra_hosts = ["app", "app-staging"]
def test_hosts_files(File, SystemInfo):
""" Ensure host files mapping are in place """
f = File('/etc/hosts')
- mon_ip = sdvars.mon_ip
+ mon_ip = os.environ.get('MON_IP', sdvars.mon_ip)
mon_host = sdvars.monitor_hostname
assert f.contains('^127.0.0.1\s*localhost')
@@ -32,8 +34,8 @@ def test_ossec_agent_installed(Package):
def test_ossec_keyfile_present(File, Command, Sudo, SystemInfo):
""" ensure client keyfile for ossec-agent is present """
pattern = "^1024 {} {} [0-9a-f]{{64}}$".format(
- sdvars.app_hostname,
- sdvars.app_ip)
+ sdvars.app_hostname,
+ os.environ.get('APP_IP', sdvars.app_ip))
regex = re.compile(pattern)
with Sudo():
diff --git a/testinfra/common/test_fpf_apt_repo.py b/testinfra/common/test_fpf_apt_repo.py
--- a/testinfra/common/test_fpf_apt_repo.py
+++ b/testinfra/common/test_fpf_apt_repo.py
@@ -1,3 +1,6 @@
+import pytest
+
+
def test_fpf_apt_repo_present(File):
"""
Ensure the FPF apt repo, apt.freedom.press, is configured.
@@ -29,18 +32,27 @@ def test_fpf_apt_repo_fingerprint(Command):
fpf_gpg_pub_key_info = """/etc/apt/trusted.gpg.d/securedrop-keyring.gpg
---------------------------------------------
-pub 4096R/00F4AD77 2016-10-20 [expires: 2017-10-20]
+pub 4096R/00F4AD77 2016-10-20 [expires: 2018-10-05]
Key fingerprint = 2224 5C81 E3BA EB41 38B3 6061 310F 5612 00F4 AD77
uid SecureDrop Release Signing Key"""
assert c.rc == 0
assert fpf_gpg_pub_key_info in c.stdout
- fpf_gpg_pub_key_fingerprint_expired = ('B89A 29DB 2128 160B 8E4B '
- '1B4C BADD E0C7 FC9F 6818')
- fpf_gpg_pub_key_info_expired = """pub 4096R/FC9F6818 2014-10-26 [expired: 2016-10-27]
- Key fingerprint = #{fpf_gpg_pub_key_fingerprint_expired}
-uid Freedom of the Press Foundation Master Signing Key"""
- assert fpf_gpg_pub_key_fingerprint_expired not in c.stdout
- assert fpf_gpg_pub_key_info_expired not in c.stdout
[email protected]('old_pubkey', [
+ 'pub 4096R/FC9F6818 2014-10-26 [expired: 2016-10-27]',
+ 'pub 4096R/00F4AD77 2016-10-20 [expires: 2017-10-20]',
+ 'pub 4096R/00F4AD77 2016-10-20 [expired: 2017-10-20]',
+ 'uid Freedom of the Press Foundation Master Signing Key',
+ 'B89A 29DB 2128 160B 8E4B 1B4C BADD E0C7 FC9F 6818',
+])
+def test_fpf_apt_repo_old_pubkeys_absent(Command, old_pubkey):
+ """
+ Ensure that expired (or about-to-expire) public keys for the FPF
+ apt repo are NOT present. Updates to the securedrop-keyring package
+ should enforce clobbering of old pubkeys, and this check will confirm
+ absence.
+ """
+ c = Command('apt-key finger')
+ assert old_pubkey not in c.stdout
diff --git a/testinfra/common/test_grsecurity.py b/testinfra/common/test_grsecurity.py
--- a/testinfra/common/test_grsecurity.py
+++ b/testinfra/common/test_grsecurity.py
@@ -143,6 +143,8 @@ def test_grub_pc_marked_manual(Command):
assert c.stdout == "grub-pc"
[email protected](os.environ.get('FPF_GRSEC', 'true') == "false",
+ reason="Need to skip in environment w/o grsec")
def test_apt_autoremove(Command):
"""
Ensure old packages have been autoremoved.
diff --git a/testinfra/common/test_ip6tables.py b/testinfra/common/test_ip6tables.py
--- a/testinfra/common/test_ip6tables.py
+++ b/testinfra/common/test_ip6tables.py
@@ -1,3 +1,7 @@
+import pytest
+
+
[email protected]()
def test_ip6tables_drop_everything(Command, Sudo):
"""
Ensure that all IPv6 packets are dropped by default.
diff --git a/testinfra/common/test_user_config.py b/testinfra/common/test_user_config.py
--- a/testinfra/common/test_user_config.py
+++ b/testinfra/common/test_user_config.py
@@ -1,7 +1,6 @@
import os
import pytest
import re
-import getpass
hostenv = os.environ['SECUREDROP_TESTINFRA_TARGET_HOST']
@@ -79,9 +78,7 @@ def test_sudoers_tmux_env_deprecated(File):
admin_user = "vagrant"
if os.environ.get("FPF_CI", None):
- admin_user = getpass.getuser()
- if admin_user == "root":
- admin_user = "ubuntu"
+ admin_user = "sdrop"
f = File("/home/{}/.bashrc".format(admin_user))
assert not f.contains("^. \/etc\/bashrc\.securedrop_additions$")
diff --git a/testinfra/functional/test_tor_interfaces.py b/testinfra/functional/test_tor_interfaces.py
--- a/testinfra/functional/test_tor_interfaces.py
+++ b/testinfra/functional/test_tor_interfaces.py
@@ -5,6 +5,7 @@
sdvars = pytest.securedrop_test_vars
[email protected]
@pytest.mark.parametrize('site', sdvars.tor_url_files)
@pytest.mark.skipif(os.environ.get('FPF_CI', 'false') == "false",
reason="Can only assure Tor is configured in CI atm")
diff --git a/testinfra/mon/test_network.py b/testinfra/mon/test_network.py
--- a/testinfra/mon/test_network.py
+++ b/testinfra/mon/test_network.py
@@ -7,12 +7,12 @@
securedrop_test_vars = pytest.securedrop_test_vars
[email protected]()
def test_mon_iptables_rules(SystemInfo, Command, Sudo):
- app_ip = securedrop_test_vars.app_ip
# Build a dict of variables to pass to jinja for iptables comparison
kwargs = dict(
- app_ip=app_ip,
+ app_ip=os.environ.get('APP_IP', securedrop_test_vars.app_ip),
default_interface=Command.check_output(
"ip r | head -n 1 | awk '{ print $5 }'"),
tor_user_id=Command.check_output("id -u debian-tor"),
@@ -49,6 +49,7 @@ def test_mon_iptables_rules(SystemInfo, Command, Sudo):
dict(host="0.0.0.0", proto="udp", port=1514, listening=True),
dict(host="0.0.0.0", proto="tcp", port=1515, listening=False),
])
[email protected]
def test_listening_ports(Socket, Sudo, ossec_service):
"""
Ensure the OSSEC-related services are listening on the
diff --git a/testinfra/mon/test_ossec.py b/testinfra/mon/test_ossec.py
--- a/testinfra/mon/test_ossec.py
+++ b/testinfra/mon/test_ossec.py
@@ -1,3 +1,4 @@
+import os
import pytest
@@ -19,6 +20,7 @@ def test_ossec_package(Package, package):
assert Package(package).is_installed
[email protected](strict=True)
def test_ossec_connectivity(Command, Sudo):
"""
Ensure ossec-server machine has active connection to the ossec-agent.
@@ -27,7 +29,7 @@ def test_ossec_connectivity(Command, Sudo):
"""
desired_output = "{}-{} is available.".format(
securedrop_test_vars.app_hostname,
- securedrop_test_vars.app_ip)
+ os.environ.get('APP_IP', securedrop_test_vars.app_ip))
with Sudo():
c = Command.check_output("/var/ossec/bin/list_agents -a")
assert c == desired_output
@@ -155,7 +157,7 @@ def test_hosts_files(File, SystemInfo):
""" Ensure host files mapping are in place """
f = File('/etc/hosts')
- app_ip = securedrop_test_vars.app_ip
+ app_ip = os.environ.get('APP_IP', securedrop_test_vars.app_ip)
app_host = securedrop_test_vars.app_hostname
assert f.contains('^127.0.0.1.*localhost')
diff --git a/testinfra/test.py b/testinfra/test.py
--- a/testinfra/test.py
+++ b/testinfra/test.py
@@ -92,9 +92,7 @@ def run_testinfra(target_host, verbose=True):
else:
if target_host in ["apptestclient"]:
conn_type = "docker"
- ssh_config_path = "{}/.ssh/sshconfig-securedrop-ci-{}".format(
- os.environ["HOME"],
- os.environ["BUILD_NUM"])
+ ssh_config_path = os.environ["CI_SSH_CONFIG"]
testinfra_command_template = """
testinfra \
-vv \
| CSS breakage for source in de_DE
# Bug
## Description
The source interface when someone tries to submit a document using `de_DE` locale, the css breaks for the buttons below.
## Steps to Reproduce
Using `de_DE` locale, try to submit a document as source.
## Expected Behavior
The buttons should come up horizontally.
## Actual Behavior
![de_css_break](https://user-images.githubusercontent.com/272303/33506010-d2037fc2-d713-11e7-849e-da9c5702a755.png)
Poor input validation on OTP secret field in journalist interface
## Description
500 error thrown due to lack of validation in new user form
Same issue as #2311, except that issue was for `manage.py`
## Steps to Reproduce
0. Sign in as administrator on journalist interface
1. Try to create new user using a Yubikey and provide a secret with an odd length
## Expected Behavior
Happy error message is shown indicating that the expected length of the secret is 40 characters
## Actual Behavior
![screen shot 2017-10-31 at 3 01 26 pm](https://user-images.githubusercontent.com/7832803/32251032-7280e2e0-be4c-11e7-9de5-10746fd27def.png)
## Comments
Simple fix, check length of string and flash an appropriate error message
Invert `login_required` decorator on journalist interface and require logins by default
# Feature request
## Description
We use a decorator called `login_required` but may forget to add that to an endpoint in the future. A better way would be to use `app.before_request` and check that if the URL doesn't match `/static/*` or `/login` then to redirect to login if the user isn't logged in.
## User Stories
As a dev, I don't want to have to remember to add a decorator to every endpoint and then miss one and break so much of SecureDrop's security.
| 2017-12-06T23:48:32Z | [] | [] |
|
freedomofpress/securedrop | 2,756 | freedomofpress__securedrop-2756 | [
"2303"
] | d2f9f9f7632c290f86ed0031341f14ae8bc1e6b5 | diff --git a/securedrop/journalist_app/account.py b/securedrop/journalist_app/account.py
--- a/securedrop/journalist_app/account.py
+++ b/securedrop/journalist_app/account.py
@@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
from flask import (Blueprint, render_template, request, g, redirect, url_for,
- flash)
+ flash, session)
from flask_babel import gettext
from db import db_session
@@ -29,6 +29,9 @@ def new_password():
error_message):
password = request.form.get('password')
set_diceware_password(user, password)
+ session.pop('uid', None)
+ session.pop('expires', None)
+ return redirect(url_for('main.login'))
return redirect(url_for('account.edit'))
@view.route('/2fa', methods=('GET', 'POST'))
| diff --git a/securedrop/tests/test_journalist.py b/securedrop/tests/test_journalist.py
--- a/securedrop/tests/test_journalist.py
+++ b/securedrop/tests/test_journalist.py
@@ -301,7 +301,7 @@ def test_admin_edits_user_password_error_response(self):
assert ('There was an error, and the new password might not have '
'been saved correctly.') in resp.data.decode('utf-8')
- def test_user_edits_password_success_reponse(self):
+ def test_user_edits_password_success_response(self):
self._login_user()
resp = self.client.post(
url_for('account.new_password'),
@@ -314,6 +314,27 @@ def test_user_edits_password_success_reponse(self):
assert "Password updated." in text
assert VALID_PASSWORD_2 in text
+ def test_user_edits_password_expires_session(self):
+ with self.client as client:
+ # do a real login to get a real session
+ # (none of the mocking `g` hacks)
+ resp = client.post(url_for('main.login'),
+ data=dict(username=self.user.username,
+ password=self.user_pw,
+ token='mocked'))
+ self.assertRedirects(resp, url_for('main.index'))
+ assert 'uid' in session
+
+ resp = client.post(
+ url_for('account.new_password'),
+ data=dict(current_password=self.user_pw,
+ token='mocked',
+ password=VALID_PASSWORD_2))
+
+ self.assertRedirects(resp, url_for('main.login'))
+ # verify the session was expired after the password was changed
+ assert 'uid' not in session
+
def test_user_edits_password_error_reponse(self):
self._login_user()
@@ -737,14 +758,6 @@ def test_incorrect_current_password_change(self):
text = resp.data.decode('utf-8')
self.assertIn('Incorrect password or two-factor code', text)
- def test_invalid_user_password_change(self):
- self._login_user()
- res = self.client.post(url_for('account.new_password'),
- data=dict(password='badpw',
- token='mocked',
- current_password=self.user_pw))
- self.assertRedirects(res, url_for('account.edit'))
-
def test_too_long_user_password_change(self):
self._login_user()
@@ -1077,17 +1090,18 @@ def test_journalist_session_expiration(self):
try:
with self.client as client:
- # do a real login to get a real session
- # (none of the mocking `g` hacks)
- resp = self.client.post(url_for('main.login'),
- data=dict(username=self.user.username,
- password=VALID_PASSWORD,
- token='mocked'))
- assert resp.status_code == 200
-
# set the expiration to ensure we trigger an expiration
config.SESSION_EXPIRATION_MINUTES = -1
+ # do a real login to get a real session
+ # (none of the mocking `g` hacks)
+ resp = client.post(url_for('main.login'),
+ data=dict(username=self.user.username,
+ password=self.user_pw,
+ token='mocked'))
+ self.assertRedirects(resp, url_for('main.index'))
+ assert 'uid' in session
+
resp = client.get(url_for('account.edit'),
follow_redirects=True)
| Failure to invalidate session when user resets their own password
## Description
When a user resets their own password, their session is not invalidated.
## Steps to Reproduce
1. User logs in
2. User resets password
## Expected Behavior
User is logged out and is requested to use their new password to login
## Actual Behavior
User can continue to browse without having to enter their new password again
## Comments
Related tickets: #2300, #880
| 2017-12-17T23:42:59Z | [] | [] |
|
freedomofpress/securedrop | 2,757 | freedomofpress__securedrop-2757 | [
"2705"
] | 416a881ba4b0799c4975093776c233e2bd1a25c4 | diff --git a/securedrop/manage.py b/securedrop/manage.py
--- a/securedrop/manage.py
+++ b/securedrop/manage.py
@@ -189,9 +189,8 @@ def _add_user(is_admin=False):
else:
print('User "{}" successfully added'.format(username))
if not otp_secret:
- # Print the QR code for FreeOTP/ Google Authenticator
- print('\nScan the QR code below with FreeOTP or Google '
- 'Authenticator:\n')
+ # Print the QR code for FreeOTP
+ print('\nScan the QR code below with FreeOTP:\n')
uri = user.totp.provisioning_uri(username,
issuer_name='SecureDrop')
qr = qrcode.QRCode()
| diff --git a/docs/test_the_installation.rst b/docs/test_the_installation.rst
--- a/docs/test_the_installation.rst
+++ b/docs/test_the_installation.rst
@@ -29,13 +29,13 @@ Log in to both servers via TTY
All access to the SecureDrop servers should be performed over SSH from the
*Admin Workstation*. To aid in troubleshooting, physical logins via TTY are
supported, but require 2FA to be configured. See the :doc:`2FA setup guide
-<google_authenticator>` for information how to enable console logins.
+<servers_2fa>` for information how to enable console logins.
Test the 2FA functionality by connecting a keyboard and display to each server,
then login with the Admin username. You will need:
* sudo passphrase for the Admin username
-* TOTP code from a 2FA app such as Google Authenticator or FreeOTP
+* TOTP code from a 2FA app such as FreeOTP
Confirm that logging in via TTY prompts for a 2FA code, and that the code
generated by your smartphone app permits logging in to an interactive shell.
diff --git a/securedrop/tests/functional/journalist_navigation_steps.py b/securedrop/tests/functional/journalist_navigation_steps.py
--- a/securedrop/tests/functional/journalist_navigation_steps.py
+++ b/securedrop/tests/functional/journalist_navigation_steps.py
@@ -159,9 +159,9 @@ def _admin_adds_a_user(self):
if not hasattr(self, 'accept_languages'):
# Clicking submit on the add user form should redirect to
- # the Google Authenticator page
+ # the FreeOTP page
h1s = self.driver.find_elements_by_tag_name('h1')
- assert "Enable Google Authenticator" in [el.text for el in h1s]
+ assert "Enable FreeOTP" in [el.text for el in h1s]
# Retrieve the saved user object from the db and keep it around for
# further testing
diff --git a/securedrop/tests/test_journalist.py b/securedrop/tests/test_journalist.py
--- a/securedrop/tests/test_journalist.py
+++ b/securedrop/tests/test_journalist.py
@@ -532,7 +532,7 @@ def test_http_get_on_admin_new_user_two_factor_page(self):
resp = self.client.get(url_for('admin.new_user_two_factor',
uid=self.user.id))
# any GET req should take a user to the admin.new_user_two_factor page
- self.assertIn('Authenticator', resp.data)
+ self.assertIn('FreeOTP', resp.data)
def test_http_get_on_admin_add_user_page(self):
self._login_admin()
| Recommend a FOSS OTP tool
# Feature request
## Description
Google's authenticator is proprietary, and FreeOTP, which is mentioned once in the UI, strings is no longer maintained, even though it could probably still be functional on current Androids.
Perhaps the **main recommendation** and in-line examples should be of FOSS OTP tools, such as [andOTP](https://github.com/andOTP/andOTP) or [FreeOTP](https://github.com/freeotp/freeotp-android), by using their names in the user's directions, with Google's as a secondary recommendation, if at all.
## User Stories
When advising users in similar situation, I recommend FOSS whenever possible. This is even more relevant in contexts of security and privacy.
| FreeOTP still work as of Nougat, I see activity this month here: https://github.com/freeotp/freeotp-android/commits/master What exactly isn't maintained anymore? andOTP mentioned that they're doing this now as a side project. I like your suggestion.
> Google's authenticator is proprietary
I did not realize that. There is a Free Software [version of Google Authenticator](https://github.com/google/google-authenticator) but it explicitly says the Android app is **not** Free Software. Does anyone know more about that ?
If it is confirmed to be a proprietary blob I second your proposal.
> FreeOTP [...] is no longer maintained
@agharbeia Citation? I agree with @KwadroNaut: there's still activity on the GitHub repo. Have you seen an announcement by the maintainers that the project is deprecated?
Big fan of recommending FreeOTP. We're already doing that (see 476223f26 via #1746), but happy to give it priority. We apparently only recommend FreeOTP in comparatively few locations:
```
$ grep -riPo '(freeotp|google(\s+auth\w+))' docs | cut -d: -f2 | sort | uniq -c
1 freeotp
3 FreeOTP
18 Google Authenticator
```
Would gladly review a PR that mentions FOSS alternatives first and Google Authenticator second. Unclear to me what FOSS alternatives should be for iOS usersβboth FreeOTP and andOTP mentioned above appear to be Android-only.
You are right, @KwadroNaut and @conorsch. it seems FreeOTP is still alive after all.
I should have rechecked before claiming that. The last time I had, a couple of months ago, it seemed to me to have been dormant for some time.
I have concerns. I definitely agree 100% that we should use FOSS in every case that we can. However, many large news organizations have security requirements that the organization requires a specific 2FA application (perhaps by Symantec or others). We should keep that in mind.
Sure, some orgs mandate use of a specific 2FA application (e.g. Duo or Authy). As long as it's TOTP, it should work just fine. Worth pointing out that the TOTP use case is general and not specific to any one application over others.
Google Authenticator has permissions to access internet and camera [according to exodus](https://reports.exodus-privacy.eu.org/reports/537/). That plus the fact that it's proprietary does not look too good.
> I did not realize that. There is a Free Software version of Google Authenticator but it explicitly says the Android app is not Free Software. Does anyone know more about that ?
@dachary you're not linking to the google-authenticator-android project. It used to be open (Apache license), but like many pieces of Android that Google maintains and develops it is now only available under a closed license. See https://github.com/google/google-authenticator-android
> This project is an older fork of the one on the Play store. It's an older version that doesn't get changes synced to it from the Play store version.
@b-meson there will always be a discrepancy between that what's suggested and that what's being used. There are some missing features/bugs with Duo and FreeOTP (ie, no u2f), but all the basic stuff (TOTP) works fine. @conorsch idea to point out that TOTP is the magic keyword here, regardless of app, is a good one.
@dachary permissions camera: scan a qr-code, internet: not sure.
> you're not linking to the google-authenticator-android project.
Ah, right. So it's confirmed that Google Authenticator as found in the app store is indeed a proprietary blob ?
> permissions camera: scan a qr-code, internet: not sure.
Yes, indeed :-) | 2017-12-18T11:50:37Z | [] | [] |
freedomofpress/securedrop | 2,758 | freedomofpress__securedrop-2758 | [
"2522"
] | 4fd70c4835ff62749eb2fbb55dabda56e2fe8652 | diff --git a/admin/bootstrap.py b/admin/bootstrap.py
new file mode 100755
--- /dev/null
+++ b/admin/bootstrap.py
@@ -0,0 +1,220 @@
+# -*- mode: python; coding: utf-8 -*-
+#
+# Copyright (C) 2013-2018 Freedom of the Press Foundation & al
+# Copyright (C) 2018 Loic Dachary <[email protected]>
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+#
+
+import argparse
+import logging
+import os
+import subprocess
+import sys
+
+sdlog = logging.getLogger(__name__)
+
+DIR = os.path.dirname(os.path.realpath(__file__))
+VENV_DIR = os.path.join(DIR, ".venv")
+
+
+def setup_logger(verbose=False):
+ """ Configure logging handler """
+ # Set default level on parent
+ sdlog.setLevel(logging.DEBUG)
+ level = logging.DEBUG if verbose else logging.INFO
+
+ stdout = logging.StreamHandler(sys.stdout)
+ stdout.setFormatter(logging.Formatter('%(levelname)s: %(message)s'))
+ stdout.setLevel(level)
+ sdlog.addHandler(stdout)
+
+
+def run_command(command):
+ """
+ Wrapper function to display stdout for running command,
+ similar to how shelling out in a Bash script displays rolling output.
+
+ Yields a list of the stdout from the `command`, and raises a
+ CalledProcessError if `command` returns non-zero.
+ """
+ popen = subprocess.Popen(command,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT)
+ for stdout_line in iter(popen.stdout.readline, ""):
+ yield stdout_line
+ popen.stdout.close()
+ return_code = popen.wait()
+ if return_code:
+ raise subprocess.CalledProcessError(return_code, command)
+
+
+def is_tails():
+ try:
+ id = subprocess.check_output('lsb_release --id --short',
+ shell=True).strip()
+ except subprocess.CalledProcessError:
+ id = None
+ return id == 'Tails'
+
+
+def maybe_torify():
+ if is_tails():
+ return ['torify']
+ else:
+ return []
+
+
+def install_apt_dependencies(args):
+ """
+ Install apt dependencies in Tails. In order to install Ansible in
+ a virtualenv, first there are a number of Python prerequisites.
+ """
+ sdlog.info("Installing SecureDrop Admin dependencies")
+ sdlog.info(("You'll be prompted for the temporary Tails admin password,"
+ " which was set on Tails login screen"))
+
+ apt_command = ['sudo', 'su', '-c',
+ "apt-get update && \
+ apt-get -q -o=Dpkg::Use-Pty=0 install -y \
+ python-virtualenv \
+ python-yaml \
+ python-pip \
+ ccontrol \
+ virtualenv \
+ libffi-dev \
+ libssl-dev \
+ libpython2.7-dev",
+ ]
+
+ try:
+ # Print command results in real-time, to keep Admin apprised
+ # of progress during long-running command.
+ for output_line in run_command(apt_command):
+ print(output_line.rstrip())
+ except subprocess.CalledProcessError:
+ # Tails supports apt persistence, which was used by SecureDrop
+ # under Tails 2.x. If updates are being applied, don't try to pile
+ # on with more apt requests.
+ sdlog.error(("Failed to install apt dependencies. Check network"
+ " connection and try again."))
+ raise
+
+
+def envsetup(args):
+ """Installs Admin tooling required for managing SecureDrop. Specifically:
+
+ * updates apt-cache
+ * installs apt packages for Python virtualenv
+ * creates virtualenv
+ * installs pip packages inside virtualenv
+
+ The virtualenv is created within the Persistence volume in Tails, so that
+ Ansible is available to the Admin on subsequent boots without requiring
+ installation of packages again.
+ """
+ # virtualenv doesnt exist? Install dependencies and create
+ if not os.path.exists(VENV_DIR):
+
+ install_apt_dependencies(args)
+
+ # Technically you can create a virtualenv from within python
+ # but pip can only be run over tor on tails, and debugging that
+ # along with instaling a third-party dependency is not worth
+ # the effort here.
+ sdlog.info("Setting up virtualenv")
+ try:
+ sdlog.debug(subprocess.check_output(
+ maybe_torify() + ['virtualenv', VENV_DIR],
+ stderr=subprocess.STDOUT))
+ except subprocess.CalledProcessError as e:
+ sdlog.debug(e.output)
+ sdlog.error(("Unable to create virtualenv. Check network settings"
+ " and try again."))
+ raise
+ else:
+ sdlog.info("Virtualenv already exists, not creating")
+
+ install_pip_dependencies(args)
+ if os.path.exists(os.path.join(DIR, 'setup.py')):
+ install_pip_self(args)
+
+ sdlog.info("Finished installing SecureDrop dependencies")
+
+
+def install_pip_self(args):
+ pip_install_cmd = [
+ os.path.join(VENV_DIR, 'bin', 'pip'),
+ 'install', '-e', DIR
+ ]
+ try:
+ subprocess.check_output(maybe_torify() + pip_install_cmd,
+ stderr=subprocess.STDOUT)
+ except subprocess.CalledProcessError as e:
+ sdlog.debug(e.output)
+ sdlog.error("Unable to install self, run with -v for more information")
+ raise
+
+
+def install_pip_dependencies(args, pip_install_cmd=[
+ os.path.join(VENV_DIR, 'bin', 'pip'),
+ 'install',
+ # Specify requirements file.
+ '-r', os.path.join(DIR, 'requirements.txt'),
+ '--require-hashes',
+ # Make sure to upgrade packages only if necessary.
+ '-U', '--upgrade-strategy', 'only-if-needed',
+]):
+ """
+ Install Python dependencies via pip into virtualenv.
+ """
+
+ sdlog.info("Checking Python dependencies for securedrop-admin")
+ try:
+ pip_output = subprocess.check_output(maybe_torify() + pip_install_cmd,
+ stderr=subprocess.STDOUT)
+ except subprocess.CalledProcessError as e:
+ sdlog.debug(e.output)
+ sdlog.error(("Failed to install pip dependencies. Check network"
+ " connection and try again."))
+ raise
+
+ sdlog.debug(pip_output)
+ if "Successfully installed" in pip_output:
+ sdlog.info("Python dependencies for securedrop-admin upgraded")
+ else:
+ sdlog.info("Python dependencies for securedrop-admin are up-to-date")
+
+
+def parse_argv(argv):
+ parser = argparse.ArgumentParser()
+ parser.add_argument('-v', action='store_true', default=False,
+ help="Increase verbosity on output")
+ parser.set_defaults(func=envsetup)
+
+ return parser.parse_args(argv)
+
+
+if __name__ == "__main__":
+ args = parse_argv(sys.argv[1:])
+ setup_logger(args.v)
+ if args.v:
+ args.func(args)
+ else:
+ try:
+ args.func(args)
+ except Exception:
+ sys.exit(1)
+ else:
+ sys.exit(0)
diff --git a/admin/securedrop_admin/__init__.py b/admin/securedrop_admin/__init__.py
new file mode 100755
--- /dev/null
+++ b/admin/securedrop_admin/__init__.py
@@ -0,0 +1,556 @@
+# -*- mode: python; coding: utf-8 -*-
+#
+# Copyright (C) 2013-2018 Freedom of the Press Foundation & al
+# Copyright (C) 2018 Loic Dachary <[email protected]>
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+#
+"""
+SecureDrop Admin Toolkit.
+
+For use by administrators to install, maintain, and manage their SD
+instances.
+"""
+
+import argparse
+import logging
+import os
+import re
+import string
+import subprocess
+import sys
+import types
+import prompt_toolkit
+from prompt_toolkit.validation import Validator, ValidationError
+import yaml
+
+sdlog = logging.getLogger(__name__)
+
+
+class FingerprintException(Exception):
+ pass
+
+
+class SiteConfig(object):
+
+ class ValidateNotEmpty(Validator):
+ def validate(self, document):
+ if document.text != '':
+ return True
+ raise ValidationError(
+ message="Must not be an empty string")
+
+ class ValidateUser(Validator):
+ def validate(self, document):
+ text = document.text
+ if text != '' and text != 'root' and text != 'amnesia':
+ return True
+ raise ValidationError(
+ message="Must not be root, amnesia or an empty string")
+
+ class ValidateIP(Validator):
+ def validate(self, document):
+ if re.match('((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)(\.|$)){4}$',
+ document.text):
+ return True
+ raise ValidationError(
+ message="An IP address must be something like 10.240.20.83")
+
+ class ValidateDNS(Validator):
+ def validate(self):
+ raise Exception() # pragma: no cover
+
+ def is_tails(self):
+ try:
+ id = subprocess.check_output('lsb_release --id --short',
+ shell=True).strip()
+ except subprocess.CalledProcessError:
+ id = None
+ return id == 'Tails'
+
+ def lookup_fqdn(self, fqdn, dns=None):
+ cmd = 'host -W=10 -T -4 ' + fqdn
+ if self.is_tails():
+ cmd = 'torify ' + cmd
+ cmd += ' ' + (dns and dns or '8.8.8.8')
+ try:
+ result = subprocess.check_output(cmd, shell=True,
+ stderr=subprocess.STDOUT)
+ except subprocess.CalledProcessError as e:
+ result = e.output
+ sdlog.debug(cmd + ' => ' + result)
+ return 'has address' in result
+
+ class ValidateDNSServer(ValidateDNS):
+ def validate(self, document):
+ if self.lookup_fqdn('gnu.org', document.text):
+ return True
+ raise ValidationError(
+ message='Unable to resolve gnu.org using this DNS')
+
+ class ValidateFQDN(ValidateDNS):
+ def validate(self, document):
+ if self.lookup_fqdn(document.text):
+ return True
+ raise ValidationError(
+ message='Unable to resolve ' + document.text)
+
+ class ValidatePath(Validator):
+ def __init__(self, basedir):
+ self.basedir = basedir
+ super(SiteConfig.ValidatePath, self).__init__()
+
+ def validate(self, document):
+ if document.text == '':
+ raise ValidationError(
+ message='an existing file name is required')
+ path = os.path.join(self.basedir, document.text)
+ if os.path.exists(path):
+ return True
+ raise ValidationError(
+ message=path + ' file does not exist')
+
+ class ValidateYesNo(Validator):
+ def validate(self, document):
+ text = document.text.lower()
+ if text == 'yes' or text == 'no':
+ return True
+ raise ValidationError(message="Must be either yes or no")
+
+ class ValidateFingerprint(Validator):
+ def validate(self, document):
+ text = document.text.replace(' ', '')
+ if text == '65A1B5FF195B56353CC63DFFCC40EF1228271441':
+ raise ValidationError(
+ message='This is the TEST journalist fingerprint')
+ if text == '600BC6D5142C68F35DDBCEA87B597104EDDDC102':
+ raise ValidationError(
+ message='This is the TEST admin fingerprint')
+ if not re.match('[a-fA-F0-9]{40}$', text):
+ raise ValidationError(
+ message='fingerprints must be 40 hexadecimal characters')
+ return True
+
+ class ValidateInt(Validator):
+ def validate(self, document):
+ if re.match('\d+$', document.text):
+ return True
+ raise ValidationError(message="Must be an integer")
+
+ class Locales(object):
+ def __init__(self, appdir):
+ self.translation_dir = os.path.realpath(
+ os.path.join(appdir, 'translations'))
+
+ def get_translations(self):
+ translations = set(['en', 'en_US'])
+ for dirname in os.listdir(self.translation_dir):
+ if dirname != 'messages.pot':
+ translations.add(dirname)
+ return translations
+
+ class ValidateLocales(Validator):
+ def __init__(self, basedir):
+ self.basedir = basedir
+ super(SiteConfig.ValidateLocales, self).__init__()
+
+ def validate(self, document):
+ desired = document.text.split()
+ existing = SiteConfig.Locales(self.basedir).get_translations()
+ missing = set(desired) - set(existing)
+ if not missing:
+ return True
+ raise ValidationError(
+ message="The following locales do not exist " + " ".join(
+ missing))
+
+ class ValidateOSSECUsername(Validator):
+ def validate(self, document):
+ text = document.text
+ if text and '@' not in text and 'test' != text:
+ return True
+ raise ValidationError(
+ message="The SASL username should not include the domain name")
+
+ class ValidateOSSECPassword(Validator):
+ def validate(self, document):
+ text = document.text
+ if len(text) >= 8 and 'password123' != text:
+ return True
+ raise ValidationError(
+ message="Password for OSSEC email account must be strong")
+
+ class ValidateOSSECEmail(Validator):
+ def validate(self, document):
+ text = document.text
+ if text and '@' in text and '[email protected]' != text:
+ return True
+ raise ValidationError(
+ message=("Must contain a @ and be set to "
+ "something other than [email protected]"))
+
+ def __init__(self, args):
+ self.args = args
+ translations = SiteConfig.Locales(
+ self.args.app_path).get_translations()
+ translations = " ".join(translations)
+ self.desc = [
+ ['ssh_users', 'sd', str,
+ u'Username for SSH access to the servers',
+ SiteConfig.ValidateUser(),
+ None],
+ ['app_ip', '10.20.2.2', str,
+ u'Local IPv4 address for the Application Server',
+ SiteConfig.ValidateIP(),
+ None],
+ ['monitor_ip', '10.20.3.2', str,
+ u'Local IPv4 address for the Monitor Server',
+ SiteConfig.ValidateIP(),
+ None],
+ ['app_hostname', 'app', str,
+ u'Hostname for Application Server',
+ SiteConfig.ValidateNotEmpty(),
+ None],
+ ['monitor_hostname', 'mon', str,
+ u'Hostname for Monitor Server',
+ SiteConfig.ValidateNotEmpty(),
+ None],
+ ['dns_server', '8.8.8.8', str,
+ u'DNS server specified during installation',
+ SiteConfig.ValidateNotEmpty(),
+ None],
+ ['securedrop_app_https_on_source_interface', False, bool,
+ u'Whether HTTPS should be enabled on '
+ 'Source Interface (requires EV cert)',
+ SiteConfig.ValidateYesNo(),
+ lambda x: x.lower() == 'yes'],
+ ['securedrop_app_gpg_public_key', 'SecureDrop.asc', str,
+ u'Local filepath to public key for '
+ 'SecureDrop Application GPG public key',
+ SiteConfig.ValidatePath(self.args.ansible_path),
+ None],
+ ['securedrop_app_gpg_fingerprint', '', str,
+ u'Full fingerprint for the SecureDrop Application GPG Key',
+ SiteConfig.ValidateFingerprint(),
+ self.sanitize_fingerprint],
+ ['ossec_alert_gpg_public_key', 'ossec.pub', str,
+ u'Local filepath to OSSEC alerts GPG public key',
+ SiteConfig.ValidatePath(self.args.ansible_path),
+ None],
+ ['ossec_gpg_fpr', '', str,
+ u'Full fingerprint for the OSSEC alerts GPG public key',
+ SiteConfig.ValidateFingerprint(),
+ self.sanitize_fingerprint],
+ ['ossec_alert_email', '', str,
+ u'Admin email address for receiving OSSEC alerts',
+ SiteConfig.ValidateOSSECEmail(),
+ None],
+ ['smtp_relay', "smtp.gmail.com", str,
+ u'SMTP relay for sending OSSEC alerts',
+ SiteConfig.ValidateNotEmpty(),
+ None],
+ ['smtp_relay_port', 587, int,
+ u'SMTP port for sending OSSEC alerts',
+ SiteConfig.ValidateInt(),
+ int],
+ ['sasl_domain', "gmail.com", str,
+ u'SASL domain for sending OSSEC alerts',
+ SiteConfig.ValidateNotEmpty(),
+ None],
+ ['sasl_username', '', str,
+ u'SASL username for sending OSSEC alerts',
+ SiteConfig.ValidateOSSECUsername(),
+ None],
+ ['sasl_password', '', str,
+ u'SASL password for sending OSSEC alerts',
+ SiteConfig.ValidateOSSECPassword(),
+ None],
+ ['securedrop_supported_locales', [], types.ListType,
+ u'Space separated list of additional locales to support '
+ '(' + translations + ')',
+ SiteConfig.ValidateLocales(self.args.app_path),
+ string.split],
+ ]
+
+ def load_and_update_config(self):
+ if self.exists():
+ self.config = self.load()
+ else:
+ self.config = None
+ return self.update_config()
+
+ def update_config(self):
+ self.config = self.user_prompt_config()
+ self.save()
+ self.validate_gpg_keys()
+ return True
+
+ def user_prompt_config(self):
+ config = {}
+ self_config = self.config or {}
+ for desc in self.desc:
+ (var, default, type, prompt, validator, transform) = desc
+ config[var] = self.user_prompt_config_one(desc,
+ self_config.get(var))
+ return config
+
+ def user_prompt_config_one(self, desc, from_config):
+ (var, default, type, prompt, validator, transform) = desc
+ if from_config is not None:
+ default = from_config
+ prompt += ': '
+ return self.validated_input(prompt, default, validator, transform)
+
+ def validated_input(self, prompt, default, validator, transform):
+ if type(default) is bool:
+ default = default and 'yes' or 'no'
+ if type(default) is int:
+ default = str(default)
+ if isinstance(default, types.ListType):
+ default = " ".join(default)
+ if type(default) is not str:
+ default = str(default)
+ kwargs = {}
+ if validator:
+ kwargs['validator'] = validator
+ value = prompt_toolkit.prompt(prompt,
+ default=unicode(default, 'utf-8'),
+ **kwargs)
+ if transform:
+ return transform(value)
+ else:
+ return value
+
+ def sanitize_fingerprint(self, value):
+ return value.upper().replace(' ', '')
+
+ def validate_gpg_keys(self):
+ keys = (('securedrop_app_gpg_public_key',
+ 'securedrop_app_gpg_fingerprint'),
+
+ ('ossec_alert_gpg_public_key',
+ 'ossec_gpg_fpr'))
+ for (public_key, fingerprint) in keys:
+ validate = os.path.join(
+ os.path.dirname(__file__), '..', 'bin',
+ 'validate-gpg-key.sh')
+ public_key = os.path.join(self.args.ansible_path,
+ self.config[public_key])
+ fingerprint = self.config[fingerprint]
+ try:
+ sdlog.debug(subprocess.check_output(
+ [validate, public_key, fingerprint],
+ stderr=subprocess.STDOUT))
+ except subprocess.CalledProcessError as e:
+ sdlog.debug(e.output)
+ raise FingerprintException(
+ "fingerprint {} ".format(fingerprint) +
+ "does not match " +
+ "the public key {}".format(public_key))
+ return True
+
+ def exists(self):
+ return os.path.exists(self.args.site_config)
+
+ def save(self):
+ with open(self.args.site_config, 'w') as site_config_file:
+ yaml.safe_dump(self.config,
+ site_config_file,
+ default_flow_style=False)
+
+ def load(self):
+ try:
+ with open(self.args.site_config) as site_config_file:
+ return yaml.safe_load(site_config_file)
+ except IOError:
+ sdlog.error("Config file missing, re-run with sdconfig")
+ raise
+ except yaml.YAMLError:
+ sdlog.error("There was an issue processing {}".format(
+ self.args.site_config))
+ raise
+
+
+def setup_logger(verbose=False):
+ """ Configure logging handler """
+ # Set default level on parent
+ sdlog.setLevel(logging.DEBUG)
+ level = logging.DEBUG if verbose else logging.INFO
+
+ stdout = logging.StreamHandler(sys.stdout)
+ stdout.setFormatter(logging.Formatter('%(levelname)s: %(message)s'))
+ stdout.setLevel(level)
+ sdlog.addHandler(stdout)
+
+
+def sdconfig(args):
+ """Configure SD site settings"""
+ SiteConfig(args).load_and_update_config()
+
+
+def install_securedrop(args):
+ """Install/Update SecureDrop"""
+ SiteConfig(args).load()
+
+ sdlog.info("Now installing SecureDrop on remote servers.")
+ sdlog.info("You will be prompted for the sudo password on the "
+ "servers.")
+ sdlog.info("The sudo password is only necessary during initial "
+ "installation.")
+ subprocess.check_call([os.path.join(args.ansible_path,
+ 'securedrop-prod.yml'),
+ '--ask-become-pass'], cwd=args.ansible_path)
+
+
+def backup_securedrop(args):
+ """Perform backup of the SecureDrop Application Server.
+ Creates a tarball of submissions and server config, and fetches
+ back to the Admin Workstation. Future `restore` actions can be performed
+ with the backup tarball."""
+ sdlog.info("Backing up the SecureDrop Application Server")
+ ansible_cmd = [
+ 'ansible-playbook',
+ os.path.join(args.ansible_path, 'securedrop-backup.yml'),
+ ]
+ subprocess.check_call(ansible_cmd, cwd=args.ansible_path)
+
+
+def restore_securedrop(args):
+ """Perform restore of the SecureDrop Application Server.
+ Requires a tarball of submissions and server config, created via
+ the `backup` action."""
+ sdlog.info("Restoring the SecureDrop Application Server from backup")
+ # Canonicalize filepath to backup tarball, so Ansible sees only the
+ # basename. The files must live in args.ansible_path,
+ # but the securedrop-admin
+ # script will be invoked from the repo root, so preceding dirs are likely.
+ restore_file_basename = os.path.basename(args.restore_file)
+ ansible_cmd = [
+ 'ansible-playbook',
+ os.path.join(args.ansible_path, 'securedrop-restore.yml'),
+ '-e',
+ "restore_file='{}'".format(restore_file_basename),
+ ]
+ subprocess.check_call(ansible_cmd, cwd=args.ansible_path)
+
+
+def run_tails_config(args):
+ """Configure Tails environment post SD install"""
+ sdlog.info("Configuring Tails workstation environment")
+ sdlog.info(("You'll be prompted for the temporary Tails admin password,"
+ " which was set on Tails login screen"))
+ ansible_cmd = [
+ os.path.join(args.ansible_path, 'securedrop-tails.yml'),
+ "--ask-become-pass",
+ # Passing an empty inventory file to override the automatic dynamic
+ # inventory script, which fails if no site vars are configured.
+ '-i', '/dev/null',
+ ]
+ subprocess.check_call(ansible_cmd,
+ cwd=args.ansible_path)
+
+
+def get_logs(args):
+ """Get logs for forensics and debugging purposes"""
+ sdlog.info("Gathering logs for forensics and debugging")
+ ansible_cmd = [
+ 'ansible-playbook',
+ os.path.join(args.ansible_path, 'securedrop-logs.yml'),
+ ]
+ subprocess.check_call(ansible_cmd, cwd=args.ansible_path)
+ sdlog.info("Encrypt logs and send to [email protected] or upload "
+ "to the SecureDrop support portal.")
+
+
+def set_default_paths(args):
+ if not args.ansible_path:
+ args.ansible_path = args.root + "/install_files/ansible-base"
+ args.ansible_path = os.path.realpath(args.ansible_path)
+ if not args.site_config:
+ args.site_config = args.ansible_path + "/group_vars/all/site-specific"
+ args.site_config = os.path.realpath(args.site_config)
+ if not args.app_path:
+ args.app_path = args.root + "/securedrop"
+ args.app_path = os.path.realpath(args.app_path)
+ return args
+
+
+def parse_argv(argv):
+ class ArgParseFormatterCombo(argparse.ArgumentDefaultsHelpFormatter,
+ argparse.RawTextHelpFormatter):
+ """Needed to combine formatting classes for help output"""
+ pass
+
+ parser = argparse.ArgumentParser(description=__doc__,
+ formatter_class=ArgParseFormatterCombo)
+ parser.add_argument('-v', action='store_true', default=False,
+ help="Increase verbosity on output")
+ parser.add_argument('-d', action='store_true', default=False,
+ help="Developer mode. Not to be used in production.")
+ parser.add_argument('--root', required=True,
+ help="path to the root of the SecureDrop repository")
+ parser.add_argument('--site-config',
+ help="path to the YAML site configuration file")
+ parser.add_argument('--ansible-path',
+ help="path to the Ansible root")
+ parser.add_argument('--app-path',
+ help="path to the SecureDrop application root")
+ subparsers = parser.add_subparsers()
+
+ parse_sdconfig = subparsers.add_parser('sdconfig', help=sdconfig.__doc__)
+ parse_sdconfig.set_defaults(func=sdconfig)
+
+ parse_install = subparsers.add_parser('install',
+ help=install_securedrop.__doc__)
+ parse_install.set_defaults(func=install_securedrop)
+
+ parse_tailsconfig = subparsers.add_parser('tailsconfig',
+ help=run_tails_config.__doc__)
+ parse_tailsconfig.set_defaults(func=run_tails_config)
+
+ parse_backup = subparsers.add_parser('backup',
+ help=backup_securedrop.__doc__)
+ parse_backup.set_defaults(func=backup_securedrop)
+
+ parse_restore = subparsers.add_parser('restore',
+ help=restore_securedrop.__doc__)
+ parse_restore.set_defaults(func=restore_securedrop)
+ parse_restore.add_argument("restore_file")
+
+ parse_logs = subparsers.add_parser('logs',
+ help=get_logs.__doc__)
+ parse_logs.set_defaults(func=get_logs)
+
+ return set_default_paths(parser.parse_args(argv))
+
+
+def main(argv):
+ args = parse_argv(argv)
+ setup_logger(args.v)
+ if args.v:
+ args.func(args)
+ else:
+ try:
+ args.func(args)
+ except KeyboardInterrupt:
+ sys.exit(0)
+ except Exception as e:
+ raise SystemExit(
+ 'ERROR (run with -v for more): {msg}'.format(msg=e))
+ else:
+ sys.exit(0)
+
+
+if __name__ == "__main__":
+ main(sys.argv[1:])
diff --git a/admin/setup.py b/admin/setup.py
new file mode 100644
--- /dev/null
+++ b/admin/setup.py
@@ -0,0 +1,22 @@
+#
+# Copyright (C) 2013-2018 Freedom of the Press Foundation & al
+# Copyright (C) 2018 Loic Dachary <[email protected]>
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+#
+import setuptools
+
+setuptools.setup(
+ setup_requires=['d2to1', 'pbr'],
+ d2to1=True)
| diff --git a/admin/tests/files/SecureDrop.asc b/admin/tests/files/SecureDrop.asc
new file mode 100644
--- /dev/null
+++ b/admin/tests/files/SecureDrop.asc
@@ -0,0 +1,52 @@
+-----BEGIN PGP PUBLIC KEY BLOCK-----
+Version: GnuPG v2.0.19 (GNU/Linux)
+
+mQINBFJZi2ABEACZJJA53+pEAdkZyD99nxB995ZVTBw60SQ/6E/gws4kInv+YS7t
+wSMXGa5bR4SD9voWxzLgyulqbM93jUFKn5GcsSh2O/lxAvEDKsPmXCRP1eBg3pjU
++8DRLm0TEFiywC+w6HF4PsOh+JlBWafUfL3vwrGKTXvrlKBsosvDmoogLjkMWomM
+KBF/97OKyQiMQf1BDJqZ88nScJEqwo0xz0PfcB04GAtfR7N6Qa8HpFc0VDQcILFB
+0aJx5+p7nw1LyR37LLoK8JbEY6QZd277Y0/U+O4v6WfH/2H5kQ8sC+P8hPwr3rSg
+u3SVbNRasB4ZHFpJZR9Kv21zmQb9U3rrCk2yg3Wm0qtZ0S5CECAAwG2LQkKouRw2
+ak+Y8aolHDt6a785eF0AaAtgbPX4THMum/CNMksHO0PBBqxR+C9z7WSHXFHvv+8B
+5nRccS4m4klyYTbZOOJ45DuC3xDjTRwzzpkYhqf4pLAhwF3spKZsAczAFPmDyxFf
+CyIBiMZSK/j8PMJT1X5tgpL1NXImNdVIPV2Fy+W7PkNfG2FL/FQIUnK6ntukLW/7
+hV6VHcx52mMn1pVUc6v80LEb4BMDz41vlj9R8YVv8hycPtnN0QL5gIME1n7jbKJf
+yfWxkvBXMINDgHK/RysRMP6FXA6Mw65BGNIuO0Il0FTy12HuKI/coEsG2QARAQAB
+tDZTZWN1cmVEcm9wIFRlc3QvRGV2ZWxvcG1lbnQgKERPIE5PVCBVU0UgSU4gUFJP
+RFVDVElPTimJAjsEEwECACUCGwMGCwkIBwMCBhUIAgkKCwQWAgMBAh4BAheABQJS
+m8UzAhkBAAoJEMxA7xIoJxRB1hAP/jVoFRi1R3i4P3EhmaYg9VQUo5SRyfMDoE6r
+FyzOv2x3vRqPM1Bm4ihLQePfwKsJLDo7UVgjmTNEY4bpSYmKus/uo6Kx6yrxm6d/
+JzY0BER+LJi0iA0iyLTqYk3eXyxQmHmy6my8zVyag5k/f/DejSUQgckJZ9pAhr7r
+q4aTCWYapo/6fDM0XAo1T5Upt/iSqHet6NZR15JCDHIvGJYGAxVemccSNKFb1tsn
+5aIMuGDbNivCUIFav+eo2JIEy60BokcZCy68qWwtlO5nIao79MoNMNz2EFSOomOg
+b1sNadEj2vAkLfU4+dOVbYsFGUzOaV0mUHcaTNPYwnK+PgyOi5M05BX55a9FSBgi
+AsEwEnDK1lvzLfWEQxVQvsw9A9vnCbSX8PwC4/uUtokkKxVN9ICl8AfaT38+OUHW
+iNl4NCgd26iRgTLhfMXpTjRyOb2RvFdzLByDEWIbvu5kCh247UFYSL0llk+suNh3
+cm0mOUdL1nZuEo4EyEF1dq+1opMfDMF98q0660wZdwvwUQIXBt/yK3FH0BGA66ai
+R78Z4pH1JqtYvzfDJx+XP8O2N9GYGd7kpak/5C2BTJzLVyzagB1yi8SmiYna5yQj
+EqW5Txeq0GGd2H4KtUETUevU4x0Rw3luHToaDd9d5sioF48o87PlGwk+OCofPfLj
+LnwFPNZcuQINBFJZi2ABEADzfv+9Ogb4KEWFom9zMF+xg8bcd/Ct72/sWLQW6Pz6
++SkmLEHuklTO+k7xiQ6jdzXzj1rTfy317L7G51naBSb6Ekfv8mu2ogOwrvtgYnGC
+vfCpooUSxcfi+aEJzIJL29TAi1RCLZm15KRbkvEl8wS93BSLiag5w4/8eP1vXebq
+95GrCZwiNZdhdQs3qn4j3VRvTW/SZHIAdJY+mMfUMPjq4c4sA82os6kVrEnWeLGf
+T9d+knfm9J/2Rumy90bLAY6SFmRZ9/DxwKwbIsVy8CRvU3RVFSX8HCBQepRCQkls
+9r7KVBqYE2Wh+0a+9wHHHNI7VBxKGXPflrirxY1AB5vjLcX1hmXbCoyf4ytgdHyC
+KDz9Oc+xkgJeyVW6XwSqc5EhuNFXp3+C7BF7eQZ1REJLbL6CtEkeF0jHBaTeKM/p
+N4fVhjPiU/FsNmZGKxxLyxDnnDI5pY8bhphVxwBRZ5GtVNqiVNDw+rRACQalpT21
+OcAgLP+Rz+qf3TPyEZN6WPEx8/76ILuSHb8mpOH7W/514f5NuFaAlgmUnO3cT10h
+h4IwOQ+kvj0qMww8fASI9DJExXUYb3xDSCmOkJPhu1/Drr3gdFBha4/jAz7jBWls
+Vr2RLJzilf8Mi9j8WpHIfP+WXtwWz3+iYPS0SPoB7g9DA0+Ei760pJJf73AEjD+f
+FwARAQABiQIfBBgBAgAJBQJSWYtgAhsMAAoJEMxA7xIoJxRBp/cP/3lJx9z5yzZA
+6UvLQR6pK+V1iy2hvZ+S+EwYRCiTgYTXekHzLXWwjWGfUYDTHMeaS9O9BMRMGOU3
+inyb47GZSoQ0N0bRVTzrY6/0ifhUSJ00MemOodI1bz4pAMk3uR8iWyhlaGn7JAIA
+KmCm+K0qkeJd61S9iyrx7s9QmaNPnupm5pc+bpOAkbKyq7sEFpWM5Qx82n1tVMtn
+IW2OoRPbz80JkkQB2pl6SjskXqZ89jcFWGI6IChYENKc65xafDt4uFuHU+5j4j2f
+4ySYSwfoWC97MOgJLqA/WimxeeNCYFhykUDWrL5mKBTgMXgH/sYk3GDo7fssaYbK
+n1xbbX4GXQl3+ru4zT6/F7CxZErjLb+evShyf4itM+5AdbKRiRzoraqKblBa4TfJ
+BSqHisdcxdZeBe19+jyY6a8ZMcGhrQeksiKxTRh7ylAk7CLVgLEIHLxXzHoZ0oAF
+z2ulG+zH9KS9Pe8MQxHCrlyfoQElQuJoYbrYBOu28itvGPgz6+5xgvZROvPoqIkI
+k8DYt9lJqUFBeZuFJd5W1TuHKLxueVYvSKeG+e3TjOYdJFvDZInM4cNWr8N92mYS
+iphljiHAKVTQeIf1ma07QUH/ul3YC+g07F+BLonIIXA6uQVebv5iLxTgOzIQwHTJ
+Vu4MPiQNn1h4dk1RonfV/aJ+de1+qjA8
+=XVz8
+-----END PGP PUBLIC KEY BLOCK-----
diff --git a/admin/tests/files/corrupted b/admin/tests/files/corrupted
new file mode 100644
--- /dev/null
+++ b/admin/tests/files/corrupted
@@ -0,0 +1,2 @@
+app_hostname: app
+app_ip: [10.20.2.2
diff --git a/admin/tests/files/key.asc b/admin/tests/files/key.asc
new file mode 100644
--- /dev/null
+++ b/admin/tests/files/key.asc
@@ -0,0 +1,281 @@
+-----BEGIN PGP PUBLIC KEY BLOCK-----
+
+mQINBFQAYqYBEAChvZWXGBoliCiePTZ93B0ScCspod2DCGMRA2AiUExnf0IxqOOq
+aBnc7l1jUdwRent+uty3483sHUcjbBjE4BHCGRfQWH+YZjiIHGyOyhUR5x3Uvslk
+b6SPrQv4EKcev+Wcr9hOnahBLf+EjsJc3xTK3Kk7Cju0RBHPPtQ2E2j/EUTwAHHk
+EoN9VEqJaCdqLHewIHcCkTMJUvulVbABUYcDGsPXAfb2pZ0e+ga13KdrOZgR5sdH
+nj+tcEYpC1y01PL3HtQdh1VgK1iJglM3HdzIMY7omAp4GeznNIyL8ZW33mZsvTU5
+DWDhYnCbZlWqIQgBC64Q6iwDOI6POuOief9LXlRz5VL5PsRWZc/R7LQ5iThA8I05
+jxXkGB4NRi9/Z2CO7ROhZVAU1WeahQcISSrgk+1UGzpxdDZulX0JwpIPcIqDC234
+gImGiWx6N8iqOIAT+gOKLwThe7yzm0wbZ8rpNTZBPen3ygHA94YM1VpL3Gk/K5/g
+PqUm0/eYkK6vP/ZRDdzbR+WxN3GZi/sF+CHkJSCIEm9GfjXRmKS0h6IHzqvbjH5h
+Dh1jPRy0D5dyOQoYtw3gCiv5adAdZEyBECs24v5+RujgK2Q+L5l88lI9nC8VkuBe
+VDK3JjZmwVLdpSQ3UCVCux59qe/8F8sowXUHL7Q83n9/9SVdvUfQqVOD9wARAQAB
+tCJMb2ljIERhY2hhcnkgKE91b1UpIDxsb2ljQGdudS5vcmc+iQI3BBMBCAAhBQJU
+AGa2AhsDBQsJCAcDBRUKCQgLBRYCAwEAAh4BAheAAAoJEJktI7OS+eTyOZgP/2CW
+EXXPGNhokLKdGprr1/dGpgVoaWyc9zcPbjOhrKOQfJ5JLzVN5vxMj3DrvjriFA7g
+6ntcFpAc8xcvmgp1FfiUve5Wrp9EOtIstuyS4yOpEZ5c4e5nByChT8s1CivCEd/R
+UBIrehGH0e5aURss1F0VQP+u2gI/Zq3B/SpkG48qtgnZloZqlXtTEZQZcEPyzF39
+Hb5ncEELgQI6W6ldamQ6EcMzJUlLMxzuztFhLKQ+DS358IDohkh2rgJN6vuvSMJh
+J57YNBDz3A6MLXey03Vm5NWoZJzp4MusecZELTsYkRMJXPPeET7u55YbQYBFtLFX
+tB2pqpIPCWK/4LQ6vscGaf+qgvc1CXBCUmpXndSgefZXM6BNxSG5wr7P4j6sNlsI
+4p8Y3CnRgJgMP78/pSuOmVA0RKGzErYHZsRnfudt6irPC75Bk3m5k8d22x3ZM2EN
+bcxnjkb7esc/pLVFeFt1kvk5x6YN+0ZOa0phR5xg1wQ3uBNgpvRB/EmluWBwbQoe
+ScHAgHeuUB3dKvUcSFX0S5YrJ9kzc5LA8WNqhEfjOQpQ7FDvS/cvNqd5u4bE2NL3
+9aFjjB5S+KwNHf9uMky9Rt65pV585JK5MmpQXp0m6sMJAMTtRh/BAm2xpNqXi8bL
+aATCj81s36BXdAA67l3IuXIO05PKiLtjJDQg+SsDiEYEEBEIAAYFAlQAZvkACgkQ
+8dLMyEl6F22/8wCeOdprYHGpSrQgENJnqlflBD3jeskAniWg5pOraywAzXGNN3bD
+uF9TOY9siQEcBBABAgAGBQJUBI1NAAoJEPnq46ZYGdfo8kgH/2VGu1C4eopujw8T
+TRF3d5PtrGgh1JKpy6atMlS/v2U5gw/SPXAN7ht2PsPu1fBg8dJiN2X75wmyDVr6
+28364KPaiVqFBzm1jj+u7811MulvqKMqKVHc3LsvDsS19xo2VJLSn1rtJBMrtXoI
+Iszh9myn9xKF5GtTToSe5aRjPIR3HMKsPokDGDdTAYtwrzx5vcVkgOzmUqvqEY6K
+nqRPYoNFPjc/wVuvlRRiSzGvPv5JSjSjYSFUPdc1cQSkjw7ublQt955Rsbpbg/j3
+6F9ls+RBM3Y1eMp1II15N8W6iUvWooawXUZAdsEQcwdnLiSuL9saREtvhROEexgB
+Ng8eblSJAhwEEAEIAAYFAlSUoaYACgkQuOUId2ZHWq9C/xAAij3vhMkISEveEKHA
+iZgON12xGYojRfZ5HojZ9/e/fkEhgBr+YvAZ8bL9i+Axp7V6tVV/OhjmR8fzqE6l
+EJAjuzin/E1qLAXR+hus7sZCCxJXNnMRSCJU2vmg5v50gLEd2soVPQpCtAho/e5K
+95JwhXYhWzwcijPmTlv26U4jxt9MR2mkDIhW/OtK1KlcXbDNE5fXN/iejVCV9PQy
+ckKL5sd9ZRXiUsWgeiqzZ+avA/b0i1kwWwau2+r745i9VLlQeojoJU3N77XeXtSK
+VILFWGHNO+Ubif/pq+WnNfyJlxMGjl+9jpi8rQzCm6V0u5V8/MlUSkCnciJRdlay
+SDuo5UwGExt0jybywkn3A3q5Nvaa1XXn1qR7/EPeNr1zHRu7aIcpQfcVOnLlL6Oy
+dYm7MuxOMBPLePCW72pQIxz5pcV+TBbp80lTj1TqHSqtibBtLPSF2FTKJcVwDQPb
+MRJUhR4MKHtVdmiQwcXH/mxsE/mOu6SFPxvXuC7B/0RSsvcTVj89CP6Bgy7LT0sQ
+TsbRr8hnhDHuy2GIqx/4UExcomcUnBVDL5DtNhdRzbzLugshk1znutoxsGaVwVYV
+xfIg64Azf4tmb4mi7D6Adshn4ULFoOo/h2v2bZw3WmeUoEZWN0M4BAWgz4aMrZHn
+1ayD0WVSPfsx9DBOSgcsi93lMjGJAhwEEAEIAAYFAlQEroQACgkQ6UHe2nxbtqWP
+1g//Tz5ITfYWU4UPzjqJVPCS9/xiuqbNSAzy38/9GLKG7JWDfwxfjInuiQxQD74Q
+uQfWgeFHG72a8Z0M4sgx0RQKLDS4skIcsR9WRIXB6Y/IOBi4luU+rglEHeUQ/6Z/
+eLew2yCN5LfdBu/7EzJjn1Tny4YIJBfHL9UMzXkSEj/TW87iVyYPIAG/NClSA7ff
+cwKLYBeQ9P4NQSyLn+TOKyIkSHemiaoYjJzagzIqLTlxJjRVaJN9VrY72etFlz2/
+mZ3s3WlRRj9JlCCmSPAf9nssORrqeso44VfJ1nzpOl45/RuLF0vWjS+JtAFtd5ye
+gx2dfeSeLZT3fKpYx7gXHZEF5jwvHlwWGFopXtn5Z/5LMbIqBZ8+KrhMz4+Pu8dF
+LgXgLvXYieN9VPI2MaJNu86lzokMJTzRAhsStHOVyOENRTaApT9JNYTx6d0OlH7m
+wBLtiIL3pO7QQBy3lA08HDDY/7pLTOhuETpHIYCT6Ed0DbZKR5FhyG6w+lRVDJC5
+vAt74qmCc4X82sWb6IYpTt8ro8DJTS0F5K+xiTVHGvO3+DEaj0HXv9A1TX5DSe3K
+sFj+RbLJCufsPv02dkD3mg/RPoiFFiSHVk9Omt8dWRVYk/WhCPQoNvcrLP7Zj8wu
+8QrfGhDvGhxxysilKxEHlauXMAzmzkGc1v55W2lVb/Lw55+JAhwEEAECAAYFAlic
+0mUACgkQLGRkryqOTAJjQQ//QgPPLKfVp2opN9ZWm7nFTzUKy51LNbjFFKiiaGVV
+Svh2Gz6fcDYSNvsgYr00U6ET4h/aMBPY21YE+sndH5rEyln4Ux77Ul4yzQ62m20m
+rzy63MAcMF1dfYb1vwAJWla0M/LnDJQCMSbeYXkvHeJ3DB8//CFKpD6T3UgsGi+J
+snefB0l68rcR0/fxRnklmkfZ8M4J5Qpk94E8b6uuSBIQJg2RdNJTY2UKmSloIKw5
+Beh1xnovwGBJdfbwV9EDEupSF4qUfDwFFjxprskdtB+3+8MPjC5rOMW0dC5ojf5q
+lWgRLDvihqepIx3n3yPynmSyNCnelAXUShEV2+iE1fe1WZ72f7Q+R4WRYRk5ScEr
+mfgv46ssKXGKANq3C2bDEd2hKbS6eyO6lSY2qK1/CxYohXPkKfxjKRrr5BpoNKB+
+3eru6xTeSNmkneYrPBKKbsswJ85MwWCoKU/AVvml21f05sMX+CvJHzVFul/v4x3F
+9L77a7FLCp5MJDM7Z1gxetA8otfJJsp/d4JRQteG/V9jwicTsh+gGCPZfY1jB6Rr
+Noppn/f8igIjbg05rD4xeavk40SnTOju5J5Vtu+EiRxlW9oFIdAhh54MxQ5zMdlT
+hMRzHlp/jWhrE1yG0MBRQfgePTv3AkIG2SqRM114yebE3paUF+Eyxn51EO5nqbsO
+w5+JAhwEEAEIAAYFAljimIwACgkQnFA+4pt5PsUvshAAmFvbpj6mRRIpTbqLlPRU
+wvDg7gUfQy6GDCGFnju4iKonje/LpQoKqRr5TK7ImE8IDPLuY1sBDJCAKQRa0L9Z
+ZWcxqNx+xJrRUIQDXA3O27Pp6B988qUmkZsq3oj1pfEAcjgASKTSxZFCDAcsxecv
+B2n/GXuYKpEKM29pybudcUZm0+lAHMFJDZ95lLXpq4mYQddAMJgwKG7wcYZhY9xb
+NuMO/U3FfvVS/m1dkM6lIQbAOW1jqj12JS3e27s8haOCZmoLQ0Wr0KuiI17clzbC
+RfjYSUQkWipeOS73XcnV/9bfN1hlkCIofF1h3RvxbGyhEB4fnITcdTevEbabVbOL
+0agl8FOPcI/0gus6TT7Z+37/4LcYn9unSgmu86gUx+VVm8cR6MTBd5tAWceyZO8z
+RuwpOpql+34MwMNBvUjZ1MKwDq7Nq/Jd8KRhFP+Q6iWh6rqFmJ8vxK6eRGaXTGrY
+QWW43bngUuUOSpMIgt75efaF8IzK3WnNnJmOcA08DHFUK7273VdUmlZxhVU9feDP
+Ywfq6QSAdr+2SsSMvgTVFVelV8YP6L5zM5NCCtRBKH02HsIKo0IdaeGAmZw37Jl1
+j44CZCe6f/9fd+O+TUL4NtXgeLeH4pTwRzEKJ+ublUthGeYVrZiPiuInBMfO3Hul
+exsXrj43w6S+q55B2XU9cvqJAhwEEAEIAAYFAlkeq8AACgkQhz3qMJ0TgH/UGBAA
+iq1LhU9M9JkdFjDSXDXz6acAZksGcyVSorK98SULMzMzA++zUHgrQKVomXtK8AXR
+JlOZ/+JXw2PUZviCYlh1+0WW+kZ+/ZGGe4J3PCO2iIQhqlDECO1PoAxXg3iCLtdp
+IJKdnBggLVet0U1JF14H1tyRXpXBBDedga3w0wrMUk03mleamuO1u2fLYQHgtqUx
+qYw2WBWTBVGo8FBrat4GANNH+e0INcCNY1qOXuvxyhjrG2WntzX+b/iy+ldN90V4
+elfzs7kEh7BmnkUvBMKL4Q5r5eBf2pFoHeOf6qTGYrqBSHl3idBa+X6bCSH3XJkN
+yAmZizyiQwhLLGQbKX1lX3YX1XRZnY6zwa50gI5UkRBCy86dynVGJ0okPIKKeUVZ
+AcxxL0KOdY4ulZCOddc/NlSOwnOjO4M+7w/AN2qDnJaNlUxahmpvXK0Yx8aqbI4K
+SPZMUifmpbglxU00yvD3y+CctuVfqNHVab4O1Zb1zejY829PAdVOcR82shLM/rqb
+GmcVVnX0IH0+BysoCil4MEDBG/DE88Iw/I7Uq7gm+a7gSO6CO0l0Qlyz+ixBoSO4
+sZQ9VwRRKXubwak46DNR+pgq3kG8b1pFakqo8LlkJWAnu8DhgRt2cqeplwbbtKGb
+NEW1gi/1xX0kTy9TNdiVk/0C72lzYxZBdim1yH1spCW0JkxvaWMgRGFjaGFyeSAo
+T3VvVSkgPGxvaWNAZGFjaGFyeS5vcmc+iQI3BBMBCAAhBQJUAGKmAhsDBQsJCAcD
+BRUKCQgLBRYCAwEAAh4BAheAAAoJEJktI7OS+eTyg0sP/1gZ2XBjfhdSugEmFDWw
+4TfSTsBZAWzEVMomouZa/r2GxGvfiQKwgGGDaw1u/XvIfdjEDdwEk7L06dhyF8UB
+jM4WidHrz+DYC9JRIxrzcNDaphhKlmXQzm6hZl0PEwvJ4MjbX5acI9MytUXcshkX
+tJCeVUoQ0NnhD6UUvaVVsfJtWwDDR9RfzsHvegJMtLUt/GKfCtwrZTd2HiNrSpW/
+TPys718GDx9gpRbYpYJttueeOCd3rYjMdU3EtdR1gcWCc31Ke8iG5cLFH5VdiMn3
+bSu8CskCxSQ+bjvDHzgrpDNfq4LEK0i4QFnK1EL4A/U5/rZ+ql7hUDtad/wKNz2j
+g7qJtuuNl5dMObFv3f/uy6tWiDqHgGLipwhZ4U9rgHu/nvo1869CKDSMj8GUOEng
+Cw4qvvUEbLSFWequ6zywY9eSD3vAhWzdvN7gZRIxqJswoNiHQzQPAFROd6E6r3O3
+octyXDZ9pQM6AT8TbVSsmy5Uajd63OTW8PVvCWCyVLXcXNN5R6D08ue3ma05yFzx
+9E1RiFZa7Jt55TAaidvuny7xm97FU2Pj6BOgdBSrcklorr+iL/h4FCEM/YvDSXrc
+YeYgTGzGRMgMJBZjUyQiVaKfm2NQfcmqsIhcxsaJqnDyOOGpgHuWrVgmTG0CMbWD
+48mwSaMbtRFN6RPKMDwwzPVEiEYEEBEIAAYFAlQAZvkACgkQ8dLMyEl6F22SZgCf
+TTIJzMChBBqW83DPxgdq5I0hnJQAn00b+s5LqUEHDQml3XTs0moVwM90iQEcBBAB
+AgAGBQJUBDQsAAoJEPnq46ZYGdfoByYIAIYR0LvBD7egGMFRADqCzi1j0cRzhZId
+xjpARKWP45CsE/Jp18SXS+4YeVXPh55/oZSbIf7mdGLtDUi9pPP1Npkuxep+2IW7
+G/+u9KmGfNlviSilo63uT/53bSjgsLM4BtZACAG1jbbrozePCfUadlRMkVkg5hNJ
+8AThlhGhFgzgOGTTFbOpSjUA2aCRNic0sWIS2yyMc5C7MSkQNpX9k3U6j+tpHkIW
+9tIZC9yS51giWDjaCV1VILr6WgOS5CxSlbUcq7Cyt87HZccBeNRqyuEebkVb42GB
+uxU0/dhSo3dab5JGknTLCphMKnWizqulPGxBxzJuvxtNIqyVdU/lWGmJAhwEEAEI
+AAYFAlSUoaYACgkQuOUId2ZHWq9CCA/8DH6LW+J44g1HcKfOWTfzIYeksK+jDN/e
+wxWS0u8LJSQ55cCXn3I7a7xiUIWSFL39fg5RHZaT/me3j4L/hwDuvabdY93VrPzv
+MivXKCAG2uxYbKlNudyw/Q63atApwyvtguD3k8+BgbgcGqe9+AUmhnQTJg92FiTb
+7loXw5lmJ561RcCaDA44bz+eWiEwoeUbwIDyJQ+aX49Zk1jjiTM3oL4l+TPo+p5i
+k2TeVnJ4O9Q00OVnUgus9c54j5XZk2IRAyE9weeu6ZMHeoWrcgeCfZ/0J954t2Sl
+JMGNXTtPMICOdGiq7a7+EbB69WKKlw9Wq/+fXXfwz0/nls+FWg3kt1mlkkbDenDw
+KwjbAf30IzBEUOm9U1KPIlH7KTYkEm39Z1zFlTuNf1UIv/9yVROgHFd8cIpLQIfe
+Zjgl9xyY6fFINRX30U+mbnZwEyLOaAd8HHlnuRz71FMV2A0hagSJELTpPl16Vkw9
+5i/CIXpkUKk1M9KrQQcQU8wGcZQGtohOtSFWSSu6haDDfOVthDC+78eUBo9kRG7A
+xBqnvFRLd5xi5QB5GJcosIP97SkKGvUctX9z7YVcIBpZn82PGXQndjeK5Ud+M/lo
+dg4GY1g3cxafkIKj90hiiN+T16h4e77VTkyZ3sz5coO+4YEGrAOLdriTTyt6fz2f
+Rx2waoIgx2uJAhwEEAEIAAYFAlQEroQACgkQ6UHe2nxbtqUWoA/+PrZXuQYH+kPM
+H3XqpbWRC9yiA4t7BjoQ5d8P0v9YrqXTr5CbA1sQG7K2+jBR/bfiOl1zVpeBXwK4
+mUUSPKVGuQR6yMFw6PzmOF1n1lESszRI6/OBXP7rEO1bHSwO34u7gfQwxTP7lSiJ
+fBki4Sct7HMoaYp47y2ejzv3/79dxv+86ps4k2WhX3l/FkdGcb9KgneQRIocRkFo
+0rZNhCs60/z5mhbxcGp3DUPZ3I0/EnKztpNvbanOrtPUAJnVPWNRIhKpwYul2eAD
+Z978CqvOr73dRTWJWSWitYp6U6BNL5EszIcyYW3GCJhCTfDCn9nOO057xCvCKpTQ
+M9F8aXp+OOWY/x+gR5Ah42impIyO0gBO7hkJzILZ8Jx8eskguJWymZNIP90li1Vx
+Rv6WgHILHAbmjA18ZCqdgVyhI5t8aENPgR/MhMiwJvtM4dYuofkwDYwUKfukqjOe
+cxEAuVjMZjAj5KICicVUI5QwKss5sGKlpIkITFvYT2/vCyLk+cQ5yey2LrhASdRu
++eA4G09Ns9Kt6+fBtG1WA8lcs2BJi9Wv4QNensxWVvHz0s1mcvKrSFRoNaWiqqr5
+Yx25ad1y0SWbuy0FN6u2R1fETEFr8YVnWZFqx0o61llG+S4bcpazTDSeGZlJs+R+
+JeLdXxFE0nFR9qI7xnIWCi1dtQ8GWEyJAhwEEAECAAYFAlic0mUACgkQLGRkryqO
+TAI5BxAApWBrfTucOPzB3oEZMUZ2jnZVR1FcC/15uKr9JX5nhAwS9Ux6kAFNgz1Q
+JzYTZFmMU5s1NGEN/oIDIj18Kq6AqvyxxpFkp9Qp6/96x5fvU0KZmzsFSNhp6CON
+fjMLXBK2yaATj+8aiXnWhRjMrb+xQKcIBe8iDStoDeD1LC6uXQWZCxipH41KnoD7
+XN0+39snPTN4MDUq5g7hoUREigGkn3tgn5EzwG281J43hRZIA1NFNFg73tC6AKWz
+3r+BOi57uohpFkJT5oDpsBWrEDbaffZJnC02v6wUBixbu8vF4AQtofkws2x7UhHd
+Gf6lK6CG/0RrH3Ftl+pwhchRnN6mu87Vvd8O8cMU8yQ/lrxCPuqTr3n/b43LzWWi
+ss/EBFOOaLD7nu9CIGN8LxRbAzO5N1kGPC8NDjg9ceXGRIwse8BhDxMH6lkD8xT+
+XQ5566sqV+ovPeyPu/kY0s9aMhkC6OFIKNGzodquKSDpBaJSKR3tcEU/fXGpjPp+
+I0DCvxg6DEVlueWdd/doh3pM9+smwyJquPG0VCof8FiGIQ7/nkQ0z+ikFFjTQUxm
+OprnYnUPZYA/y6xVXvN1BnwBsb/YvCpQGyp6SmRgNdpYJzv6kakpk4hkQeQmQpbY
+8xWEesmdVxF2tv/CxGFRVfJz3tgM0aEml5oZ+5XLjw3UVB21LxKJAhwEEAEIAAYF
+AljimJsACgkQnFA+4pt5PsXzSBAAvrpN34vvWV/odVK1Ea2aap6CVCkVo2y+IWww
+K34t4wwq7JVHxSlNy4zpbIQjPjNf7YeDX0kB+2ePqnIJN//u6kjnBNtU1xFk3tQr
+zoK4bB5z6q7189F4urztGreECPar6Rrf2AKAbN6sd5KG1R1l4vXhhcG1ETrc9bug
+kD/SKccfid7HBGGcXpb/qhtTUhzPMns+IytH0SQwn5DFyarlaAP+fxrfaHFbJlmS
+4dltU9MR8Xeo0i0NGHEHGMBlCU9bldcIIU/8iR4JB6cCJjkREsF4Z1zQBg3y2uNk
+F4nwgP7rO6Aeomuf92NG/LYAIB7iwrJpuFhsxh8ZYikKdOg4WNDElXsXQwfvQqIb
+5QTCm4B/lcupXW6S5iOzzL0rBsxhR4QliHi2ibHHT/CSkOLyWCGOCIX9y4zLicwv
+A+FueBO9rxukhIoF/J9Vxe5rRahTOoXlv/8wWEgAOIlQ5FLzALoHTBEGmDQuut2y
+8swcGn7OrdSP6BxVStikxjApJWm06zSDxo716+PeLK36RtbFFX2vR0OsuZXFwbfK
+6yDZFjFNlyubnbaiH1YsvkgbKkHxhvQ0Ncfg5rf5H5iR88bHv2WCrVN8qoHeKOmK
++fVHT3NmBokIpb4XwOPgdn2+3VguiZjxf0Vd3rZ3+cojpXWFimDG7K1Ht2oWQbma
+eFDY+AyJAhwEEAEIAAYFAlkeq8UACgkQhz3qMJ0TgH9kSxAAoaDv4nMTaAe632wo
+6peEhRv7pfUCbuLCjXLcMA1w64FsH45k94wZeeOqwNN3CdeADd4gpqUsta6KoPmV
+fDphz0lYX9vz1LEbzMOK5mrUwLXyRNy6sgO50xfBLY9rjaQApzeaqc7JSGLNvUPi
+clnoheo7AAy32oQf/O+ubW4eJAmKWsPstxFRET5snwoSWSm7Xx2Q5HZo1x5FureU
+CFrX6LVyYJTF8v7C4vMRfsPNjXNM6URqURUxYuEcWpPBzhiM5ZyYlm+2PRCSi0kc
+QvUTf9ro8DqRTjZP39wIqOb/Xpt/FyO/Rwkh295477BT5tUHuCh7bKDf9B7HzE7o
+Ws/LbUPKn0B1lUfwCTQmvBZu0WGEOjw9+wXBO2ucBqGfp7UaVWlSzismy0lC8jFq
+Xpl1hGtp5/s5WRyxjMEKVa7x8TtNahJmfWUPf4bp465Kpjf42LYp8VnPL+b0sb1+
+I36DS8qfviDd2zMXyr29DDmXaDx/16nlLsNNe47NU9/jA/5B9vn52eS7IQNyqI9+
+bySI5oV7g7x34D0P+9WENY0De3V1QAsDL/oOQjtH5cK1v/thmK6TWVHm1erb4kak
+BoNz79U8/HsgiCZSbPDsvCnzdACXb8fuqRkUmkYYZd6alBGiyFBzFNFwIIaDdg0G
+FAy3qkIcSv9p4W4ViyEpkroxDr60JUxvaWMgRGFjaGFyeSAoT3VvVSkgPGxvaWNA
+ZGViaWFuLm9yZz6JAjcEEwEIACEFAlQAZkECGwMFCwkIBwMFFQoJCAsFFgIDAQAC
+HgECF4AACgkQmS0js5L55PIAKA//SL27KyLfbsNAEDYYsVpyhxEDaZbrdnbDCCK9
+9MYz5OTmfbpng01bp10PS3qzGrJFaRC71FJuYFWxkLxatP/OPSmEiEZFROFAWs5w
+TbgfaAQqQF7KLksRKJRNeglOiaULgWVDO+aHvXjfkMGm9oGehHYv4oUYNe8HrUrN
+lpKkn2DoNgy8FdvhXSm2OhZcASAtB8d0+VSoFmwNGTq1LMUkne1DAVpSHsC2ArfW
+rFlskeN4gB7jl1thiV0Ep3dfr/adImjVtLgvJgnSO+DEuqg98c/4QOZedR5BBQiA
+aaRPclvSVDcr/OQrbVwfagl99PKowfPzJT9xW6Jy6L+FHLcRmWw+2dVcQzzTk/32
+vQnHDzhGYiGLG+Vqroe7SWoTiT0pUfZ4fbf/vXNtuxEK6gwvxgnHSo/6yKX1M8dK
++v73j4kXG6obDJ5uPvuub7NcGYYHv3zRvNDW1e+CEVF/Z4O6lMF3Ng+No5CXOzIj
+/INHzyy20/sHsg75omFpBvV5fcBqdLkXxmEfTcUoQR5tZSaha/i1qpsS66DRbMuE
+TGUz8q5Nj1B1Uj9MTTn33/unTzBRMocjL1WTYcXsd4RiEmh40mIt/q6GHmkMI0TV
+9IQxFmjWeJumrYUbwT8Ib+/GHeWYIDnSuhO6JDfL5F+VD3iQTHNYEPGbyfnSpCAC
+grfHrfCIRgQQEQgABgUCVABm+QAKCRDx0szISXoXbRXpAJ9lKhoceJC20XcVNDVh
+l6NK0wtMswCeKwJLXKv7i1Ueby9fCqlCeFYCvLOJARwEEAECAAYFAlQEjU0ACgkQ
++erjplgZ1+j3JAf+NdHcvxrbH3yy7TWkHA+r+dBGRtg1vv4esL9tDF97A94PMPcY
+3KteXOTw6mR60zz0/hCU6cn8BHOSgDly8WOHF/1nhNB2iVKxQ8C02NJFQeymWRoO
+dYO6ePYGDphNubuRKLUbQvPsJFNT8VPpoh7WdlfKRqseqKWMkpFTSq+gfH5lkFNn
+xpVQ5zAqFIZIM7C6qMZ9wZXJqbwcdD/PhfGJTWjX1Bft2URdBeGHuouA5QSC7mXm
+iLS/TXzR5J7PFS5tk6Xs0QNdpYk+Z4U1FbBFl7RQyh/bKUuzMvkZreCraaH7a7Kr
+WISMm09MtbTIpiZ+SpdGQY3916bUJjExBk3cjokCHAQQAQgABgUCVJShpgAKCRC4
+5Qh3Zkdarx9YEACtFBpjvj15FqumoU2+8B6ohrXJ7rvS9K9NNzvGkcS3+n5rJtt9
+Z6oUug5EDRWHJ7vV26/yzR6f5F+yun78TQZ0H6GEQUNnQtsMQ527CgxmRPFo2m1a
+HQpJBS2C47JinF8Pt4msP+9639s1X+VIqXasF57z9uQ6ezb5rhIyg+n64P5d3Jc9
+o3Hy+obbXHgJ0IVQfACGITT1DWJNgKpfs70if3HlUV/Xznn+e3kC+o7oX6ACYR4g
+fO4Gy992FGgA/Yjk9VbnwbXjFeugV71TdWTNeLbXPhjt7wS/PrYcZqer3Tpj3K3y
+GdhJlAghXbhJENtdfsNzN4SPI2lDEzIimN0PeABf+9ldbKLs+6XU2yXNHdg2cX+k
+uPddkeK9Q2TrZIFEZKGjOnQE59vyDC29wBjZECB14EV1pG/2CrEnkBwU7poeBqd4
+N6/l0JCCCk77s9DH5cUtlJH7qDmrqY3Duv2snYNbYfl72TOsSFLM+/fN4Ze8eNnk
+TeTrsQbrkOJHU9sOjeuEN9GNeavcRIU1aRIIRofE1YpdNbzk022x4kvV8uCFAby2
+gewucmgWDFTID7KfQClGufAq4p+jjq2qfsGYI2pV+b/pl1nfbNgeYBFD4bJpdyN/
+JPcloThSjc2ui6Id2yTFWRQeEWqALuq5easvKTjXiNixHLVtyxstt9gZ8YkCHAQQ
+AQgABgUCVASuhAAKCRDpQd7afFu2pfKkD/4wsbCOka5+BtHjQCTOnk6T1Ld26i7y
+XbOq7Gnzim+t8ViCc2jNwVpiDeam93OJ4kIJFOg9dUzp9Gd4WOyfdP8hsKBpTxCk
+WjORa8sExpJHAc3F8zhIlUgFrBwOJsJ3hZz7MD6PLF1EEo+9bsbOgLi65iLT9yfk
+WNFJdYE4OB6Hzqwch7DrtYfQpcPNXPfwMnKcrvMnMvHCMRCaJmEt/qI7hrnnNwF7
+h4QwxvH0fvkzq48nryjbiCrB3GHaD2tzrOTZgIAYcR4iVLb9PfAJ79uTvZJlCOAU
+livWTo5gPHO2YjmdQnI8CPqQbl6M8fmLLdvePOsS+icO36bBGC3+ZlE0UbUw5M4M
+Hkzza6QEYL0MG2I2lkrCJKzx8BcoO5lAZy9cP+mXCBJDtIZ2StWGMmszr/egjlDI
+yE42/W4nIXE1qFPP366wa7wTO2hXHvH/AUn3oEPMtJsQRgRG9aC8C0GJ+ufepk/K
++9aoxo/TsZXgWax+EvFhRZ/TeXW7ZIeKGP4U4rt00rB9M5XWvbkxVNNUZ+2Wb7r0
+w9kAZ0jtqjpW6SHgzaJ5btzvgLw5zMyMIuBq69RjTqOHbtZ8i95w6iKAyDPKOmfd
+c/4d6R0XmCt4utSn/fjsvf9ZWKGBrIFRi6UwBcXjcSGy749bbnN4RdVtAddBAzqz
+vZFidRMUtEgsI4kCHAQQAQIABgUCWJzSZQAKCRAsZGSvKo5MAjtzEACVY7ze4P7J
+qhG4k4K+2AVTNFBECsQM36NoVIaPahUbG83RJ6yg7XmeVM6zsVT1vvbFwrRh/i9/
+nzSoDtZabWtxkeLbVIzUKEtVz5ZHdCaAhu5ER+r6Zke4S1xOdiERVWUJvhcnVeSF
+l3Kgae3WNoMOayXQNZyLiyC3UnpQZV+d+rNPmLEja9NTrdxx02QdMxOlNVORTtsn
+wcIxUTagAL134HUu2T2QmHBae1f6JsnK8pgLNvmCDIM3d0O14mdiA1Y4wJGwjDGf
+znaLl98xhTxI5Gf8ymC+hMuLcyg+b4yqoEH+8eIzBkfI5GbBsBuswcXId+7rZSit
+MlANektOpwJQ7wXhSofofwxttkYHeTJWBDpTj6TkoO4y5UWwATXGg2rapcAQy5Ek
+yQV0ZvBIO7UQPpiEZQKALABxcFLerVXeyutcXXNhtgsOZboAlB8sbDUsE4GHgcKE
+iLa5gRWAvod7LmrAb4k/78ZjAVdCp6bo8J9aXR8sJdlVOM8Czqa0XX20h3LXkiuV
+Xv4+kwb4lU6w/OwKzDZzdOv5JQdV6/AYYYsGxgZ2EtV5tAzTn5vlVPYd1xOj9OHA
+2hnZwExXkWy2huByJkF7AYzZu9nEykhGwU4DrXr9EkOAsTmy/EC/En+4Ue65jk58
+m+c01Mh5vg+9TcS2dAkLUbUC6Fob/dB1XIkCHAQQAQgABgUCWOKYmwAKCRCcUD7i
+m3k+xRQwEACqhzGR1UjUgn8Lwgu0kupBmb/gTlyDk8CK6kALMimpf6AbDnc9yaxi
+uvFXX+HgGr7nT7FdcBmM2a6ZX03xNqFGNNxdi5gLF14iLyo/QqBD2D7rVWCDLphc
+PLBvgb0+dP5/AdK7DYQkppFJ83lWW1citmKwCuWyQ3+bn8D6CqgwEg/U/TO7FalB
+oMaad87WQ49YN9sFrkeF4wILsWO1lwMSVDhya8+/m9hxNvSGMh/twAP3DB8qgySQ
+AfxDdL0b+Al17wSBH38xSZJ0ebga03vCVwCfhSgjbGHZQ7i7t0SWJhq+ROlib/lb
+s/Lc22DCK0l26lR0Fd7thMyX7y1nSsXeVNGZ4jfcmfYUwhsky0w/eB67GYIliWj5
+U+elcrCfXIQd36ClEVRV/+rR736mzx7/bIzFFshRrCThRwUgpF5/x890nRVtvRvF
+hvFDIYUjUXru6vZJBQx3BjW2cTCvN6XngoVWbrleRUU2jiLujBihUdhm6U1BGyHW
+BmAQ2O5y1id0Skpr5r9DDIXrFeBVvq91mJFMsj1xotIXELKf5D7JX4qrR0w02TVl
+pw+OUrbSWMqzb/GK/nap8XzNp8jR/drfTsKXfOS++uJnIe4cry53FRZXBUlLCDV7
+NT+TucdmqdObGsPNOCZLX7cAfl3/R01CuNT3ur7OBKRiyuKbGzt4cokCHAQQAQgA
+BgUCWR6rxQAKCRCHPeownROAf/BGD/0Va+RV+/mCK6bMGkhGiscFRmGLWgNKxsAA
+2JIT33kLiL2Tn3xdYSkdYpaFmtPxjfOs0KEvDPvWqJyZOFARKv/8BPHGW/+wPlkZ
+ON+B2wW7KzuKvN/p7xOdDrSTMZ1WXD8jbQILUNLzvOivTnr7FFCV2e0c+jlrTW9W
+nSXfcr7HedncC5aqMLb79DirqNEqgsjDUwKLw2hGzeXNs7YFXlbibcrR5KSK75Pl
+tlOiYW/bXsMiOrh1KGP81Mr4+fbn4L0yNGiuHGJKxn9skcajrYSzlgKZkURd/9yk
+32NpXpBq8d0eV7l+dZrr68HftsVWHDQbMcvP1jZxtJBkdC7mmk5S92a1VHwAeRFS
+6RSc4ge5QEOfowMJsth5R27b6xqiDuPQ0vqwdgYGyD5XqzO0oMTx3o8p0VG2m1VI
+ACgqPV0AOepasPBnntOltFZZxJVZVSkgXeMADXjXeTYOuH5iDJnJH/bNUgjo0uD9
+dIkMBJSzd3TQFMs4NcqmHmgDvtH5+wgG5UcdpPOTtGJ2L/YMhqgqy5UqiSPB4Uek
+eRVxBUhjCZz2qYWCZfgBzyQu6N5jfdbTvZdLcP+oseWcLCBPoMU5RSLILxFPMHl3
+W9+H+jGzq20u6X1u3Z3zJGxPoxaaZkAL8kIwR+E8uAWywdquljAX3mc1yzJTBPxd
+HnV9lz/TdrQhTG9pYyBEYWNoYXJ5IDxsb2ljQGZyZWVkb20ucHJlc3M+iQJOBBMB
+CAA4FiEE6Z/+g99z5y+2smTtmS0js5L55PIFAlotvG8CGwMFCwkIBwMFFQoJCAsF
+FgIDAQACHgECF4AACgkQmS0js5L55PI8gw//f6F/8QdzVDopGcceaDM1AsPQpl5f
+JUjcmJwZLhXR26/+wIZ0CcIZNc/Gys/qTALuWb3Eeqt0sDJX+W76ZxsUYWBk9hz9
+175kJ9IMQ2uz/+JDQRC2TkU00cLcnwbV7gNqy+MQWoW2hLLJP4MXdEIUs1ABNX/l
+k9FJmBfV6w68X4IaJKsVTmo5IBumJKCAyxpGLrgUSDeYZEfGkObKlYxk9Rpw5OlW
+hhGf3WyFER+0etWlGn5DjW+UkiuiOCQ0gw+50maluVG3+Rvao+YCdR8mv5tsMLPy
+KOPKrnc5ShZavDQ3zcOHvoI+GSV7pYDO6VUQyhxjUjkLU/8atLXsL3CTeCp51g3i
+eE4nHtef47AIZRo170zOqbpeH7hsgrGcsWhIIkMFnscDAOEaiIwWXt/ZGSBIhKGM
+l1HcPMIDTKLgBONUinFupGpHqv24swwt3b4M4u9d0Kn43ckuOa42MYKlFmyHctO/
++FDrRnJvtiFH5eiXYYHGhdTIzfsoPcDPeGqgDgOjjdS/XYSbufLY6oHoCHPuF7yF
+m/lecSQShQqbzY3bCtN7RMOEvbHR/BYcncXgy97vOSp/4cefK+zw1Rk3x2B/18d0
+GUn3gwpxMsog/aAKdhmftI9cTooeMlGF+NhQxYgC6s4cOQ976AFBLn05xYrd8Z0e
+hKPmFpwExoTyEq+5Ag0EVABipgEQAMnduvH1p9HE0nrtnWlddBCmxr/uZ+JcotfV
+z1XrFj9m438FiKy1qGlb6JXaMbr8vBAqqR2Pu/PfFjNl21zby9Ek7kR9yYYlZhem
+ksObehmIX5YxICUl5RvXDjfLSdm6OR9YbpCi19Wg0VXsEkQ9v/kXVS7D6XQXAQUl
+Gu+TcVqltfwdtQvTha4qQaTXnEVPwdSwqI3tHqA2kALWG/c93HZmeuQs3FX02MDT
+LN4uBR9DeYfFGGnIYWt71UeG2aXf5axyOi03XZgKDw3zpJGwukdVH7tMFtfT0W0r
+rTVDkMdtSXMVrEOHpT3b0j81Z3Ma3fbarSPKgTGr1eSwF2WQBpNrxmqQkLawyzDQ
+RPzXN9ge31k7lSkUFUfBGm6zJ/SlPnmVHxGNGAInozXN+VX+AjGKGvTOvT9qUXql
+GOTMeCCNQLgH0QexfUfI2vpk6MGtcfxNKhLGNPi/T1HCy+QdFv3K6qKtq4hDZ6mx
+FfB4eO8xRa0GYkI5/cPY46XiBATxBx8A1Eo+O/7vUrCBONAkxTCQdas3M/0mY2Kf
+lmuF3tqNp1wsRRVruVD+ghcUNU0uJucDRLfrSqm2yOCM56LRlFA7uD9fpBIUDV7h
+u1DRlkIhM9d+CaAYDdJQ5PsT9ddk3h5Ej1DRDTmCfCxZyjpWR6CNXyoVVeLX6Al2
+Hsa/EVS3ABEBAAGJAh8EGAEIAAkFAlQAYqYCGwwACgkQmS0js5L55PJDDhAAkEzd
+5X/791Yazg40tVQvg9LsM3Ak2lyOzxS76OTCqIbtdhh74uFrTSrPVIKIf1gFxv6J
+KzjRk8KRmzzKqlLGy/Fmd7j6C28bsbZNVqryRyJ+zFTsaC72x5F6wX1RTvwLMIO7
+RVXwFkKKegIrjMr9n+mpCL8xs2JVPEM7Z7U+BrOQUER6QLWtpFnnMtPUI/k03Fav
+K4XDI2AI9+FH2KZ6Fb0vjKT7Y5ToeH1LJz4FS44sZfQlA14BU6JvsxsuT71iCgRG
+B0ChxujLBNTqp5Qvi/xNv8bg1Rr1Wrtv9wohQ5JbDtC29khmbMeEg3YRU9aO2/NY
+o1oaJOaVm7mFD4QyD3Ek4mgOH/PaN9K/V9ZRjR1vacybBv9aF8ngRaf4glmBSdz/
+yK121bErWfqsQsdH3WxAhrKgF+lojaoaIZO5Sj4+y34jEbafxd2v9j594bXPSqvn
+jRqI9WG50Fll98PNqzq5hqjLq/fOhREin/QrYc2LZ+P2+VTSsnRU6qaokuqo+T+p
+4Glvij6RGKNZ+A/Ogv2QV8in2BHvQ57fAlSn9VLdzUnQ2jxmhVNKmwM+ITIjxkGS
+Ya0qJXNVRHKMfHFoUWa5Zyck+IGBKgLUhJ4gs8SKj85zRbLvrQydt5QUVrO9319Y
+DJU+9WUOomFaLd0GpnVJJhXofslisnu+0AFLLlo=
+=WumQ
+-----END PGP PUBLIC KEY BLOCK-----
diff --git a/admin/tests/files/ossec.pub b/admin/tests/files/ossec.pub
new file mode 100644
--- /dev/null
+++ b/admin/tests/files/ossec.pub
@@ -0,0 +1,52 @@
+-----BEGIN PGP PUBLIC KEY BLOCK-----
+Version: GnuPG v2.0.19 (GNU/Linux)
+
+mQINBFJZi2ABEACZJJA53+pEAdkZyD99nxB995ZVTBw60SQ/6E/gws4kInv+YS7t
+wSMXGa5bR4SD9voWxzLgyulqbM93jUFKn5GcsSh2O/lxAvEDKsPmXCRP1eBg3pjU
++8DRLm0TEFiywC+w6HF4PsOh+JlBWafUfL3vwrGKTXvrlKBsosvDmoogLjkMWomM
+KBF/97OKyQiMQf1BDJqZ88nScJEqwo0xz0PfcB04GAtfR7N6Qa8HpFc0VDQcILFB
+0aJx5+p7nw1LyR37LLoK8JbEY6QZd277Y0/U+O4v6WfH/2H5kQ8sC+P8hPwr3rSg
+u3SVbNRasB4ZHFpJZR9Kv21zmQb9U3rrCk2yg3Wm0qtZ0S5CECAAwG2LQkKouRw2
+ak+Y8aolHDt6a785eF0AaAtgbPX4THMum/CNMksHO0PBBqxR+C9z7WSHXFHvv+8B
+5nRccS4m4klyYTbZOOJ45DuC3xDjTRwzzpkYhqf4pLAhwF3spKZsAczAFPmDyxFf
+CyIBiMZSK/j8PMJT1X5tgpL1NXImNdVIPV2Fy+W7PkNfG2FL/FQIUnK6ntukLW/7
+hV6VHcx52mMn1pVUc6v80LEb4BMDz41vlj9R8YVv8hycPtnN0QL5gIME1n7jbKJf
+yfWxkvBXMINDgHK/RysRMP6FXA6Mw65BGNIuO0Il0FTy12HuKI/coEsG2QARAQAB
+tDZTZWN1cmVEcm9wIFRlc3QvRGV2ZWxvcG1lbnQgKERPIE5PVCBVU0UgSU4gUFJP
+RFVDVElPTimJAjsEEwECACUCGwMGCwkIBwMCBhUIAgkKCwQWAgMBAh4BAheABQJS
+m8UzAhkBAAoJEMxA7xIoJxRB1hAP/jVoFRi1R3i4P3EhmaYg9VQUo5SRyfMDoE6r
+FyzOv2x3vRqPM1Bm4ihLQePfwKsJLDo7UVgjmTNEY4bpSYmKus/uo6Kx6yrxm6d/
+JzY0BER+LJi0iA0iyLTqYk3eXyxQmHmy6my8zVyag5k/f/DejSUQgckJZ9pAhr7r
+q4aTCWYapo/6fDM0XAo1T5Upt/iSqHet6NZR15JCDHIvGJYGAxVemccSNKFb1tsn
+5aIMuGDbNivCUIFav+eo2JIEy60BokcZCy68qWwtlO5nIao79MoNMNz2EFSOomOg
+b1sNadEj2vAkLfU4+dOVbYsFGUzOaV0mUHcaTNPYwnK+PgyOi5M05BX55a9FSBgi
+AsEwEnDK1lvzLfWEQxVQvsw9A9vnCbSX8PwC4/uUtokkKxVN9ICl8AfaT38+OUHW
+iNl4NCgd26iRgTLhfMXpTjRyOb2RvFdzLByDEWIbvu5kCh247UFYSL0llk+suNh3
+cm0mOUdL1nZuEo4EyEF1dq+1opMfDMF98q0660wZdwvwUQIXBt/yK3FH0BGA66ai
+R78Z4pH1JqtYvzfDJx+XP8O2N9GYGd7kpak/5C2BTJzLVyzagB1yi8SmiYna5yQj
+EqW5Txeq0GGd2H4KtUETUevU4x0Rw3luHToaDd9d5sioF48o87PlGwk+OCofPfLj
+LnwFPNZcuQINBFJZi2ABEADzfv+9Ogb4KEWFom9zMF+xg8bcd/Ct72/sWLQW6Pz6
++SkmLEHuklTO+k7xiQ6jdzXzj1rTfy317L7G51naBSb6Ekfv8mu2ogOwrvtgYnGC
+vfCpooUSxcfi+aEJzIJL29TAi1RCLZm15KRbkvEl8wS93BSLiag5w4/8eP1vXebq
+95GrCZwiNZdhdQs3qn4j3VRvTW/SZHIAdJY+mMfUMPjq4c4sA82os6kVrEnWeLGf
+T9d+knfm9J/2Rumy90bLAY6SFmRZ9/DxwKwbIsVy8CRvU3RVFSX8HCBQepRCQkls
+9r7KVBqYE2Wh+0a+9wHHHNI7VBxKGXPflrirxY1AB5vjLcX1hmXbCoyf4ytgdHyC
+KDz9Oc+xkgJeyVW6XwSqc5EhuNFXp3+C7BF7eQZ1REJLbL6CtEkeF0jHBaTeKM/p
+N4fVhjPiU/FsNmZGKxxLyxDnnDI5pY8bhphVxwBRZ5GtVNqiVNDw+rRACQalpT21
+OcAgLP+Rz+qf3TPyEZN6WPEx8/76ILuSHb8mpOH7W/514f5NuFaAlgmUnO3cT10h
+h4IwOQ+kvj0qMww8fASI9DJExXUYb3xDSCmOkJPhu1/Drr3gdFBha4/jAz7jBWls
+Vr2RLJzilf8Mi9j8WpHIfP+WXtwWz3+iYPS0SPoB7g9DA0+Ei760pJJf73AEjD+f
+FwARAQABiQIfBBgBAgAJBQJSWYtgAhsMAAoJEMxA7xIoJxRBp/cP/3lJx9z5yzZA
+6UvLQR6pK+V1iy2hvZ+S+EwYRCiTgYTXekHzLXWwjWGfUYDTHMeaS9O9BMRMGOU3
+inyb47GZSoQ0N0bRVTzrY6/0ifhUSJ00MemOodI1bz4pAMk3uR8iWyhlaGn7JAIA
+KmCm+K0qkeJd61S9iyrx7s9QmaNPnupm5pc+bpOAkbKyq7sEFpWM5Qx82n1tVMtn
+IW2OoRPbz80JkkQB2pl6SjskXqZ89jcFWGI6IChYENKc65xafDt4uFuHU+5j4j2f
+4ySYSwfoWC97MOgJLqA/WimxeeNCYFhykUDWrL5mKBTgMXgH/sYk3GDo7fssaYbK
+n1xbbX4GXQl3+ru4zT6/F7CxZErjLb+evShyf4itM+5AdbKRiRzoraqKblBa4TfJ
+BSqHisdcxdZeBe19+jyY6a8ZMcGhrQeksiKxTRh7ylAk7CLVgLEIHLxXzHoZ0oAF
+z2ulG+zH9KS9Pe8MQxHCrlyfoQElQuJoYbrYBOu28itvGPgz6+5xgvZROvPoqIkI
+k8DYt9lJqUFBeZuFJd5W1TuHKLxueVYvSKeG+e3TjOYdJFvDZInM4cNWr8N92mYS
+iphljiHAKVTQeIf1ma07QUH/ul3YC+g07F+BLonIIXA6uQVebv5iLxTgOzIQwHTJ
+Vu4MPiQNn1h4dk1RonfV/aJ+de1+qjA8
+=XVz8
+-----END PGP PUBLIC KEY BLOCK-----
diff --git a/admin/tests/files/site-specific b/admin/tests/files/site-specific
new file mode 100644
--- /dev/null
+++ b/admin/tests/files/site-specific
@@ -0,0 +1,19 @@
+app_hostname: app
+app_ip: 10.20.2.2
+dns_server: 8.8.8.8
+monitor_hostname: mon
+monitor_ip: 10.20.3.2
+ossec_alert_email: [email protected]
+ossec_alert_gpg_public_key: key.asc
+ossec_gpg_fpr: E99FFE83DF73E72FB6B264ED992D23B392F9E4F2
+sasl_domain: gnu.com
+sasl_password: passowrdok
+sasl_username: usernameok
+securedrop_app_gpg_fingerprint: E99FFE83DF73E72FB6B264ED992D23B392F9E4F2
+securedrop_app_gpg_public_key: key.asc
+securedrop_app_https_on_source_interface: false
+securedrop_supported_locales:
+- en
+smtp_relay: smtp.gmail.com
+smtp_relay_port: 587
+ssh_users: sd
diff --git a/admin/tests/files/site-specific-missing-entries b/admin/tests/files/site-specific-missing-entries
new file mode 100644
--- /dev/null
+++ b/admin/tests/files/site-specific-missing-entries
@@ -0,0 +1,14 @@
+app_hostname: app
+app_ip: 10.20.2.2
+dns_server: 8.8.8.8
+monitor_hostname: mon
+monitor_ip: 10.20.3.2
+ossec_alert_email: [email protected]
+ossec_alert_gpg_public_key: key.asc
+ossec_gpg_fpr: E99FFE83DF73E72FB6B264ED992D23B392F9E4F2
+sasl_domain: gnu.com
+sasl_password: passowrdok
+sasl_username: usernameok
+securedrop_app_gpg_fingerprint: E99FFE83DF73E72FB6B264ED992D23B392F9E4F2
+securedrop_app_gpg_public_key: key.asc
+smtp_relay: smtp.gmail.com
diff --git a/admin/tests/files/test_journalist_key.pub b/admin/tests/files/test_journalist_key.pub
new file mode 100644
--- /dev/null
+++ b/admin/tests/files/test_journalist_key.pub
@@ -0,0 +1,52 @@
+-----BEGIN PGP PUBLIC KEY BLOCK-----
+Version: GnuPG v2.0.19 (GNU/Linux)
+
+mQINBFJZi2ABEACZJJA53+pEAdkZyD99nxB995ZVTBw60SQ/6E/gws4kInv+YS7t
+wSMXGa5bR4SD9voWxzLgyulqbM93jUFKn5GcsSh2O/lxAvEDKsPmXCRP1eBg3pjU
++8DRLm0TEFiywC+w6HF4PsOh+JlBWafUfL3vwrGKTXvrlKBsosvDmoogLjkMWomM
+KBF/97OKyQiMQf1BDJqZ88nScJEqwo0xz0PfcB04GAtfR7N6Qa8HpFc0VDQcILFB
+0aJx5+p7nw1LyR37LLoK8JbEY6QZd277Y0/U+O4v6WfH/2H5kQ8sC+P8hPwr3rSg
+u3SVbNRasB4ZHFpJZR9Kv21zmQb9U3rrCk2yg3Wm0qtZ0S5CECAAwG2LQkKouRw2
+ak+Y8aolHDt6a785eF0AaAtgbPX4THMum/CNMksHO0PBBqxR+C9z7WSHXFHvv+8B
+5nRccS4m4klyYTbZOOJ45DuC3xDjTRwzzpkYhqf4pLAhwF3spKZsAczAFPmDyxFf
+CyIBiMZSK/j8PMJT1X5tgpL1NXImNdVIPV2Fy+W7PkNfG2FL/FQIUnK6ntukLW/7
+hV6VHcx52mMn1pVUc6v80LEb4BMDz41vlj9R8YVv8hycPtnN0QL5gIME1n7jbKJf
+yfWxkvBXMINDgHK/RysRMP6FXA6Mw65BGNIuO0Il0FTy12HuKI/coEsG2QARAQAB
+tDZTZWN1cmVEcm9wIFRlc3QvRGV2ZWxvcG1lbnQgKERPIE5PVCBVU0UgSU4gUFJP
+RFVDVElPTimJAjsEEwECACUCGwMGCwkIBwMCBhUIAgkKCwQWAgMBAh4BAheABQJS
+m8UzAhkBAAoJEMxA7xIoJxRB1hAP/jVoFRi1R3i4P3EhmaYg9VQUo5SRyfMDoE6r
+FyzOv2x3vRqPM1Bm4ihLQePfwKsJLDo7UVgjmTNEY4bpSYmKus/uo6Kx6yrxm6d/
+JzY0BER+LJi0iA0iyLTqYk3eXyxQmHmy6my8zVyag5k/f/DejSUQgckJZ9pAhr7r
+q4aTCWYapo/6fDM0XAo1T5Upt/iSqHet6NZR15JCDHIvGJYGAxVemccSNKFb1tsn
+5aIMuGDbNivCUIFav+eo2JIEy60BokcZCy68qWwtlO5nIao79MoNMNz2EFSOomOg
+b1sNadEj2vAkLfU4+dOVbYsFGUzOaV0mUHcaTNPYwnK+PgyOi5M05BX55a9FSBgi
+AsEwEnDK1lvzLfWEQxVQvsw9A9vnCbSX8PwC4/uUtokkKxVN9ICl8AfaT38+OUHW
+iNl4NCgd26iRgTLhfMXpTjRyOb2RvFdzLByDEWIbvu5kCh247UFYSL0llk+suNh3
+cm0mOUdL1nZuEo4EyEF1dq+1opMfDMF98q0660wZdwvwUQIXBt/yK3FH0BGA66ai
+R78Z4pH1JqtYvzfDJx+XP8O2N9GYGd7kpak/5C2BTJzLVyzagB1yi8SmiYna5yQj
+EqW5Txeq0GGd2H4KtUETUevU4x0Rw3luHToaDd9d5sioF48o87PlGwk+OCofPfLj
+LnwFPNZcuQINBFJZi2ABEADzfv+9Ogb4KEWFom9zMF+xg8bcd/Ct72/sWLQW6Pz6
++SkmLEHuklTO+k7xiQ6jdzXzj1rTfy317L7G51naBSb6Ekfv8mu2ogOwrvtgYnGC
+vfCpooUSxcfi+aEJzIJL29TAi1RCLZm15KRbkvEl8wS93BSLiag5w4/8eP1vXebq
+95GrCZwiNZdhdQs3qn4j3VRvTW/SZHIAdJY+mMfUMPjq4c4sA82os6kVrEnWeLGf
+T9d+knfm9J/2Rumy90bLAY6SFmRZ9/DxwKwbIsVy8CRvU3RVFSX8HCBQepRCQkls
+9r7KVBqYE2Wh+0a+9wHHHNI7VBxKGXPflrirxY1AB5vjLcX1hmXbCoyf4ytgdHyC
+KDz9Oc+xkgJeyVW6XwSqc5EhuNFXp3+C7BF7eQZ1REJLbL6CtEkeF0jHBaTeKM/p
+N4fVhjPiU/FsNmZGKxxLyxDnnDI5pY8bhphVxwBRZ5GtVNqiVNDw+rRACQalpT21
+OcAgLP+Rz+qf3TPyEZN6WPEx8/76ILuSHb8mpOH7W/514f5NuFaAlgmUnO3cT10h
+h4IwOQ+kvj0qMww8fASI9DJExXUYb3xDSCmOkJPhu1/Drr3gdFBha4/jAz7jBWls
+Vr2RLJzilf8Mi9j8WpHIfP+WXtwWz3+iYPS0SPoB7g9DA0+Ei760pJJf73AEjD+f
+FwARAQABiQIfBBgBAgAJBQJSWYtgAhsMAAoJEMxA7xIoJxRBp/cP/3lJx9z5yzZA
+6UvLQR6pK+V1iy2hvZ+S+EwYRCiTgYTXekHzLXWwjWGfUYDTHMeaS9O9BMRMGOU3
+inyb47GZSoQ0N0bRVTzrY6/0ifhUSJ00MemOodI1bz4pAMk3uR8iWyhlaGn7JAIA
+KmCm+K0qkeJd61S9iyrx7s9QmaNPnupm5pc+bpOAkbKyq7sEFpWM5Qx82n1tVMtn
+IW2OoRPbz80JkkQB2pl6SjskXqZ89jcFWGI6IChYENKc65xafDt4uFuHU+5j4j2f
+4ySYSwfoWC97MOgJLqA/WimxeeNCYFhykUDWrL5mKBTgMXgH/sYk3GDo7fssaYbK
+n1xbbX4GXQl3+ru4zT6/F7CxZErjLb+evShyf4itM+5AdbKRiRzoraqKblBa4TfJ
+BSqHisdcxdZeBe19+jyY6a8ZMcGhrQeksiKxTRh7ylAk7CLVgLEIHLxXzHoZ0oAF
+z2ulG+zH9KS9Pe8MQxHCrlyfoQElQuJoYbrYBOu28itvGPgz6+5xgvZROvPoqIkI
+k8DYt9lJqUFBeZuFJd5W1TuHKLxueVYvSKeG+e3TjOYdJFvDZInM4cNWr8N92mYS
+iphljiHAKVTQeIf1ma07QUH/ul3YC+g07F+BLonIIXA6uQVebv5iLxTgOzIQwHTJ
+Vu4MPiQNn1h4dk1RonfV/aJ+de1+qjA8
+=XVz8
+-----END PGP PUBLIC KEY BLOCK-----
diff --git a/securedrop/tests/test_securedrop-admin.py b/admin/tests/test_securedrop-admin-setup.py
similarity index 74%
rename from securedrop/tests/test_securedrop-admin.py
rename to admin/tests/test_securedrop-admin-setup.py
--- a/securedrop/tests/test_securedrop-admin.py
+++ b/admin/tests/test_securedrop-admin-setup.py
@@ -18,39 +18,36 @@
#
import argparse
-import imp
-from os.path import abspath, dirname, join, realpath
import pytest
import subprocess
-here = abspath(join(dirname(realpath(__file__))))
-securedrop_admin = imp.load_source('sa', here + '/../securedrop-admin')
+import bootstrap
class TestSecureDropAdmin(object):
def test_verbose(self, capsys):
- securedrop_admin.setup_logger(verbose=True)
- securedrop_admin.sdlog.debug('VISIBLE')
+ bootstrap.setup_logger(verbose=True)
+ bootstrap.sdlog.debug('VISIBLE')
out, err = capsys.readouterr()
assert 'VISIBLE' in out
def test_not_verbose(self, capsys):
- securedrop_admin.setup_logger(verbose=False)
- securedrop_admin.sdlog.debug('HIDDEN')
- securedrop_admin.sdlog.info('VISIBLE')
+ bootstrap.setup_logger(verbose=False)
+ bootstrap.sdlog.debug('HIDDEN')
+ bootstrap.sdlog.info('VISIBLE')
out, err = capsys.readouterr()
assert 'HIDDEN' not in out
assert 'VISIBLE' in out
def test_run_command(self):
- for output_line in securedrop_admin.run_command(
+ for output_line in bootstrap.run_command(
['/bin/echo', 'something']):
assert output_line.strip() == 'something'
lines = []
with pytest.raises(subprocess.CalledProcessError):
- for output_line in securedrop_admin.run_command(
+ for output_line in bootstrap.run_command(
['sh', '-c',
'echo in stdout ; echo in stderr >&2 ; false']):
lines.append(output_line.strip())
@@ -59,19 +56,19 @@ def test_run_command(self):
def test_install_pip_dependencies_up_to_date(self, caplog):
args = argparse.Namespace()
- securedrop_admin.install_pip_dependencies(args, ['/bin/echo'])
+ bootstrap.install_pip_dependencies(args, ['/bin/echo'])
assert 'securedrop-admin are up-to-date' in caplog.text
def test_install_pip_dependencies_upgraded(self, caplog):
args = argparse.Namespace()
- securedrop_admin.install_pip_dependencies(
+ bootstrap.install_pip_dependencies(
args, ['/bin/echo', 'Successfully installed'])
assert 'securedrop-admin upgraded' in caplog.text
def test_install_pip_dependencies_fail(self, caplog):
args = argparse.Namespace()
- with pytest.raises(SystemExit):
- securedrop_admin.install_pip_dependencies(
+ with pytest.raises(subprocess.CalledProcessError):
+ bootstrap.install_pip_dependencies(
args, ['/bin/sh', '-c',
'echo in stdout ; echo in stderr >&2 ; false'])
assert 'Failed to install' in caplog.text
diff --git a/admin/tests/test_securedrop-admin.py b/admin/tests/test_securedrop-admin.py
new file mode 100644
--- /dev/null
+++ b/admin/tests/test_securedrop-admin.py
@@ -0,0 +1,459 @@
+# -*- coding: utf-8 -*-
+#
+# SecureDrop whistleblower submission system
+# Copyright (C) 2017 Loic Dachary <[email protected]>
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+#
+
+import argparse
+from os.path import dirname, join, basename
+import mock
+from prompt_toolkit.validation import ValidationError
+import pytest
+import string
+import subprocess
+import textwrap
+import yaml
+
+import securedrop_admin
+
+
+class Document(object):
+ def __init__(self, text):
+ self.text = text
+
+
+class TestSecureDropAdmin(object):
+
+ def test_verbose(self, capsys):
+ securedrop_admin.setup_logger(verbose=True)
+ securedrop_admin.sdlog.debug('VISIBLE')
+ out, err = capsys.readouterr()
+ assert 'VISIBLE' in out
+
+ def test_not_verbose(self, capsys):
+ securedrop_admin.setup_logger(verbose=False)
+ securedrop_admin.sdlog.debug('HIDDEN')
+ securedrop_admin.sdlog.info('VISIBLE')
+ out, err = capsys.readouterr()
+ assert 'HIDDEN' not in out
+ assert 'VISIBLE' in out
+
+
+class TestSiteConfig(object):
+
+ def test_exists(self):
+ args = argparse.Namespace(site_config='DOES_NOT_EXIST',
+ ansible_path='.',
+ app_path=dirname(__file__))
+ assert not securedrop_admin.SiteConfig(args).exists()
+ args = argparse.Namespace(site_config=__file__,
+ ansible_path='.',
+ app_path=dirname(__file__))
+ assert securedrop_admin.SiteConfig(args).exists()
+
+ def test_validate_not_empty(self):
+ validator = securedrop_admin.SiteConfig.ValidateNotEmpty()
+
+ assert validator.validate(Document('something'))
+ with pytest.raises(ValidationError):
+ validator.validate(Document(''))
+
+ def test_validate_ossec_username(self):
+ validator = securedrop_admin.SiteConfig.ValidateOSSECUsername()
+
+ assert validator.validate(Document('username'))
+ with pytest.raises(ValidationError):
+ validator.validate(Document('bad@user'))
+ with pytest.raises(ValidationError):
+ validator.validate(Document('test'))
+
+ def test_validate_ossec_password(self):
+ validator = securedrop_admin.SiteConfig.ValidateOSSECPassword()
+
+ assert validator.validate(Document('goodpassword'))
+ with pytest.raises(ValidationError):
+ validator.validate(Document('password123'))
+ with pytest.raises(ValidationError):
+ validator.validate(Document(''))
+ with pytest.raises(ValidationError):
+ validator.validate(Document('short'))
+
+ def test_validate_ossec_email(self):
+ validator = securedrop_admin.SiteConfig.ValidateOSSECEmail()
+
+ assert validator.validate(Document('[email protected]'))
+ with pytest.raises(ValidationError):
+ validator.validate(Document('badmail'))
+ with pytest.raises(ValidationError):
+ validator.validate(Document(''))
+ with pytest.raises(ValidationError) as e:
+ validator.validate(Document('[email protected]'))
+ assert 'something other than [email protected]' in e.value.message
+
+ def test_is_tails(self):
+ validator = securedrop_admin.SiteConfig.ValidateDNS()
+ with mock.patch('subprocess.check_output', return_value='Tails'):
+ assert validator.is_tails()
+ with mock.patch('subprocess.check_output', return_value='Debian'):
+ assert validator.is_tails() is False
+ with mock.patch('subprocess.check_output',
+ side_effect=subprocess.CalledProcessError(
+ 1, 'cmd', 'BANG')):
+ assert validator.is_tails() is False
+
+ def test_lookup_dns(self, caplog):
+ validator = securedrop_admin.SiteConfig.ValidateDNS()
+ with mock.patch('securedrop_admin.SiteConfig.ValidateDNS.is_tails',
+ return_value=True):
+ with mock.patch('subprocess.check_output',
+ return_value='has address') as check_output:
+ assert validator.lookup_fqdn('gnu.org', '8.8.8.8')
+ assert check_output.call_args[0][0].startswith('torify')
+ assert check_output.call_args[0][0].endswith('8.8.8.8')
+
+ with mock.patch('securedrop_admin.SiteConfig.ValidateDNS.is_tails',
+ return_value=False):
+ with mock.patch('subprocess.check_output',
+ return_value='failed') as check_output:
+ assert validator.lookup_fqdn('gnu.org') is False
+ assert not check_output.call_args[0][0].startswith('torify')
+ assert 'failed' in caplog.text
+
+ with mock.patch('securedrop_admin.SiteConfig.ValidateDNS.is_tails',
+ return_value=False):
+ with mock.patch('subprocess.check_output',
+ side_effect=subprocess.CalledProcessError(
+ 1, 'cmd', 'BANG')):
+ assert validator.lookup_fqdn('gnu.org') is False
+ assert 'BANG' in caplog.text
+
+ def test_validate_dns_server(self, caplog):
+ validator = securedrop_admin.SiteConfig.ValidateDNSServer()
+ with mock.patch('securedrop_admin.SiteConfig.ValidateDNS.lookup_fqdn',
+ return_value=True):
+ assert validator.validate(Document('8.8.8.8'))
+ with mock.patch('securedrop_admin.SiteConfig.ValidateDNS.lookup_fqdn',
+ return_value=False):
+ with pytest.raises(ValidationError):
+ validator.validate(Document('8.8.8.8'))
+
+ def test_lookup_fqdn(self, caplog):
+ validator = securedrop_admin.SiteConfig.ValidateFQDN()
+ with mock.patch('securedrop_admin.SiteConfig.ValidateDNS.lookup_fqdn',
+ return_value=True):
+ assert validator.validate(Document('gnu.org'))
+ with mock.patch('securedrop_admin.SiteConfig.ValidateDNS.lookup_fqdn',
+ return_value=False):
+ with pytest.raises(ValidationError):
+ assert validator.validate(Document('gnu.org'))
+
+ def test_validate_user(self):
+ validator = securedrop_admin.SiteConfig.ValidateUser()
+ with pytest.raises(ValidationError):
+ validator.validate(Document("amnesia"))
+ with pytest.raises(ValidationError):
+ validator.validate(Document("root"))
+ with pytest.raises(ValidationError):
+ validator.validate(Document(""))
+ assert validator.validate(Document("gooduser"))
+
+ def test_validate_ip(self):
+ validator = securedrop_admin.SiteConfig.ValidateIP()
+ with pytest.raises(ValidationError):
+ validator.validate(Document("599.20"))
+ assert validator.validate(Document("192.168.1.1"))
+
+ def test_validate_path(self):
+ mydir = dirname(__file__)
+ myfile = basename(__file__)
+ validator = securedrop_admin.SiteConfig.ValidatePath(mydir)
+ assert validator.validate(Document(myfile))
+ with pytest.raises(ValidationError):
+ validator.validate(Document("NONEXIST"))
+ with pytest.raises(ValidationError):
+ validator.validate(Document(""))
+
+ def test_validate_yes_no(self):
+ validator = securedrop_admin.SiteConfig.ValidateYesNo()
+ with pytest.raises(ValidationError):
+ validator.validate(Document("something"))
+ assert validator.validate(Document("yes"))
+ assert validator.validate(Document("YES"))
+ assert validator.validate(Document("no"))
+ assert validator.validate(Document("NO"))
+
+ def test_validate_fingerprint(self):
+ validator = securedrop_admin.SiteConfig.ValidateFingerprint()
+ assert validator.validate(Document(
+ "012345678901234567890123456789ABCDEFABCD"))
+ assert validator.validate(Document(
+ "01234 5678901234567890123456789ABCDE FABCD"))
+
+ with pytest.raises(ValidationError) as e:
+ validator.validate(Document(
+ "65A1B5FF195B56353CC63DFFCC40EF1228271441"))
+ assert 'TEST journalist' in e.value.message
+
+ with pytest.raises(ValidationError) as e:
+ validator.validate(Document(
+ "600BC6D5142C68F35DDBCEA87B597104EDDDC102"))
+ assert 'TEST admin' in e.value.message
+
+ with pytest.raises(ValidationError) as e:
+ validator.validate(Document(
+ "0000"))
+ assert '40 hexadecimal' in e.value.message
+
+ with pytest.raises(ValidationError) as e:
+ validator.validate(Document(
+ "zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz"))
+ assert '40 hexadecimal' in e.value.message
+
+ def test_sanitize_fingerprint(self):
+ args = argparse.Namespace(site_config='DOES_NOT_EXIST',
+ ansible_path='.',
+ app_path=dirname(__file__))
+ site_config = securedrop_admin.SiteConfig(args)
+ assert "ABC" == site_config.sanitize_fingerprint(" A bc")
+
+ def test_validate_int(self):
+ validator = securedrop_admin.SiteConfig.ValidateInt()
+ with pytest.raises(ValidationError):
+ validator.validate(Document("123X"))
+ assert validator.validate(Document("192"))
+
+ def test_locales(self):
+ locales = securedrop_admin.SiteConfig.Locales(dirname(__file__))
+ translations = locales.get_translations()
+ assert 'en_US' in translations
+ assert 'fr_FR' in translations
+
+ def test_validate_locales(self):
+ validator = securedrop_admin.SiteConfig.ValidateLocales(
+ dirname(__file__))
+ assert validator.validate(Document('en_US fr_FR '))
+ with pytest.raises(ValidationError) as e:
+ validator.validate(Document('BAD'))
+ assert 'BAD' in e.value.message
+
+ def test_save(self, tmpdir):
+ site_config_path = join(str(tmpdir), 'site_config')
+ args = argparse.Namespace(site_config=site_config_path,
+ ansible_path='.',
+ app_path=dirname(__file__))
+ site_config = securedrop_admin.SiteConfig(args)
+ site_config.config = {'var1': u'val1', 'var2': u'val2'}
+ site_config.save()
+ expected = textwrap.dedent("""\
+ var1: val1
+ var2: val2
+ """)
+ assert expected == open(site_config_path).read()
+
+ def test_validate_gpg_key(self, caplog):
+ args = argparse.Namespace(site_config='INVALID',
+ ansible_path='tests/files',
+ app_path=dirname(__file__))
+ site_config = securedrop_admin.SiteConfig(args)
+ site_config.config = {
+ 'securedrop_app_gpg_public_key':
+ 'test_journalist_key.pub',
+
+ 'securedrop_app_gpg_fingerprint':
+ '65A1B5FF195B56353CC63DFFCC40EF1228271441',
+
+ 'ossec_alert_gpg_public_key':
+ 'test_journalist_key.pub',
+
+ 'ossec_gpg_fpr':
+ '65A1B5FF195B56353CC63DFFCC40EF1228271441',
+ }
+ assert site_config.validate_gpg_keys()
+ site_config.config['ossec_gpg_fpr'] = 'FAIL'
+ with pytest.raises(securedrop_admin.FingerprintException) as e:
+ site_config.validate_gpg_keys()
+ assert 'FAIL does not match' in e.value.message
+
+ @mock.patch('securedrop_admin.SiteConfig.validated_input',
+ side_effect=lambda p, d, v, t: d)
+ @mock.patch('securedrop_admin.SiteConfig.save')
+ def test_update_config(self, mock_save, mock_validate_input):
+ args = argparse.Namespace(site_config='tests/files/site-specific',
+ ansible_path='tests/files',
+ app_path=dirname(__file__))
+ site_config = securedrop_admin.SiteConfig(args)
+
+ assert site_config.load_and_update_config()
+ mock_save.assert_called_once()
+ mock_validate_input.assert_called()
+
+ def test_load_and_update_config(self):
+ args = argparse.Namespace(site_config='tests/files/site-specific',
+ ansible_path='tests/files',
+ app_path=dirname(__file__))
+ site_config = securedrop_admin.SiteConfig(args)
+ with mock.patch('securedrop_admin.SiteConfig.update_config'):
+ site_config.load_and_update_config()
+ assert site_config.config is not None
+
+ args = argparse.Namespace(
+ site_config='tests/files/site-specific-missing-entries',
+ ansible_path='tests/files',
+ app_path=dirname(__file__))
+ site_config = securedrop_admin.SiteConfig(args)
+ with mock.patch('securedrop_admin.SiteConfig.update_config'):
+ site_config.load_and_update_config()
+ assert site_config.config is not None
+
+ args = argparse.Namespace(site_config='UNKNOWN',
+ ansible_path='tests/files',
+ app_path=dirname(__file__))
+ site_config = securedrop_admin.SiteConfig(args)
+ with mock.patch('securedrop_admin.SiteConfig.update_config'):
+ site_config.load_and_update_config()
+ assert site_config.config is None
+
+ def get_desc(self, site_config, var):
+ for desc in site_config.desc:
+ if desc[0] == var:
+ return desc
+
+ def verify_desc_consistency(self, site_config, desc):
+ (var, default, etype, prompt, validator, transform) = desc
+ # verify the default passes validation
+ assert site_config.user_prompt_config_one(desc, None) == default
+ assert type(default) == etype
+ with pytest.raises(ValidationError):
+ site_config.user_prompt_config_one(desc, '')
+
+ verify_prompt_ssh_users = verify_desc_consistency
+ verify_prompt_app_ip = verify_desc_consistency
+ verify_prompt_monitor_ip = verify_desc_consistency
+ verify_prompt_app_hostname = verify_desc_consistency
+ verify_prompt_monitor_hostname = verify_desc_consistency
+ verify_prompt_dns_server = verify_desc_consistency
+
+ def verify_prompt_securedrop_app_https_on_source_interface(
+ self, site_config, desc):
+ self.verify_desc_consistency(site_config, desc)
+ (var, default, etype, prompt, validator, transform) = desc
+ assert site_config.user_prompt_config_one(desc, True) is True
+ assert site_config.user_prompt_config_one(desc, False) is False
+ assert site_config.user_prompt_config_one(desc, 'YES') is True
+ assert site_config.user_prompt_config_one(desc, 'NO') is False
+
+ verify_prompt_securedrop_app_gpg_public_key = verify_desc_consistency
+
+ def verify_prompt_not_empty(self, site_config, desc):
+ with pytest.raises(ValidationError):
+ site_config.user_prompt_config_one(desc, '')
+
+ def verify_prompt_fingerprint(self, site_config, desc):
+ self.verify_prompt_not_empty(site_config, desc)
+ fpr = "0123456 789012 34567890123456789ABCDEFABCD"
+ clean_fpr = site_config.sanitize_fingerprint(fpr)
+ assert site_config.user_prompt_config_one(desc, fpr) == clean_fpr
+
+ verify_prompt_securedrop_app_gpg_fingerprint = verify_prompt_fingerprint
+ verify_prompt_ossec_alert_gpg_public_key = verify_desc_consistency
+ verify_prompt_ossec_gpg_fpr = verify_prompt_fingerprint
+ verify_prompt_ossec_alert_email = verify_prompt_not_empty
+ verify_prompt_smtp_relay = verify_prompt_not_empty
+ verify_prompt_smtp_relay_port = verify_desc_consistency
+ verify_prompt_sasl_domain = verify_desc_consistency
+ verify_prompt_sasl_username = verify_prompt_not_empty
+ verify_prompt_sasl_password = verify_prompt_not_empty
+
+ def verify_prompt_securedrop_supported_locales(self, site_config, desc):
+ (var, default, etype, prompt, validator, transform) = desc
+ # verify the default passes validation
+ assert site_config.user_prompt_config_one(desc, None) == default
+ assert type(default) == etype
+ assert site_config.user_prompt_config_one(
+ desc, 'en en_US') == ['en', 'en_US']
+ assert site_config.user_prompt_config_one(
+ desc, ['en', 'en_US']) == ['en', 'en_US']
+ assert site_config.user_prompt_config_one(desc, '') == []
+ with pytest.raises(ValidationError):
+ site_config.user_prompt_config_one(desc, 'wrong')
+
+ def test_user_prompt_config_one(self):
+ args = argparse.Namespace(site_config='UNKNOWN',
+ ansible_path='tests/files',
+ app_path=dirname(__file__))
+ site_config = securedrop_admin.SiteConfig(args)
+
+ def auto_prompt(prompt, default, **kwargs):
+ if 'validator' in kwargs:
+ assert kwargs['validator'].validate(Document(default))
+ return default
+
+ with mock.patch('prompt_toolkit.prompt', side_effect=auto_prompt):
+ for desc in site_config.desc:
+ (var, default, etype, prompt, validator, transform) = desc
+ method = 'verify_prompt_' + var
+ print("checking " + method)
+ getattr(self, method)(site_config, desc)
+
+ def test_validated_input(self):
+ args = argparse.Namespace(site_config='UNKNOWN',
+ ansible_path='tests/files',
+ app_path=dirname(__file__))
+ site_config = securedrop_admin.SiteConfig(args)
+
+ def auto_prompt(prompt, default, **kwargs):
+ return default
+
+ with mock.patch('prompt_toolkit.prompt', side_effect=auto_prompt):
+ value = 'VALUE'
+ assert value == site_config.validated_input(
+ '', value, lambda: True, None)
+ assert value.lower() == site_config.validated_input(
+ '', value, lambda: True, string.lower)
+ assert 'yes' == site_config.validated_input(
+ '', True, lambda: True, None)
+ assert 'no' == site_config.validated_input(
+ '', False, lambda: True, None)
+ assert '1234' == site_config.validated_input(
+ '', 1234, lambda: True, None)
+ assert "a b" == site_config.validated_input(
+ '', ['a', 'b'], lambda: True, None)
+ assert "{}" == site_config.validated_input(
+ '', {}, lambda: True, None)
+
+ def test_load(self, caplog):
+ args = argparse.Namespace(site_config='tests/files/site-specific',
+ ansible_path='tests/files',
+ app_path=dirname(__file__))
+ site_config = securedrop_admin.SiteConfig(args)
+ assert 'app_hostname' in site_config.load()
+
+ args = argparse.Namespace(site_config='UNKNOWN',
+ ansible_path='tests/files',
+ app_path=dirname(__file__))
+ site_config = securedrop_admin.SiteConfig(args)
+ with pytest.raises(IOError) as e:
+ site_config.load()
+ assert 'No such file' in e.value.strerror
+ assert 'Config file missing' in caplog.text
+
+ args = argparse.Namespace(site_config='tests/files/corrupted',
+ ansible_path='tests/files',
+ app_path=dirname(__file__))
+ site_config = securedrop_admin.SiteConfig(args)
+ with pytest.raises(yaml.YAMLError) as e:
+ site_config.load()
+ assert 'issue processing' in caplog.text
diff --git a/admin/tests/translations/fr_FR b/admin/tests/translations/fr_FR
new file mode 100644
diff --git a/securedrop/bin/test b/securedrop/bin/test
--- a/securedrop/bin/test
+++ b/securedrop/bin/test
@@ -20,6 +20,7 @@ pytest \
--page-layout \
--durations 10 \
--junitxml=/tmp/test-results/junit.xml \
+ --cov-report term-missing \
--cov-report html:/tmp/test-results/cov_html \
--cov-report xml:/tmp/test-results/cov.xml \
--cov-report annotate:/tmp/test-results/cov_annotate \
| refactor securedrop-admin sdconfig in python
# Feature request
## Description
securedrop-admin sdconfig is implemented in python via install_files/ansible-base/securedrop-configure.yml
ansible is contorted and not user friendly in this context. What we want is to display a prompt to the user with a default or the previous value read from site_specific. And have immediate validation instead of post-submission validation.
The [click](http://click.pocoo.org/5/) library is already a SecureDrop dependency and could be used for that purpose.
See also https://github.com/freedomofpress/securedrop/issues/2266 and https://github.com/freedomofpress/securedrop/issues/2095
## User Stories
As an admin I want to get immediate validation when `securedrop-admin sdconfig` prompts me for a valid when configuring securedrop for the first time.
As an admin I want `securedrop-admin sdconfig` to display the values from `install_files/ansible-base/group_vars/all/site-specific` when running it the second time so that I can modify them instead of editing the file manually.
| Quoting @redshiftzero
> i think replacing everything before "Validate site-specific information." in python makes sense and would lead to a much nicer user experience (one could still do some additional validation in python for some immediate user feedback but replacing the entire validate role seems like a big task)
Quoting @msheiny
> i'm totally on-board with you swapping that out -- i also had reservations about doing that code logic in ansible. ansible is a terrible replacement for doing that in python with proper argument parsing validation | 2017-12-19T18:56:06Z | [] | [] |
freedomofpress/securedrop | 2,784 | freedomofpress__securedrop-2784 | [
"2781"
] | 37ea1775371e226c819dd6aa7a80c809af149cfa | diff --git a/securedrop/source_app/main.py b/securedrop/source_app/main.py
--- a/securedrop/source_app/main.py
+++ b/securedrop/source_app/main.py
@@ -217,7 +217,7 @@ def login():
return redirect(url_for('.lookup', from_login='1'))
else:
current_app.logger.info(
- "Login failed for invalid codename".format(codename))
+ "Login failed for invalid codename")
flash(gettext("Sorry, that is not a recognized codename."),
"error")
return render_template('login.html', form=form)
diff --git a/securedrop/store.py b/securedrop/store.py
--- a/securedrop/store.py
+++ b/securedrop/store.py
@@ -6,7 +6,7 @@
import crypto_util
import tempfile
import gzip
-from werkzeug import secure_filename
+from werkzeug.utils import secure_filename
from secure_tempfile import SecureTemporaryFile
| diff --git a/securedrop/tests/functional/journalist_navigation_steps.py b/securedrop/tests/functional/journalist_navigation_steps.py
--- a/securedrop/tests/functional/journalist_navigation_steps.py
+++ b/securedrop/tests/functional/journalist_navigation_steps.py
@@ -12,7 +12,7 @@
from step_helpers import screenshots
-class JournalistNavigationSteps():
+class JournalistNavigationStepsMixin():
@screenshots
def _get_submission_content(self, file_url, raw_content):
@@ -334,9 +334,9 @@ def can_edit_user():
'button[type=submit]')
update_user_btn.click()
- def can_edit_user():
+ def can_edit_user2():
assert ('"{}"'.format(new_username) in self.driver.page_source)
- self.wait_for(can_edit_user)
+ self.wait_for(can_edit_user2)
# Update self.new_user with the new username for the future tests
self.new_user['username'] = new_username
diff --git a/securedrop/tests/functional/make_account_changes.py b/securedrop/tests/functional/make_account_changes.py
--- a/securedrop/tests/functional/make_account_changes.py
+++ b/securedrop/tests/functional/make_account_changes.py
@@ -2,12 +2,12 @@
from unittest import TestCase
from functional_test import FunctionalTest
-from journalist_navigation_steps import JournalistNavigationSteps
+from journalist_navigation_steps import JournalistNavigationStepsMixin
from step_helpers import screenshots
-class MakeAccountChanges(FunctionalTest, JournalistNavigationSteps, TestCase):
-
+class MakeAccountChanges(FunctionalTest, JournalistNavigationStepsMixin,
+ TestCase):
@screenshots
def test_admin_edit_account_html_template_rendering(self):
"""The edit_account.html template is used both when an admin is editing
diff --git a/securedrop/tests/functional/source_navigation_steps.py b/securedrop/tests/functional/source_navigation_steps.py
--- a/securedrop/tests/functional/source_navigation_steps.py
+++ b/securedrop/tests/functional/source_navigation_steps.py
@@ -5,7 +5,7 @@
from step_helpers import screenshots
-class SourceNavigationSteps():
+class SourceNavigationStepsMixin():
@screenshots
def _source_visits_source_homepage(self):
diff --git a/securedrop/tests/functional/test_admin_interface.py b/securedrop/tests/functional/test_admin_interface.py
--- a/securedrop/tests/functional/test_admin_interface.py
+++ b/securedrop/tests/functional/test_admin_interface.py
@@ -5,7 +5,7 @@
class TestAdminInterface(
functional_test.FunctionalTest,
- journalist_navigation_steps.JournalistNavigationSteps):
+ journalist_navigation_steps.JournalistNavigationStepsMixin):
@screenshots
def test_admin_interface(self):
diff --git a/securedrop/tests/functional/test_journalist.py b/securedrop/tests/functional/test_journalist.py
--- a/securedrop/tests/functional/test_journalist.py
+++ b/securedrop/tests/functional/test_journalist.py
@@ -22,8 +22,8 @@
class TestJournalist(
functional_test.FunctionalTest,
- source_navigation_steps.SourceNavigationSteps,
- journalist_navigation_steps.JournalistNavigationSteps):
+ source_navigation_steps.SourceNavigationStepsMixin,
+ journalist_navigation_steps.JournalistNavigationStepsMixin):
def test_journalist_verifies_deletion_of_one_submission_javascript(self):
self._source_visits_source_homepage()
diff --git a/securedrop/tests/functional/test_source.py b/securedrop/tests/functional/test_source.py
--- a/securedrop/tests/functional/test_source.py
+++ b/securedrop/tests/functional/test_source.py
@@ -4,7 +4,7 @@
class TestSourceInterface(
functional_test.FunctionalTest,
- source_navigation_steps.SourceNavigationSteps):
+ source_navigation_steps.SourceNavigationStepsMixin):
def test_lookup_codename_hint(self):
self._source_visits_source_homepage()
diff --git a/securedrop/tests/functional/test_source_notfound.py b/securedrop/tests/functional/test_source_notfound.py
--- a/securedrop/tests/functional/test_source_notfound.py
+++ b/securedrop/tests/functional/test_source_notfound.py
@@ -4,7 +4,7 @@
class TestSourceInterfaceNotFound(
functional_test.FunctionalTest,
- source_navigation_steps.SourceNavigationSteps):
+ source_navigation_steps.SourceNavigationStepsMixin):
def test_not_found(self):
self._source_not_found()
diff --git a/securedrop/tests/functional/test_source_session_timeout.py b/securedrop/tests/functional/test_source_session_timeout.py
--- a/securedrop/tests/functional/test_source_session_timeout.py
+++ b/securedrop/tests/functional/test_source_session_timeout.py
@@ -4,7 +4,7 @@
class TestSourceSessions(
functional_test.FunctionalTest,
- source_navigation_steps.SourceNavigationSteps):
+ source_navigation_steps.SourceNavigationStepsMixin):
def setup(self):
# The session expiration here cannot be set to -1
diff --git a/securedrop/tests/functional/test_source_warnings.py b/securedrop/tests/functional/test_source_warnings.py
--- a/securedrop/tests/functional/test_source_warnings.py
+++ b/securedrop/tests/functional/test_source_warnings.py
@@ -4,7 +4,7 @@
class TestSourceInterfaceBannerWarnings(
functional_test.FunctionalTest,
- source_navigation_steps.SourceNavigationSteps):
+ source_navigation_steps.SourceNavigationStepsMixin):
def test_warning_appears_if_tor_browser_not_in_use(self):
self.driver.get(self.source_location)
diff --git a/securedrop/tests/functional/test_submission_not_in_memory.py b/securedrop/tests/functional/test_submission_not_in_memory.py
--- a/securedrop/tests/functional/test_submission_not_in_memory.py
+++ b/securedrop/tests/functional/test_submission_not_in_memory.py
@@ -1,6 +1,6 @@
from functional_test import FunctionalTest
import subprocess
-from source_navigation_steps import SourceNavigationSteps
+from source_navigation_steps import SourceNavigationStepsMixin
import os
import pytest
import getpass
@@ -8,7 +8,7 @@
class TestSubmissionNotInMemory(FunctionalTest,
- SourceNavigationSteps):
+ SourceNavigationStepsMixin):
def setup(self):
self.devnull = open('/dev/null', 'r')
diff --git a/securedrop/tests/functional/test_submit_and_retrieve_file.py b/securedrop/tests/functional/test_submit_and_retrieve_file.py
--- a/securedrop/tests/functional/test_submit_and_retrieve_file.py
+++ b/securedrop/tests/functional/test_submit_and_retrieve_file.py
@@ -6,8 +6,8 @@
class TestSubmitAndRetrieveFile(
functional_test.FunctionalTest,
- source_navigation_steps.SourceNavigationSteps,
- journalist_navigation_steps.JournalistNavigationSteps):
+ source_navigation_steps.SourceNavigationStepsMixin,
+ journalist_navigation_steps.JournalistNavigationStepsMixin):
@screenshots
def test_submit_and_retrieve_happy_path(self):
diff --git a/securedrop/tests/functional/test_submit_and_retrieve_message.py b/securedrop/tests/functional/test_submit_and_retrieve_message.py
--- a/securedrop/tests/functional/test_submit_and_retrieve_message.py
+++ b/securedrop/tests/functional/test_submit_and_retrieve_message.py
@@ -6,8 +6,8 @@
class TestSubmitAndRetrieveMessage(
functional_test.FunctionalTest,
- source_navigation_steps.SourceNavigationSteps,
- journalist_navigation_steps.JournalistNavigationSteps):
+ source_navigation_steps.SourceNavigationStepsMixin,
+ journalist_navigation_steps.JournalistNavigationStepsMixin):
@screenshots
def test_submit_and_retrieve_happy_path(self):
diff --git a/securedrop/tests/i18n/code.py b/securedrop/tests/i18n/code.py
--- a/securedrop/tests/i18n/code.py
+++ b/securedrop/tests/i18n/code.py
@@ -1,2 +1,4 @@
# -*- coding: utf-8 -*-
+from flask_babel import gettext
+
print(gettext('code hello i18n'))
diff --git a/securedrop/tests/pages-layout/test_journalist.py b/securedrop/tests/pages-layout/test_journalist.py
--- a/securedrop/tests/pages-layout/test_journalist.py
+++ b/securedrop/tests/pages-layout/test_journalist.py
@@ -36,8 +36,8 @@ def finalizer():
@pytest.mark.pagelayout
class TestJournalistLayout(
functional_test.FunctionalTest,
- source_navigation_steps.SourceNavigationSteps,
- journalist_navigation_steps.JournalistNavigationSteps):
+ source_navigation_steps.SourceNavigationStepsMixin,
+ journalist_navigation_steps.JournalistNavigationStepsMixin):
def test_account_edit_hotp_secret(self):
self._journalist_logs_in()
diff --git a/securedrop/tests/pages-layout/test_source.py b/securedrop/tests/pages-layout/test_source.py
--- a/securedrop/tests/pages-layout/test_source.py
+++ b/securedrop/tests/pages-layout/test_source.py
@@ -24,8 +24,8 @@
@pytest.mark.pagelayout
class TestSourceLayout(
functional_test.FunctionalTest,
- source_navigation_steps.SourceNavigationSteps,
- journalist_navigation_steps.JournalistNavigationSteps):
+ source_navigation_steps.SourceNavigationStepsMixin,
+ journalist_navigation_steps.JournalistNavigationStepsMixin):
def test_index(self):
self._source_visits_source_homepage()
@@ -141,8 +141,8 @@ def test_why_journalist_key(self):
@pytest.mark.pagelayout
class TestSourceSessionLayout(
functional_test.FunctionalTest,
- source_navigation_steps.SourceNavigationSteps,
- journalist_navigation_steps.JournalistNavigationSteps):
+ source_navigation_steps.SourceNavigationStepsMixin,
+ journalist_navigation_steps.JournalistNavigationStepsMixin):
def setup(self):
self.session_length_minutes = 0.03
| pylint: pylint rules should be refreshed and enforced in CI
| 2017-12-31T04:48:50Z | [] | [] |
|
freedomofpress/securedrop | 2,802 | freedomofpress__securedrop-2802 | [
"1562"
] | 133c9961456930397c129edeac627e3429c150cb | diff --git a/securedrop/journalist_app/admin.py b/securedrop/journalist_app/admin.py
--- a/securedrop/journalist_app/admin.py
+++ b/securedrop/journalist_app/admin.py
@@ -208,7 +208,13 @@ def set_password(user_id):
@admin_required
def delete_user(user_id):
user = Journalist.query.get(user_id)
- if user:
+ if user_id == g.user.id:
+ # Do not flash because the interface already has safe guards.
+ # It can only happen by manually crafting a POST request
+ current_app.logger.error(
+ "Admin {} tried to delete itself".format(g.user.username))
+ abort(403)
+ elif user:
db_session.delete(user)
db_session.commit()
flash(gettext("Deleted user '{user}'").format(
| diff --git a/securedrop/tests/test_journalist.py b/securedrop/tests/test_journalist.py
--- a/securedrop/tests/test_journalist.py
+++ b/securedrop/tests/test_journalist.py
@@ -270,6 +270,34 @@ def test_admin_delete_user(self):
# Verify journalist is no longer in the database
self.assertEqual(Journalist.query.get(self.user.id), None)
+ def test_admin_cannot_delete_self(self):
+ # Verify journalist is in the database
+ self.assertNotEqual(Journalist.query.get(self.user.id), None)
+
+ self._login_admin()
+ resp = self.client.post(url_for('admin.delete_user',
+ user_id=self.admin.id),
+ follow_redirects=True)
+
+ # Assert correct interface behavior
+ self.assert403(resp)
+
+ resp = self.client.get(url_for('admin.index'))
+ self.assert200(resp)
+ self.assertIn("Admin Interface", resp.data)
+ # The user can be edited and deleted
+ self.assertIn(escape("Edit user {}".format(self.user.username)),
+ resp.data)
+ self.assertIn(
+ escape("Delete user {}".format(self.user.username)),
+ resp.data)
+ # The admin can be edited but cannot deleted
+ self.assertIn(escape("Edit user {}".format(self.admin.username)),
+ resp.data)
+ self.assertNotIn(
+ escape("Delete user {}".format(self.admin.username)),
+ resp.data)
+
def test_admin_deletes_invalid_user_404(self):
self._login_admin()
invalid_user_pk = max([user.id for user in Journalist.query.all()]) + 1
| Prevent admin journalists from deleting themselves
To prevent lockouts and other forms of usability degradation, I propose one of two things:
1) a full ban on any admin from deleting themselves
2) an admin may not delete themselves if they are the last remaining admin
| There's still SSH though? Just not convinced this is really a problem that needs solving.
Agreed with @fowlslegs. I've actually used the GUI to delete the last remaining Admin intentionally during on-sites, due to staffing changes, then recreated via the CLI.
Fair enough. It just seemed like a silly thing to be able to do.
Closing as per Conor and I's comments. | 2018-01-07T13:38:59Z | [] | [] |
freedomofpress/securedrop | 2,803 | freedomofpress__securedrop-2803 | [
"1195"
] | d272e85b79848ff83ad45bbcdf43a1df472308b7 | diff --git a/admin/securedrop_admin/__init__.py b/admin/securedrop_admin/__init__.py
--- a/admin/securedrop_admin/__init__.py
+++ b/admin/securedrop_admin/__init__.py
@@ -43,6 +43,10 @@ class FingerprintException(Exception):
pass
+class JournalistAlertEmailException(Exception):
+ pass
+
+
class SiteConfig(object):
class ValidateNotEmpty(Validator):
@@ -122,6 +126,13 @@ def validate(self, document):
raise ValidationError(
message=path + ' file does not exist')
+ class ValidateOptionalPath(ValidatePath):
+ def validate(self, document):
+ if document.text == '':
+ return True
+ return super(SiteConfig.ValidateOptionalPath, self).validate(
+ document)
+
class ValidateYesNo(Validator):
def validate(self, document):
text = document.text.lower()
@@ -143,6 +154,13 @@ def validate(self, document):
message='fingerprints must be 40 hexadecimal characters')
return True
+ class ValidateOptionalFingerprint(ValidateFingerprint):
+ def validate(self, document):
+ if document.text == '':
+ return True
+ return super(SiteConfig.ValidateOptionalFingerprint,
+ self).validate(document)
+
class ValidateInt(Validator):
def validate(self, document):
if re.match('\d+$', document.text):
@@ -192,14 +210,33 @@ def validate(self, document):
raise ValidationError(
message="Password for OSSEC email account must be strong")
- class ValidateOSSECEmail(Validator):
+ class ValidateEmail(Validator):
def validate(self, document):
text = document.text
- if text and '@' in text and '[email protected]' != text:
+ if text == '':
+ raise ValidationError(
+ message=("Must not be empty"))
+ if '@' not in text:
+ raise ValidationError(
+ message=("Must contain a @"))
+ return True
+
+ class ValidateOSSECEmail(ValidateEmail):
+ def validate(self, document):
+ super(SiteConfig.ValidateOSSECEmail, self).validate(document)
+ text = document.text
+ if '[email protected]' != text:
return True
raise ValidationError(
- message=("Must contain a @ and be set to "
- "something other than [email protected]"))
+ message=("Must be set to something other than "
+ "[email protected]"))
+
+ class ValidateOptionalEmail(ValidateEmail):
+ def validate(self, document):
+ if document.text == '':
+ return True
+ return super(SiteConfig.ValidateOptionalEmail, self).validate(
+ document)
def __init__(self, args):
self.args = args
@@ -257,6 +294,19 @@ def __init__(self, args):
u'Admin email address for receiving OSSEC alerts',
SiteConfig.ValidateOSSECEmail(),
None],
+ ['journalist_alert_gpg_public_key', '', str,
+ u'Local filepath to journalist alerts GPG public key (optional)',
+ SiteConfig.ValidateOptionalPath(self.args.ansible_path),
+ None],
+ ['journalist_gpg_fpr', '', str,
+ u'Full fingerprint for the journalist alerts '
+ u'GPG public key (optional)',
+ SiteConfig.ValidateOptionalFingerprint(),
+ self.sanitize_fingerprint],
+ ['journalist_alert_email', '', str,
+ u'Email address for receiving journalist alerts (optional)',
+ SiteConfig.ValidateOptionalEmail(),
+ None],
['smtp_relay', "smtp.gmail.com", str,
u'SMTP relay for sending OSSEC alerts',
SiteConfig.ValidateNotEmpty(),
@@ -295,6 +345,7 @@ def update_config(self):
self.config.update(self.user_prompt_config())
self.save()
self.validate_gpg_keys()
+ self.validate_journalist_alert_email()
return True
def user_prompt_config(self):
@@ -340,11 +391,17 @@ def validate_gpg_keys(self):
'securedrop_app_gpg_fingerprint'),
('ossec_alert_gpg_public_key',
- 'ossec_gpg_fpr'))
+ 'ossec_gpg_fpr'),
+
+ ('journalist_alert_gpg_public_key',
+ 'journalist_gpg_fpr'))
+ validate = os.path.join(
+ os.path.dirname(__file__), '..', 'bin',
+ 'validate-gpg-key.sh')
for (public_key, fingerprint) in keys:
- validate = os.path.join(
- os.path.dirname(__file__), '..', 'bin',
- 'validate-gpg-key.sh')
+ if (self.config[public_key] == '' and
+ self.config[fingerprint] == ''):
+ continue
public_key = os.path.join(self.args.ansible_path,
self.config[public_key])
fingerprint = self.config[fingerprint]
@@ -360,6 +417,23 @@ def validate_gpg_keys(self):
"the public key {}".format(public_key))
return True
+ def validate_journalist_alert_email(self):
+ if (self.config['journalist_alert_gpg_public_key'] == '' and
+ self.config['journalist_gpg_fpr'] == ''):
+ return True
+
+ class Document(object):
+ def __init__(self, text):
+ self.text = text
+
+ try:
+ SiteConfig.ValidateEmail().validate(Document(
+ self.config['journalist_alert_email']))
+ except ValidationError as e:
+ raise JournalistAlertEmailException(
+ "journalist alerts email: " + e.message)
+ return True
+
def exists(self):
return os.path.exists(self.args.site_config)
diff --git a/securedrop/manage.py b/securedrop/manage.py
--- a/securedrop/manage.py
+++ b/securedrop/manage.py
@@ -3,6 +3,7 @@
import argparse
import codecs
+import datetime
import logging
import os
import pwd
@@ -14,15 +15,16 @@
import traceback
from flask import current_app
-from sqlalchemy import text
+from sqlalchemy import text, create_engine
from sqlalchemy.orm.exc import NoResultFound
+from sqlalchemy.orm import sessionmaker
os.environ['SECUREDROP_ENV'] = 'dev' # noqa
from sdconfig import config
import journalist_app
from db import db
-from models import Journalist, PasswordError, InvalidUsernameException
+from models import Source, Journalist, PasswordError, InvalidUsernameException
from management.run import run
logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s')
@@ -51,15 +53,15 @@ def reset(args):
# Clear submission/reply storage
try:
- os.stat(config.STORE_DIR)
+ os.stat(args.store_dir)
except OSError:
pass
else:
- for source_dir in os.listdir(config.STORE_DIR):
+ for source_dir in os.listdir(args.store_dir):
try:
# Each entry in STORE_DIR is a directory corresponding
# to a source
- shutil.rmtree(os.path.join(config.STORE_DIR, source_dir))
+ shutil.rmtree(os.path.join(args.store_dir, source_dir))
except OSError:
pass
return 0
@@ -250,10 +252,38 @@ def init_db(args):
os.chown('/var/lib/securedrop/db.sqlite', user.pw_uid, user.pw_gid)
+def were_there_submissions_today(args):
+ if config.DATABASE_ENGINE == "sqlite":
+ db_uri = (config.DATABASE_ENGINE + ":///" +
+ config.DATABASE_FILE)
+ else:
+ db_uri = (
+ config.DATABASE_ENGINE + '://' +
+ config.DATABASE_USERNAME + ':' +
+ config.DATABASE_PASSWORD + '@' +
+ config.DATABASE_HOST + '/' +
+ config.DATABASE_NAME
+ )
+ session = sessionmaker(bind=create_engine(db_uri))()
+ something = session.query(Source).filter(
+ Source.last_updated >
+ datetime.datetime.utcnow() - datetime.timedelta(hours=24)
+ ).count() > 0
+ count_file = os.path.join(args.data_root, 'submissions_today.txt')
+ open(count_file, 'w').write(something and '1' or '0')
+
+
def get_args():
parser = argparse.ArgumentParser(prog=__file__, description='Management '
'and testing utility for SecureDrop.')
parser.add_argument('-v', '--verbose', action='store_true')
+ parser.add_argument('--data-root',
+ default=config.SECUREDROP_DATA_ROOT,
+ help=('directory in which the securedrop '
+ 'data is stored'))
+ parser.add_argument('--store-dir',
+ default=config.STORE_DIR,
+ help=('directory in which the documents are stored'))
subps = parser.add_subparsers()
# Run WSGI app
run_subp = subps.add_parser('run', help='Run the Werkzeug source & '
@@ -286,6 +316,8 @@ def get_args():
set_clean_tmp_parser(subps, 'clean-tmp')
set_clean_tmp_parser(subps, 'clean_tmp')
+ set_were_there_submissions_today(subps)
+
init_db_subp = subps.add_parser('init-db', help='initialize the DB')
init_db_subp.add_argument('-u', '--user',
help='Unix user for the DB',
@@ -295,6 +327,14 @@ def get_args():
return parser
+def set_were_there_submissions_today(subps):
+ parser = subps.add_parser(
+ 'were-there-submissions-today',
+ help=('Update the file indicating '
+ 'whether submissions were received in the past 24h'))
+ parser.set_defaults(func=were_there_submissions_today)
+
+
def set_clean_tmp_parser(subps, name):
parser = subps.add_parser(name, help='Cleanup the '
'SecureDrop temp directory.')
| diff --git a/admin/tests/test_securedrop-admin.py b/admin/tests/test_securedrop-admin.py
--- a/admin/tests/test_securedrop-admin.py
+++ b/admin/tests/test_securedrop-admin.py
@@ -178,18 +178,29 @@ def test_validate_ossec_password(self):
with pytest.raises(ValidationError):
validator.validate(Document('short'))
- def test_validate_ossec_email(self):
- validator = securedrop_admin.SiteConfig.ValidateOSSECEmail()
+ def test_validate_email(self):
+ validator = securedrop_admin.SiteConfig.ValidateEmail()
assert validator.validate(Document('[email protected]'))
with pytest.raises(ValidationError):
validator.validate(Document('badmail'))
with pytest.raises(ValidationError):
validator.validate(Document(''))
+
+ def test_validate_ossec_email(self):
+ validator = securedrop_admin.SiteConfig.ValidateOSSECEmail()
+
+ assert validator.validate(Document('[email protected]'))
with pytest.raises(ValidationError) as e:
validator.validate(Document('[email protected]'))
assert 'something other than [email protected]' in e.value.message
+ def test_validate_optional_email(self):
+ validator = securedrop_admin.SiteConfig.ValidateOptionalEmail()
+
+ assert validator.validate(Document('[email protected]'))
+ assert validator.validate(Document(''))
+
def test_is_tails(self):
validator = securedrop_admin.SiteConfig.ValidateDNS()
with mock.patch('subprocess.check_output', return_value='Tails'):
@@ -273,6 +284,13 @@ def test_validate_path(self):
with pytest.raises(ValidationError):
validator.validate(Document(""))
+ def test_validate_optional_path(self):
+ mydir = dirname(__file__)
+ myfile = basename(__file__)
+ validator = securedrop_admin.SiteConfig.ValidateOptionalPath(mydir)
+ assert validator.validate(Document(myfile))
+ assert validator.validate(Document(""))
+
def test_validate_yes_no(self):
validator = securedrop_admin.SiteConfig.ValidateYesNo()
with pytest.raises(ValidationError):
@@ -309,6 +327,12 @@ def test_validate_fingerprint(self):
"zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz"))
assert '40 hexadecimal' in e.value.message
+ def test_validate_optional_fingerprint(self):
+ validator = securedrop_admin.SiteConfig.ValidateOptionalFingerprint()
+ assert validator.validate(Document(
+ "012345678901234567890123456789ABCDEFABCD"))
+ assert validator.validate(Document(""))
+
def test_sanitize_fingerprint(self):
args = argparse.Namespace(site_config='DOES_NOT_EXIST',
ansible_path='.',
@@ -354,8 +378,7 @@ def test_validate_gpg_key(self, caplog):
args = argparse.Namespace(site_config='INVALID',
ansible_path='tests/files',
app_path=dirname(__file__))
- site_config = securedrop_admin.SiteConfig(args)
- site_config.config = {
+ good_config = {
'securedrop_app_gpg_public_key':
'test_journalist_key.pub',
@@ -367,12 +390,61 @@ def test_validate_gpg_key(self, caplog):
'ossec_gpg_fpr':
'65A1B5FF195B56353CC63DFFCC40EF1228271441',
+
+ 'journalist_alert_gpg_public_key':
+ 'test_journalist_key.pub',
+
+ 'journalist_gpg_fpr':
+ '65A1B5FF195B56353CC63DFFCC40EF1228271441',
}
+ site_config = securedrop_admin.SiteConfig(args)
+ site_config.config = good_config
assert site_config.validate_gpg_keys()
- site_config.config['ossec_gpg_fpr'] = 'FAIL'
- with pytest.raises(securedrop_admin.FingerprintException) as e:
- site_config.validate_gpg_keys()
- assert 'FAIL does not match' in e.value.message
+
+ for key in ('securedrop_app_gpg_fingerprint',
+ 'ossec_gpg_fpr',
+ 'journalist_gpg_fpr'):
+ bad_config = good_config.copy()
+ bad_config[key] = 'FAIL'
+ site_config.config = bad_config
+ with pytest.raises(securedrop_admin.FingerprintException) as e:
+ site_config.validate_gpg_keys()
+ assert 'FAIL does not match' in e.value.message
+
+ def test_journalist_alert_email(self):
+ args = argparse.Namespace(site_config='INVALID',
+ ansible_path='tests/files',
+ app_path=dirname(__file__))
+ site_config = securedrop_admin.SiteConfig(args)
+ site_config.config = {
+ 'journalist_alert_gpg_public_key':
+ '',
+
+ 'journalist_gpg_fpr':
+ '',
+ }
+ assert site_config.validate_journalist_alert_email()
+ site_config.config = {
+ 'journalist_alert_gpg_public_key':
+ 'test_journalist_key.pub',
+
+ 'journalist_gpg_fpr':
+ '65A1B5FF195B56353CC63DFFCC40EF1228271441',
+ }
+ site_config.config['journalist_alert_email'] = ''
+ with pytest.raises(
+ securedrop_admin.JournalistAlertEmailException) as e:
+ site_config.validate_journalist_alert_email()
+ assert 'not be empty' in e.value.message
+
+ site_config.config['journalist_alert_email'] = 'bademail'
+ with pytest.raises(
+ securedrop_admin.JournalistAlertEmailException) as e:
+ site_config.validate_journalist_alert_email()
+ assert 'Must contain a @' in e.value.message
+
+ site_config.config['journalist_alert_email'] = '[email protected]'
+ assert site_config.validate_journalist_alert_email()
@mock.patch('securedrop_admin.SiteConfig.validated_input',
side_effect=lambda p, d, v, t: d)
@@ -437,11 +509,15 @@ def get_desc(self, site_config, var):
if desc[0] == var:
return desc
- def verify_desc_consistency(self, site_config, desc):
+ def verify_desc_consistency_optional(self, site_config, desc):
(var, default, etype, prompt, validator, transform) = desc
# verify the default passes validation
assert site_config.user_prompt_config_one(desc, None) == default
assert type(default) == etype
+
+ def verify_desc_consistency(self, site_config, desc):
+ self.verify_desc_consistency_optional(site_config, desc)
+ (var, default, etype, prompt, validator, transform) = desc
with pytest.raises(ValidationError):
site_config.user_prompt_config_one(desc, '')
@@ -467,8 +543,7 @@ def verify_prompt_not_empty(self, site_config, desc):
with pytest.raises(ValidationError):
site_config.user_prompt_config_one(desc, '')
- def verify_prompt_fingerprint(self, site_config, desc):
- self.verify_prompt_not_empty(site_config, desc)
+ def verify_prompt_fingerprint_optional(self, site_config, desc):
fpr = "0123456 789012 34567890123456789ABCDEFABCD"
clean_fpr = site_config.sanitize_fingerprint(fpr)
assert site_config.user_prompt_config_one(desc, fpr) == clean_fpr
@@ -479,10 +554,18 @@ def verify_desc_consistency_allow_empty(self, site_config, desc):
assert site_config.user_prompt_config_one(desc, None) == default
assert type(default) == etype
+ def verify_prompt_fingerprint(self, site_config, desc):
+ self.verify_prompt_not_empty(site_config, desc)
+ self.verify_prompt_fingerprint_optional(site_config, desc)
+
verify_prompt_securedrop_app_gpg_fingerprint = verify_prompt_fingerprint
verify_prompt_ossec_alert_gpg_public_key = verify_desc_consistency
verify_prompt_ossec_gpg_fpr = verify_prompt_fingerprint
verify_prompt_ossec_alert_email = verify_prompt_not_empty
+ verify_prompt_journalist_alert_gpg_public_key = (
+ verify_desc_consistency_optional)
+ verify_prompt_journalist_gpg_fpr = verify_prompt_fingerprint_optional
+ verify_prompt_journalist_alert_email = verify_desc_consistency_optional
verify_prompt_smtp_relay = verify_prompt_not_empty
verify_prompt_smtp_relay_port = verify_desc_consistency
verify_prompt_sasl_domain = verify_desc_consistency_allow_empty
diff --git a/securedrop/tests/test_manage.py b/securedrop/tests/test_manage.py
--- a/securedrop/tests/test_manage.py
+++ b/securedrop/tests/test_manage.py
@@ -2,6 +2,7 @@
import argparse
import io
+import datetime
import logging
import os
import manage
@@ -13,7 +14,8 @@
os.environ['SECUREDROP_ENV'] = 'test' # noqa
-from models import Journalist
+from models import Journalist, db
+from utils import db_helper
YUBIKEY_HOTP = ['cb a0 5f ad 41 a2 ff 4e eb 53 56 3a 1b f7 23 2e ce fc dc',
@@ -170,7 +172,8 @@ def test_reset(journalist_app, test_journo, config):
# We need to override the config to point at the per-test DB
manage.config = config
- return_value = manage.reset(args=None)
+ args = argparse.Namespace(store_dir=config.STORE_DIR)
+ return_value = manage.reset(args=args)
assert return_value == 0
assert os.path.exists(config.DATABASE_FILE)
assert os.path.exists(config.STORE_DIR)
@@ -221,3 +224,28 @@ def test_clean_tmp_removed(config, caplog):
manage.setup_verbosity(args)
manage.clean_tmp(args)
assert 'FILE removed' in caplog.text
+
+
+def test_were_there_submissions_today(source_app, config):
+ original_config = manage.config
+ try:
+ # We need to override the config to point at the per-test DB
+ manage.config = config
+ data_root = config.SECUREDROP_DATA_ROOT
+ args = argparse.Namespace(data_root=data_root,
+ verbose=logging.DEBUG)
+
+ with source_app.app_context():
+ count_file = os.path.join(data_root, 'submissions_today.txt')
+ source, codename = db_helper.init_source_without_keypair()
+ source.last_updated = (datetime.datetime.utcnow() -
+ datetime.timedelta(hours=24*2))
+ db.session.commit()
+ manage.were_there_submissions_today(args)
+ assert io.open(count_file).read() == "0"
+ source.last_updated = datetime.datetime.utcnow()
+ db.session.commit()
+ manage.were_there_submissions_today(args)
+ assert io.open(count_file).read() == "1"
+ finally:
+ manage.config = original_config
diff --git a/testinfra/mon/test_ossec.py b/testinfra/mon/test_ossec.py
--- a/testinfra/mon/test_ossec.py
+++ b/testinfra/mon/test_ossec.py
@@ -5,21 +5,6 @@
securedrop_test_vars = pytest.securedrop_test_vars
[email protected]('package', [
- 'mailutils',
- 'ossec-server',
- 'postfix',
- 'procmail',
- 'securedrop-ossec-server',
-])
-def test_ossec_package(Package, package):
- """
- Ensure required packages for OSSEC are installed.
- Includes mail utilities and the FPF-maintained metapackage.
- """
- assert Package(package).is_installed
-
-
def test_ossec_connectivity(Command, Sudo):
"""
Ensure ossec-server machine has active connection to the ossec-agent.
@@ -34,44 +19,6 @@ def test_ossec_connectivity(Command, Sudo):
assert c == desired_output
-def test_ossec_gnupg_homedir(File, Sudo):
- """ ensure ossec gpg homedir exists """
- with Sudo():
- f = File("/var/ossec/.gnupg")
- assert f.is_directory
- assert f.user == "ossec"
- assert oct(f.mode) == "0700"
-
-
-# Permissions don't match between Ansible and OSSEC deb packages postinst.
[email protected]
-def test_ossec_gnupg(File, Sudo):
- """
- Ensures the test Admin GPG public key is present as file.
- Does not check that it's added to the keyring for the ossec user;
- that's handled by a separate test.
- """
- with Sudo():
- f = File("/var/ossec/test_admin_key.pub")
- assert f.is_file
- assert oct(f.mode) == "0644"
-
-
-def test_ossec_pubkey_in_keyring(Command, Sudo):
- """
- Ensure the test Admin GPG public key exists in the keyring
- within the ossec home directory.
- """
- ossec_gpg_pubkey_info = """pub 2048R/B5E53711 2018-01-25
-uid SecureDrop admin key for tests (do not use in production)
-sub 2048R/EC1DF5D0 2018-01-25""" # noqa
- with Sudo("ossec"):
- c = Command.check_output(
- "gpg --homedir /var/ossec/.gnupg "
- "--list-keys 53E1113AC1F25027BA5D475B1141E2BBB5E53711")
- assert c == ossec_gpg_pubkey_info
-
-
# Permissions don't match between Ansible and OSSEC deb packages postinst.
@pytest.mark.xfail
@pytest.mark.parametrize('keyfile', [
@@ -96,40 +43,6 @@ def test_ossec_keyfiles(File, Sudo, keyfile):
assert f.group == "ossec"
[email protected]('setting', [
- 'VERBOSE=yes',
- 'MAILDIR=/var/mail/',
- 'DEFAULT=$MAILDIR',
- 'LOGFILE=/var/log/procmail.log',
- 'SUBJECT=`formail -xSubject:`',
- ':0 c',
- '*^To:.*root.*',
- '|/var/ossec/send_encrypted_alarm.sh',
-])
-def test_procmail_settings(File, Sudo, setting):
- """
- Ensure procmail settings are correct. These config lines determine
- how the OSSEC email alerts are encrypted and then passed off for sending.
- """
- # Sudo is required to traverse the /var/ossec directory.
- with Sudo():
- f = File("/var/ossec/.procmailrc")
- assert f.contains('^{}$'.format(setting))
-
-
-# Permissions don't match between Ansible and OSSEC deb packages postinst.
[email protected]
-def test_procmail_attrs(File, Sudo):
- """
- Ensure procmail file attributes are specified correctly.
- """
- with Sudo():
- f = File("/var/ossec/.procmailrc")
- assert f.is_file
- assert f.user == "ossec"
- assert oct(f.mode) == "0440"
-
-
# Permissions don't match between Ansible and OSSEC deb packages postinst.
@pytest.mark.xfail
def test_procmail_log(File, Sudo):
diff --git a/testinfra/mon/test_postfix.py b/testinfra/mon/test_postfix.py
--- a/testinfra/mon/test_postfix.py
+++ b/testinfra/mon/test_postfix.py
@@ -25,48 +25,6 @@ def test_postfix_headers(File, header):
assert re.search(regex, f.content, re.M)
[email protected]('setting', [
- 'relayhost = [smtp.gmail.com]:587',
- 'smtp_sasl_auth_enable = yes',
- 'smtp_sasl_password_maps = hash:/etc/postfix/sasl_passwd',
- 'smtp_sasl_security_options = noanonymous',
- 'smtp_use_tls = yes',
- 'smtp_tls_session_cache_database = btree:${data_directory}/smtp_scache',
- 'smtp_tls_security_level = secure',
- 'smtp_tls_CApath = /etc/ssl/certs',
- 'smtp_tls_ciphers = high',
- 'smtp_tls_protocols = TLSv1.2 TLSv1.1 TLSv1 !SSLv3 !SSLv2',
- 'myhostname = ossec.server',
- 'myorigin = $myhostname',
- 'smtpd_banner = $myhostname ESMTP $mail_name (Ubuntu)',
- 'biff = no',
- 'append_dot_mydomain = no',
- 'readme_directory = no',
- 'smtp_header_checks = regexp:/etc/postfix/header_checks',
- 'mailbox_command = /usr/bin/procmail',
- 'inet_interfaces = loopback-only',
- 'alias_maps = hash:/etc/aliases',
- 'alias_database = hash:/etc/aliases',
- 'mydestination = $myhostname, localhost.localdomain , localhost',
- 'mynetworks = 127.0.0.0/8 [::ffff:127.0.0.0]/104 [::1]/128',
- 'mailbox_size_limit = 0',
- 'recipient_delimiter = +',
-])
-def test_postfix_settings(File, setting):
- """
- Check all postfix configuration lines. There are technically multiple
- configuration paths regarding the TLS settings, particularly the
- fingerprint verification logic, but only the base default config is tested
- currently.
- """
- f = File("/etc/postfix/main.cf")
- assert f.is_file
- assert f.user == 'root'
- assert oct(f.mode) == "0644"
- regex = '^{}$'.format(re.escape(setting))
- assert re.search(regex, f.content, re.M)
-
-
def test_postfix_generic_maps(File):
"""
Regression test to check that generic Postfix maps are not configured
diff --git a/testinfra/ossec/test_journalist_mail.py b/testinfra/ossec/test_journalist_mail.py
new file mode 100644
--- /dev/null
+++ b/testinfra/ossec/test_journalist_mail.py
@@ -0,0 +1,212 @@
+import pytest
+import testinfra
+import time
+
+
+class TestBase(object):
+
+ @pytest.fixture(autouse=True)
+ def only_mon_staging_sudo(self, host):
+ if host.backend.host != 'mon-staging':
+ pytest.skip()
+
+ with host.sudo():
+ yield
+
+ def ansible(self, host, module, parameters):
+ r = host.ansible(module, parameters, check=False)
+ assert 'exception' not in r
+
+ def run(self, host, cmd):
+ print(host.backend.host + " running: " + cmd)
+ r = host.run(cmd)
+ print(r.stdout)
+ print(r.stderr)
+ return r.rc == 0
+
+ def wait_for(self, fun):
+ success = False
+ for d in (1, 2, 4, 8, 16, 32, 64):
+ if fun():
+ success = True
+ break
+ time.sleep(d)
+ return success
+
+ def wait_for_command(self, host, cmd):
+ return self.wait_for(lambda: self.run(host, cmd))
+
+ #
+ # implementation note: we do not use host.ansible("service", ...
+ # because it only works for services in /etc/init and not those
+ # legacy only found in /etc/init.d such as postfix
+ #
+ def service_started(self, host, name):
+ assert self.run(host, "service {name} start".format(name=name))
+ assert self.wait_for_command(
+ host,
+ "service {name} status | grep -q 'is running'".format(name=name))
+
+ def service_restarted(self, host, name):
+ assert self.run(host, "service {name} restart".format(name=name))
+ assert self.wait_for_command(
+ host,
+ "service {name} status | grep -q 'is running'".format(name=name))
+
+ def service_stopped(self, host, name):
+ assert self.run(host, "service {name} stop".format(name=name))
+ assert self.wait_for_command(
+ host,
+ "service {name} status | grep -q 'not running'".format(name=name))
+
+
+class TestJournalistMail(TestBase):
+
+ def test_procmail(self, host):
+ self.service_started(host, "postfix")
+ today_payload = (
+ 'ossec: output: head -1 /var/lib/securedrop/submissions_today.txt'
+ '\n1234')
+ for (destination, payload) in (
+ ('journalist', today_payload),
+ ('ossec', 'MYGREATPAYLOAD')):
+ assert self.run(host, "postsuper -d ALL")
+ assert self.run(
+ host,
+ "echo -e '{payload}' | "
+ "mail -s 'abc' root@localhost".format(payload=payload))
+ assert self.wait_for_command(
+ host,
+ "mailq | grep -q {destination}@ossec.test".format(
+ destination=destination))
+ self.service_stopped(host, "postfix")
+
+ def test_process_submissions_today(self, host):
+ self.run(host, "/var/ossec/process_submissions_today.sh test_main")
+
+ def test_send_encrypted_alert(self, host):
+ self.service_started(host, "postfix")
+ src = "install_files/ansible-base/roles/ossec/files/test_admin_key.sec"
+ self.ansible(host, "copy",
+ "dest=/tmp/test_admin_key.sec src={src}".format(src=src))
+
+ self.run(host, "gpg --homedir /var/ossec/.gnupg"
+ " --import /tmp/test_admin_key.sec")
+
+ def trigger(who, payload):
+ assert self.run(
+ host, "! mailq | grep -q {who}@ossec.test".format(who=who))
+ assert self.run(
+ host,
+ """
+ ( echo 'Subject: TEST' ; echo ; echo -e '{payload}' ) | \
+ /var/ossec/send_encrypted_alarm.sh {who}
+ """.format(who=who, payload=payload))
+ assert self.wait_for_command(
+ host, "mailq | grep -q {who}@ossec.test".format(who=who))
+
+ #
+ # encrypted mail to journalist or ossec contact
+ #
+ for (who, payload, expected) in (
+ ('journalist', 'ossec: output\n1', '1'),
+ ('ossec', 'MYGREATPAYLOAD', 'MYGREATPAYLOAD')):
+ assert self.run(host, "postsuper -d ALL")
+ trigger(who, payload)
+ assert self.run(
+ host,
+ """
+ job=$(mailq | sed -n -e '2p' | cut -f1 -d ' ')
+ postcat -q $job | tee /dev/stderr | \
+ gpg --homedir /var/ossec/.gnupg --decrypt 2>&1 | \
+ grep -q {expected}
+ """.format(expected=expected))
+ #
+ # failure to encrypt must trigger an emergency mail to ossec contact
+ #
+ try:
+ assert self.run(host, "postsuper -d ALL")
+ assert self.run(host, "mv /usr/bin/gpg /usr/bin/gpg.save")
+ trigger(who, 'MYGREATPAYLOAD')
+ assert self.run(
+ host,
+ """
+ job=$(mailq | sed -n -e '2p' | cut -f1 -d ' ')
+ postcat -q $job | grep -q 'Failed to encrypt OSSEC alert'
+ """)
+ finally:
+ assert self.run(host, "mv /usr/bin/gpg.save /usr/bin/gpg")
+ self.service_stopped(host, "postfix")
+
+ def test_missing_journalist_alert(self, host):
+ #
+ # missing journalist mail does nothing
+ #
+ assert self.run(
+ host,
+ """
+ JOURNALIST_EMAIL= \
+ bash -x /var/ossec/send_encrypted_alarm.sh journalist | \
+ tee /dev/stderr | \
+ grep -q 'no notification sent'
+ """)
+
+ # https://ossec-docs.readthedocs.io/en/latest/manual/rules-decoders/testing.html
+ def test_ossec_rule_journalist(self, host):
+ assert self.run(host, """
+ set -ex
+ l="ossec: output: 'head -1 /var/lib/securedrop/submissions_today.txt"
+ echo "$l" | /var/ossec/bin/ossec-logtest
+ echo "$l" | /var/ossec/bin/ossec-logtest -U '400600:1:ossec'
+ """)
+
+ def test_journalist_mail_notification(self, host):
+ mon = host
+ app = testinfra.host.Host.get_host(
+ 'ansible://app-staging',
+ ansible_inventory=host.backend.ansible_inventory)
+ #
+ # run ossec & postfix on mon
+ #
+ self.service_started(mon, "postfix")
+ self.service_started(mon, "ossec")
+
+ #
+ # ensure the submission_today.txt file exists
+ #
+ with app.sudo():
+ assert self.run(app, """
+ cd /var/www/securedrop
+ ./manage.py were-there-submissions-today
+ test -f /var/lib/securedrop/submissions_today.txt
+ """)
+
+ #
+ # empty the mailq on mon in case there were leftovers
+ #
+ assert self.run(mon, "postsuper -d ALL")
+
+ #
+ # the command fires every time ossec starts,
+ # regardless of the frequency
+ #
+ with app.sudo():
+ self.service_restarted(app, "ossec")
+
+ #
+ # wait until at exactly one notification is sent
+ #
+ assert self.wait_for_command(
+ mon,
+ "mailq | grep -q [email protected]")
+ assert self.run(
+ mon,
+ "test 1 = $(mailq | grep [email protected] | wc -l)")
+
+ #
+ # teardown the ossec and postfix on mon and app
+ #
+ self.service_stopped(mon, "postfix")
+ self.service_stopped(mon, "ossec")
+ with app.sudo():
+ self.service_stopped(app, "ossec")
diff --git a/testinfra/test.py b/testinfra/test.py
--- a/testinfra/test.py
+++ b/testinfra/test.py
@@ -24,6 +24,7 @@ def get_target_roles(target_host):
'testinfra/app-code',
'testinfra/common',
'testinfra/development/test_xvfb.py'],
+ "staging": ['testinfra/ossec'],
"mon-staging": ['testinfra/mon',
'testinfra/common'],
"mon-prod": ['testinfra/mon']}
@@ -92,6 +93,37 @@ def run_testinfra(target_host, verbose=True):
{target_roles}
""".lstrip().rstrip()
+ elif target_host == 'staging':
+ if "CI_SSH_CONFIG" in os.environ:
+ ssh_config_path = '--ssh-config ' + os.environ["CI_SSH_CONFIG"]
+ inventory = ""
+ junit = """
+ --junit-xml=./{target_host}-results.xml \
+ --junit-prefix={target_host} \
+ """
+ else:
+ ssh_config_path = ""
+ inventory = """
+ --ansible-inventory \
+ .vagrant/provisioners/ansible/inventory/vagrant_ansible_inventory \
+ """
+ junit = ""
+ testinfra_command_template = """
+testinfra \
+ -vv \
+ --connection ansible \
+ {testinfra_args} \
+ {ssh_config_path} \
+ {inventory} \
+ {junit} \
+ --hosts app-staging,mon-staging \
+ {target_roles}
+""".format(ssh_config_path=ssh_config_path,
+ inventory=inventory,
+ target_roles=" ".join(target_roles),
+ junit=junit,
+ testinfra_args=os.environ.get('TESTINFRA_ARGS', ''))
+
else:
ssh_config_path = ""
testinfra_command_template = """
| Submission email notifications
Checking SecureDrop every day is a huge hassle. Instead, it should tell me about new submissions, but without leaking any information.
Every day at 6am, SecureDrop sends me an encrypted email noting the number of unread submissions and the number of submissions received in the last 24 hours, even if these numbers are zero. If submission categories are used, each category's βownerβ is sent the same info about their category.
| Just a thought, but it may be unfounded: Wouldn't just stating the number of recent submissions leak information about the time in which they were submitted? Especially since they are some what ordered. This would be most drastic in the case where an outlet has a small number of submissions. Let's say that an outlet has a single source that submits a trove of documents of the course of a few weeks. That source only works two days of the week and is equally likely to submit documents on either day. Doesn't the fact that emails stating when those documents were submitted (even though it is only a 24 hour window) greatly narrow down who could leak the documents since the emails would only show new documents within 24 hours after the source submitted them?
Strong encryption and a constant stream of emails may be enough to mitigate these concerns, but guaranteeing the source's anonymity should be of greater concern than journalist convenience. These are just my thoughts, so don't put too much weight into them.
I don't see how an encrypted email that gets sent out every day whether or not there's a submission reveals any information whatsoever.
@pwplus I don't understand your critique.
The goal here is to avoid leaking information about source behavior to an adversary who is capable of performing traffic analysis on the status emails. The easiest way to do that, while still being convenient for journalists, is to do what @flamsmark suggested in his initial comment, plus a little more. You need to:
1. Send the email on a regular schedule, regardless of the activity on the server. This interval could be configurable by the journalist (every day, 2 days, week, etc.), it simply must be unrelated to any source activity.
2. Encrypt the email end-to-end (obviously)
At this point, the only remaining signal to an adversary is the size of the encrypted email. Each notification email should be padded to ensure that all emails have the same size, regardless of their contents. This requires a unified and well-defined format for all possible notification emails (depending on categories, roles, etc.)
The only subtle point is, once padded, we need to avoid compressing the emails before encryption. Otherwise, an adversary could perform a traffic analysis attack on the size of the emails, similar to the [CRIME](https://en.wikipedia.org/wiki/CRIME) attack on HTTPS. Similar to CRIME, allowing compression in this context would enable a chosen plaintext attack because:
1. The email format will be well-defined (open source)
2. An adversary can perform actions on the Source Interface that will deterministically influence the contents of the emails.
Once encrypted, any further compression (e.g. applied by the mail server or transport) does not lead to a potential attack. I don't think they'll ever be so large that compression makes a meaningful impact on their storage or transmission.
Note that GPG compresses plaintext by default (this is actually a terrible idea, but it's GPG so not a big surprise there). We'd need to pass `-z 0` to gpg to avoid compressing the plaintext in this context.
If the notification is really simple, the size shouldn't indicate much. βThere are XX unread submissions of which XX where submitted in the last 24 hours.β doesn't leave a lot of room for adventure.
Or pad notifications with poetry and/or gifs.
Agree that this would be a nice feature and @flamsmark's proposal would be a good way to implement it to mitigate leaking sensitive info. However, before going forward we should keep in mind that this potentially could diminish one of the features of SecureDrop which we talk about when describing the benefits to others: newsrooms often have compromised networks, and that right now, nothing about SD submissions ever touches that network. Right on our front page, we say SecureDrop is designed to be used in high-risk environments like that.
The current workflow requires the journalist log onto their regular workstation using Tails. If we implement this feature, the email would come to their normal workstation, and even if the alert is sent using PGP, if their computer or network is compromised, it is possible for the attacker to either get the PGP key off that computer or wait for the journalist to decrypt it and read it then.
Of course one could say the same if their firmware if compromised and an attacker can somehow see what they're doing on Tails. However that is at least a much more sophisticated attack.
In @flamsmark's proposal, the info would be less specific because it only sends it once per day and does not specify the source. Personally I do think the chances are low that this information would be useful to an attacker, and we might decide that the trade off is worth it, but we should at least be conscious of it.
@trevortimm Good point, thanks for bringing it up. It depends on the content of the notification emails. If they are, as @flamsmark has suggested, simply a summary of meta-activity, e.g.
```
7 new sources
15 new submissions
```
then it's not a concern. If the emails are more detailed (which would make them more useful/helpful), e.g.
```
Since you last logged in, there are 7 new sources.
You have replies from:
* source name 1
* source name 2
* ...
You have new submissions from:
* source name 1
* source name 2
* ...
```
I think the 2nd form would be much more useful, and the source names are designed to be random pseudonyms so it _might_ be acceptable to expose them to a less trusted security domain (the journalist's computer). It does leak some information (metadata) to an adversary who can compromise the journalist's computer.
What I was trying to say is that if the emails were some how compromised (court order or a hacked email account, etc), having a 24 hour creation time could greatly narrow down the possible list of sources if a company or agency were able to get those emails. My example was just to show that if there were only a single source, then a company with those emails would have a pretty short list of who to look at as possible sources. I can see how that is not so clear from my earlier comment.
Like I said, encryption and now the things Garrett mentioned are possibly enough to protect from this.
Right now, the biggest hurdle is just getting people to check SecureDrop β they're busy and (especially during pre-launch testing) the probability of seeing an empty document interface day after day is demoralizing. Adding more info might be more useful someday I guess, but what I really want is a push notification that says whether or not to do the work of logging in to check for submissions. Minimal information is fine. Even if it just said βcheck SecureDrop" or βdon't check SecureDrop", that'd be great.
@flamsmark Thanks, that's helpful feedback! Perfect is the enemy of the good, and all that :grin:
I seem to have duplicated Issue #544 .
Assuming you have some journos that will check in daily regardless, and others that do so more randomly due to their other work load, another possible way of addressing this would be if SD only sent notifications if a journo had not checked in for say x days after a submission (lets say for example sake, 2 days).
I think it preferable for notification behavior to be completely undetermined by either submissions or journalists logging in.
Going to work on a very simple implementation of this for the hackathon.
Per conversation with @fowlslegs and @redshiftzero, I'm going to implement this as a daily cronjob that runs a python script that parses through the OSSEC logs, looks for changes related to new submissions, and sends an encrypted email either saying "There are no new submissions" and "There are new submissions" .
@fowlslegs @redshiftzero This may not actually work as we originally thought.
From: http://ossec-docs.readthedocs.io/en/latest/faq/ossec.html
```
OSSEC does not store the logs sent to it by default. If a log does
not trigger an alert it is discarded, and logs that do trigger alerts
are stored with the alerts in /var/ossec/logs/alerts.
```
This means that for this too work from the monitoring server, we would need to configure the changes to trigger alerts. At that point, you would lose the ability to hide when the notification would come in + you would have the problem discussed this morning of mixing in application-level functionality with ops-specific functionality.
@ajvb OSSEC does not seem the right tool for this job. What about a POC Python script that:
1. checks the sqlite db for any "unread" submissions
2. outputs that number as an integer, even if it's zero
Bonus points if it returns a non-zero exit code for zero unread submissions, and a zero exit code if unread submissions were found. Then we can rather trivially hook up an alerting mechanism, such as piping to encrypted email to the instance Admin.
To be clear, we have no method of contacting registered Journalists outside of the Document Interface. These alerts at present would apply only to the Admin, who already receives OSSEC alerts via encrypted email.
@conorsch
So if this could use `db.py`, then:
``` python
#!/usr/bin/python
from sys import exit
from db import Submission
if __name__ == "__main__":
n = Submission.query.filter_by(downloaded=False).count()
print n
if n > 0:
exit(0)
else:
exit(1)
```
> Then we can rather trivially hook up an alerting mechanism, such as piping to encrypted email to the instance Admin.
Would you mind explaining how this could be done within the current infrastructure? Wouldn't this require sending the email from the application server or am I missing something?
We'd want all DI users to receive these notifications, not just admins. How does the admin manage access to email notifications in a way that doesn't threaten the separation of the mon and app servers? It'd be nice if when a journalist was deleted from the DI, they stopped receiving emails, but we can't just allow the journalist app running on the app server to set values on the mon server. We also shouldn't install a MTA on the app server.
> We'd want all DI users to receive these notifications, not just admins.
Agreed, but may be harder. Might be easier to do just admins for the MVP?
> It'd be nice if when a journalist was deleted from the DI, they stopped receiving emails, but we can't just allow the journalist app running on the app server to set values on the mon server. We also shouldn't install a MTA on the app server.
Not sure how we can get away with both of these:
- App server can't set values on the mon server
- shouldn't install a MTA on the app server
If OSSEC is not the right tool for this (which I agree that it is not), then there needs to be someway for the monitoring server to pull data from the app server.
What is the safest way that the monitoring server can get the boolean response from the application server on whether or not there is new data?
Or, what is the safest way to send emails from the app server?
I feel like the safer route currently is to have the monitoring server get the data from the application server as it already is getting data from it. Can we mimic the safety measures of ossec within the application/API we provide for getting this data?
I say this often, but this is yet another problem that would be much easier to solve via the reading room client. The problem is that we want to provide a unified management interface to the admin, while storing state on two backends that must remain compartmentalized. We can't use code provided by one of the backends to do this (i.e., the DI) because this would break the compartmentalization, so we must provide a native application via a signed package. Anyway, back to addressing this for the current SD landscape....
How about we build a little tool for the admin workstation that lets the admin add and remove users from the email notifications list, `manage-notifications`? We could of course make a desktop icon to launch it. The script downloads a plaintext configuration file from the monitor server over SSH, and opens it up in some friendly, simple GUI editor (maybe even Zenity). The format is very simple:
```
email1 fingerprint1
email2 fingerprint2
```
After modification, the contents are copied back to the monitor server. We could do some sanity checking to make sure each field at least appears to be a valid email address or PGP fingerprint.
When journalists are added or deleted via the admin interface, we should change the message flashed to inform the admin they might want to modify this list.
As to the email notifications themselves. I think the app server should create a new, simple log that gets rotated daily, and provides a boolean response to whether there are unread messages. I think we should use OSSEC to parse this log, and a new command (via a Python script) to send email notifications.
> How about we build a little tool for the admin workstation that lets the admin add and remove users from the email notifications list, manage-notifications? We could of course make a desktop icon to launch it. The script downloads a plaintext configuration file from the monitor server over SSH, and opens it up in some friendly, simple GUI editor (maybe even Zenity).
I like this, but it seems frail and prone to weird bugs. Is it not possible to have the MVP require you to SSH into a the machine and edit the file using `vim` or similar?
> As to the email notifications themselves. I think the app server should create a new, simple log that gets rotated daily, and provides a boolean response to whether there are unread messages.
π
> I think we should use OSSEC to parse this log
I don't believe OSSEC is the right tool for this, per my former comment: https://github.com/freedomofpress/securedrop/issues/1195#issuecomment-258708226
This probably breaks the compartmentalization mentioned in your previous comment, but would it be possible have the mon server simply `scp`/download the file, read it, and then delete it?
> a new command (via a Python script) to send email notifications.
π
> I like this, but it seems frail and prone to weird bugs. Is it not possible to have the MVP require you to SSH into a the machine and edit the file using vim or similar?
Yeah, I think that's probably the way to do this for now, lest we invest two much time into it, when it's important we begin to focus our energy on more long-term solutions.
I think we can have OSSEC call a [customized active response](https://ossec-docs.readthedocs.io/en/latest/manual/ar/ar-custom.html), that is not really active. What I mean by this is that it calls a Python script that perhaps maintains a simple binary state (e.g., a text file that should only contain a 0 or 1 at any time). Then we create a second script that sends one of two email messages, based on this binary state, to an admin-maintained list of users. This second script is run as a cron job at a set time. This could be maintained/ provisioned by Ansible--the list of users to email, the time to email them, and whether email notifications are wanted at all--by adding new vars.
I still think this is a hacky solution, but at least it adds minimal code and doesn't introduce new communication processes between the app and mon servers. I need to get back to this when I have more time to play with OSSEC + dig through the documentation. There's gotta be a way to do this safe and easy.
| 2018-01-07T13:46:29Z | [] | [] |
freedomofpress/securedrop | 2,878 | freedomofpress__securedrop-2878 | [
"2876"
] | 958b6bf7343a1510d4bd5c4cdc9f26888e27c394 | diff --git a/securedrop/journalist_app/admin.py b/securedrop/journalist_app/admin.py
--- a/securedrop/journalist_app/admin.py
+++ b/securedrop/journalist_app/admin.py
@@ -34,9 +34,12 @@ def manage_config():
static_filepath = os.path.join(config.SECUREDROP_ROOT,
"static/i/logo.png")
f.save(static_filepath)
- flash(gettext("Image updated."), "notification")
+ flash(gettext("Image updated."), "logo-success")
return redirect(url_for("admin.manage_config"))
else:
+ for field, errors in form.errors.items():
+ for error in errors:
+ flash(error, "logo-error")
return render_template("config.html", form=form)
@view.route('/add', methods=('GET', 'POST'))
| diff --git a/securedrop/tests/test_journalist.py b/securedrop/tests/test_journalist.py
--- a/securedrop/tests/test_journalist.py
+++ b/securedrop/tests/test_journalist.py
@@ -746,7 +746,7 @@ def test_logo_upload_with_valid_image_succeeds(self):
data=form.data,
follow_redirects=True)
- self.assertMessageFlashed("Image updated.", "notification")
+ self.assertMessageFlashed("Image updated.", "logo-success")
finally:
# Restore original image to logo location for subsequent tests
with open(logo_image_location, 'w') as logo_file:
@@ -761,7 +761,7 @@ def test_logo_upload_with_invalid_filetype_fails(self):
resp = self.client.post(url_for('admin.manage_config'),
data=form.data,
follow_redirects=True)
-
+ self.assertMessageFlashed("Upload images only.", "logo-error")
self.assertIn('Upload images only.', resp.data)
def test_logo_upload_with_empty_input_field_fails(self):
@@ -774,6 +774,7 @@ def test_logo_upload_with_empty_input_field_fails(self):
data=form.data,
follow_redirects=True)
+ self.assertMessageFlashed("File required.", "logo-error")
self.assertIn('File required.', resp.data)
@patch('journalist.app.logger.error')
| Admin interface: Improve UX on successful logo upload
## Description
As pointed out in #2844, the error message and success messages should ideally be consistent in terms of where they show up on the page. We have error messages on many of our forms displayed below the form (intended to be as close as possible to http://uipatterns.io/input-validation/form-validation):
![screen shot 2018-01-12 at 12 32 48 pm](https://user-images.githubusercontent.com/7832803/34894160-15a6eae2-f795-11e7-9f0b-cb70da9e63af.png)
but the successful logo upload user feedback occurs in a flashed message at the top of the screen. It would be more intuitive for the user to get user feedback next to the "UPDATE LOGO" button itself, e.g. a green checkmark with "Logo updated" or similar.
## User Stories
As a SecureDrop administrator, I want a successful action to produce user feedback near the area of the webpage I am on.
| 2018-01-16T21:09:17Z | [] | [] |
|
freedomofpress/securedrop | 2,895 | freedomofpress__securedrop-2895 | [
"2866"
] | b413bd0d0fef6b86bdf779b0b3ac2b7e180424fa | diff --git a/securedrop/db.py b/securedrop/db.py
--- a/securedrop/db.py
+++ b/securedrop/db.py
@@ -1,475 +1,5 @@
-import os
-import datetime
-import base64
-import binascii
+# -*- coding: utf-8 -*-
-# Find the best implementation available on this platform
-try:
- from cStringIO import StringIO
-except ImportError:
- from StringIO import StringIO
+from flask_sqlalchemy import SQLAlchemy
-from sqlalchemy import create_engine, ForeignKey
-from sqlalchemy.orm import scoped_session, sessionmaker, relationship, backref
-from sqlalchemy.ext.declarative import declarative_base
-from sqlalchemy import Column, Integer, String, Boolean, DateTime, Binary
-from sqlalchemy.orm.exc import MultipleResultsFound, NoResultFound
-from jinja2 import Markup
-
-import scrypt
-import pyotp
-
-import qrcode
-# Using svg because it doesn't require additional dependencies
-import qrcode.image.svg
-
-import config
-import store
-
-
-LOGIN_HARDENING = True
-# Unfortunately, the login hardening measures mess with the tests in
-# non-deterministic ways. TODO rewrite the tests so we can more
-# precisely control which code paths are exercised.
-if os.environ.get('SECUREDROP_ENV') == 'test':
- LOGIN_HARDENING = False
-
-# http://flask.pocoo.org/docs/patterns/sqlalchemy/
-
-if config.DATABASE_ENGINE == "sqlite":
- engine = create_engine(
- config.DATABASE_ENGINE + ":///" +
- config.DATABASE_FILE
- )
-
- engine.execute('PRAGMA secure_delete = ON')
- engine.execute('PRAGMA auto_vacuum = FULL')
-else: # pragma: no cover
- engine = create_engine(
- config.DATABASE_ENGINE + '://' +
- config.DATABASE_USERNAME + ':' +
- config.DATABASE_PASSWORD + '@' +
- config.DATABASE_HOST + '/' +
- config.DATABASE_NAME, echo=False
- )
-
-db_session = scoped_session(sessionmaker(autocommit=False,
- autoflush=False,
- bind=engine))
-Base = declarative_base()
-Base.query = db_session.query_property()
-
-
-def get_one_or_else(query, logger, failure_method):
- try:
- return query.one()
- except MultipleResultsFound as e:
- logger.error(
- "Found multiple while executing %s when one was expected: %s" %
- (query, e, ))
- failure_method(500)
- except NoResultFound as e:
- logger.error("Found none when one was expected: %s" % (e,))
- failure_method(404)
-
-
-class Source(Base):
- __tablename__ = 'sources'
- id = Column(Integer, primary_key=True)
- filesystem_id = Column(String(96), unique=True)
- journalist_designation = Column(String(255), nullable=False)
- flagged = Column(Boolean, default=False)
- last_updated = Column(DateTime, default=datetime.datetime.utcnow)
- star = relationship("SourceStar", uselist=False, backref="source")
-
- # sources are "pending" and don't get displayed to journalists until they
- # submit something
- pending = Column(Boolean, default=True)
-
- # keep track of how many interactions have happened, for filenames
- interaction_count = Column(Integer, default=0, nullable=False)
-
- # Don't create or bother checking excessively long codenames to prevent DoS
- NUM_WORDS = 7
- MAX_CODENAME_LEN = 128
-
- def __init__(self, filesystem_id=None, journalist_designation=None):
- self.filesystem_id = filesystem_id
- self.journalist_designation = journalist_designation
-
- def __repr__(self):
- return '<Source %r>' % (self.journalist_designation)
-
- @property
- def journalist_filename(self):
- valid_chars = 'abcdefghijklmnopqrstuvwxyz1234567890-_'
- return ''.join([c for c in self.journalist_designation.lower().replace(
- ' ', '_') if c in valid_chars])
-
- def documents_messages_count(self):
- try:
- return self.docs_msgs_count
- except AttributeError:
- self.docs_msgs_count = {'messages': 0, 'documents': 0}
- for submission in self.submissions:
- if submission.filename.endswith('msg.gpg'):
- self.docs_msgs_count['messages'] += 1
- elif (submission.filename.endswith('doc.gz.gpg') or
- submission.filename.endswith('doc.zip.gpg')):
- self.docs_msgs_count['documents'] += 1
- return self.docs_msgs_count
-
- @property
- def collection(self):
- """Return the list of submissions and replies for this source, sorted
- in ascending order by the filename/interaction count."""
- collection = []
- collection.extend(self.submissions)
- collection.extend(self.replies)
- collection.sort(key=lambda x: int(x.filename.split('-')[0]))
- return collection
-
-
-class Submission(Base):
- __tablename__ = 'submissions'
- id = Column(Integer, primary_key=True)
- source_id = Column(Integer, ForeignKey('sources.id'))
- source = relationship(
- "Source",
- backref=backref("submissions", order_by=id, cascade="delete")
- )
-
- filename = Column(String(255), nullable=False)
- size = Column(Integer, nullable=False)
- downloaded = Column(Boolean, default=False)
-
- def __init__(self, source, filename):
- self.source_id = source.id
- self.filename = filename
- self.size = os.stat(store.path(source.filesystem_id, filename)).st_size
-
- def __repr__(self):
- return '<Submission %r>' % (self.filename)
-
-
-class Reply(Base):
- __tablename__ = "replies"
- id = Column(Integer, primary_key=True)
-
- journalist_id = Column(Integer, ForeignKey('journalists.id'))
- journalist = relationship(
- "Journalist",
- backref=backref(
- 'replies',
- order_by=id))
-
- source_id = Column(Integer, ForeignKey('sources.id'))
- source = relationship(
- "Source",
- backref=backref("replies", order_by=id, cascade="delete")
- )
-
- filename = Column(String(255), nullable=False)
- size = Column(Integer, nullable=False)
-
- def __init__(self, journalist, source, filename):
- self.journalist_id = journalist.id
- self.source_id = source.id
- self.filename = filename
- self.size = os.stat(store.path(source.filesystem_id, filename)).st_size
-
- def __repr__(self):
- return '<Reply %r>' % (self.filename)
-
-
-class SourceStar(Base):
- __tablename__ = 'source_stars'
- id = Column("id", Integer, primary_key=True)
- source_id = Column("source_id", Integer, ForeignKey('sources.id'))
- starred = Column("starred", Boolean, default=True)
-
- def __eq__(self, other):
- if isinstance(other, SourceStar):
- return (self.source_id == other.source_id and
- self.id == other.id and self.starred == other.starred)
- return NotImplemented
-
- def __init__(self, source, starred=True):
- self.source_id = source.id
- self.starred = starred
-
-
-class InvalidUsernameException(Exception):
-
- """Raised when a user logs in with an invalid username"""
-
-
-class LoginThrottledException(Exception):
-
- """Raised when a user attempts to log in
- too many times in a given time period"""
-
-
-class WrongPasswordException(Exception):
-
- """Raised when a user logs in with an incorrect password"""
-
-
-class BadTokenException(Exception):
-
- """Raised when a user logins in with an incorrect TOTP token"""
-
-
-class PasswordError(Exception):
-
- """Generic error for passwords that are invalid.
- """
-
-
-class InvalidPasswordLength(PasswordError):
- """Raised when attempting to create a Journalist or log in with an invalid
- password length.
- """
-
- def __init__(self, password):
- self.pw_len = len(password)
-
- def __str__(self):
- if self.pw_len > Journalist.MAX_PASSWORD_LEN:
- return "Password too long (len={})".format(self.pw_len)
- if self.pw_len < Journalist.MIN_PASSWORD_LEN:
- return "Password needs to be at least {} characters".format(
- Journalist.MIN_PASSWORD_LEN
- )
-
-
-class NonDicewarePassword(PasswordError):
-
- """Raised when attempting to validate a password that is not diceware-like
- """
-
-
-class Journalist(Base):
- __tablename__ = "journalists"
- id = Column(Integer, primary_key=True)
- username = Column(String(255), nullable=False, unique=True)
- pw_salt = Column(Binary(32))
- pw_hash = Column(Binary(256))
- is_admin = Column(Boolean)
-
- otp_secret = Column(String(16), default=pyotp.random_base32)
- is_totp = Column(Boolean, default=True)
- hotp_counter = Column(Integer, default=0)
- last_token = Column(String(6))
-
- created_on = Column(DateTime, default=datetime.datetime.utcnow)
- last_access = Column(DateTime)
- login_attempts = relationship(
- "JournalistLoginAttempt",
- backref="journalist")
-
- MIN_USERNAME_LEN = 3
-
- def __init__(self, username, password, is_admin=False, otp_secret=None):
- self.check_username_acceptable(username)
- self.username = username
- self.set_password(password)
- self.is_admin = is_admin
-
- if otp_secret:
- self.set_hotp_secret(otp_secret)
-
- def __repr__(self):
- return "<Journalist {0}{1}>".format(
- self.username,
- " [admin]" if self.is_admin else "")
-
- def _gen_salt(self, salt_bytes=32):
- return os.urandom(salt_bytes)
-
- _SCRYPT_PARAMS = dict(N=2**14, r=8, p=1)
-
- def _scrypt_hash(self, password, salt, params=None):
- if not params:
- params = self._SCRYPT_PARAMS
- return scrypt.hash(str(password), salt, **params)
-
- MAX_PASSWORD_LEN = 128
- MIN_PASSWORD_LEN = 14
-
- def set_password(self, password):
- self.check_password_acceptable(password)
-
- # Don't do anything if user's password hasn't changed.
- if self.pw_hash and self.valid_password(password):
- return
-
- self.pw_salt = self._gen_salt()
- self.pw_hash = self._scrypt_hash(password, self.pw_salt)
-
- @classmethod
- def check_username_acceptable(cls, username):
- if len(username) < cls.MIN_USERNAME_LEN:
- raise InvalidUsernameException(
- 'Username "{}" must be at least {} characters long.'
- .format(username, cls.MIN_USERNAME_LEN))
-
- @classmethod
- def check_password_acceptable(cls, password):
- # Enforce a reasonable maximum length for passwords to avoid DoS
- if len(password) > cls.MAX_PASSWORD_LEN:
- raise InvalidPasswordLength(password)
-
- # Enforce a reasonable minimum length for new passwords
- if len(password) < cls.MIN_PASSWORD_LEN:
- raise InvalidPasswordLength(password)
-
- # Ensure all passwords are "diceware-like"
- if len(password.split()) < 7:
- raise NonDicewarePassword()
-
- def valid_password(self, password):
- # Avoid hashing passwords that are over the maximum length
- if len(password) > self.MAX_PASSWORD_LEN:
- raise InvalidPasswordLength(password)
- # No check on minimum password length here because some passwords
- # may have been set prior to setting the minimum password length.
- return pyotp.utils.compare_digest(
- self._scrypt_hash(password, self.pw_salt),
- self.pw_hash)
-
- def regenerate_totp_shared_secret(self):
- self.otp_secret = pyotp.random_base32()
-
- def set_hotp_secret(self, otp_secret):
- self.is_totp = False
- self.otp_secret = base64.b32encode(
- binascii.unhexlify(
- otp_secret.replace(
- " ",
- "")))
- self.hotp_counter = 0
-
- @property
- def totp(self):
- return pyotp.TOTP(self.otp_secret)
-
- @property
- def hotp(self):
- return pyotp.HOTP(self.otp_secret)
-
- @property
- def shared_secret_qrcode(self):
- uri = self.totp.provisioning_uri(
- self.username,
- issuer_name="SecureDrop")
-
- qr = qrcode.QRCode(
- box_size=15,
- image_factory=qrcode.image.svg.SvgPathImage
- )
- qr.add_data(uri)
- img = qr.make_image()
-
- svg_out = StringIO()
- img.save(svg_out)
- return Markup(svg_out.getvalue())
-
- @property
- def formatted_otp_secret(self):
- """The OTP secret is easier to read and manually enter if it is all
- lowercase and split into four groups of four characters. The secret is
- base32-encoded, so it is case insensitive."""
- sec = self.otp_secret
- chunks = [sec[i:i + 4] for i in range(0, len(sec), 4)]
- return ' '.join(chunks).lower()
-
- def _format_token(self, token):
- """Strips from authentication tokens the whitespace
- that many clients add for readability"""
- return ''.join(token.split())
-
- def verify_token(self, token):
- token = self._format_token(token)
-
- # Store latest token to prevent OTP token reuse
- self.last_token = token
- db_session.commit()
-
- if self.is_totp:
- # Also check the given token against the previous and next
- # valid tokens, to compensate for potential time skew
- # between the client and the server. The total valid
- # window is 1:30s.
- return self.totp.verify(token, valid_window=1)
- else:
- for counter_val in range(
- self.hotp_counter,
- self.hotp_counter + 20):
- if self.hotp.verify(token, counter_val):
- self.hotp_counter = counter_val + 1
- db_session.commit()
- return True
- return False
-
- _LOGIN_ATTEMPT_PERIOD = 60 # seconds
- _MAX_LOGIN_ATTEMPTS_PER_PERIOD = 5
-
- @classmethod
- def throttle_login(cls, user):
- # Record the login attempt...
- login_attempt = JournalistLoginAttempt(user)
- db_session.add(login_attempt)
- db_session.commit()
-
- # ...and reject it if they have exceeded the threshold
- login_attempt_period = datetime.datetime.utcnow() - \
- datetime.timedelta(seconds=cls._LOGIN_ATTEMPT_PERIOD)
- attempts_within_period = JournalistLoginAttempt.query.filter(
- JournalistLoginAttempt.timestamp > login_attempt_period).all()
- if len(attempts_within_period) > cls._MAX_LOGIN_ATTEMPTS_PER_PERIOD:
- raise LoginThrottledException(
- "throttled ({} attempts in last {} seconds)".format(
- len(attempts_within_period),
- cls._LOGIN_ATTEMPT_PERIOD))
-
- @classmethod
- def login(cls, username, password, token):
- try:
- user = Journalist.query.filter_by(username=username).one()
- except NoResultFound:
- raise InvalidUsernameException(
- "invalid username '{}'".format(username))
-
- if LOGIN_HARDENING:
- cls.throttle_login(user)
-
- # Prevent TOTP token reuse
- if user.last_token is not None:
- if pyotp.utils.compare_digest(token, user.last_token):
- raise BadTokenException("previously used token "
- "{}".format(token))
- if not user.verify_token(token):
- raise BadTokenException("invalid token")
- if not user.valid_password(password):
- raise WrongPasswordException("invalid password")
- return user
-
-
-class JournalistLoginAttempt(Base):
-
- """This model keeps track of journalist's login attempts so we can
- rate limit them in order to prevent attackers from brute forcing
- passwords or two-factor tokens."""
- __tablename__ = "journalist_login_attempt"
- id = Column(Integer, primary_key=True)
- timestamp = Column(DateTime, default=datetime.datetime.utcnow)
- journalist_id = Column(Integer, ForeignKey('journalists.id'))
-
- def __init__(self, journalist):
- self.journalist_id = journalist.id
-
-
-# Declare (or import) models before init_db
-def init_db():
- Base.metadata.create_all(bind=engine)
+db = SQLAlchemy()
diff --git a/securedrop/journalist_app/__init__.py b/securedrop/journalist_app/__init__.py
--- a/securedrop/journalist_app/__init__.py
+++ b/securedrop/journalist_app/__init__.py
@@ -11,7 +11,8 @@
import template_filters
import version
-from db import db_session, Journalist
+from db import db
+from models import Journalist
from journalist_app import account, admin, main, col
from journalist_app.utils import get_source, logged_in
@@ -28,6 +29,21 @@ def create_app(config):
CSRFProtect(app)
Environment(app)
+ if config.DATABASE_ENGINE == "sqlite":
+ db_uri = (config.DATABASE_ENGINE + ":///" +
+ config.DATABASE_FILE)
+ else:
+ db_uri = (
+ config.DATABASE_ENGINE + '://' +
+ config.DATABASE_USERNAME + ':' +
+ config.DATABASE_PASSWORD + '@' +
+ config.DATABASE_HOST + '/' +
+ config.DATABASE_NAME
+ )
+ app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
+ app.config['SQLALCHEMY_DATABASE_URI'] = db_uri
+ db.init_app(app)
+
@app.errorhandler(CSRFError)
def handle_csrf_error(e):
# render the message first to ensure it's localized.
@@ -63,12 +79,6 @@ def autoversion_filter(filename):
versioned_filename = "{0}?v={1}".format(filename, timestamp)
return versioned_filename
- @app.teardown_appcontext
- def shutdown_session(exception=None):
- """Automatically remove database sessions at the end of the request, or
- when the application shuts down"""
- db_session.remove()
-
@app.before_request
def setup_g():
"""Store commonly used values in Flask's special g object"""
diff --git a/securedrop/journalist_app/account.py b/securedrop/journalist_app/account.py
--- a/securedrop/journalist_app/account.py
+++ b/securedrop/journalist_app/account.py
@@ -4,7 +4,7 @@
flash, session)
from flask_babel import gettext
-from db import db_session
+from db import db
from journalist_app.utils import (make_password, set_diceware_password,
validate_user)
@@ -53,7 +53,7 @@ def new_two_factor():
def reset_two_factor_totp():
g.user.is_totp = True
g.user.regenerate_totp_shared_secret()
- db_session.commit()
+ db.session.commit()
return redirect(url_for('account.new_two_factor'))
@view.route('/reset-2fa-hotp', methods=['POST'])
@@ -61,7 +61,7 @@ def reset_two_factor_hotp():
otp_secret = request.form.get('otp_secret', None)
if otp_secret:
g.user.set_hotp_secret(otp_secret)
- db_session.commit()
+ db.session.commit()
return redirect(url_for('account.new_two_factor'))
else:
return render_template('account_edit_hotp_secret.html')
diff --git a/securedrop/journalist_app/admin.py b/securedrop/journalist_app/admin.py
--- a/securedrop/journalist_app/admin.py
+++ b/securedrop/journalist_app/admin.py
@@ -8,8 +8,8 @@
from sqlalchemy.exc import IntegrityError
from sqlalchemy.orm.exc import NoResultFound
-from db import (db_session, Journalist, InvalidUsernameException,
- PasswordError)
+from db import db
+from models import Journalist, InvalidUsernameException, PasswordError
from journalist_app.decorators import admin_required
from journalist_app.utils import (make_password, commit_account_changes,
set_diceware_password)
@@ -60,8 +60,8 @@ def add_user():
password=password,
is_admin=is_admin,
otp_secret=otp_secret)
- db_session.add(new_user)
- db_session.commit()
+ db.session.add(new_user)
+ db.session.commit()
except PasswordError:
flash(gettext(
'There was an error with the autogenerated password. '
@@ -71,7 +71,7 @@ def add_user():
form_valid = False
flash('Invalid username: ' + str(e), "error")
except IntegrityError as e:
- db_session.rollback()
+ db.session.rollback()
form_valid = False
if "UNIQUE constraint failed: journalists.username" in str(e):
flash(gettext("That username is already in use"),
@@ -121,7 +121,7 @@ def reset_two_factor_totp():
user = Journalist.query.get(uid)
user.is_totp = True
user.regenerate_totp_shared_secret()
- db_session.commit()
+ db.session.commit()
return redirect(url_for('admin.new_user_two_factor', uid=uid))
@view.route('/reset-2fa-hotp', methods=['POST'])
@@ -153,7 +153,7 @@ def reset_two_factor_hotp():
otp_secret, uid, e))
return render_template('admin_edit_hotp_secret.html', uid=uid)
else:
- db_session.commit()
+ db.session.commit()
return redirect(url_for('admin.new_user_two_factor', uid=uid))
else:
return render_template('admin_edit_hotp_secret.html', uid=uid)
@@ -218,8 +218,8 @@ def delete_user(user_id):
"Admin {} tried to delete itself".format(g.user.username))
abort(403)
elif user:
- db_session.delete(user)
- db_session.commit()
+ db.session.delete(user)
+ db.session.commit()
flash(gettext("Deleted user '{user}'").format(
user=user.username), "notification")
else:
diff --git a/securedrop/journalist_app/col.py b/securedrop/journalist_app/col.py
--- a/securedrop/journalist_app/col.py
+++ b/securedrop/journalist_app/col.py
@@ -8,7 +8,8 @@
import crypto_util
import store
-from db import db_session, Submission
+from db import db
+from models import Submission
from journalist_app.forms import ReplyForm
from journalist_app.utils import (make_star_true, make_star_false, get_source,
delete_collection, col_download_unread,
@@ -22,13 +23,13 @@ def make_blueprint(config):
@view.route('/add_star/<filesystem_id>', methods=('POST',))
def add_star(filesystem_id):
make_star_true(filesystem_id)
- db_session.commit()
+ db.session.commit()
return redirect(url_for('main.index'))
@view.route("/remove_star/<filesystem_id>", methods=('POST',))
def remove_star(filesystem_id):
make_star_false(filesystem_id)
- db_session.commit()
+ db.session.commit()
return redirect(url_for('main.index'))
@view.route('/<filesystem_id>')
@@ -77,7 +78,7 @@ def download_single_submission(filesystem_id, fn):
try:
Submission.query.filter(
Submission.filename == fn).one().downloaded = True
- db_session.commit()
+ db.session.commit()
except NoResultFound as e:
current_app.logger.error(
"Could not mark " + fn + " as downloaded: %s" % (e,))
diff --git a/securedrop/journalist_app/forms.py b/securedrop/journalist_app/forms.py
--- a/securedrop/journalist_app/forms.py
+++ b/securedrop/journalist_app/forms.py
@@ -7,7 +7,7 @@
ValidationError)
from wtforms.validators import InputRequired, Optional
-from db import Journalist
+from models import Journalist
def otp_secret_validation(form, field):
diff --git a/securedrop/journalist_app/main.py b/securedrop/journalist_app/main.py
--- a/securedrop/journalist_app/main.py
+++ b/securedrop/journalist_app/main.py
@@ -9,7 +9,8 @@
import crypto_util
import store
-from db import db_session, Source, SourceStar, Submission, Reply
+from db import db
+from models import Source, SourceStar, Submission, Reply
from journalist_app.forms import ReplyForm
from journalist_app.utils import (validate_user, bulk_delete, download,
confirm_bulk_delete, get_source)
@@ -31,8 +32,8 @@ def login():
# Update access metadata
user.last_access = datetime.utcnow()
- db_session.add(user)
- db_session.commit()
+ db.session.add(user)
+ db.session.commit()
session['uid'] = user.id
return redirect(url_for('main.index'))
@@ -101,8 +102,8 @@ def reply():
reply = Reply(g.user, g.source, filename)
try:
- db_session.add(reply)
- db_session.commit()
+ db.session.add(reply)
+ db.session.commit()
except Exception as exc:
flash(gettext(
"An unexpected error occurred! Please "
@@ -123,7 +124,7 @@ def reply():
@view.route('/flag', methods=('POST',))
def flag():
g.source.flagged = True
- db_session.commit()
+ db.session.commit()
return render_template('flag.html', filesystem_id=g.filesystem_id,
codename=g.source.journalist_designation)
@@ -163,7 +164,7 @@ def regenerate_code():
g.filesystem_id,
item.filename,
g.source.journalist_filename)
- db_session.commit()
+ db.session.commit()
flash(gettext(
"The source '{original_name}' has been renamed to '{new_name}'")
diff --git a/securedrop/journalist_app/utils.py b/securedrop/journalist_app/utils.py
--- a/securedrop/journalist_app/utils.py
+++ b/securedrop/journalist_app/utils.py
@@ -11,10 +11,11 @@
import store
import worker
-from db import (db_session, get_one_or_else, Source, Journalist,
- InvalidUsernameException, WrongPasswordException,
- LoginThrottledException, BadTokenException, SourceStar,
- PasswordError, Submission)
+from db import db
+from models import (get_one_or_else, Source, Journalist,
+ InvalidUsernameException, WrongPasswordException,
+ LoginThrottledException, BadTokenException, SourceStar,
+ PasswordError, Submission)
from rm import srm
@@ -30,17 +31,17 @@ def logged_in():
def commit_account_changes(user):
- if db_session.is_modified(user):
+ if db.session.is_modified(user):
try:
- db_session.add(user)
- db_session.commit()
+ db.session.add(user)
+ db.session.commit()
except Exception as e:
flash(gettext(
"An unexpected error occurred! Please "
"inform your administrator."), "error")
current_app.logger.error("Account changes for '{}' failed: {}"
.format(user, e))
- db_session.rollback()
+ db.session.rollback()
else:
flash(gettext("Account updated."), "success")
@@ -110,7 +111,7 @@ def download(zip_basename, submissions):
:param str zip_basename: The basename of the ZIP-file download.
- :param list submissions: A list of :class:`db.Submission`s to
+ :param list submissions: A list of :class:`models.Submission`s to
include in the ZIP-file.
"""
zf = store.get_bulk_archive(submissions,
@@ -121,7 +122,7 @@ def download(zip_basename, submissions):
# Mark the submissions that have been downloaded as such
for submission in submissions:
submission.downloaded = True
- db_session.commit()
+ db.session.commit()
return send_file(zf.name, mimetype="application/zip",
attachment_filename=attachment_filename,
@@ -132,8 +133,8 @@ def bulk_delete(filesystem_id, items_selected):
for item in items_selected:
item_path = store.path(filesystem_id, item.filename)
worker.enqueue(srm, item_path)
- db_session.delete(item)
- db_session.commit()
+ db.session.delete(item)
+ db.session.commit()
flash(ngettext("Submission deleted.",
"{num} submissions deleted.".format(
@@ -155,15 +156,15 @@ def make_star_true(filesystem_id):
source.star.starred = True
else:
source_star = SourceStar(source)
- db_session.add(source_star)
+ db.session.add(source_star)
def make_star_false(filesystem_id):
source = get_source(filesystem_id)
if not source.star:
source_star = SourceStar(source)
- db_session.add(source_star)
- db_session.commit()
+ db.session.add(source_star)
+ db.session.commit()
source.star.starred = False
@@ -171,7 +172,7 @@ def col_star(cols_selected):
for filesystem_id in cols_selected:
make_star_true(filesystem_id)
- db_session.commit()
+ db.session.commit()
return redirect(url_for('main.index'))
@@ -179,7 +180,7 @@ def col_un_star(cols_selected):
for filesystem_id in cols_selected:
make_star_false(filesystem_id)
- db_session.commit()
+ db.session.commit()
return redirect(url_for('main.index'))
@@ -217,8 +218,8 @@ def delete_collection(filesystem_id):
# Delete their entry in the db
source = get_source(filesystem_id)
- db_session.delete(source)
- db_session.commit()
+ db.session.delete(source)
+ db.session.commit()
return job
@@ -231,7 +232,7 @@ def set_diceware_password(user, password):
return
try:
- db_session.commit()
+ db.session.commit()
except Exception:
flash(gettext(
'There was an error, and the new password might not have been '
diff --git a/securedrop/manage.py b/securedrop/manage.py
--- a/securedrop/manage.py
+++ b/securedrop/manage.py
@@ -6,6 +6,7 @@
import logging
import os
from os.path import dirname, join, realpath
+import pwd
import shutil
import signal
import subprocess
@@ -15,13 +16,15 @@
import version
import qrcode
+from sqlalchemy import text
from sqlalchemy.orm.exc import NoResultFound
os.environ['SECUREDROP_ENV'] = 'dev' # noqa
import config
import crypto_util
-from db import (db_session, init_db, Journalist, PasswordError,
- InvalidUsernameException)
+import journalist_app
+from db import db
+from models import Journalist, PasswordError, InvalidUsernameException
from management.run import run
logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s')
@@ -87,7 +90,8 @@ def reset(args):
pass
# Regenerate the database
- init_db()
+ with app_context():
+ db.create_all()
# Clear submission/reply storage
try:
@@ -148,6 +152,8 @@ def _make_password():
def _add_user(is_admin=False):
+ context = app_context()
+ context.push()
username = _get_username()
print("Note: Passwords are now autogenerated.")
@@ -175,10 +181,10 @@ def _add_user(is_admin=False):
password=password,
is_admin=is_admin,
otp_secret=otp_secret)
- db_session.add(user)
- db_session.commit()
+ db.session.add(user)
+ db.session.commit()
except Exception as exc:
- db_session.rollback()
+ db.session.rollback()
if "UNIQUE constraint failed: journalists.username" in str(exc):
print('ERROR: That username is already taken!')
else:
@@ -223,6 +229,8 @@ def _get_delete_confirmation(user):
def delete_user(args):
"""Deletes a journalist or administrator from the application."""
+ context = app_context()
+ context.push()
username = _get_username_to_delete()
try:
selected_user = Journalist.query.filter_by(username=username).one()
@@ -236,8 +244,8 @@ def delete_user(args):
# Try to delete user from the database
try:
- db_session.delete(selected_user)
- db_session.commit()
+ db.session.delete(selected_user)
+ db.session.commit()
except Exception as e:
# If the user was deleted between the user selection and confirmation,
# (e.g., through the web app), we don't report any errors. If the user
@@ -379,6 +387,17 @@ def translate_desktop(args):
sources=" ".join(args.source)))
+def init_db(args):
+ with journalist_app.create_app(config).app_context():
+ db.create_all()
+ db.session.execute(text('PRAGMA secure_delete = ON'))
+ db.session.execute(text('PRAGMA auto_vacuum = FULL'))
+ db.session.commit()
+
+ user = pwd.getpwnam(args.user)
+ os.chown('/var/lib/securedrop/db.sqlite', user.pw_uid, user.pw_gid)
+
+
def get_args():
parser = argparse.ArgumentParser(prog=__file__, description='Management '
'and testing utility for SecureDrop.')
@@ -418,6 +437,12 @@ def get_args():
set_translate_messages_parser(subps)
set_translate_desktop_parser(subps)
+ init_db_subp = subps.add_parser('init-db', help='initialize the DB')
+ init_db_subp.add_argument('-u', '--user',
+ help='Unix user for the DB',
+ required=True)
+ init_db_subp.set_defaults(func=init_db)
+
return parser
@@ -504,6 +529,10 @@ def setup_verbosity(args):
logging.getLogger(__name__).setLevel(logging.INFO)
+def app_context():
+ return journalist_app.create_app(config).app_context()
+
+
def _run_from_commandline(): # pragma: no cover
try:
args = get_args().parse_args()
diff --git a/securedrop/models.py b/securedrop/models.py
new file mode 100644
--- /dev/null
+++ b/securedrop/models.py
@@ -0,0 +1,444 @@
+import os
+import datetime
+import base64
+import binascii
+
+# Find the best implementation available on this platform
+try:
+ from cStringIO import StringIO
+except ImportError:
+ from StringIO import StringIO
+
+from sqlalchemy import ForeignKey
+from sqlalchemy.orm import relationship, backref
+from sqlalchemy import Column, Integer, String, Boolean, DateTime, Binary
+from sqlalchemy.orm.exc import MultipleResultsFound, NoResultFound
+from jinja2 import Markup
+
+import scrypt
+import pyotp
+
+import qrcode
+# Using svg because it doesn't require additional dependencies
+import qrcode.image.svg
+
+from db import db
+import store
+
+
+LOGIN_HARDENING = True
+# Unfortunately, the login hardening measures mess with the tests in
+# non-deterministic ways. TODO rewrite the tests so we can more
+# precisely control which code paths are exercised.
+if os.environ.get('SECUREDROP_ENV') == 'test':
+ LOGIN_HARDENING = False
+
+
+def get_one_or_else(query, logger, failure_method):
+ try:
+ return query.one()
+ except MultipleResultsFound as e:
+ logger.error(
+ "Found multiple while executing %s when one was expected: %s" %
+ (query, e, ))
+ failure_method(500)
+ except NoResultFound as e:
+ logger.error("Found none when one was expected: %s" % (e,))
+ failure_method(404)
+
+
+class Source(db.Model):
+ __tablename__ = 'sources'
+ id = Column(Integer, primary_key=True)
+ filesystem_id = Column(String(96), unique=True)
+ journalist_designation = Column(String(255), nullable=False)
+ flagged = Column(Boolean, default=False)
+ last_updated = Column(DateTime, default=datetime.datetime.utcnow)
+ star = relationship("SourceStar", uselist=False, backref="source")
+
+ # sources are "pending" and don't get displayed to journalists until they
+ # submit something
+ pending = Column(Boolean, default=True)
+
+ # keep track of how many interactions have happened, for filenames
+ interaction_count = Column(Integer, default=0, nullable=False)
+
+ # Don't create or bother checking excessively long codenames to prevent DoS
+ NUM_WORDS = 7
+ MAX_CODENAME_LEN = 128
+
+ def __init__(self, filesystem_id=None, journalist_designation=None):
+ self.filesystem_id = filesystem_id
+ self.journalist_designation = journalist_designation
+
+ def __repr__(self):
+ return '<Source %r>' % (self.journalist_designation)
+
+ @property
+ def journalist_filename(self):
+ valid_chars = 'abcdefghijklmnopqrstuvwxyz1234567890-_'
+ return ''.join([c for c in self.journalist_designation.lower().replace(
+ ' ', '_') if c in valid_chars])
+
+ def documents_messages_count(self):
+ try:
+ return self.docs_msgs_count
+ except AttributeError:
+ self.docs_msgs_count = {'messages': 0, 'documents': 0}
+ for submission in self.submissions:
+ if submission.filename.endswith('msg.gpg'):
+ self.docs_msgs_count['messages'] += 1
+ elif (submission.filename.endswith('doc.gz.gpg') or
+ submission.filename.endswith('doc.zip.gpg')):
+ self.docs_msgs_count['documents'] += 1
+ return self.docs_msgs_count
+
+ @property
+ def collection(self):
+ """Return the list of submissions and replies for this source, sorted
+ in ascending order by the filename/interaction count."""
+ collection = []
+ collection.extend(self.submissions)
+ collection.extend(self.replies)
+ collection.sort(key=lambda x: int(x.filename.split('-')[0]))
+ return collection
+
+
+class Submission(db.Model):
+ __tablename__ = 'submissions'
+ id = Column(Integer, primary_key=True)
+ source_id = Column(Integer, ForeignKey('sources.id'))
+ source = relationship(
+ "Source",
+ backref=backref("submissions", order_by=id, cascade="delete")
+ )
+
+ filename = Column(String(255), nullable=False)
+ size = Column(Integer, nullable=False)
+ downloaded = Column(Boolean, default=False)
+
+ def __init__(self, source, filename):
+ self.source_id = source.id
+ self.filename = filename
+ self.size = os.stat(store.path(source.filesystem_id, filename)).st_size
+
+ def __repr__(self):
+ return '<Submission %r>' % (self.filename)
+
+
+class Reply(db.Model):
+ __tablename__ = "replies"
+ id = Column(Integer, primary_key=True)
+
+ journalist_id = Column(Integer, ForeignKey('journalists.id'))
+ journalist = relationship(
+ "Journalist",
+ backref=backref(
+ 'replies',
+ order_by=id))
+
+ source_id = Column(Integer, ForeignKey('sources.id'))
+ source = relationship(
+ "Source",
+ backref=backref("replies", order_by=id, cascade="delete")
+ )
+
+ filename = Column(String(255), nullable=False)
+ size = Column(Integer, nullable=False)
+
+ def __init__(self, journalist, source, filename):
+ self.journalist_id = journalist.id
+ self.source_id = source.id
+ self.filename = filename
+ self.size = os.stat(store.path(source.filesystem_id, filename)).st_size
+
+ def __repr__(self):
+ return '<Reply %r>' % (self.filename)
+
+
+class SourceStar(db.Model):
+ __tablename__ = 'source_stars'
+ id = Column("id", Integer, primary_key=True)
+ source_id = Column("source_id", Integer, ForeignKey('sources.id'))
+ starred = Column("starred", Boolean, default=True)
+
+ def __eq__(self, other):
+ if isinstance(other, SourceStar):
+ return (self.source_id == other.source_id and
+ self.id == other.id and self.starred == other.starred)
+ return NotImplemented
+
+ def __init__(self, source, starred=True):
+ self.source_id = source.id
+ self.starred = starred
+
+
+class InvalidUsernameException(Exception):
+
+ """Raised when a user logs in with an invalid username"""
+
+
+class LoginThrottledException(Exception):
+
+ """Raised when a user attempts to log in
+ too many times in a given time period"""
+
+
+class WrongPasswordException(Exception):
+
+ """Raised when a user logs in with an incorrect password"""
+
+
+class BadTokenException(Exception):
+
+ """Raised when a user logins in with an incorrect TOTP token"""
+
+
+class PasswordError(Exception):
+
+ """Generic error for passwords that are invalid.
+ """
+
+
+class InvalidPasswordLength(PasswordError):
+ """Raised when attempting to create a Journalist or log in with an invalid
+ password length.
+ """
+
+ def __init__(self, password):
+ self.pw_len = len(password)
+
+ def __str__(self):
+ if self.pw_len > Journalist.MAX_PASSWORD_LEN:
+ return "Password too long (len={})".format(self.pw_len)
+ if self.pw_len < Journalist.MIN_PASSWORD_LEN:
+ return "Password needs to be at least {} characters".format(
+ Journalist.MIN_PASSWORD_LEN
+ )
+
+
+class NonDicewarePassword(PasswordError):
+
+ """Raised when attempting to validate a password that is not diceware-like
+ """
+
+
+class Journalist(db.Model):
+ __tablename__ = "journalists"
+ id = Column(Integer, primary_key=True)
+ username = Column(String(255), nullable=False, unique=True)
+ pw_salt = Column(Binary(32))
+ pw_hash = Column(Binary(256))
+ is_admin = Column(Boolean)
+
+ otp_secret = Column(String(16), default=pyotp.random_base32)
+ is_totp = Column(Boolean, default=True)
+ hotp_counter = Column(Integer, default=0)
+ last_token = Column(String(6))
+
+ created_on = Column(DateTime, default=datetime.datetime.utcnow)
+ last_access = Column(DateTime)
+ login_attempts = relationship(
+ "JournalistLoginAttempt",
+ backref="journalist")
+
+ MIN_USERNAME_LEN = 3
+
+ def __init__(self, username, password, is_admin=False, otp_secret=None):
+ self.check_username_acceptable(username)
+ self.username = username
+ self.set_password(password)
+ self.is_admin = is_admin
+
+ if otp_secret:
+ self.set_hotp_secret(otp_secret)
+
+ def __repr__(self):
+ return "<Journalist {0}{1}>".format(
+ self.username,
+ " [admin]" if self.is_admin else "")
+
+ def _gen_salt(self, salt_bytes=32):
+ return os.urandom(salt_bytes)
+
+ _SCRYPT_PARAMS = dict(N=2**14, r=8, p=1)
+
+ def _scrypt_hash(self, password, salt, params=None):
+ if not params:
+ params = self._SCRYPT_PARAMS
+ return scrypt.hash(str(password), salt, **params)
+
+ MAX_PASSWORD_LEN = 128
+ MIN_PASSWORD_LEN = 14
+
+ def set_password(self, password):
+ self.check_password_acceptable(password)
+
+ # Don't do anything if user's password hasn't changed.
+ if self.pw_hash and self.valid_password(password):
+ return
+
+ self.pw_salt = self._gen_salt()
+ self.pw_hash = self._scrypt_hash(password, self.pw_salt)
+
+ @classmethod
+ def check_username_acceptable(cls, username):
+ if len(username) < cls.MIN_USERNAME_LEN:
+ raise InvalidUsernameException(
+ 'Username "{}" must be at least {} characters long.'
+ .format(username, cls.MIN_USERNAME_LEN))
+
+ @classmethod
+ def check_password_acceptable(cls, password):
+ # Enforce a reasonable maximum length for passwords to avoid DoS
+ if len(password) > cls.MAX_PASSWORD_LEN:
+ raise InvalidPasswordLength(password)
+
+ # Enforce a reasonable minimum length for new passwords
+ if len(password) < cls.MIN_PASSWORD_LEN:
+ raise InvalidPasswordLength(password)
+
+ # Ensure all passwords are "diceware-like"
+ if len(password.split()) < 7:
+ raise NonDicewarePassword()
+
+ def valid_password(self, password):
+ # Avoid hashing passwords that are over the maximum length
+ if len(password) > self.MAX_PASSWORD_LEN:
+ raise InvalidPasswordLength(password)
+ # No check on minimum password length here because some passwords
+ # may have been set prior to setting the minimum password length.
+ return pyotp.utils.compare_digest(
+ self._scrypt_hash(password, self.pw_salt),
+ self.pw_hash)
+
+ def regenerate_totp_shared_secret(self):
+ self.otp_secret = pyotp.random_base32()
+
+ def set_hotp_secret(self, otp_secret):
+ self.is_totp = False
+ self.otp_secret = base64.b32encode(
+ binascii.unhexlify(
+ otp_secret.replace(
+ " ",
+ "")))
+ self.hotp_counter = 0
+
+ @property
+ def totp(self):
+ return pyotp.TOTP(self.otp_secret)
+
+ @property
+ def hotp(self):
+ return pyotp.HOTP(self.otp_secret)
+
+ @property
+ def shared_secret_qrcode(self):
+ uri = self.totp.provisioning_uri(
+ self.username,
+ issuer_name="SecureDrop")
+
+ qr = qrcode.QRCode(
+ box_size=15,
+ image_factory=qrcode.image.svg.SvgPathImage
+ )
+ qr.add_data(uri)
+ img = qr.make_image()
+
+ svg_out = StringIO()
+ img.save(svg_out)
+ return Markup(svg_out.getvalue())
+
+ @property
+ def formatted_otp_secret(self):
+ """The OTP secret is easier to read and manually enter if it is all
+ lowercase and split into four groups of four characters. The secret is
+ base32-encoded, so it is case insensitive."""
+ sec = self.otp_secret
+ chunks = [sec[i:i + 4] for i in range(0, len(sec), 4)]
+ return ' '.join(chunks).lower()
+
+ def _format_token(self, token):
+ """Strips from authentication tokens the whitespace
+ that many clients add for readability"""
+ return ''.join(token.split())
+
+ def verify_token(self, token):
+ token = self._format_token(token)
+
+ # Store latest token to prevent OTP token reuse
+ self.last_token = token
+ db.session.commit()
+
+ if self.is_totp:
+ # Also check the given token against the previous and next
+ # valid tokens, to compensate for potential time skew
+ # between the client and the server. The total valid
+ # window is 1:30s.
+ return self.totp.verify(token, valid_window=1)
+ else:
+ for counter_val in range(
+ self.hotp_counter,
+ self.hotp_counter + 20):
+ if self.hotp.verify(token, counter_val):
+ self.hotp_counter = counter_val + 1
+ db.session.commit()
+ return True
+ return False
+
+ _LOGIN_ATTEMPT_PERIOD = 60 # seconds
+ _MAX_LOGIN_ATTEMPTS_PER_PERIOD = 5
+
+ @classmethod
+ def throttle_login(cls, user):
+ # Record the login attempt...
+ login_attempt = JournalistLoginAttempt(user)
+ db.session.add(login_attempt)
+ db.session.commit()
+
+ # ...and reject it if they have exceeded the threshold
+ login_attempt_period = datetime.datetime.utcnow() - \
+ datetime.timedelta(seconds=cls._LOGIN_ATTEMPT_PERIOD)
+ attempts_within_period = JournalistLoginAttempt.query.filter(
+ JournalistLoginAttempt.timestamp > login_attempt_period).all()
+ if len(attempts_within_period) > cls._MAX_LOGIN_ATTEMPTS_PER_PERIOD:
+ raise LoginThrottledException(
+ "throttled ({} attempts in last {} seconds)".format(
+ len(attempts_within_period),
+ cls._LOGIN_ATTEMPT_PERIOD))
+
+ @classmethod
+ def login(cls, username, password, token):
+ try:
+ user = Journalist.query.filter_by(username=username).one()
+ except NoResultFound:
+ raise InvalidUsernameException(
+ "invalid username '{}'".format(username))
+
+ if LOGIN_HARDENING:
+ cls.throttle_login(user)
+
+ # Prevent TOTP token reuse
+ if user.last_token is not None:
+ if pyotp.utils.compare_digest(token, user.last_token):
+ raise BadTokenException("previously used token "
+ "{}".format(token))
+ if not user.verify_token(token):
+ raise BadTokenException("invalid token")
+ if not user.valid_password(password):
+ raise WrongPasswordException("invalid password")
+ return user
+
+
+class JournalistLoginAttempt(db.Model):
+
+ """This model keeps track of journalist's login attempts so we can
+ rate limit them in order to prevent attackers from brute forcing
+ passwords or two-factor tokens."""
+ __tablename__ = "journalist_login_attempt"
+ id = Column(Integer, primary_key=True)
+ timestamp = Column(DateTime, default=datetime.datetime.utcnow)
+ journalist_id = Column(Integer, ForeignKey('journalists.id'))
+
+ def __init__(self, journalist):
+ self.journalist_id = journalist.id
diff --git a/securedrop/source_app/__init__.py b/securedrop/source_app/__init__.py
--- a/securedrop/source_app/__init__.py
+++ b/securedrop/source_app/__init__.py
@@ -14,7 +14,8 @@
import template_filters
import version
-from db import Source, db_session
+from db import db
+from models import Source
from request_that_secures_file_uploads import RequestThatSecuresFileUploads
from source_app import main, info, api
from source_app.decorators import ignore_static
@@ -31,9 +32,35 @@ def create_app(config):
# The default CSRF token expiration is 1 hour. Since large uploads can
# take longer than an hour over Tor, we increase the valid window to 24h.
app.config['WTF_CSRF_TIME_LIMIT'] = 60 * 60 * 24
-
CSRFProtect(app)
+ if config.DATABASE_ENGINE == "sqlite":
+ db_uri = (config.DATABASE_ENGINE + ":///" +
+ config.DATABASE_FILE)
+ else:
+ db_uri = (
+ config.DATABASE_ENGINE + '://' +
+ config.DATABASE_USERNAME + ':' +
+ config.DATABASE_PASSWORD + '@' +
+ config.DATABASE_HOST + '/' +
+ config.DATABASE_NAME
+ )
+ app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
+ app.config['SQLALCHEMY_DATABASE_URI'] = db_uri
+ db.init_app(app)
+
+ if config.DATABASE_ENGINE == "sqlite":
+ db_uri = (config.DATABASE_ENGINE + ":///" +
+ config.DATABASE_FILE)
+ else: # pragma: no cover
+ db_uri = (
+ config.DATABASE_ENGINE + '://' +
+ config.DATABASE_USERNAME + ':' +
+ config.DATABASE_PASSWORD + '@' +
+ config.DATABASE_HOST + '/' +
+ config.DATABASE_NAME
+ )
+
@app.errorhandler(CSRFError)
def handle_csrf_error(e):
msg = render_template('session_timeout.html')
@@ -121,12 +148,6 @@ def setup_g():
return redirect(url_for('main.index'))
g.loc = store.path(g.filesystem_id)
- @app.teardown_appcontext
- def shutdown_session(exception=None):
- """Automatically remove database sessions at the end of the request, or
- when the application shuts down"""
- db_session.remove()
-
@app.errorhandler(404)
def page_not_found(error):
return render_template('notfound.html'), 404
diff --git a/securedrop/source_app/forms.py b/securedrop/source_app/forms.py
--- a/securedrop/source_app/forms.py
+++ b/securedrop/source_app/forms.py
@@ -3,7 +3,7 @@
from wtforms import PasswordField
from wtforms.validators import InputRequired, Regexp, Length
-from db import Source
+from models import Source
class LoginForm(FlaskForm):
diff --git a/securedrop/source_app/main.py b/securedrop/source_app/main.py
--- a/securedrop/source_app/main.py
+++ b/securedrop/source_app/main.py
@@ -10,7 +10,8 @@
import crypto_util
import store
-from db import Source, db_session, Submission, Reply, get_one_or_else
+from db import db
+from models import Source, Submission, Reply, get_one_or_else
from rm import srm
from source_app.decorators import login_required
from source_app.utils import (logged_in, generate_unique_codename,
@@ -46,11 +47,11 @@ def create():
filesystem_id = crypto_util.hash_codename(session['codename'])
source = Source(filesystem_id, crypto_util.display_id())
- db_session.add(source)
+ db.session.add(source)
try:
- db_session.commit()
+ db.session.commit()
except IntegrityError as e:
- db_session.rollback()
+ db.session.rollback()
current_app.logger.error(
"Attempt to create a source with duplicate codename: %s" %
(e,))
@@ -154,7 +155,7 @@ def submit():
for fname in fnames:
submission = Submission(g.source, fname)
- db_session.add(submission)
+ db.session.add(submission)
if g.source.pending:
g.source.pending = False
@@ -172,7 +173,7 @@ def submit():
entropy_avail))
g.source.last_updated = datetime.utcnow()
- db_session.commit()
+ db.session.commit()
normalize_timestamps(g.filesystem_id)
return redirect(url_for('main.lookup'))
@@ -184,8 +185,8 @@ def delete():
Reply.filename == request.form['reply_filename'])
reply = get_one_or_else(query, current_app.logger, abort)
srm(store.path(g.filesystem_id, reply.filename))
- db_session.delete(reply)
- db_session.commit()
+ db.session.delete(reply)
+ db.session.commit()
flash(gettext("Reply deleted"), "notification")
return redirect(url_for('.lookup'))
@@ -201,8 +202,8 @@ def batch_delete():
for reply in replies:
srm(store.path(g.filesystem_id, reply.filename))
- db_session.delete(reply)
- db_session.commit()
+ db.session.delete(reply)
+ db.session.commit()
flash(gettext("All replies have been deleted"), "notification")
return redirect(url_for('.lookup'))
diff --git a/securedrop/source_app/utils.py b/securedrop/source_app/utils.py
--- a/securedrop/source_app/utils.py
+++ b/securedrop/source_app/utils.py
@@ -9,7 +9,8 @@
import i18n
import store
-from db import Source, db_session
+from db import db
+from models import Source
def logged_in():
@@ -77,7 +78,7 @@ def async_genkey(filesystem_id, codename):
source = Source.query.filter(Source.filesystem_id == filesystem_id) \
.one()
source.last_updated = datetime.utcnow()
- db_session.commit()
+ db.session.commit()
except Exception as e:
logging.getLogger(__name__).error(
"async_genkey for source (filesystem_id={}): {}"
| diff --git a/securedrop/tests/functional/functional_test.py b/securedrop/tests/functional/functional_test.py
--- a/securedrop/tests/functional/functional_test.py
+++ b/securedrop/tests/functional/functional_test.py
@@ -22,9 +22,9 @@
os.environ['SECUREDROP_ENV'] = 'test' # noqa
import config
-import db
-import journalist
-from source_app import create_app
+import journalist_app
+import source_app
+from db import db
import crypto_util
import tests.utils.env as env
@@ -83,8 +83,10 @@ def _prepare_webdriver(self):
return firefox_binary.FirefoxBinary(log_file=log_file)
def setup(self, session_expiration=30):
+ self.__context = journalist_app.create_app(config).app_context()
+ self.__context.push()
# Patch the two-factor verification to avoid intermittent errors
- self.patcher = mock.patch('db.Journalist.verify_token')
+ self.patcher = mock.patch('models.Journalist.verify_token')
self.mock_journalist_verify_token = self.patcher.start()
self.mock_journalist_verify_token.return_value = True
@@ -96,7 +98,7 @@ def setup(self, session_expiration=30):
env.create_directories()
self.gpg = env.init_gpg()
- db.init_db()
+ db.create_all()
source_port = self._unused_port()
journalist_port = self._unused_port()
@@ -118,9 +120,7 @@ def start_source_server():
config.SESSION_EXPIRATION_MINUTES = self.session_expiration
- source_app = create_app(config)
-
- source_app.run(
+ source_app.create_app(config).run(
port=source_port,
debug=True,
use_reloader=False,
@@ -128,7 +128,7 @@ def start_source_server():
def start_journalist_server():
Random.atfork()
- journalist.app.run(
+ journalist_app.create_app(config).run(
port=journalist_port,
debug=True,
use_reloader=False,
@@ -190,6 +190,7 @@ def teardown(self):
self.driver.quit()
self.source_process.terminate()
self.journalist_process.terminate()
+ self.__context.pop()
def wait_for(self, function_with_assertion, timeout=5):
"""Polling wait for an arbitrary assertion."""
diff --git a/securedrop/tests/functional/journalist_navigation_steps.py b/securedrop/tests/functional/journalist_navigation_steps.py
--- a/securedrop/tests/functional/journalist_navigation_steps.py
+++ b/securedrop/tests/functional/journalist_navigation_steps.py
@@ -9,7 +9,7 @@
import tests.utils.db_helper as db_helper
import crypto_util
-from db import Journalist
+from models import Journalist
from step_helpers import screenshots
import config
diff --git a/securedrop/tests/pages-layout/test_journalist.py b/securedrop/tests/pages-layout/test_journalist.py
--- a/securedrop/tests/pages-layout/test_journalist.py
+++ b/securedrop/tests/pages-layout/test_journalist.py
@@ -20,17 +20,17 @@
import functional_test
import pytest
-import db
+import models
@pytest.fixture
def hardening(request):
- hardening = db.LOGIN_HARDENING
+ hardening = models.LOGIN_HARDENING
def finalizer():
- db.LOGIN_HARDENING = hardening
+ models.LOGIN_HARDENING = hardening
request.addfinalizer(finalizer)
- db.LOGIN_HARDENING = True
+ models.LOGIN_HARDENING = True
return None
diff --git a/securedrop/tests/test_2fa.py b/securedrop/tests/test_2fa.py
--- a/securedrop/tests/test_2fa.py
+++ b/securedrop/tests/test_2fa.py
@@ -5,7 +5,7 @@
import flask_testing
os.environ['SECUREDROP_ENV'] = 'test' # noqa
-from db import Journalist, BadTokenException
+from models import Journalist, BadTokenException
import journalist
import utils
diff --git a/securedrop/tests/test_crypto_util.py b/securedrop/tests/test_crypto_util.py
--- a/securedrop/tests/test_crypto_util.py
+++ b/securedrop/tests/test_crypto_util.py
@@ -5,20 +5,26 @@
os.environ['SECUREDROP_ENV'] = 'test' # noqa
import config
import crypto_util
-import db
+import journalist_app
+import models
import store
import utils
+from db import db
+
class TestCryptoUtil(unittest.TestCase):
"""The set of tests for crypto_util.py."""
def setUp(self):
+ self.__context = journalist_app.create_app(config).app_context()
+ self.__context.push()
utils.env.setup()
def tearDown(self):
utils.env.teardown()
+ self.__context.pop()
def test_word_list_does_not_contain_empty_strings(self):
self.assertNotIn('', (crypto_util._get_wordlist('en')
@@ -175,9 +181,9 @@ def test_genkeypair(self):
codename = crypto_util.genrandomid()
filesystem_id = crypto_util.hash_codename(codename)
journalist_filename = crypto_util.display_id()
- source = db.Source(filesystem_id, journalist_filename)
- db.db_session.add(source)
- db.db_session.commit()
+ source = models.Source(filesystem_id, journalist_filename)
+ db.session.add(source)
+ db.session.commit()
crypto_util.genkeypair(source.filesystem_id, codename)
self.assertIsNotNone(crypto_util.getkey(filesystem_id))
diff --git a/securedrop/tests/test_db.py b/securedrop/tests/test_db.py
--- a/securedrop/tests/test_db.py
+++ b/securedrop/tests/test_db.py
@@ -4,8 +4,8 @@
import journalist
from utils import db_helper, env
-from db import (Journalist, Submission, Reply, get_one_or_else,
- LoginThrottledException)
+from models import (Journalist, Submission, Reply, get_one_or_else,
+ LoginThrottledException)
class TestDatabase(TestCase):
diff --git a/securedrop/tests/test_i18n.py b/securedrop/tests/test_i18n.py
--- a/securedrop/tests/test_i18n.py
+++ b/securedrop/tests/test_i18n.py
@@ -20,6 +20,7 @@
import logging
import os
import re
+import unittest
from flask import request, session, render_template_string, render_template
from flask_babel import gettext
@@ -36,11 +37,23 @@
import utils
-class TestI18N(object):
+class TestI18N(unittest.TestCase):
- @classmethod
- def setup_class(cls):
+ def setUp(self):
+ self.__context = journalist_app.create_app(config).app_context()
+
+ # Note: We only need the context for the setup/teardown; it interferes
+ # with the rest of the test cases.
+ self.__context.push()
utils.env.setup()
+ self.__context.pop()
+
+ def tearDown(self):
+ # Note: We only need the context for the setup/teardown; it interferes
+ # with the rest of the test cases.
+ self.__context.push()
+ utils.env.teardown()
+ self.__context.pop()
def get_fake_config(self):
class Config:
diff --git a/securedrop/tests/test_integration.py b/securedrop/tests/test_integration.py
--- a/securedrop/tests/test_integration.py
+++ b/securedrop/tests/test_integration.py
@@ -1,8 +1,10 @@
# -*- coding: utf-8 -*-
+
from cStringIO import StringIO
import gzip
import mock
import os
+import random
import re
import shutil
import tempfile
@@ -17,133 +19,144 @@
os.environ['SECUREDROP_ENV'] = 'test' # noqa
import config
import crypto_util
-from db import db_session, Journalist
-import journalist
-import source
+from db import db
+from models import Journalist
+import journalist_app
+import source_app
import store
import utils
+# Seed the RNG for deterministic testing
+random.seed('ΰ² _ΰ² ')
+
class TestIntegration(unittest.TestCase):
- def _login_user(self):
- self.journalist_app.post('/login', data=dict(
- username=self.user.username,
+ def _login_user(self, app):
+ app.post('/login', data=dict(
+ username=self.username,
password=self.user_pw,
token='mocked'),
follow_redirects=True)
def setUp(self):
- utils.env.setup()
+ self.source_app = source_app.create_app(config)
+ self.journalist_app = journalist_app.create_app(config)
- self.source_app = source.app.test_client()
- self.journalist_app = journalist.app.test_client()
+ self.__context = self.journalist_app.app_context()
+ self.__context.push()
+ utils.env.setup()
self.gpg = gnupg.GPG(homedir=config.GPG_KEY_DIR)
# Patch the two-factor verification to avoid intermittent errors
- patcher = mock.patch('db.Journalist.verify_token')
+ patcher = mock.patch('models.Journalist.verify_token')
self.addCleanup(patcher.stop)
self.mock_journalist_verify_token = patcher.start()
self.mock_journalist_verify_token.return_value = True
- # Add a test user to the journalist interface and log them in
- # print Journalist.query.all()
+ # Add a test user to the journalist interface
self.user_pw = "corret horse battery staple haha cultural reference"
- self.user = Journalist(username="some-username",
- password=self.user_pw)
- db_session.add(self.user)
- db_session.commit()
- self._login_user()
+ self.username = crypto_util.genrandomid()
+ user = Journalist(username=self.username, password=self.user_pw)
+ db.session.add(user)
+ db.session.commit()
+ self.__context.pop()
def tearDown(self):
+ self.__context.push()
utils.env.teardown()
+ self.__context.pop()
def test_submit_message(self):
"""When a source creates an account, test that a new entry appears
in the journalist interface"""
test_msg = "This is a test message."
- with self.source_app as source_app:
- resp = source_app.get('/generate')
- resp = source_app.post('/create', follow_redirects=True)
+ with self.source_app.test_client() as app:
+ resp = app.get('/generate')
+ resp = app.post('/create', follow_redirects=True)
filesystem_id = g.filesystem_id
# redirected to submission form
- resp = self.source_app.post('/submit', data=dict(
+ resp = app.post('/submit', data=dict(
msg=test_msg,
fh=(StringIO(''), ''),
), follow_redirects=True)
self.assertEqual(resp.status_code, 200)
- source_app.get('/logout')
+ app.get('/logout')
# Request the Journalist Interface index
- rv = self.journalist_app.get('/')
- self.assertEqual(rv.status_code, 200)
- self.assertIn("Sources", rv.data)
- soup = BeautifulSoup(rv.data, 'html.parser')
-
- # The source should have a "download unread" link that says "1 unread"
- col = soup.select('ul#cols > li')[0]
- unread_span = col.select('span.unread a')[0]
- self.assertIn("1 unread", unread_span.get_text())
-
- col_url = soup.select('ul#cols > li a')[0]['href']
- resp = self.journalist_app.get(col_url)
- self.assertEqual(resp.status_code, 200)
- soup = BeautifulSoup(resp.data, 'html.parser')
- submission_url = soup.select('ul#submissions li a')[0]['href']
- self.assertIn("-msg", submission_url)
- span = soup.select('ul#submissions li span.info span')[0]
- self.assertRegexpMatches(span['title'], "\d+ bytes")
-
- resp = self.journalist_app.get(submission_url)
- self.assertEqual(resp.status_code, 200)
- decrypted_data = self.gpg.decrypt(resp.data)
- self.assertTrue(decrypted_data.ok)
- self.assertEqual(decrypted_data.data, test_msg)
-
- # delete submission
- resp = self.journalist_app.get(col_url)
- self.assertEqual(resp.status_code, 200)
- soup = BeautifulSoup(resp.data, 'html.parser')
- doc_name = soup.select(
- 'ul > li > input[name="doc_names_selected"]')[0]['value']
- resp = self.journalist_app.post('/bulk', data=dict(
- action='confirm_delete',
- filesystem_id=filesystem_id,
- doc_names_selected=doc_name
- ))
+ with self.journalist_app.test_client() as app:
+ self._login_user(app)
+ rv = app.get('/')
+ self.assertEqual(rv.status_code, 200)
+ self.assertIn("Sources", rv.data)
+ soup = BeautifulSoup(rv.data, 'html.parser')
+
+ # The source should have a "download unread" link that
+ # says "1 unread"
+ col = soup.select('ul#cols > li')[0]
+ unread_span = col.select('span.unread a')[0]
+ self.assertIn("1 unread", unread_span.get_text())
+
+ col_url = soup.select('ul#cols > li a')[0]['href']
+ resp = app.get(col_url)
+ self.assertEqual(resp.status_code, 200)
+ soup = BeautifulSoup(resp.data, 'html.parser')
+ submission_url = soup.select('ul#submissions li a')[0]['href']
+ self.assertIn("-msg", submission_url)
+ span = soup.select('ul#submissions li span.info span')[0]
+ self.assertRegexpMatches(span['title'], "\d+ bytes")
- self.assertEqual(resp.status_code, 200)
- soup = BeautifulSoup(resp.data, 'html.parser')
- self.assertIn("The following file has been selected for", resp.data)
+ resp = app.get(submission_url)
+ self.assertEqual(resp.status_code, 200)
+ decrypted_data = self.gpg.decrypt(resp.data)
+ self.assertTrue(decrypted_data.ok)
+ self.assertEqual(decrypted_data.data, test_msg)
- # confirm delete submission
- doc_name = soup.select
- doc_name = soup.select(
- 'ul > li > input[name="doc_names_selected"]')[0]['value']
- resp = self.journalist_app.post('/bulk', data=dict(
- action='delete',
- filesystem_id=filesystem_id,
- doc_names_selected=doc_name,
- ), follow_redirects=True)
- self.assertEqual(resp.status_code, 200)
- soup = BeautifulSoup(resp.data, 'html.parser')
- self.assertIn("Submission deleted.", resp.data)
+ # delete submission
+ resp = app.get(col_url)
+ self.assertEqual(resp.status_code, 200)
+ soup = BeautifulSoup(resp.data, 'html.parser')
+ doc_name = soup.select(
+ 'ul > li > input[name="doc_names_selected"]')[0]['value']
+ resp = app.post('/bulk', data=dict(
+ action='confirm_delete',
+ filesystem_id=filesystem_id,
+ doc_names_selected=doc_name
+ ))
- # confirm that submission deleted and absent in list of submissions
- resp = self.journalist_app.get(col_url)
- self.assertEqual(resp.status_code, 200)
- self.assertIn("No documents to display.", resp.data)
+ self.assertEqual(resp.status_code, 200)
+ soup = BeautifulSoup(resp.data, 'html.parser')
+ self.assertIn("The following file has been selected for",
+ resp.data)
+
+ # confirm delete submission
+ doc_name = soup.select
+ doc_name = soup.select(
+ 'ul > li > input[name="doc_names_selected"]')[0]['value']
+ resp = app.post('/bulk', data=dict(
+ action='delete',
+ filesystem_id=filesystem_id,
+ doc_names_selected=doc_name,
+ ), follow_redirects=True)
+ self.assertEqual(resp.status_code, 200)
+ soup = BeautifulSoup(resp.data, 'html.parser')
+ self.assertIn("Submission deleted.", resp.data)
- # the file should be deleted from the filesystem
- # since file deletion is handled by a polling worker, this test needs
- # to wait for the worker to get the job and execute it
- utils.async.wait_for_assertion(
- lambda: self.assertFalse(
- os.path.exists(store.path(filesystem_id, doc_name))
+ # confirm that submission deleted and absent in list of submissions
+ resp = app.get(col_url)
+ self.assertEqual(resp.status_code, 200)
+ self.assertIn("No documents to display.", resp.data)
+
+ # the file should be deleted from the filesystem
+ # since file deletion is handled by a polling worker, this test
+ # needs to wait for the worker to get the job and execute it
+ utils.async.wait_for_assertion(
+ lambda: self.assertFalse(
+ os.path.exists(store.path(filesystem_id, doc_name))
+ )
)
- )
def test_submit_file(self):
"""When a source creates an account, test that a new entry appears
@@ -151,89 +164,93 @@ def test_submit_file(self):
test_file_contents = "This is a test file."
test_filename = "test.txt"
- with self.source_app as source_app:
- resp = source_app.get('/generate')
- resp = source_app.post('/create', follow_redirects=True)
+ with self.source_app.test_client() as app:
+ resp = app.get('/generate')
+ resp = app.post('/create', follow_redirects=True)
filesystem_id = g.filesystem_id
# redirected to submission form
- resp = self.source_app.post('/submit', data=dict(
+ resp = app.post('/submit', data=dict(
msg="",
fh=(StringIO(test_file_contents), test_filename),
), follow_redirects=True)
self.assertEqual(resp.status_code, 200)
- source_app.get('/logout')
-
- resp = self.journalist_app.get('/')
- self.assertEqual(resp.status_code, 200)
- self.assertIn("Sources", resp.data)
- soup = BeautifulSoup(resp.data, 'html.parser')
+ app.get('/logout')
- # The source should have a "download unread" link that says "1 unread"
- col = soup.select('ul#cols > li')[0]
- unread_span = col.select('span.unread a')[0]
- self.assertIn("1 unread", unread_span.get_text())
+ with self.journalist_app.test_client() as app:
+ self._login_user(app)
+ resp = app.get('/')
+ self.assertEqual(resp.status_code, 200)
+ self.assertIn("Sources", resp.data)
+ soup = BeautifulSoup(resp.data, 'html.parser')
- col_url = soup.select('ul#cols > li a')[0]['href']
- resp = self.journalist_app.get(col_url)
- self.assertEqual(resp.status_code, 200)
- soup = BeautifulSoup(resp.data, 'html.parser')
- submission_url = soup.select('ul#submissions li a')[0]['href']
- self.assertIn("-doc", submission_url)
- span = soup.select('ul#submissions li span.info span')[0]
- self.assertRegexpMatches(span['title'], "\d+ bytes")
+ # The source should have a "download unread" link that says
+ # "1 unread"
+ col = soup.select('ul#cols > li')[0]
+ unread_span = col.select('span.unread a')[0]
+ self.assertIn("1 unread", unread_span.get_text())
- resp = self.journalist_app.get(submission_url)
- self.assertEqual(resp.status_code, 200)
- decrypted_data = self.gpg.decrypt(resp.data)
- self.assertTrue(decrypted_data.ok)
+ col_url = soup.select('ul#cols > li a')[0]['href']
+ resp = app.get(col_url)
+ self.assertEqual(resp.status_code, 200)
+ soup = BeautifulSoup(resp.data, 'html.parser')
+ submission_url = soup.select('ul#submissions li a')[0]['href']
+ self.assertIn("-doc", submission_url)
+ span = soup.select('ul#submissions li span.info span')[0]
+ self.assertRegexpMatches(span['title'], "\d+ bytes")
- sio = StringIO(decrypted_data.data)
- with gzip.GzipFile(mode='rb', fileobj=sio) as gzip_file:
- unzipped_decrypted_data = gzip_file.read()
- self.assertEqual(unzipped_decrypted_data, test_file_contents)
+ resp = app.get(submission_url)
+ self.assertEqual(resp.status_code, 200)
+ decrypted_data = self.gpg.decrypt(resp.data)
+ self.assertTrue(decrypted_data.ok)
- # delete submission
- resp = self.journalist_app.get(col_url)
- self.assertEqual(resp.status_code, 200)
- soup = BeautifulSoup(resp.data, 'html.parser')
- doc_name = soup.select(
- 'ul > li > input[name="doc_names_selected"]')[0]['value']
- resp = self.journalist_app.post('/bulk', data=dict(
- action='confirm_delete',
- filesystem_id=filesystem_id,
- doc_names_selected=doc_name
- ))
+ sio = StringIO(decrypted_data.data)
+ with gzip.GzipFile(mode='rb', fileobj=sio) as gzip_file:
+ unzipped_decrypted_data = gzip_file.read()
+ self.assertEqual(unzipped_decrypted_data, test_file_contents)
- self.assertEqual(resp.status_code, 200)
- soup = BeautifulSoup(resp.data, 'html.parser')
- self.assertIn("The following file has been selected for", resp.data)
-
- # confirm delete submission
- doc_name = soup.select
- doc_name = soup.select(
- 'ul > li > input[name="doc_names_selected"]')[0]['value']
- resp = self.journalist_app.post('/bulk', data=dict(
- action='delete',
- filesystem_id=filesystem_id,
- doc_names_selected=doc_name,
- ), follow_redirects=True)
- self.assertEqual(resp.status_code, 200)
- soup = BeautifulSoup(resp.data, 'html.parser')
- self.assertIn("Submission deleted.", resp.data)
+ # delete submission
+ resp = app.get(col_url)
+ self.assertEqual(resp.status_code, 200)
+ soup = BeautifulSoup(resp.data, 'html.parser')
+ doc_name = soup.select(
+ 'ul > li > input[name="doc_names_selected"]')[0]['value']
+ resp = app.post('/bulk', data=dict(
+ action='confirm_delete',
+ filesystem_id=filesystem_id,
+ doc_names_selected=doc_name
+ ))
- # confirm that submission deleted and absent in list of submissions
- resp = self.journalist_app.get(col_url)
- self.assertEqual(resp.status_code, 200)
- self.assertIn("No documents to display.", resp.data)
+ self.assertEqual(resp.status_code, 200)
+ soup = BeautifulSoup(resp.data, 'html.parser')
+ self.assertIn("The following file has been selected for",
+ resp.data)
+
+ # confirm delete submission
+ doc_name = soup.select
+ doc_name = soup.select(
+ 'ul > li > input[name="doc_names_selected"]')[0]['value']
+ resp = app.post('/bulk', data=dict(
+ action='delete',
+ filesystem_id=filesystem_id,
+ doc_names_selected=doc_name,
+ ), follow_redirects=True)
+ self.assertEqual(resp.status_code, 200)
+ soup = BeautifulSoup(resp.data, 'html.parser')
+ self.assertIn("Submission deleted.", resp.data)
- # the file should be deleted from the filesystem
- # since file deletion is handled by a polling worker, this test needs
- # to wait for the worker to get the job and execute it
- utils.async.wait_for_assertion(
- lambda: self.assertFalse(
- os.path.exists(store.path(filesystem_id, doc_name))
+ # confirm that submission deleted and absent in list of submissions
+ resp = app.get(col_url)
+ self.assertEqual(resp.status_code, 200)
+ self.assertIn("No documents to display.", resp.data)
+
+ # the file should be deleted from the filesystem
+ # since file deletion is handled by a polling worker, this test
+ # needs to wait for the worker to get the job and execute it
+ utils.async.wait_for_assertion(
+ lambda: self.assertFalse(
+ os.path.exists(store.path(filesystem_id, doc_name))
+ )
)
- )
def test_reply_normal(self):
self.helper_test_reply("This is a test reply.", True)
@@ -291,49 +308,52 @@ def _can_decrypt_with_key(self, msg, key_fpr, passphrase=None):
def helper_test_reply(self, test_reply, expected_success=True):
test_msg = "This is a test message."
- with self.source_app as source_app:
- resp = source_app.get('/generate')
- resp = source_app.post('/create', follow_redirects=True)
+ with self.source_app.test_client() as app:
+ resp = app.get('/generate')
+ resp = app.post('/create', follow_redirects=True)
codename = session['codename']
filesystem_id = g.filesystem_id
# redirected to submission form
- resp = source_app.post('/submit', data=dict(
+ resp = app.post('/submit', data=dict(
msg=test_msg,
fh=(StringIO(''), ''),
), follow_redirects=True)
self.assertEqual(resp.status_code, 200)
self.assertFalse(g.source.flagged)
- source_app.get('/logout')
+ app.get('/logout')
- resp = self.journalist_app.get('/')
- self.assertEqual(resp.status_code, 200)
- self.assertIn("Sources", resp.data)
- soup = BeautifulSoup(resp.data, 'html.parser')
- col_url = soup.select('ul#cols > li a')[0]['href']
+ with self.journalist_app.test_client() as app:
+ self._login_user(app)
+ resp = app.get('/')
+ self.assertEqual(resp.status_code, 200)
+ self.assertIn("Sources", resp.data)
+ soup = BeautifulSoup(resp.data, 'html.parser')
+ col_url = soup.select('ul#cols > li a')[0]['href']
- resp = self.journalist_app.get(col_url)
- self.assertEqual(resp.status_code, 200)
+ resp = app.get(col_url)
+ self.assertEqual(resp.status_code, 200)
- with self.source_app as source_app:
- resp = source_app.post('/login', data=dict(
+ with self.source_app.test_client() as app:
+ resp = app.post('/login', data=dict(
codename=codename), follow_redirects=True)
self.assertEqual(resp.status_code, 200)
self.assertFalse(g.source.flagged)
- source_app.get('/logout')
+ app.get('/logout')
- with self.journalist_app as journalist_app:
- resp = journalist_app.post('/flag', data=dict(
+ with self.journalist_app.test_client() as app:
+ self._login_user(app)
+ resp = app.post('/flag', data=dict(
filesystem_id=filesystem_id))
self.assertEqual(resp.status_code, 200)
- with self.source_app as source_app:
- resp = source_app.post('/login', data=dict(
+ with self.source_app.test_client() as app:
+ resp = app.post('/login', data=dict(
codename=codename), follow_redirects=True)
self.assertEqual(resp.status_code, 200)
self.assertTrue(g.source.flagged)
- source_app.get('/lookup')
+ app.get('/lookup')
self.assertTrue(g.source.flagged)
- source_app.get('/logout')
+ app.get('/logout')
# Block up to 15s for the reply keypair, so we can test sending a reply
utils.async.wait_for_assertion(
@@ -342,20 +362,22 @@ def helper_test_reply(self, test_reply, expected_success=True):
15)
# Create 2 replies to test deleting on journalist and source interface
- for i in range(2):
- resp = self.journalist_app.post('/reply', data=dict(
- filesystem_id=filesystem_id,
- message=test_reply
- ), follow_redirects=True)
- self.assertEqual(resp.status_code, 200)
+ with self.journalist_app.test_client() as app:
+ self._login_user(app)
+ for i in range(2):
+ resp = app.post('/reply', data=dict(
+ filesystem_id=filesystem_id,
+ message=test_reply
+ ), follow_redirects=True)
+ self.assertEqual(resp.status_code, 200)
- if not expected_success:
- pass
- else:
- self.assertIn("Thanks. Your reply has been stored.", resp.data)
+ if not expected_success:
+ pass
+ else:
+ text = resp.data.decode('utf-8')
+ assert "Thanks. Your reply has been stored." in text, text
- with self.journalist_app as journalist_app:
- resp = journalist_app.get(col_url)
+ resp = app.get(col_url)
self.assertIn("reply-", resp.data)
soup = BeautifulSoup(resp.data, 'html.parser')
@@ -365,7 +387,7 @@ def helper_test_reply(self, test_reply, expected_success=True):
filesystem_id = soup.select('input[name="filesystem_id"]')[0]['value']
checkbox_values = [
soup.select('input[name="doc_names_selected"]')[1]['value']]
- resp = self.journalist_app.post('/bulk', data=dict(
+ resp = app.post('/bulk', data=dict(
filesystem_id=filesystem_id,
action='download',
doc_names_selected=checkbox_values
@@ -381,13 +403,13 @@ def helper_test_reply(self, test_reply, expected_success=True):
# Test deleting reply on the journalist interface
last_reply_number = len(
soup.select('input[name="doc_names_selected"]')) - 1
- self.helper_filenames_delete(soup, last_reply_number)
+ self.helper_filenames_delete(app, soup, last_reply_number)
- with self.source_app as source_app:
- resp = source_app.post('/login', data=dict(codename=codename),
- follow_redirects=True)
+ with self.source_app.test_client() as app:
+ resp = app.post('/login', data=dict(codename=codename),
+ follow_redirects=True)
self.assertEqual(resp.status_code, 200)
- resp = source_app.get('/lookup')
+ resp = app.get('/lookup')
self.assertEqual(resp.status_code, 200)
if not expected_success:
@@ -401,7 +423,7 @@ def helper_test_reply(self, test_reply, expected_success=True):
soup = BeautifulSoup(resp.data, 'html.parser')
msgid = soup.select(
'form.message > input[name="reply_filename"]')[0]['value']
- resp = source_app.post('/delete', data=dict(
+ resp = app.post('/delete', data=dict(
filesystem_id=filesystem_id,
reply_filename=msgid
), follow_redirects=True)
@@ -413,204 +435,230 @@ def helper_test_reply(self, test_reply, expected_success=True):
lambda: self.assertFalse(os.path.exists(
store.path(filesystem_id, msgid))))
- source_app.get('/logout')
+ app.get('/logout')
@patch('source_app.main.async_genkey')
def test_delete_collection(self, async_genkey):
"""Test the "delete collection" button on each collection page"""
# first, add a source
- self.source_app.get('/generate')
- self.source_app.post('/create')
- resp = self.source_app.post('/submit', data=dict(
- msg="This is a test.",
- fh=(StringIO(''), ''),
- ), follow_redirects=True)
-
- assert resp.status_code == 200, resp.data.decode('utf-8')
-
- resp = self.journalist_app.get('/')
- # navigate to the collection page
- soup = BeautifulSoup(resp.data, 'html.parser')
- first_col_url = soup.select('ul#cols > li a')[0]['href']
- resp = self.journalist_app.get(first_col_url)
- self.assertEqual(resp.status_code, 200)
-
- # find the delete form and extract the post parameters
- soup = BeautifulSoup(resp.data, 'html.parser')
- delete_form_inputs = soup.select('form#delete-collection')[0]('input')
- filesystem_id = delete_form_inputs[1]['value']
- col_name = delete_form_inputs[2]['value']
+ with self.source_app.test_client() as app:
+ app.get('/generate')
+ app.post('/create')
+ resp = app.post('/submit', data=dict(
+ msg="This is a test.",
+ fh=(StringIO(''), ''),
+ ), follow_redirects=True)
- resp = self.journalist_app.post('/col/delete/' + filesystem_id,
- follow_redirects=True)
- self.assertEquals(resp.status_code, 200)
+ assert resp.status_code == 200, resp.data.decode('utf-8')
- self.assertIn(escape("%s's collection deleted" % (col_name,)),
- resp.data)
- self.assertIn("No documents have been submitted!", resp.data)
- self.assertTrue(async_genkey.called)
+ with self.journalist_app.test_client() as app:
+ self._login_user(app)
+ resp = app.get('/')
+ # navigate to the collection page
+ soup = BeautifulSoup(resp.data, 'html.parser')
+ first_col_url = soup.select('ul#cols > li a')[0]['href']
+ resp = app.get(first_col_url)
+ self.assertEqual(resp.status_code, 200)
- # Make sure the collection is deleted from the filesystem
- utils.async.wait_for_assertion(
- lambda: self.assertFalse(os.path.exists(store.path(filesystem_id)))
- )
+ # find the delete form and extract the post parameters
+ soup = BeautifulSoup(resp.data, 'html.parser')
+ delete_form_inputs = soup.select(
+ 'form#delete-collection')[0]('input')
+ filesystem_id = delete_form_inputs[1]['value']
+ col_name = delete_form_inputs[2]['value']
+
+ resp = app.post('/col/delete/' + filesystem_id,
+ follow_redirects=True)
+ self.assertEquals(resp.status_code, 200)
+
+ self.assertIn(escape("%s's collection deleted" % (col_name,)),
+ resp.data)
+ self.assertIn("No documents have been submitted!", resp.data)
+ self.assertTrue(async_genkey.called)
+
+ # Make sure the collection is deleted from the filesystem
+ utils.async.wait_for_assertion(
+ lambda: self.assertFalse(
+ os.path.exists(store.path(filesystem_id)))
+ )
@patch('source_app.main.async_genkey')
def test_delete_collections(self, async_genkey):
"""Test the "delete selected" checkboxes on the index page that can be
used to delete multiple collections"""
# first, add some sources
- num_sources = 2
- for i in range(num_sources):
- self.source_app.get('/generate')
- self.source_app.post('/create')
- self.source_app.post('/submit', data=dict(
- msg="This is a test " + str(i) + ".",
- fh=(StringIO(''), ''),
+ with self.source_app.test_client() as app:
+ num_sources = 2
+ for i in range(num_sources):
+ app.get('/generate')
+ app.post('/create')
+ app.post('/submit', data=dict(
+ msg="This is a test " + str(i) + ".",
+ fh=(StringIO(''), ''),
+ ), follow_redirects=True)
+ app.get('/logout')
+
+ with self.journalist_app.test_client() as app:
+ self._login_user(app)
+ resp = app.get('/')
+ # get all the checkbox values
+ soup = BeautifulSoup(resp.data, 'html.parser')
+ checkbox_values = [checkbox['value'] for checkbox in
+ soup.select('input[name="cols_selected"]')]
+
+ resp = app.post('/col/process', data=dict(
+ action='delete',
+ cols_selected=checkbox_values
), follow_redirects=True)
- self.source_app.get('/logout')
-
- resp = self.journalist_app.get('/')
- # get all the checkbox values
- soup = BeautifulSoup(resp.data, 'html.parser')
- checkbox_values = [checkbox['value'] for checkbox in
- soup.select('input[name="cols_selected"]')]
-
- resp = self.journalist_app.post('/col/process', data=dict(
- action='delete',
- cols_selected=checkbox_values
- ), follow_redirects=True)
- self.assertEqual(resp.status_code, 200)
- self.assertIn("%s collections deleted" % (num_sources,), resp.data)
- self.assertTrue(async_genkey.called)
+ self.assertEqual(resp.status_code, 200)
+ self.assertIn("%s collections deleted" % (num_sources,), resp.data)
+ self.assertTrue(async_genkey.called)
- # Make sure the collections are deleted from the filesystem
- utils.async.wait_for_assertion(lambda: self.assertFalse(
- any([os.path.exists(store.path(filesystem_id))
- for filesystem_id in checkbox_values])))
+ # Make sure the collections are deleted from the filesystem
+ utils.async.wait_for_assertion(lambda: self.assertFalse(
+ any([os.path.exists(store.path(filesystem_id))
+ for filesystem_id in checkbox_values])))
def test_filenames(self):
"""Test pretty, sequential filenames when source uploads messages
and files"""
# add a source and submit stuff
- self.source_app.get('/generate')
- self.source_app.post('/create')
- self.helper_filenames_submit()
+ with self.source_app.test_client() as app:
+ app.get('/generate')
+ app.post('/create')
+ self.helper_filenames_submit(app)
# navigate to the collection page
- resp = self.journalist_app.get('/')
- soup = BeautifulSoup(resp.data, 'html.parser')
- first_col_url = soup.select('ul#cols > li a')[0]['href']
- resp = self.journalist_app.get(first_col_url)
- self.assertEqual(resp.status_code, 200)
+ with self.journalist_app.test_client() as app:
+ self._login_user(app)
+ resp = app.get('/')
+ soup = BeautifulSoup(resp.data, 'html.parser')
+ first_col_url = soup.select('ul#cols > li a')[0]['href']
+ resp = app.get(first_col_url)
+ self.assertEqual(resp.status_code, 200)
- # test filenames and sort order
- soup = BeautifulSoup(resp.data, 'html.parser')
- submission_filename_re = r'^{0}-[a-z0-9-_]+(-msg|-doc\.gz)\.gpg$'
- for i, submission_link in enumerate(
- soup.select('ul#submissions li a .filename')):
- filename = str(submission_link.contents[0])
- self.assertTrue(re.match(submission_filename_re.format(i + 1),
- filename))
+ # test filenames and sort order
+ soup = BeautifulSoup(resp.data, 'html.parser')
+ submission_filename_re = r'^{0}-[a-z0-9-_]+(-msg|-doc\.gz)\.gpg$'
+ for i, submission_link in enumerate(
+ soup.select('ul#submissions li a .filename')):
+ filename = str(submission_link.contents[0])
+ self.assertTrue(re.match(submission_filename_re.format(i + 1),
+ filename))
def test_filenames_delete(self):
"""Test pretty, sequential filenames when journalist deletes files"""
# add a source and submit stuff
- self.source_app.get('/generate')
- self.source_app.post('/create')
- self.helper_filenames_submit()
+ with self.source_app.test_client() as app:
+ app.get('/generate')
+ app.post('/create')
+ self.helper_filenames_submit(app)
# navigate to the collection page
- resp = self.journalist_app.get('/')
- soup = BeautifulSoup(resp.data, 'html.parser')
- first_col_url = soup.select('ul#cols > li a')[0]['href']
- resp = self.journalist_app.get(first_col_url)
- self.assertEqual(resp.status_code, 200)
- soup = BeautifulSoup(resp.data, 'html.parser')
-
- # delete file #2
- self.helper_filenames_delete(soup, 1)
- resp = self.journalist_app.get(first_col_url)
- soup = BeautifulSoup(resp.data, 'html.parser')
-
- # test filenames and sort order
- submission_filename_re = r'^{0}-[a-z0-9-_]+(-msg|-doc\.gz)\.gpg$'
- filename = str(
- soup.select('ul#submissions li a .filename')[0].contents[0])
- self.assertTrue(re.match(submission_filename_re.format(1), filename))
- filename = str(
- soup.select('ul#submissions li a .filename')[1].contents[0])
- self.assertTrue(re.match(submission_filename_re.format(3), filename))
- filename = str(
- soup.select('ul#submissions li a .filename')[2].contents[0])
- self.assertTrue(re.match(submission_filename_re.format(4), filename))
+ with self.journalist_app.test_client() as app:
+ self._login_user(app)
+ resp = app.get('/')
+ soup = BeautifulSoup(resp.data, 'html.parser')
+ first_col_url = soup.select('ul#cols > li a')[0]['href']
+ resp = app.get(first_col_url)
+ self.assertEqual(resp.status_code, 200)
+ soup = BeautifulSoup(resp.data, 'html.parser')
+
+ # delete file #2
+ self.helper_filenames_delete(app, soup, 1)
+ resp = app.get(first_col_url)
+ soup = BeautifulSoup(resp.data, 'html.parser')
+
+ # test filenames and sort order
+ submission_filename_re = r'^{0}-[a-z0-9-_]+(-msg|-doc\.gz)\.gpg$'
+ filename = str(
+ soup.select('ul#submissions li a .filename')[0].contents[0])
+ self.assertTrue(re.match(submission_filename_re.format(1),
+ filename))
+ filename = str(
+ soup.select('ul#submissions li a .filename')[1].contents[0])
+ self.assertTrue(re.match(submission_filename_re.format(3),
+ filename))
+ filename = str(
+ soup.select('ul#submissions li a .filename')[2].contents[0])
+ self.assertTrue(re.match(submission_filename_re.format(4),
+ filename))
def test_user_change_password(self):
"""Test that a journalist can successfully login after changing
their password"""
- # change password
- new_pw = 'another correct horse battery staply long password'
- self.journalist_app.post('/account/new-password',
- data=dict(password=new_pw,
- current_password=self.user_pw,
- token='mocked'))
-
- # logout
- self.journalist_app.get('/logout')
-
- # login with new credentials should redirect to index page
- resp = self.journalist_app.post('/login', data=dict(
- username=self.user.username,
- password=new_pw,
- token='mocked',
- follow_redirects=True))
- self.assertEqual(resp.status_code, 302)
+ with self.journalist_app.test_client() as app:
+ self._login_user(app)
+ # change password
+ new_pw = 'another correct horse battery staply long password'
+ app.post('/account/new-password',
+ data=dict(password=new_pw,
+ current_password=self.user_pw,
+ token='mocked'))
+
+ # logout
+ app.get('/logout')
+
+ # login with new credentials should redirect to index page
+ resp = app.post('/login', data=dict(
+ username=self.username,
+ password=new_pw,
+ token='mocked',
+ follow_redirects=True))
+ self.assertEqual(resp.status_code, 302)
def test_login_after_regenerate_hotp(self):
"""Test that journalists can login after resetting their HOTP 2fa"""
# edit hotp
- self.journalist_app.post('/account/reset-2fa-hotp', data=dict(
- otp_secret=123456))
-
- # successful verificaton should redirect to /account
- resp = self.journalist_app.post('/account/2fa', data=dict(
- token=self.user.hotp))
- self.assertEqual(resp.status_code, 302)
-
- # log out
- self.journalist_app.get('/logout')
-
- # login with new 2fa secret should redirect to index page
- resp = self.journalist_app.post('/login', data=dict(
- username=self.user.username,
- password=self.user_pw,
- token=self.user.hotp,
- follow_redirects=True))
- self.assertEqual(resp.status_code, 302)
-
- def helper_filenames_submit(self):
- self.source_app.post('/submit', data=dict(
+ with self.journalist_app.test_client() as app:
+ self._login_user(app)
+ otp_secret = '123456'
+ app.post('/account/reset-2fa-hotp', data=dict(
+ otp_secret=otp_secret))
+
+ user = Journalist(username=self.username,
+ password=self.user_pw,
+ otp_secret=otp_secret)
+
+ # successful verificaton should redirect to /account
+ resp = app.post('/account/2fa', data=dict(
+ token=user.hotp))
+ self.assertEqual(resp.status_code, 302)
+
+ # log out
+ app.get('/logout')
+
+ # login with new 2fa secret should redirect to index page
+ resp = app.post('/login', data=dict(
+ username=self.username,
+ password=self.user_pw,
+ token=user.hotp,
+ follow_redirects=True))
+ self.assertEqual(resp.status_code, 302)
+
+ def helper_filenames_submit(self, app):
+ app.post('/submit', data=dict(
msg="This is a test.",
fh=(StringIO(''), ''),
), follow_redirects=True)
- self.source_app.post('/submit', data=dict(
+ app.post('/submit', data=dict(
msg="This is a test.",
fh=(StringIO('This is a test'), 'test.txt'),
), follow_redirects=True)
- self.source_app.post('/submit', data=dict(
+ app.post('/submit', data=dict(
msg="",
fh=(StringIO('This is a test'), 'test.txt'),
), follow_redirects=True)
- def helper_filenames_delete(self, soup, i):
+ def helper_filenames_delete(self, app, soup, i):
filesystem_id = soup.select('input[name="filesystem_id"]')[0]['value']
checkbox_values = [
soup.select('input[name="doc_names_selected"]')[i]['value']]
# delete
- resp = self.journalist_app.post('/bulk', data=dict(
+ resp = app.post('/bulk', data=dict(
filesystem_id=filesystem_id,
action='confirm_delete',
doc_names_selected=checkbox_values
@@ -622,7 +670,7 @@ def helper_filenames_delete(self, soup, i):
resp.data)
# confirm delete
- resp = self.journalist_app.post('/bulk', data=dict(
+ resp = app.post('/bulk', data=dict(
filesystem_id=filesystem_id,
action='delete',
doc_names_selected=checkbox_values
diff --git a/securedrop/tests/test_journalist.py b/securedrop/tests/test_journalist.py
--- a/securedrop/tests/test_journalist.py
+++ b/securedrop/tests/test_journalist.py
@@ -14,9 +14,10 @@
os.environ['SECUREDROP_ENV'] = 'test' # noqa
import config
import crypto_util
-from db import (db_session, InvalidPasswordLength, Journalist, Reply, Source,
- Submission)
-import db
+from db import db
+from models import (InvalidPasswordLength, Journalist, Reply, Source,
+ Submission)
+import models
import journalist
import journalist_app
import journalist_app.utils
@@ -65,7 +66,7 @@ def test_reply_error_logging(self, mocked_error_logger):
exception_class = StaleDataError
exception_msg = 'Potentially sensitive content!'
- with patch('db.db_session.commit',
+ with patch('sqlalchemy.orm.scoping.scoped_session.commit',
side_effect=exception_class(exception_msg)):
self.client.post(url_for('main.reply'),
data={'filesystem_id': filesystem_id,
@@ -85,7 +86,8 @@ def test_reply_error_flashed_message(self):
exception_class = StaleDataError
- with patch('db.db_session.commit', side_effect=exception_class()):
+ with patch('sqlalchemy.orm.scoping.scoped_session.commit',
+ side_effect=exception_class()):
self.client.post(url_for('main.reply'),
data={'filesystem_id': filesystem_id,
'message': '_'})
@@ -123,7 +125,7 @@ def test_unauthorized_access_redirects_to_login(self):
self.assertRedirects(resp, url_for('main.login'))
def test_login_throttle(self):
- db.LOGIN_HARDENING = True
+ models.LOGIN_HARDENING = True
try:
for _ in range(Journalist._MAX_LOGIN_ATTEMPTS_PER_PERIOD):
resp = self.client.post(url_for('main.login'),
@@ -141,7 +143,7 @@ def test_login_throttle(self):
self.assertIn("Please wait at least {} seconds".format(
Journalist._LOGIN_ATTEMPT_PERIOD), resp.data)
finally:
- db.LOGIN_HARDENING = False
+ models.LOGIN_HARDENING = False
def test_login_invalid_credentials(self):
resp = self.client.post(url_for('main.login'),
@@ -320,14 +322,16 @@ def test_admin_edits_user_password_success_response(self):
def test_admin_edits_user_password_error_response(self):
self._login_admin()
- with patch('db.db_session.commit', side_effect=Exception()):
+ with patch('sqlalchemy.orm.scoping.scoped_session.commit',
+ side_effect=Exception()):
resp = self.client.post(
url_for('admin.new_password', user_id=self.user.id),
data=dict(password=VALID_PASSWORD_2),
follow_redirects=True)
+ text = resp.data.decode('utf-8')
assert ('There was an error, and the new password might not have '
- 'been saved correctly.') in resp.data.decode('utf-8')
+ 'been saved correctly.') in text, text
def test_user_edits_password_success_response(self):
self._login_user()
@@ -366,7 +370,8 @@ def test_user_edits_password_expires_session(self):
def test_user_edits_password_error_reponse(self):
self._login_user()
- with patch('db.db_session.commit', side_effect=Exception()):
+ with patch('sqlalchemy.orm.scoping.scoped_session.commit',
+ side_effect=Exception()):
resp = self.client.post(
url_for('account.new_password'),
data=dict(current_password=self.user_pw,
@@ -502,7 +507,7 @@ def test_admin_resets_user_hotp_format_odd(self):
"Invalid secret format: "
"odd-length secret. Did you mistype the secret?", "error")
- @patch('db.Journalist.set_hotp_secret')
+ @patch('models.Journalist.set_hotp_secret')
@patch('journalist.app.logger.error')
def test_admin_resets_user_hotp_error(self,
mocked_error_logger,
@@ -909,7 +914,7 @@ def test_delete_source_deletes_submissions(self):
journalist_app.utils.delete_collection(self.source.filesystem_id)
# Source should be gone
- results = db_session.query(Source).filter(
+ results = db.session.query(Source).filter(
Source.id == self.source.id).all()
self.assertEqual(results, [])
@@ -926,10 +931,10 @@ def test_delete_collection_updates_db(self):
journalist_app.utils.delete_collection(self.source.filesystem_id)
results = Source.query.filter(Source.id == self.source.id).all()
self.assertEqual(results, [])
- results = db_session.query(
+ results = db.session.query(
Submission.source_id == self.source.id).all()
self.assertEqual(results, [])
- results = db_session.query(Reply.source_id == self.source.id).all()
+ results = db.session.query(Reply.source_id == self.source.id).all()
self.assertEqual(results, [])
def test_delete_source_deletes_source_key(self):
@@ -1250,7 +1255,7 @@ def test_col_process_successfully_deletes_multiple_sources(self):
self.assert200(resp)
# Verify there are no remaining sources
- remaining_sources = db_session.query(db.Source).all()
+ remaining_sources = db.session.query(models.Source).all()
self.assertEqual(len(remaining_sources), 0)
def test_col_process_successfully_stars_sources(self):
@@ -1338,6 +1343,8 @@ def test_render_locales(self):
class TestJournalistLogin(unittest.TestCase):
def setUp(self):
+ self.__context = journalist_app.create_app(config).app_context()
+ self.__context.push()
utils.env.setup()
# Patch the two-factor verification so it always succeeds
@@ -1347,14 +1354,10 @@ def setUp(self):
def tearDown(self):
utils.env.teardown()
- # TODO: figure out why this is necessary here, but unnecessary in all
- # of the tests in `tests/test_unit_*.py`. Without this, the session
- # continues to return values even if the underlying database is deleted
- # (as in `shared_teardown`).
- db_session.remove()
-
- @patch('db.Journalist._scrypt_hash')
- @patch('db.Journalist.valid_password', return_value=True)
+ self.__context.pop()
+
+ @patch('models.Journalist._scrypt_hash')
+ @patch('models.Journalist.valid_password', return_value=True)
def test_valid_login_calls_scrypt(self,
mock_scrypt_hash,
mock_valid_password):
@@ -1363,7 +1366,7 @@ def test_valid_login_calls_scrypt(self,
mock_scrypt_hash.called,
"Failed to call _scrypt_hash for password w/ valid length")
- @patch('db.Journalist._scrypt_hash')
+ @patch('models.Journalist._scrypt_hash')
def test_login_with_invalid_password_doesnt_call_scrypt(self,
mock_scrypt_hash):
invalid_pw = 'a'*(Journalist.MAX_PASSWORD_LEN + 1)
diff --git a/securedrop/tests/test_manage.py b/securedrop/tests/test_manage.py
--- a/securedrop/tests/test_manage.py
+++ b/securedrop/tests/test_manage.py
@@ -19,7 +19,10 @@
import version
import utils
-from db import Journalist, db_session
+import journalist_app
+
+from db import db
+from models import Journalist
YUBIKEY_HOTP = ['cb a0 5f ad 41 a2 ff 4e eb 53 56 3a 1b f7 23 2e ce fc dc',
@@ -27,6 +30,7 @@
class TestManagePy(object):
+
def test_parse_args(self):
# just test that the arg parser is stable
manage.get_args()
@@ -45,11 +49,17 @@ def test_verbose(self, caplog):
class TestManagementCommand(unittest.TestCase):
+
def setUp(self):
+ self.__context = journalist_app.create_app(config).app_context()
+ self.__context.push()
utils.env.setup()
+ self.__context.pop()
def tearDown(self):
+ self.__context.push()
utils.env.teardown()
+ self.__context.pop()
@mock.patch("__builtin__.raw_input", return_value='jen')
def test_get_username_success(self, mock_stdin):
@@ -147,7 +157,7 @@ def test_reset(self):
assert os.path.exists(config.STORE_DIR)
# Verify journalist user present in the database is gone
- db_session.remove() # Close session and get a session on the new db
+ db.session.remove() # Close session and get a session on the new db
with self.assertRaises(NoResultFound):
Journalist.query.filter_by(username=user_should_be_gone).one()
@@ -155,11 +165,14 @@ def test_reset(self):
class TestManage(object):
def setup(self):
+ self.__context = journalist_app.create_app(config).app_context()
+ self.__context.push()
self.dir = abspath(dirname(realpath(__file__)))
utils.env.setup()
def teardown(self):
utils.env.teardown()
+ self.__context.pop()
@mock.patch("__builtin__.raw_input", return_value='foo-bar-baz')
def test_get_username(self, mock_get_usernam):
diff --git a/securedrop/tests/test_secure_tempfile.py b/securedrop/tests/test_secure_tempfile.py
--- a/securedrop/tests/test_secure_tempfile.py
+++ b/securedrop/tests/test_secure_tempfile.py
@@ -6,18 +6,23 @@
os.environ['SECUREDROP_ENV'] = 'test' # noqa
import config
+import journalist_app
import secure_tempfile
import utils
class TestSecureTempfile(unittest.TestCase):
+
def setUp(self):
+ self.__context = journalist_app.create_app(config).app_context()
+ self.__context.push()
utils.env.setup()
self.f = secure_tempfile.SecureTemporaryFile(config.STORE_DIR)
self.msg = '410,757,864,530'
def tearDown(self):
utils.env.teardown()
+ self.__context.pop()
def test_read_before_writing(self):
with self.assertRaisesRegexp(AssertionError,
diff --git a/securedrop/tests/test_source.py b/securedrop/tests/test_source.py
--- a/securedrop/tests/test_source.py
+++ b/securedrop/tests/test_source.py
@@ -8,7 +8,8 @@
from flask_testing import TestCase
import crypto_util
-from db import db_session, Source
+from db import db
+from models import Source
import source
import version
import utils
@@ -459,8 +460,8 @@ def test_source_is_deleted_while_logged_in(self, logger):
filesystem_id = crypto_util.hash_codename(codename)
crypto_util.delete_reply_keypair(filesystem_id)
source = Source.query.filter_by(filesystem_id=filesystem_id).one()
- db_session.delete(source)
- db_session.commit()
+ db.session.delete(source)
+ db.session.commit()
# Source attempts to continue to navigate
resp = client.post('/lookup', follow_redirects=True)
diff --git a/securedrop/tests/test_store.py b/securedrop/tests/test_store.py
--- a/securedrop/tests/test_store.py
+++ b/securedrop/tests/test_store.py
@@ -6,7 +6,7 @@
os.environ['SECUREDROP_ENV'] = 'test' # noqa
import config
-from db import db_session
+import journalist_app
import store
import utils
@@ -16,11 +16,13 @@ class TestStore(unittest.TestCase):
"""The set of tests for store.py."""
def setUp(self):
+ self.__context = journalist_app.create_app(config).app_context()
+ self.__context.push()
utils.env.setup()
def tearDown(self):
utils.env.teardown()
- db_session.remove()
+ self.__context.pop()
def create_file_in_source_dir(self, filesystem_id, filename):
"""Helper function for simulating files"""
diff --git a/securedrop/tests/utils/db_helper.py b/securedrop/tests/utils/db_helper.py
--- a/securedrop/tests/utils/db_helper.py
+++ b/securedrop/tests/utils/db_helper.py
@@ -8,27 +8,29 @@
os.environ['SECUREDROP_ENV'] = 'test' # noqa
import config
import crypto_util
-import db
+import models
import store
-# db.{Journalist, Reply}
+from db import db
+
+# models.{Journalist, Reply}
def init_journalist(is_admin=False):
"""Initialize a journalist into the database. Return their
- :class:`db.Journalist` object and password string.
+ :class:`models.Journalist` object and password string.
:param bool is_admin: Whether the user is an admin.
- :returns: A 2-tuple. The first entry, an :obj:`db.Journalist`
+ :returns: A 2-tuple. The first entry, an :obj:`models.Journalist`
corresponding to the row just added to the database. The
second, their password string.
"""
username = crypto_util.genrandomid()
user_pw = crypto_util.genrandomid()
- user = db.Journalist(username, user_pw, is_admin)
- db.db_session.add(user)
- db.db_session.commit()
+ user = models.Journalist(username, user_pw, is_admin)
+ db.session.add(user)
+ db.session.commit()
return user, user_pw
@@ -36,14 +38,14 @@ def reply(journalist, source, num_replies):
"""Generates and submits *num_replies* replies to *source*
from *journalist*. Returns reply objects as a list.
- :param db.Journalist journalist: The journalist to write the
+ :param models.Journalist journalist: The journalist to write the
reply from.
- :param db.Source source: The source to send the reply to.
+ :param models.Source source: The source to send the reply to.
:param int num_replies: Number of random-data replies to make.
- :returns: A list of the :class:`db.Reply`s submitted.
+ :returns: A list of the :class:`models.Reply`s submitted.
"""
assert num_replies >= 1
replies = []
@@ -57,11 +59,11 @@ def reply(journalist, source, num_replies):
config.JOURNALIST_KEY
],
store.path(source.filesystem_id, fname))
- reply = db.Reply(journalist, source, fname)
+ reply = models.Reply(journalist, source, fname)
replies.append(reply)
- db.db_session.add(reply)
+ db.session.add(reply)
- db.db_session.commit()
+ db.session.commit()
return replies
@@ -72,7 +74,7 @@ def mock_verify_token(testcase):
:param unittest.TestCase testcase: The test case for which to patch
TOTP verification.
"""
- patcher = mock.patch('db.Journalist.verify_token')
+ patcher = mock.patch('models.Journalist.verify_token')
testcase.addCleanup(patcher.stop)
testcase.mock_journalist_verify_token = patcher.start()
testcase.mock_journalist_verify_token.return_value = True
@@ -81,31 +83,31 @@ def mock_verify_token(testcase):
def mark_downloaded(*submissions):
"""Mark *submissions* as downloaded in the database.
- :param db.Submission submissions: One or more submissions that
+ :param models.Submission submissions: One or more submissions that
should be marked as downloaded.
"""
for submission in submissions:
submission.downloaded = True
- db.db_session.commit()
+ db.session.commit()
-# db.{Source,Submission}
+# models.{Source,Submission}
def init_source_without_keypair():
"""Initialize a source: create their database record and the
filesystem directory that stores their submissions & replies.
Return a source object and their codename string.
- :returns: A 2-tuple. The first entry, the :class:`db.Source`
+ :returns: A 2-tuple. The first entry, the :class:`models.Source`
initialized. The second, their codename string.
"""
# Create source identity and database record
codename = crypto_util.genrandomid()
filesystem_id = crypto_util.hash_codename(codename)
journalist_filename = crypto_util.display_id()
- source = db.Source(filesystem_id, journalist_filename)
- db.db_session.add(source)
- db.db_session.commit()
+ source = models.Source(filesystem_id, journalist_filename)
+ db.session.add(source)
+ db.session.commit()
# Create the directory to store their submissions and replies
os.mkdir(store.path(source.filesystem_id))
@@ -118,7 +120,7 @@ def init_source():
and their GPG key encrypted with their codename. Return a source
object and their codename string.
- :returns: A 2-tuple. The first entry, the :class:`db.Source`
+ :returns: A 2-tuple. The first entry, the :class:`models.Source`
initialized. The second, their codename string.
"""
source, codename = init_source_without_keypair()
@@ -129,16 +131,16 @@ def init_source():
def submit(source, num_submissions):
"""Generates and submits *num_submissions*
- :class:`db.Submission`s on behalf of a :class:`db.Source`
+ :class:`models.Submission`s on behalf of a :class:`models.Source`
*source*.
- :param db.Source source: The source on who's behalf to make
+ :param models.Source source: The source on who's behalf to make
submissions.
:param int num_submissions: Number of random-data submissions
to make.
- :returns: A list of the :class:`db.Submission`s submitted.
+ :returns: A list of the :class:`models.Submission`s submitted.
"""
assert num_submissions >= 1
submissions = []
@@ -148,11 +150,11 @@ def submit(source, num_submissions):
source.interaction_count,
source.journalist_filename,
str(os.urandom(1)))
- submission = db.Submission(source, fpath)
+ submission = models.Submission(source, fpath)
submissions.append(submission)
- db.db_session.add(submission)
+ db.session.add(submission)
- db.db_session.commit()
+ db.session.commit()
return submissions
diff --git a/securedrop/tests/utils/env.py b/securedrop/tests/utils/env.py
--- a/securedrop/tests/utils/env.py
+++ b/securedrop/tests/utils/env.py
@@ -12,7 +12,7 @@
os.environ['SECUREDROP_ENV'] = 'test' # noqa
import config
import crypto_util
-from db import init_db, db_session
+from db import db
FILES_DIR = abspath(join(dirname(realpath(__file__)), '..', 'files'))
@@ -49,7 +49,7 @@ def setup():
"""Set up the file system, GPG, and database."""
create_directories()
init_gpg()
- init_db()
+ db.create_all()
# Do tests that should always run on app startup
crypto_util.do_runtime_tests()
@@ -61,7 +61,7 @@ def teardown():
for t in threading.enumerate():
if t.is_alive() and not isinstance(t, threading._MainThread):
t.join()
- db_session.remove()
+ db.session.remove()
shutil.rmtree(config.TEMP_DIR)
try:
shutil.rmtree(config.SECUREDROP_DATA_ROOT)
| Move custom SQLAlchemy session management to Flask-SQLAlchemy
# Feature request
## Description
We maintain our own code to handle the database operations, and it would make more sense to use someone else's code. This will also make usage of Alembic cleaner for #1419.
## User Stories
As a developer, I don't want to maintain custom code just to keep SecureDrop running.
| 2018-01-19T11:54:15Z | [] | [] |
|
freedomofpress/securedrop | 2,903 | freedomofpress__securedrop-2903 | [
"1609"
] | a8fc1f93651b7516fd8d16a41724ae3282018c68 | diff --git a/securedrop/crypto_util.py b/securedrop/crypto_util.py
--- a/securedrop/crypto_util.py
+++ b/securedrop/crypto_util.py
@@ -5,7 +5,7 @@
import os
import subprocess
-from Crypto.Random import random
+from Cryptodome.Random import random
import gnupg
from gnupg._util import _is_stream, _make_binary_stream
import scrypt
diff --git a/securedrop/secure_tempfile.py b/securedrop/secure_tempfile.py
--- a/securedrop/secure_tempfile.py
+++ b/securedrop/secure_tempfile.py
@@ -4,9 +4,9 @@
from tempfile import _TemporaryFileWrapper
from gnupg._util import _STREAMLIKE_TYPES
-from Crypto.Cipher import AES
-from Crypto.Random import random
-from Crypto.Util import Counter
+from Cryptodome.Cipher import AES
+from Cryptodome.Random import random
+from Cryptodome.Util import Counter
class SecureTemporaryFile(_TemporaryFileWrapper, object):
| diff --git a/securedrop/tests/functional/functional_test.py b/securedrop/tests/functional/functional_test.py
--- a/securedrop/tests/functional/functional_test.py
+++ b/securedrop/tests/functional/functional_test.py
@@ -12,7 +12,7 @@
import traceback
import requests
-from Crypto import Random
+from Cryptodome import Random
from selenium import webdriver
from selenium.common.exceptions import (WebDriverException,
NoAlertPresentException)
| PyCrypto is no longer being actively maintained
As first noted in https://github.com/freedomofpress/securedrop/issues/204#issuecomment-272338348:
> So it's come to our attention that we need to replace PyCrypto, the progress of which is pretty much stalled (e.g., [this year-old vulnerability](https://github.com/dlitz/pycrypto/issues/173) has still not been fixed in a release, even though it's patched in master--note this one does not effect SD).... Ideally, we'd use https://github.com/pyca/pynacl, which bundles `libsodium`, as a replacement...
While as noted there are no known PyCrypto vulnerabilities that affect SD, we still should still have it in our timeline for the coming months to look into replacements.
| We might want to add https://github.com/pyca/cryptography as well, depending on the functionality that's being replaced.
Both would work, but libsodium offers a lot more functionality that we might not use at first, but will be available should we choose to use it later.
Hmm, `pycryptodome` is an actively maintained drop-in replacement for the aging `PyCrypto` library, this is probably the best candidate for a quick replacement: https://github.com/Legrandin/pycryptodome | 2018-01-19T22:34:27Z | [] | [] |
freedomofpress/securedrop | 2,907 | freedomofpress__securedrop-2907 | [
"2906"
] | 5bbace368b95fe84da3e53c5551b3628416a9803 | diff --git a/securedrop/db.py b/securedrop/db.py
--- a/securedrop/db.py
+++ b/securedrop/db.py
@@ -6,7 +6,7 @@
# Find the best implementation available on this platform
try:
from cStringIO import StringIO
-except:
+except ImportError:
from StringIO import StringIO
from sqlalchemy import create_engine, ForeignKey
diff --git a/securedrop/i18n.py b/securedrop/i18n.py
--- a/securedrop/i18n.py
+++ b/securedrop/i18n.py
@@ -87,7 +87,7 @@ def get_locale(config):
sep = '_'
try:
accept_languages.append(str(core.Locale.parse(l, sep)))
- except:
+ except Exception:
pass
if 'l' in request.args:
if len(request.args['l']) == 0:
diff --git a/securedrop/journalist_app/utils.py b/securedrop/journalist_app/utils.py
--- a/securedrop/journalist_app/utils.py
+++ b/securedrop/journalist_app/utils.py
@@ -95,7 +95,7 @@ def validate_user(username, password, token, error_message=None):
login_flashed_msg += gettext(
"Please wait for a new code from your two-factor token"
" or application before trying again.")
- except:
+ except Exception:
pass
flash(login_flashed_msg, "error")
| diff --git a/securedrop/tests/functional/functional_test.py b/securedrop/tests/functional/functional_test.py
--- a/securedrop/tests/functional/functional_test.py
+++ b/securedrop/tests/functional/functional_test.py
@@ -144,7 +144,7 @@ def start_journalist_server():
try:
requests.get(self.source_location)
requests.get(self.journalist_location)
- except:
+ except Exception:
time.sleep(1)
else:
break
| Pyenchant depends on enchant and dev env is borked
# Bug
## Description
`pyenchant` depends on `enchant` which is incompatible with other dev dependencies.
## Steps to Reproduce
Running `vagrant up` on `develop` yields this error.
```
Traceback (most recent call last):
File \"<string>\", line 17, in <module>
File \"/tmp/pip_build_root/pyenchant/setup.py\", line 212, in <module>
import enchant
File \"enchant/__init__.py\", line 92, in <module>
from enchant import _enchant as _e
File \"enchant/_enchant.py\", line 145, in <module>
raise ImportError(msg)
ImportError: The 'enchant' C library was not found. Please install it via your OS package manager, or use a pre-built binary wheel from PyPI.
Complete output from command python setup.py egg_info:
Traceback (most recent call last):
File \"<string>\", line 17, in <module>
File \"/tmp/pip_build_root/pyenchant/setup.py\", line 212, in <module>
import enchant
File \"enchant/__init__.py\", line 92, in <module>
from enchant import _enchant as _e
File \"enchant/_enchant.py\", line 145, in <module
raise ImportError(msg)
ImportError: The 'enchant' C library was not found. Please install it via your OS package manager, or use a pre-built binary wheel from PyPI.
----------------------------------------
Cleaning up...
Command python setup.py egg_info failed with error code 1 in /tmp/pip_build_root/pyenchant
Storing debug log for failure in /root/.pip/pip.log
```
However, adding `enchant` to `development_dependencies` yields this error:
```
TASK [app-test : Install Firefox 46.0.1 for compatibility with Selenium 2.53.6.] ***
fatal: [development]: FAILED! => {"changed": false, "failed": true, "msg": "Breaks existing package 'hunspell-en-us' that conflict: 'iceweasel'. But the '/opt/firefox_46.0.1+build1-0ubuntu0.14.04.3_amd64.deb' provides it via: 'gnome-www-browser,iceweasel,www-browser'"}
```
## Expected Behavior
I can start the dev environment.
## Actual Behavior
I can't
## Notes
This was introduced by #2905, but I think it came from #2780 originally.
| I confirmed the problem locally. It was not noticed because the CI does not **vagrant up** and developers only rarely destroy the development VM and reprovision it. | 2018-01-21T13:15:19Z | [] | [] |
freedomofpress/securedrop | 2,922 | freedomofpress__securedrop-2922 | [
"2885"
] | b4d3ae30dfc40b26a7d39f94128205c958274afb | diff --git a/install_files/ansible-base/roles/upgrade/files/0.3pre_upgrade.py b/install_files/ansible-base/roles/upgrade/files/0.3pre_upgrade.py
deleted file mode 100755
--- a/install_files/ansible-base/roles/upgrade/files/0.3pre_upgrade.py
+++ /dev/null
@@ -1,197 +0,0 @@
-#!/usr/bin/python2.7
-
-from datetime import datetime
-import os
-import shutil
-import sqlite3
-import subprocess
-import sys
-import tarfile
-import traceback
-
-
-def backup_app():
- tar_fn = 'backup-app-{}.tar.bz2'.format(datetime.now().strftime("%Y-%m-%d--%H-%M-%S"))
- with tarfile.open(tar_fn, 'w:bz2') as t:
- t.add('/var/lib/securedrop/')
- t.add('/var/lib/tor/services/')
- t.add('/var/www/securedrop/config.py')
- try:
- t.add('/var/www/securedrop/static/i/logo.png')
- except OSError:
- print "[!] Expected but non-essential file ('logo.png') not found. Continuing..."
- print "** Backed up system to {} before migrating.".format(tar_fn)
-
-
-def backup_mon():
- # The only thing we have to back up for the monitor server is the SSH ATHS cert.
- # All other required values are available in prod-specific.yml from the installation.
- tar_fn = 'backup-mon-{}.tar.bz2'.format(datetime.now().strftime("%Y-%m-%d--%H-%M-%S"))
- with tarfile.open(tar_fn, 'w:bz2') as t:
- t.add('/var/lib/tor/services/')
- print "** Backed up system to {} before migrating.".format(tar_fn)
-
-
-def secure_unlink(path):
- subprocess.check_call(['srm', '-r', path])
-
-
-def cleanup_deleted_sources(store_dir, c):
- """
- In 0.3pre and 0.3, there were two bugs that could potentially lead
- to the source directory failing to be deleted when a source was
- deleted from the Journalist Interface. We clean up these leftover
- directories as part of the migration.
-
- These sources can be identified because they have a source_dir in
- the store_dir, but no corresponding Source entry in the database.
-
- See https://github.com/freedomofpress/securedrop/pull/944 for context.
- """
- for source_dir in os.listdir(store_dir):
- try:
- source = c.execute("SELECT * FROM sources WHERE filesystem_id=?",
- (source_dir,)).fetchone()
- if not source:
- print "Deleting source with no db entry ('{}')...".format(source_dir)
- secure_unlink(os.path.join(store_dir, source_dir))
- except Exception as e:
- print "\n!! Error occurred cleaning up deleted sources for source {}".format(source_dir)
- print "Source had {} submissions".format(len(os.listdir(os.path.join(store_dir, source_dir))))
- print traceback.format_exc()
-
-
-def get_db_connection():
- db_path = "/var/lib/securedrop/db.sqlite"
- assert os.path.isfile(db_path)
- conn = sqlite3.connect(db_path)
- return conn, conn.cursor()
-
-
-def migrate_app_db():
- store_dir = "/var/lib/securedrop/store"
- conn, c = get_db_connection()
-
- # Before modifying the database, clean up any source directories that were
- # left on the filesystem after the sources were deleted.
- cleanup_deleted_sources(store_dir, c)
-
- # To get CREATE TABLE from SQLAlchemy:
- # >>> import db
- # >>> from sqlalchemy.schema import CreateTable
- # >>> print CreateTable(db.Journalist.__table__).compile(db.engine)
- # Or, add `echo=True` to the engine constructor.
- # CREATE TABLE replies
- c.execute("""
-CREATE TABLE replies (
- id INTEGER NOT NULL,
- journalist_id INTEGER,
- source_id INTEGER,
- filename VARCHAR(255) NOT NULL,
- size INTEGER NOT NULL,
- PRIMARY KEY (id),
- FOREIGN KEY(journalist_id) REFERENCES journalists (id),
- FOREIGN KEY(source_id) REFERENCES sources (id)
-)""")
-
- # Fill in replies from the replies in STORE_DIR at the time of the migration
- #
- # Caveats:
- #
- # 1. Before we added the `replies` table, we did not keep track of which
- # journalist wrote the reply. There is no way for us to reverse-engineer
- # that information, so the migration will default to saying they were all
- # created by the first journalist (arbitrarily). Since we do not surface
- # this in the UI yet anyway, it should not be a big deal.
- #
- # 2. We do not try to get the order of the (autoincrementing primary key)
- # reply_id to match the order in which the replies were created (which could
- # be inferred from the file timestamps, since we only normalize submission
- # timestamps and not reply timestamps) since this order is not used anywhere
- # in the code.
-
- # Copy from db.py to compute filesystem-safe journalist filenames
- def journalist_filename(s):
- valid_chars = 'abcdefghijklmnopqrstuvwxyz1234567890-_'
- return ''.join([c for c in s.lower().replace(' ', '_') if c in valid_chars])
-
- reply_id = 1
- for source_dir in os.listdir(store_dir):
- try:
- source_id, journalist_designation = c.execute(
- "SELECT id, journalist_designation FROM sources WHERE filesystem_id=?",
- (source_dir,)).fetchone()
- except sqlite3.Error as e:
- print "!!\tError occurred migrating replies for source {}".format(source_dir)
- print traceback.format_exc()
- continue
-
- for filename in os.listdir(os.path.join(store_dir, source_dir)):
- if "-reply.gpg" not in filename:
- continue
-
- # Rename the reply file from 0.3pre convention to 0.3 convention
- interaction_count = filename.split('-')[0]
- new_filename = "{}-{}-reply.gpg".format(interaction_count,
- journalist_filename(journalist_designation))
- os.rename(os.path.join(store_dir, source_dir, filename),
- os.path.join(store_dir, source_dir, new_filename))
-
- # need id, journalist_id, source_id, filename, size
- journalist_id = 1 # *shrug*
- full_path = os.path.join(store_dir, source_dir, new_filename)
- size = os.stat(full_path).st_size
- c.execute("INSERT INTO replies VALUES (?,?,?,?,?)",
- (reply_id, journalist_id, source_id, new_filename, size))
- reply_id += 1 # autoincrement for next reply
-
- # CREATE TABLE journalist_login_attempts
- c.execute("""
-CREATE TABLE journalist_login_attempt (
- id INTEGER NOT NULL,
- timestamp DATETIME,
- journalist_id INTEGER,
- PRIMARY KEY (id),
- FOREIGN KEY(journalist_id) REFERENCES journalists (id)
-)""")
-
- # ALTER TABLE journalists, add last_token column
- c.execute("""ALTER TABLE journalists ADD COLUMN last_token VARCHAR(6)""")
-
- # Save changes and close connection
- conn.commit()
- conn.close()
-
-
-def app_db_migrated():
- """To make the upgrade role idempotent, we need to skip migrating the
- database if it has already been modified. The best way to do this
- is to check whether the last sql command in `migrate_app_db`
- (ALTER TABLE to add the last_token column to the journalists
- table) succeeded. If so, we can assume the database app migration
- succeeded and can safely skip doing it again.
-
- """
- conn, c = get_db_connection()
- journalist_tables = c.execute('PRAGMA table_info(journalists)').fetchall()
- table_names = set([table[1] for table in journalist_tables])
- return 'last_token' in table_names
-
-
-def main():
- if len(sys.argv) <= 1:
- print "Usage: 0.3pre_upgrade.py app|mon"
- sys.exit(1)
-
- server_role = sys.argv[1]
- assert server_role in ("app", "mon")
-
- if server_role == "app":
- backup_app()
- if not app_db_migrated():
- migrate_app_db()
- else:
- backup_mon()
-
-if __name__ == "__main__":
- main()
| diff --git a/molecule/ansible-config/tests/test_max_fail_percentage.py b/molecule/ansible-config/tests/test_max_fail_percentage.py
new file mode 100644
--- /dev/null
+++ b/molecule/ansible-config/tests/test_max_fail_percentage.py
@@ -0,0 +1,80 @@
+import os
+
+import pytest
+import yaml
+
+
+# Lots of parent directories to dig out of the Molecule test dir.
+# Could also inspect the Molecule env vars and go from there.
+REPO_ROOT = os.path.abspath(os.path.join(__file__,
+ os.path.pardir,
+ os.path.pardir,
+ os.path.pardir,
+ os.path.pardir,
+ ))
+ANSIBLE_BASE = os.path.join(REPO_ROOT, 'install_files', 'ansible-base')
+
+
+def find_ansible_playbooks():
+ """
+ Test helper to generate list of filepaths for SecureDrop
+ Ansible playbooks. All files will be validated to contain the
+ max_fail option.
+ """
+ playbooks = []
+ # Not using os.walk since all SecureDrop playbooks are in top-level
+ # of the "ansible-base" directory, and *many* YAML files that are
+ # not playbooks reside in subdirectories.
+ for f in os.listdir(ANSIBLE_BASE):
+ # Assume all YAML files in directory are playbooks.
+ if f.endswith(".yml"):
+ # Ignore deprecated production vars file.
+ if f != "prod-specific.yml":
+ playbooks.append(os.path.join(ANSIBLE_BASE, f))
+ # Sanity checking to make sure list of playbooks is not empty.
+ assert len(playbooks) > 0
+ return playbooks
+
+
[email protected]('playbook', find_ansible_playbooks())
+def test_max_fail_percentage(host, playbook):
+ """
+ All SecureDrop playbooks should set `max_fail_percentage` to "0"
+ on each and every play. Doing so ensures that an error on a single
+ host constitutes a play failure.
+
+ In conjunction with the `any_errors_fatal` option, tested separately,
+ this will achieve a "fail fast" behavior from Ansible.
+
+ There's no ansible.cfg option to set for max_fail_percentage, which would
+ allow for a single DRY update that would apply automatically to all
+ invocations of `ansible-playbook`. Therefore this test, which will
+ search for the line present in all playbooks.
+
+ Technically it's only necessary that plays targeting multiple hosts use
+ the parameter, but we'll play it safe and require it everywhere,
+ to avoid mistakes down the road.
+ """
+ with open(playbook, 'r') as f:
+ playbook_yaml = yaml.safe_load(f)
+ # Descend into playbook list structure to validate play attributes.
+ for play in playbook_yaml:
+ assert 'max_fail_percentage' in play
+ assert play['max_fail_percentage'] == 0
+
+
[email protected]('playbook', find_ansible_playbooks())
+def test_max_fail_percentage(host, playbook):
+ """
+ All SecureDrop playbooks should set `any_errors_fatal` to "yes"
+ on each and every play. In conjunction with `max_fail_percentage` set
+ to "0", doing so ensures that any errors will cause an immediate failure
+ on the playbook.
+ """
+ with open(playbook, 'r') as f:
+ playbook_yaml = yaml.safe_load(f)
+ # Descend into playbook list structure to validate play attributes.
+ for play in playbook_yaml:
+ assert 'any_errors_fatal' in play
+ # Ansible coerces booleans, so bare assert is sufficient
+ assert play['any_errors_fatal']
| Failures on a single host should halt Ansible execution
# Bug
## Description
Right now, when running `./securedrop-admin install`, if an error is encountered on a single host, e.g. the Monitor Server, that host will drop off the play, but the playbook will continue to execute against the Application Server. That's very confusing, and can make reporting problems difficult: lots of scrollback is necessary even to determine that a host did indeed fail.
By way of example, the OSSEC registration bug documented in #2478 will lead to the only-one-host-failed scenario described above.
## Steps to Reproduce
Quite difficult to reproduce reliably. As mentioned above, #2748 is a fine candidate for observing the poor failure mode.
## Expected Behavior
When encountering an error, Ansible stops executing, leaving the error text prominently displayed. The Admin can then use that error message to debug the underlying issue, or report the problem to the SecureDrop developers for additional support.
## Actual Behavior
If only one host fails, Ansible continues to execute against the remaining host, obscuring the fact that the error occurred at all, and complicating the procedure for sharing critical feedback required for support, such as verbatim error messages.
## Comments
We have a recourse here. There's an Ansible option called [max_fail_percentage](http://docs.ansible.com/ansible/latest/playbooks_delegation.html#maximum-failure-percentage):
> By default, Ansible will continue executing actions as long as there are hosts in the group that have not yet failed.
Setting `max_fail_percentage` to `0` should enforce fail-fast behavior from Ansible with SecureDrop. While it would be ideal to set the option in `ansible.cfg`, it appears it's only supported at the play levelβwhich means all playbooks should have it set _on each play_.
| Here's an example of this happening during QA of #2748:
```
PLAY [Add FPF apt repository and install base packages.] *******************************************************
TASK [Gathering Facts] *****************************************************************************************
ok: [app]
fatal: [mon]: FAILED! => {"failed": true, "msg": "Timeout (62s) waiting for privilege escalation prompt: "}
META: ran handlers
META: ran handlers
META: ran handlers
META: ran handlers
META: ran handlers
PLAY [Configure OSSEC.] ****************************************************************************************
TASK [Gathering Facts] *****************************************************************************************
ok: [app]
META: ran handlers
TASK [ossec : Install securedrop-ossec-agent package.] *********************************************************
task path: /home/amnesia/Persistent/securedrop/install_files/ansible-base/roles/ossec/tasks/configure_client.yml:2
ok: [app] => {"cache_update_time": 1516732901, "cache_updated": false, "changed": false}
TASK [ossec : Install OSSEC manager package.] ******************************************************************
task path: /home/amnesia/Persistent/securedrop/install_files/ansible-base/roles/ossec/tasks/configure_server.yml:2
TASK [ossec : Copy the OSSEC GPG public key for sending encrypted alerts.] *************************************
task path: /home/amnesia/Persistent/securedrop/install_files/ansible-base/roles/ossec/tasks/configure_server.yml:12
TASK [ossec : Add the OSSEC GPG public key to the OSSEC manager keyring.] **************************************
task path: /home/amnesia/Persistent/securedrop/install_files/ansible-base/roles/ossec/tasks/configure_server.yml:19
TASK [ossec : Copy script for sending GPG-encrypted OSSEC alerts.] *********************************************
task path: /home/amnesia/Persistent/securedrop/install_files/ansible-base/roles/ossec/tasks/configure_server.yml:32
TASK [ossec : Create OSSEC manager SSL key.] *******************************************************************
task path: /home/amnesia/Persistent/securedrop/install_files/ansible-base/roles/ossec/tasks/configure_server.yml:43
TASK [ossec : Create OSSEC manager SSL certificate.] ***********************************************************
task path: /home/amnesia/Persistent/securedrop/install_files/ansible-base/roles/ossec/tasks/configure_server.yml:50
TASK [ossec : Check whether Application Server is registered as OSSEC agent.] **********************************
task path: /home/amnesia/Persistent/securedrop/install_files/ansible-base/roles/ossec/tasks/register.yml:2
TASK [ossec : Set host fact for OSSEC registration state.] *****************************************************
task path: /home/amnesia/Persistent/securedrop/install_files/ansible-base/roles/ossec/tasks/register.yml:14
fatal: [app]: FAILED! => {"failed": true, "msg": "the field 'args' has an invalid value, which appears to include a variable that is undefined. The error was: 'dict object' has no attribute 'ossec_list_agents_result'\n\nThe error appears to have been in '/home/amnesia/Persistent/securedrop/install_files/ansible-base/roles/ossec/tasks/register.yml': line 14, column 3, but may\nbe elsewhere in the file depending on the exact syntax problem.\n\nThe offending line appears to be:\n\n# copy the result to a more conveniently named fact on both hosts.\n- name: Set host fact for OSSEC registration state.\n ^ here\n"}
PLAY [Configure mailing utilities.] ****************************************************************************
PLAY [Configure SecureDrop Application Server.] ****************************************************************
PLAY [Lock down firewall configuration for Application and Monitor Servers.] ***********************************
PLAY [Reboot Application and Monitor Servers.] *****************************************************************
[WARNING]: Could not create retry file '/home/amnesia/Persistent/securedrop/install_files/ansible-base
/securedrop-prod.retry'. [Errno 1] Operation not permitted:
u'/home/amnesia/Persistent/securedrop/install_files/ansible-base/securedrop-prod.retry'
PLAY RECAP *****************************************************************************************************
app : ok=3 changed=0 unreachable=0 failed=1
localhost : ok=2 changed=0 unreachable=0 failed=0
mon : ok=0 changed=0 unreachable=0 failed=1
```
Note that I used a `--tags ossec` invocation, which drastically abbreviates the error outputβotherwise, tracing back the error would have involved lots of scrollback. | 2018-01-24T03:01:04Z | [] | [] |
freedomofpress/securedrop | 2,940 | freedomofpress__securedrop-2940 | [
"2927",
"2800"
] | 0648361c77078cda255ccc3742aed5673ec84161 | diff --git a/install_files/ansible-base/roles/upgrade/files/0.3pre_upgrade.py b/install_files/ansible-base/roles/upgrade/files/0.3pre_upgrade.py
deleted file mode 100755
--- a/install_files/ansible-base/roles/upgrade/files/0.3pre_upgrade.py
+++ /dev/null
@@ -1,197 +0,0 @@
-#!/usr/bin/python2.7
-
-from datetime import datetime
-import os
-import shutil
-import sqlite3
-import subprocess
-import sys
-import tarfile
-import traceback
-
-
-def backup_app():
- tar_fn = 'backup-app-{}.tar.bz2'.format(datetime.now().strftime("%Y-%m-%d--%H-%M-%S"))
- with tarfile.open(tar_fn, 'w:bz2') as t:
- t.add('/var/lib/securedrop/')
- t.add('/var/lib/tor/services/')
- t.add('/var/www/securedrop/config.py')
- try:
- t.add('/var/www/securedrop/static/i/logo.png')
- except OSError:
- print "[!] Expected but non-essential file ('logo.png') not found. Continuing..."
- print "** Backed up system to {} before migrating.".format(tar_fn)
-
-
-def backup_mon():
- # The only thing we have to back up for the monitor server is the SSH ATHS cert.
- # All other required values are available in prod-specific.yml from the installation.
- tar_fn = 'backup-mon-{}.tar.bz2'.format(datetime.now().strftime("%Y-%m-%d--%H-%M-%S"))
- with tarfile.open(tar_fn, 'w:bz2') as t:
- t.add('/var/lib/tor/services/')
- print "** Backed up system to {} before migrating.".format(tar_fn)
-
-
-def secure_unlink(path):
- subprocess.check_call(['srm', '-r', path])
-
-
-def cleanup_deleted_sources(store_dir, c):
- """
- In 0.3pre and 0.3, there were two bugs that could potentially lead
- to the source directory failing to be deleted when a source was
- deleted from the Journalist Interface. We clean up these leftover
- directories as part of the migration.
-
- These sources can be identified because they have a source_dir in
- the store_dir, but no corresponding Source entry in the database.
-
- See https://github.com/freedomofpress/securedrop/pull/944 for context.
- """
- for source_dir in os.listdir(store_dir):
- try:
- source = c.execute("SELECT * FROM sources WHERE filesystem_id=?",
- (source_dir,)).fetchone()
- if not source:
- print "Deleting source with no db entry ('{}')...".format(source_dir)
- secure_unlink(os.path.join(store_dir, source_dir))
- except Exception as e:
- print "\n!! Error occurred cleaning up deleted sources for source {}".format(source_dir)
- print "Source had {} submissions".format(len(os.listdir(os.path.join(store_dir, source_dir))))
- print traceback.format_exc()
-
-
-def get_db_connection():
- db_path = "/var/lib/securedrop/db.sqlite"
- assert os.path.isfile(db_path)
- conn = sqlite3.connect(db_path)
- return conn, conn.cursor()
-
-
-def migrate_app_db():
- store_dir = "/var/lib/securedrop/store"
- conn, c = get_db_connection()
-
- # Before modifying the database, clean up any source directories that were
- # left on the filesystem after the sources were deleted.
- cleanup_deleted_sources(store_dir, c)
-
- # To get CREATE TABLE from SQLAlchemy:
- # >>> import db
- # >>> from sqlalchemy.schema import CreateTable
- # >>> print CreateTable(db.Journalist.__table__).compile(db.engine)
- # Or, add `echo=True` to the engine constructor.
- # CREATE TABLE replies
- c.execute("""
-CREATE TABLE replies (
- id INTEGER NOT NULL,
- journalist_id INTEGER,
- source_id INTEGER,
- filename VARCHAR(255) NOT NULL,
- size INTEGER NOT NULL,
- PRIMARY KEY (id),
- FOREIGN KEY(journalist_id) REFERENCES journalists (id),
- FOREIGN KEY(source_id) REFERENCES sources (id)
-)""")
-
- # Fill in replies from the replies in STORE_DIR at the time of the migration
- #
- # Caveats:
- #
- # 1. Before we added the `replies` table, we did not keep track of which
- # journalist wrote the reply. There is no way for us to reverse-engineer
- # that information, so the migration will default to saying they were all
- # created by the first journalist (arbitrarily). Since we do not surface
- # this in the UI yet anyway, it should not be a big deal.
- #
- # 2. We do not try to get the order of the (autoincrementing primary key)
- # reply_id to match the order in which the replies were created (which could
- # be inferred from the file timestamps, since we only normalize submission
- # timestamps and not reply timestamps) since this order is not used anywhere
- # in the code.
-
- # Copy from db.py to compute filesystem-safe journalist filenames
- def journalist_filename(s):
- valid_chars = 'abcdefghijklmnopqrstuvwxyz1234567890-_'
- return ''.join([c for c in s.lower().replace(' ', '_') if c in valid_chars])
-
- reply_id = 1
- for source_dir in os.listdir(store_dir):
- try:
- source_id, journalist_designation = c.execute(
- "SELECT id, journalist_designation FROM sources WHERE filesystem_id=?",
- (source_dir,)).fetchone()
- except sqlite3.Error as e:
- print "!!\tError occurred migrating replies for source {}".format(source_dir)
- print traceback.format_exc()
- continue
-
- for filename in os.listdir(os.path.join(store_dir, source_dir)):
- if "-reply.gpg" not in filename:
- continue
-
- # Rename the reply file from 0.3pre convention to 0.3 convention
- interaction_count = filename.split('-')[0]
- new_filename = "{}-{}-reply.gpg".format(interaction_count,
- journalist_filename(journalist_designation))
- os.rename(os.path.join(store_dir, source_dir, filename),
- os.path.join(store_dir, source_dir, new_filename))
-
- # need id, journalist_id, source_id, filename, size
- journalist_id = 1 # *shrug*
- full_path = os.path.join(store_dir, source_dir, new_filename)
- size = os.stat(full_path).st_size
- c.execute("INSERT INTO replies VALUES (?,?,?,?,?)",
- (reply_id, journalist_id, source_id, new_filename, size))
- reply_id += 1 # autoincrement for next reply
-
- # CREATE TABLE journalist_login_attempts
- c.execute("""
-CREATE TABLE journalist_login_attempt (
- id INTEGER NOT NULL,
- timestamp DATETIME,
- journalist_id INTEGER,
- PRIMARY KEY (id),
- FOREIGN KEY(journalist_id) REFERENCES journalists (id)
-)""")
-
- # ALTER TABLE journalists, add last_token column
- c.execute("""ALTER TABLE journalists ADD COLUMN last_token VARCHAR(6)""")
-
- # Save changes and close connection
- conn.commit()
- conn.close()
-
-
-def app_db_migrated():
- """To make the upgrade role idempotent, we need to skip migrating the
- database if it has already been modified. The best way to do this
- is to check whether the last sql command in `migrate_app_db`
- (ALTER TABLE to add the last_token column to the journalists
- table) succeeded. If so, we can assume the database app migration
- succeeded and can safely skip doing it again.
-
- """
- conn, c = get_db_connection()
- journalist_tables = c.execute('PRAGMA table_info(journalists)').fetchall()
- table_names = set([table[1] for table in journalist_tables])
- return 'last_token' in table_names
-
-
-def main():
- if len(sys.argv) <= 1:
- print "Usage: 0.3pre_upgrade.py app|mon"
- sys.exit(1)
-
- server_role = sys.argv[1]
- assert server_role in ("app", "mon")
-
- if server_role == "app":
- backup_app()
- if not app_db_migrated():
- migrate_app_db()
- else:
- backup_mon()
-
-if __name__ == "__main__":
- main()
diff --git a/securedrop/crypto_util.py b/securedrop/crypto_util.py
--- a/securedrop/crypto_util.py
+++ b/securedrop/crypto_util.py
@@ -5,7 +5,7 @@
import os
import subprocess
-from Crypto.Random import random
+from Cryptodome.Random import random
import gnupg
from gnupg._util import _is_stream, _make_binary_stream
import scrypt
diff --git a/securedrop/secure_tempfile.py b/securedrop/secure_tempfile.py
--- a/securedrop/secure_tempfile.py
+++ b/securedrop/secure_tempfile.py
@@ -4,9 +4,9 @@
from tempfile import _TemporaryFileWrapper
from gnupg._util import _STREAMLIKE_TYPES
-from Crypto.Cipher import AES
-from Crypto.Random import random
-from Crypto.Util import Counter
+from Cryptodome.Cipher import AES
+from Cryptodome.Random import random
+from Cryptodome.Util import Counter
class SecureTemporaryFile(_TemporaryFileWrapper, object):
| diff --git a/docs/development/testing_continuous_integration.rst b/docs/development/testing_continuous_integration.rst
--- a/docs/development/testing_continuous_integration.rst
+++ b/docs/development/testing_continuous_integration.rst
@@ -3,23 +3,10 @@
Testing: CI
===========
-The SecureDrop project uses multiple automated third-party solutions
-for running automated test suites on code changes:
+The SecureDrop project uses CircleCI_ for running automated test suites on code changes:
- * Travis_
- * CircleCI_
-
-.. _Travis: https://travis-ci.org/freedomofpress/securedrop/
.. _CircleCI: http://circleci.com/gh/freedomofpress/securedrop/
-Travis tests
-------------
-
-The Travis_ test suite provisions the development VM and runs the application
-test suite against the latest version of the code. It also performs basic
-linting and validation, e.g. checking for mistakes in the Sphinx documentation
-(see :doc:`documentation_guidelines`).
-
CI test layout
--------------
@@ -29,10 +16,9 @@ The relevant files for configuring the CI tests are: ::
βββ devops
β βββ inventory <-- environment specific inventory
β βββ playbooks <-- playbooks to start CI boxes
- β βββ scripts <-- shell wrapper scripts
+ β βββ scripts <-- shell wrapper scripts
β βββ templates <-- contains templates for ansible tasks
β βββ vars <-- environment specific variables
- βββ .travis.yml <--- config for development tests on travis
βββ Makefile <-- defines make task shortcuts
The files under ``devops/`` are used to create a minimized staging environment
@@ -46,6 +32,9 @@ The staging environment tests will run automatically in CircleCI,
when changes are submitted by Freedom of the Press Foundation staff
(i.e. members of the ``freedomofpress`` GitHub organization).
+It also performs basic linting and validation, e.g. checking for mistakes in
+the Sphinx documentation.
+
.. tip:: You will need an Amazon Web Services EC2 account to proceed.
See the `AWS Getting Started Guide`_ for detailed instructions.
diff --git a/install_files/ansible-base/roles/ossec-server/files/test_admin_key.pub b/install_files/ansible-base/roles/ossec/files/test_admin_key.pub
similarity index 100%
rename from install_files/ansible-base/roles/ossec-server/files/test_admin_key.pub
rename to install_files/ansible-base/roles/ossec/files/test_admin_key.pub
diff --git a/install_files/ansible-base/roles/ossec-server/files/test_admin_key.sec b/install_files/ansible-base/roles/ossec/files/test_admin_key.sec
similarity index 100%
rename from install_files/ansible-base/roles/ossec-server/files/test_admin_key.sec
rename to install_files/ansible-base/roles/ossec/files/test_admin_key.sec
diff --git a/molecule/ansible-config/tests/test_max_fail_percentage.py b/molecule/ansible-config/tests/test_max_fail_percentage.py
new file mode 100644
--- /dev/null
+++ b/molecule/ansible-config/tests/test_max_fail_percentage.py
@@ -0,0 +1,80 @@
+import os
+
+import pytest
+import yaml
+
+
+# Lots of parent directories to dig out of the Molecule test dir.
+# Could also inspect the Molecule env vars and go from there.
+REPO_ROOT = os.path.abspath(os.path.join(__file__,
+ os.path.pardir,
+ os.path.pardir,
+ os.path.pardir,
+ os.path.pardir,
+ ))
+ANSIBLE_BASE = os.path.join(REPO_ROOT, 'install_files', 'ansible-base')
+
+
+def find_ansible_playbooks():
+ """
+ Test helper to generate list of filepaths for SecureDrop
+ Ansible playbooks. All files will be validated to contain the
+ max_fail option.
+ """
+ playbooks = []
+ # Not using os.walk since all SecureDrop playbooks are in top-level
+ # of the "ansible-base" directory, and *many* YAML files that are
+ # not playbooks reside in subdirectories.
+ for f in os.listdir(ANSIBLE_BASE):
+ # Assume all YAML files in directory are playbooks.
+ if f.endswith(".yml"):
+ # Ignore deprecated production vars file.
+ if f != "prod-specific.yml":
+ playbooks.append(os.path.join(ANSIBLE_BASE, f))
+ # Sanity checking to make sure list of playbooks is not empty.
+ assert len(playbooks) > 0
+ return playbooks
+
+
[email protected]('playbook', find_ansible_playbooks())
+def test_max_fail_percentage(host, playbook):
+ """
+ All SecureDrop playbooks should set `max_fail_percentage` to "0"
+ on each and every play. Doing so ensures that an error on a single
+ host constitutes a play failure.
+
+ In conjunction with the `any_errors_fatal` option, tested separately,
+ this will achieve a "fail fast" behavior from Ansible.
+
+ There's no ansible.cfg option to set for max_fail_percentage, which would
+ allow for a single DRY update that would apply automatically to all
+ invocations of `ansible-playbook`. Therefore this test, which will
+ search for the line present in all playbooks.
+
+ Technically it's only necessary that plays targeting multiple hosts use
+ the parameter, but we'll play it safe and require it everywhere,
+ to avoid mistakes down the road.
+ """
+ with open(playbook, 'r') as f:
+ playbook_yaml = yaml.safe_load(f)
+ # Descend into playbook list structure to validate play attributes.
+ for play in playbook_yaml:
+ assert 'max_fail_percentage' in play
+ assert play['max_fail_percentage'] == 0
+
+
[email protected]('playbook', find_ansible_playbooks())
+def test_any_errors_fatal(host, playbook):
+ """
+ All SecureDrop playbooks should set `any_errors_fatal` to "yes"
+ on each and every play. In conjunction with `max_fail_percentage` set
+ to "0", doing so ensures that any errors will cause an immediate failure
+ on the playbook.
+ """
+ with open(playbook, 'r') as f:
+ playbook_yaml = yaml.safe_load(f)
+ # Descend into playbook list structure to validate play attributes.
+ for play in playbook_yaml:
+ assert 'any_errors_fatal' in play
+ # Ansible coerces booleans, so bare assert is sufficient
+ assert play['any_errors_fatal']
diff --git a/securedrop/tests/functional/functional_test.py b/securedrop/tests/functional/functional_test.py
--- a/securedrop/tests/functional/functional_test.py
+++ b/securedrop/tests/functional/functional_test.py
@@ -12,7 +12,7 @@
import traceback
import requests
-from Crypto import Random
+from Cryptodome import Random
from selenium import webdriver
from selenium.common.exceptions import (WebDriverException,
NoAlertPresentException)
diff --git a/testinfra/mon/test_ossec.py b/testinfra/mon/test_ossec.py
--- a/testinfra/mon/test_ossec.py
+++ b/testinfra/mon/test_ossec.py
@@ -20,7 +20,6 @@ def test_ossec_package(Package, package):
assert Package(package).is_installed
[email protected](strict=True)
def test_ossec_connectivity(Command, Sudo):
"""
Ensure ossec-server machine has active connection to the ossec-agent.
| Temporarily disable safety check
## Description
We'll need to temporarily disable safety in order to merge until #2926 is resolved (and we'll need to cherry pick the disabling of safety into the 0.5.2 release branch).
## User Stories
As a SecureDrop maintainer, I don't want to merge with failing CI.
CI test failures obscure debugging output
# Feature request
## Description
The CI run obscures results from the various test suites, making certain failures difficult to understand. See for example [Circle CI build 5914](https://circleci.com/gh/freedomofpress/securedrop/5914), which shows:
> TASK [fail] ******************************************************************** Friday 05 January 2018 19:42:25 +0000 (0:03:20.042) 0:03:24.094 ******** fatal: [app-staging]: FAILED! => {"changed": false, "failed": true, "msg": "Failed as requested from task"}
The relevant task in `molecule/aws/side_effect.yml` is:
```
- fail:
when: app_test_register|failed or testinfra_results|failed
```
It's not immediately clear from the error output _which_ specific test failed, or even which test suite contained the error. We should break that single untitled task into two named tasks, with separate conditionals. Consider using `assert` rather than `fail` because then we can supply a custom `fail_msg` with informative output for developers.
## User Stories
As a developer, if tests fail I want to know exactly what failed so I can fix it. I don't want to cajole answers out of a stubborn and inscrutable oracle _before_ asking team members for review.
|
I intially tried to fix this in PR #2796 by adding an annotation to the junit file . If that works correctly.. it will show a notification at the top on the Circle UI. Unfortunately... its not picking up for whatever reason... GRRR .. so i can circle back to this but for now `3591fc7` doesnt fix the issue as described but it did add more artifacts for dev to search raw test execution output. | 2018-01-26T21:27:13Z | [] | [] |
freedomofpress/securedrop | 2,944 | freedomofpress__securedrop-2944 | [
"2933"
] | 58ab66e76f6a5fb23d8bf3d9c969f4e4d89d37d9 | diff --git a/docs/conf.py b/docs/conf.py
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -12,9 +12,7 @@
# All configuration values have a default; values that are commented out
# serve to show the default.
-import sys
import os
-import shlex
# Detect if we're being built by Read the Docs
# https://docs.readthedocs.org/en/latest/faq.html#how-do-i-change-behavior-for-read-the-docs
@@ -23,12 +21,12 @@
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
-#sys.path.insert(0, os.path.abspath('.'))
+# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
-#needs_sphinx = '1.0'
+# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
@@ -44,7 +42,7 @@
source_suffix = '.rst'
# The encoding of source files.
-#source_encoding = 'utf-8-sig'
+# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
@@ -72,9 +70,9 @@
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
-#today = ''
+# today = ''
# Else, today_fmt is used as the format for a strftime call.
-#today_fmt = '%B %d, %Y'
+# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
@@ -82,27 +80,27 @@
# The reST default role (used for this markup: `text`) to use for all
# documents.
-#default_role = None
+# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
-#add_function_parentheses = True
+# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
-#add_module_names = True
+# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
-#show_authors = False
+# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
-#modindex_common_prefix = []
+# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
-#keep_warnings = False
+# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
@@ -132,17 +130,17 @@
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
-#html_theme_options = {}
+# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
-#html_theme_path = []
+# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
-#html_title = None
+# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
-#html_short_title = None
+# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
@@ -151,7 +149,7 @@
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
-#html_favicon = None
+# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
@@ -161,62 +159,62 @@
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
-#html_extra_path = []
+# html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
-#html_last_updated_fmt = '%b %d, %Y'
+# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
-#html_use_smartypants = True
+# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
-#html_sidebars = {}
+# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
-#html_additional_pages = {}
+# html_additional_pages = {}
# If false, no module index is generated.
-#html_domain_indices = True
+# html_domain_indices = True
# If false, no index is generated.
-#html_use_index = True
+# html_use_index = True
# If true, the index is split into individual pages for each letter.
-#html_split_index = False
+# html_split_index = False
# If true, links to the reST sources are added to the pages.
-#html_show_sourcelink = True
+# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
-#html_show_sphinx = True
+# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
-#html_show_copyright = True
+# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
-#html_use_opensearch = ''
+# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
-#html_file_suffix = None
+# html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
-#html_search_language = 'en'
+# html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
-#html_search_options = {'type': 'default'}
+# html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
-#html_search_scorer = 'scorer.js'
+# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'SecureDropdoc'
@@ -224,17 +222,17 @@
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
-# The paper size ('letterpaper' or 'a4paper').
-#'papersize': 'letterpaper',
+ # The paper size ('letterpaper' or 'a4paper').
+ # 'papersize': 'letterpaper',
-# The font size ('10pt', '11pt' or '12pt').
-#'pointsize': '10pt',
+ # The font size ('10pt', '11pt' or '12pt').
+ # 'pointsize': '10pt',
-# Additional stuff for the LaTeX preamble.
-#'preamble': '',
+ # Additional stuff for the LaTeX preamble.
+ # 'preamble': '',
-# Latex figure (float) alignment
-#'figure_align': 'htbp',
+ # Latex figure (float) alignment
+ # 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
@@ -247,23 +245,23 @@
# The name of an image file (relative to this directory) to place at the top of
# the title page.
-#latex_logo = None
+# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
-#latex_use_parts = False
+# latex_use_parts = False
# If true, show page references after internal links.
-#latex_show_pagerefs = False
+# latex_show_pagerefs = False
# If true, show URL addresses after external links.
-#latex_show_urls = False
+# latex_show_urls = False
# Documents to append as an appendix to all manuals.
-#latex_appendices = []
+# latex_appendices = []
# If false, no module index is generated.
-#latex_domain_indices = True
+# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
@@ -276,7 +274,7 @@
]
# If true, show URL addresses after external links.
-#man_show_urls = False
+# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
@@ -291,13 +289,13 @@
]
# Documents to append as an appendix to all manuals.
-#texinfo_appendices = []
+# texinfo_appendices = []
# If false, no module index is generated.
-#texinfo_domain_indices = True
+# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
-#texinfo_show_urls = 'footnote'
+# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
-#texinfo_no_detailmenu = False
+# texinfo_no_detailmenu = False
diff --git a/install_files/ansible-base/callback_plugins/ansible_version_check.py b/install_files/ansible-base/callback_plugins/ansible_version_check.py
--- a/install_files/ansible-base/callback_plugins/ansible_version_check.py
+++ b/install_files/ansible-base/callback_plugins/ansible_version_check.py
@@ -1,5 +1,6 @@
# -*- encoding:utf-8 -*-
-from __future__ import absolute_import, division, print_function, unicode_literals
+from __future__ import absolute_import, division, print_function, \
+ unicode_literals
import sys
@@ -18,11 +19,13 @@ def print_red_bold(text):
class CallbackModule(CallbackBase):
def __init__(self):
- # Can't use `on_X` because this isn't forwards compatible with Ansible 2.0+
+ # Can't use `on_X` because this isn't forwards compatible
+ # with Ansible 2.0+
required_version = '2.4.2' # Keep synchronized with requirements files
if not ansible.__version__.startswith(required_version):
print_red_bold(
- "SecureDrop restriction: only Ansible {version}.* is supported. "
+ "SecureDrop restriction: only Ansible {version}.*"
+ "is supported."
.format(version=required_version)
)
sys.exit(1)
diff --git a/install_files/ansible-base/callback_plugins/profile_tasks.py b/install_files/ansible-base/callback_plugins/profile_tasks.py
--- a/install_files/ansible-base/callback_plugins/profile_tasks.py
+++ b/install_files/ansible-base/callback_plugins/profile_tasks.py
@@ -65,9 +65,10 @@ def playbook_on_stats(self, stats):
)
total_seconds = sum([x[1] for x in self.stats.items()])
- print("\nPlaybook finished: {0}, {1} total tasks. {2} elapsed. \n".format(
+ print("\nPlaybook finished: {0}, {1} total tasks."
+ " {2} elapsed. \n".format(
time.asctime(),
len(self.stats.items()),
datetime.timedelta(seconds=(int(total_seconds)))
)
- )
+ )
diff --git a/install_files/ansible-base/roles/backup/files/0.3_collect.py b/install_files/ansible-base/roles/backup/files/0.3_collect.py
--- a/install_files/ansible-base/roles/backup/files/0.3_collect.py
+++ b/install_files/ansible-base/roles/backup/files/0.3_collect.py
@@ -9,15 +9,12 @@
import sys
import os
-import re
import zipfile
from datetime import datetime
-import functools
# Import the application config.py file
sys.path.append("/var/www/securedrop")
-import config
-import gnupg
-import subprocess
+import config # noqa: F403
+import gnupg # noqa: F403
TOR_SERVICES = "/var/lib/tor/services"
TOR_CONFIG = "/etc/tor/torrc"
@@ -78,5 +75,6 @@ def main():
encrypt_zip_file(zf_fn)
print zf_fn
+
if __name__ == "__main__":
main()
diff --git a/install_files/ansible-base/roles/backup/files/backup.py b/install_files/ansible-base/roles/backup/files/backup.py
--- a/install_files/ansible-base/roles/backup/files/backup.py
+++ b/install_files/ansible-base/roles/backup/files/backup.py
@@ -9,6 +9,7 @@
import os
import tarfile
+
def main():
backup_filename = 'sd-backup-{}.tar.gz'.format(
datetime.utcnow().strftime("%Y-%m-%d--%H-%M-%S"))
@@ -32,5 +33,6 @@ def main():
print backup_filename
+
if __name__ == "__main__":
main()
diff --git a/install_files/ansible-base/roles/build-ossec-deb-pkg/library/ossec_urls.py b/install_files/ansible-base/roles/build-ossec-deb-pkg/library/ossec_urls.py
--- a/install_files/ansible-base/roles/build-ossec-deb-pkg/library/ossec_urls.py
+++ b/install_files/ansible-base/roles/build-ossec-deb-pkg/library/ossec_urls.py
@@ -26,9 +26,8 @@
ossec_version: "2.8.2"
'''
-from StringIO import StringIO
-from urlparse import urljoin
-import re
+import re # noqa: E402
+
HAS_REQUESTS = True
try:
@@ -37,7 +36,6 @@
HAS_REQUESTS = False
-
class OSSECURLs():
def __init__(self, ossec_version):
@@ -59,34 +57,30 @@ def __init__(self, ossec_version):
def ossec_tarball_filename(self):
return "ossec-hids-{}.tar.gz".format(self.ossec_version)
-
@property
def ossec_tarball_url(self):
return "https://github.com/ossec/ossec-hids/archive/{}.tar.gz".format(
self.ossec_version)
-
@property
def ossec_checksum_url(self):
- return "https://github.com/ossec/ossec-hids/releases/download/{}/{}".format(
+ return "https://github.com/ossec/ossec-hids/releases/download/{}/{}".format( # noqa: E501
self.ossec_version, self.ossec_checksum_filename)
-
@property
def ossec_checksum_filename(self):
return "{}-checksum.txt".format(self.ossec_tarball_filename)
-
def parse_checksums(self):
r = requests.get(self.ossec_checksum_url)
checksum_regex = re.compile(r'''
^MD5\(
'''
- +re.escape(self.ossec_tarball_filename)+
+ + re.escape(self.ossec_tarball_filename) +
r'''\)=\s+(?P<ossec_md5_checksum>[0-9a-f]{32})\s+
SHA1\(
'''
- +re.escape(self.ossec_tarball_filename)+
+ + re.escape(self.ossec_tarball_filename) +
r'''\)=\s+(?P<ossec_sha1_checksum>[0-9a-f]{40})$
''', re.VERBOSE | re.MULTILINE
)
@@ -96,23 +90,23 @@ def parse_checksums(self):
def main():
- module = AnsibleModule(
+ module = AnsibleModule( # noqa: F405
argument_spec=dict(
- ossec_version=dict(default="2.8.2" ),
+ ossec_version=dict(default="2.8.2"),
),
supports_check_mode=False
)
if not HAS_REQUESTS:
- module.fail_json(msg='requests required for this module')
+ module.fail_json(msg='requests required for this module')
ossec_version = module.params['ossec_version']
try:
ossec_config = OSSECURLs(ossec_version=ossec_version)
except:
msg = ("Failed to find checksum information for OSSEC v{}."
- "Ensure you have the proper release specified, "
- "and check the download page to confirm: "
- "http://www.ossec.net/?page_id=19".format(ossec_version))
+ "Ensure you have the proper release specified, "
+ "and check the download page to confirm: "
+ "http://www.ossec.net/?page_id=19".format(ossec_version))
module.fail_json(msg=msg)
results = ossec_config.ansible_facts
@@ -124,5 +118,5 @@ def main():
module.fail_json(msg=msg)
-from ansible.module_utils.basic import *
+from ansible.module_utils.basic import * # noqa E402,F403
main()
diff --git a/install_files/ansible-base/roles/restore/files/0.3_restore.py b/install_files/ansible-base/roles/restore/files/0.3_restore.py
deleted file mode 100755
--- a/install_files/ansible-base/roles/restore/files/0.3_restore.py
+++ /dev/null
@@ -1,171 +0,0 @@
-#!/usr/bin/python2.7
-"""
-
-This script and decrypted backup zip should be copied to the App server
-and run by the anisble plabook. When run (as root), it restores the 0.3
-backup file.
-
-python 0.3_restore.py sd-backup-TIMESTAMP.zip
-
-"""
-
-import sys
-import os
-import re
-import zipfile
-import subprocess
-import shutil
-from datetime import datetime
-from operator import itemgetter
-import calendar
-import traceback
-
-
-def replace_prefix(path, p1, p2):
- """
- Replace p1 in path with p2
-
- >>> replace_prefix("/tmp/files/foo.bar", "/tmp", "/home/me")
- "home/me/files/foo.bar"
- """
- common_prefix = os.path.commonprefix([path, p1])
- if common_prefix:
- assert path.find(common_prefix) == 0
- # +1 so chop off the next path separator, which otherwise becomes a
- # leading path separate and confuses os.path.join
- path = path[len(common_prefix)+1:]
- return os.path.join(p2, path)
-
-
-def extract_to_path(archive, member, path, user):
- """
- Extract from the zip archive `archive` the member `member` and write it to
- `path`, preserving file metadata and chown'ing the file using `user`
- """
- # Create all upper directories if necessary
- upperdirs = os.path.dirname(path)
- if upperdirs and not os.path.exists(upperdirs):
- os.makedirs(upperdirs)
-
- with archive.open(member) as source, file(path, "wb") as target:
- shutil.copyfileobj(source, target)
-
- # Update the timestamps as well (as best we can, thanks, conversion to
- # localtime). This only actually works if the .zip was created on a
- # machine where the timezone was set to UTC, but it might be good
- # enough since we just need the relative order of timestamps (they will
- # all be normalized anyway).
- if hasattr(member, 'date_time'):
- timestamp = calendar.timegm(member.date_time)
- os.utime(path, (timestamp, timestamp))
-
- ug = "{}:{}".format(user, user)
- subprocess.call(['chown', '-R', ug, path])
-
-
-def restore_config_file(zf):
- print "* Migrating SecureDrop config file from backup..."
-
- # Restore the original config file
- for zi in zf.infolist():
- if "var/www/securedrop/config.py" in zi.filename:
- extract_to_path(zf, "var/www/securedrop/config.py",
- "/var/www/securedrop/config.py", "www-data")
-
-
-def restore_securedrop_root(zf):
- print "* Migrating directories from SECUREDROP_ROOT..."
-
- # Restore the original source directories and key files
- for zi in zf.infolist():
- if "var/lib/securedrop/store" in zi.filename:
- extract_to_path(zf, zi,
- replace_prefix(zi.filename,
- "var/lib/securedrop/store",
- "/var/lib/securedrop/store"),
- "www-data")
- elif "var/lib/securedrop/keys" in zi.filename:
- # TODO: is it a bad idea to migrate the random_seed from the
- # previous installation?
- extract_to_path(zf, zi,
- replace_prefix(zi.filename,
- "var/lib/securedrop/keys",
- "/var/lib/securedrop/keys"),
- "www-data")
-
-
-def restore_database(zf):
- print "* Migrating database..."
-
- extract_to_path(zf, "var/lib/securedrop/db.sqlite",
- "/var/lib/securedrop/db.sqlite", "www-data")
-
-
-def restore_custom_header_image(zf):
- print "* Migrating custom header image..."
- extract_to_path(zf,
- "var/www/securedrop/static/i/logo.png",
- "/var/www/securedrop/static/i/logo.png", "www-data")
-
-
-def restore_tor_files(zf):
- tor_root_dir = "/var/lib/tor"
- ths_root_dir = os.path.join(tor_root_dir, "services")
- source_ths_dir = os.path.join(ths_root_dir, "source")
- journalist_ths_dir = os.path.join(ths_root_dir, "journalist")
-
- print "* Deleting previous source THS interface..."
-
- for fn in os.listdir(source_ths_dir):
- os.remove(os.path.join(source_ths_dir, fn))
-
- print "* Deleting previous journalist ATHS interface..."
-
- for fn in os.listdir(journalist_ths_dir):
- os.remove(os.path.join(journalist_ths_dir, fn))
-
- print "* Migrating source and journalist interface .onion..."
-
- for zi in zf.infolist():
- if "var/lib/tor/services/source" in zi.filename:
- extract_to_path(zf, zi,
- replace_prefix(zi.filename,
- "var/lib/tor/services/source",
- "/var/lib/tor/services/source"),
- "debian-tor")
- elif "var/lib/tor/services/journalist" in zi.filename:
- extract_to_path(zf, zi,
- replace_prefix(zi.filename,
- "var/lib/tor/services/journalist",
- "/var/lib/tor/services/journalist"),
- "debian-tor")
-
- # Reload Tor to trigger registering the old Tor Hidden Services
- # reloading Tor compared to restarting tor will not break the current tor
- # connections for SSH
- subprocess.call(['service', 'tor', 'reload'])
-
-
-def main():
- if len(sys.argv) <= 1:
- print ("Usage: 0.3_restore.py <filename>\n\n"
- " <filename>\tPath to a SecureDrop 0.3 backup .zip file"
- "created by 0.3_collect.py")
- sys.exit(1)
-
- try:
- zf_fn = sys.argv[1]
- with zipfile.ZipFile(zf_fn, 'r') as zf:
- restore_config_file(zf)
- restore_securedrop_root(zf)
- restore_database(zf)
- restore_custom_header_image(zf)
- restore_tor_files(zf)
- except:
- print "\n!!! Something went wrong, please file an issue.\n"
- print traceback.format_exc()
- else:
- print "Done!"
-
-if __name__ == "__main__":
- main()
diff --git a/install_files/ansible-base/roles/restore/files/restore.py b/install_files/ansible-base/roles/restore/files/restore.py
--- a/install_files/ansible-base/roles/restore/files/restore.py
+++ b/install_files/ansible-base/roles/restore/files/restore.py
@@ -46,5 +46,6 @@ def main():
subprocess.check_call(['service', 'apache2', 'restart'])
subprocess.check_call(['service', 'tor', 'reload'])
+
if __name__ == "__main__":
main()
diff --git a/install_files/ansible-base/roles/tails-config/files/securedrop_init.py b/install_files/ansible-base/roles/tails-config/files/securedrop_init.py
--- a/install_files/ansible-base/roles/tails-config/files/securedrop_init.py
+++ b/install_files/ansible-base/roles/tails-config/files/securedrop_init.py
@@ -16,7 +16,7 @@
path_torrc_backup = '/etc/tor/torrc.bak'
path_torrc = '/etc/tor/torrc'
path_desktop = '/home/amnesia/Desktop/'
-path_persistent_desktop = '/lib/live/mount/persistence/TailsData_unlocked/dotfiles/Desktop/'
+path_persistent_desktop = '/lib/live/mount/persistence/TailsData_unlocked/dotfiles/Desktop/' # noqa: E501
# load torrc_additions
if os.path.isfile(path_torrc_additions):
@@ -64,17 +64,22 @@
env['XDG_DATA_DIR'] = '/usr/share/gnome:/usr/local/share/:/usr/share/'
env['HOME'] = '/home/amnesia'
env['LOGNAME'] = 'amnesia'
-env['DBUS_SESSION_BUS_ADDRESS'] = 'unix:path=/run/user/{}/bus'.format(amnesia_uid)
+env['DBUS_SESSION_BUS_ADDRESS'] = 'unix:path=/run/user/{}/bus'.format(
+ amnesia_uid)
-# remove existing shortcut, recreate symlink and change metadata attribute to trust .desktop
+# remove existing shortcut, recreate symlink and change metadata attribute
+# to trust .desktop
for shortcut in ['source.desktop', 'journalist.desktop']:
subprocess.call(['rm', path_desktop + shortcut], env=env)
- subprocess.call(['ln', '-s', path_persistent_desktop + shortcut, path_desktop + shortcut], env=env)
- subprocess.call(['gio', 'set', path_desktop + shortcut, 'metadata::trusted', 'yes'], env=env)
+ subprocess.call(['ln', '-s', path_persistent_desktop + shortcut,
+ path_desktop + shortcut], env=env)
+ subprocess.call(['gio', 'set', path_desktop + shortcut,
+ 'metadata::trusted', 'yes'], env=env)
# reacquire uid0 and notify the user
-os.setresuid(0,0,-1)
-os.setresgid(0,0,-1)
+os.setresuid(0, 0, -1)
+os.setresgid(0, 0, -1)
subprocess.call(['tails-notify-user',
'SecureDrop successfully auto-configured!',
- 'You can now access the Journalist Interface.\nIf you are an admin, you can now SSH to the servers.'])
+ 'You can now access the Journalist Interface.\n',
+ 'If you are an admin, you can now SSH to the servers.'])
| diff --git a/molecule/aws/tests/test_tor_interfaces.py b/molecule/aws/tests/test_tor_interfaces.py
--- a/molecule/aws/tests/test_tor_interfaces.py
+++ b/molecule/aws/tests/test_tor_interfaces.py
@@ -8,6 +8,7 @@
testinfra_hosts = ["docker://apptestclient"]
+
@pytest.mark.parametrize('site', TOR_URL_FILES)
def test_www(host, site):
"""
diff --git a/molecule/builder/tests/test_securedrop_deb_package.py b/molecule/builder/tests/test_securedrop_deb_package.py
--- a/molecule/builder/tests/test_securedrop_deb_package.py
+++ b/molecule/builder/tests/test_securedrop_deb_package.py
@@ -218,9 +218,10 @@ def test_deb_package_contains_no_generated_assets(File, Command, deb):
# no SASS files should exist; only the generated CSS files.
assert not re.search("^.*sass.*$", c.stdout, re.M)
- #no .map files should exist; only the generated CSS files.
+ # no .map files should exist; only the generated CSS files.
assert not re.search("^.*css.map$", c.stdout, re.M)
+
@pytest.mark.parametrize("deb", deb_packages)
def test_deb_package_contains_css(File, Command, deb):
"""
@@ -250,6 +251,7 @@ def test_deb_package_lintian(File, Command, deb, tag):
tag, deb_package.path))
assert len(c.stdout) == 0
+
@pytest.mark.parametrize("deb", deb_packages)
def test_deb_app_package_contains_https_validate_dir(host, deb):
"""
@@ -264,7 +266,8 @@ def test_deb_app_package_contains_https_validate_dir(host, deb):
c = host.run("dpkg-deb --contents {}".format(deb_package.path))
# static/gen/ directory should exist
assert re.search("^.*\./var/www/securedrop/"
- ".well-known/$", c.stdout, re.M)
+ ".well-known/$", c.stdout, re.M)
+
@pytest.mark.parametrize("deb", deb_packages)
def test_grsec_metapackage(host, deb):
diff --git a/securedrop/tests/utils/env.py b/securedrop/tests/utils/env.py
--- a/securedrop/tests/utils/env.py
+++ b/securedrop/tests/utils/env.py
@@ -2,9 +2,8 @@
"""Testing utilities related to setup and teardown of test environment.
"""
import os
-from os.path import abspath, dirname, exists, isdir, join, realpath
+from os.path import abspath, dirname, isdir, join, realpath
import shutil
-import subprocess
import threading
import gnupg
@@ -65,7 +64,8 @@ def teardown():
shutil.rmtree(config.TEMP_DIR)
try:
shutil.rmtree(config.SECUREDROP_DATA_ROOT)
- assert not os.path.exists(config.SECUREDROP_DATA_ROOT) # safeguard for #844
+ # safeguard for #844
+ assert not os.path.exists(config.SECUREDROP_DATA_ROOT)
except OSError as exc:
if 'No such file or directory' not in exc:
raise
| Lint all Python files with flake8
# Feature request
## Description
Our current strategy for linting Python files via flake8 uses a whitelist for file location, which is brittle. If someone adds a new Python file outside of the whitelisted directories, e.g. as in #2922, flake8 will not lint them. That can lead to problems. Specifically, the changes in #2922 did not pass linting and never shold have been merged.
We should update our flake8 linting logic to check _all_ Python files in the repository, which will also catch new additions during CI against PRs.
## User Stories
As a developer, I want CI to lint the code I write, not just its favorite code that it's comfortable with.
As a CI pipeline, I want to lint everyone's code, not just the same old stuff I'm used to.
| cf. the linting strategy used by `make shellcheck`, which leverages a blacklist rather than a whitelist. Let's implement that approach, but for Python files.
We can specify which files not to lint via `flake8 --exclude=path/to/files/to/not/lint/*` | 2018-01-27T04:43:07Z | [] | [] |
freedomofpress/securedrop | 2,965 | freedomofpress__securedrop-2965 | [
"2927",
"2800"
] | 0648361c77078cda255ccc3742aed5673ec84161 | diff --git a/docs/conf.py b/docs/conf.py
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -59,9 +59,9 @@
# built documents.
#
# The short X.Y version.
-version = '0.5.1'
+version = '0.5.2'
# The full version, including alpha/beta/rc tags.
-release = '0.5.1'
+release = '0.5.2'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
diff --git a/install_files/ansible-base/roles/upgrade/files/0.3pre_upgrade.py b/install_files/ansible-base/roles/upgrade/files/0.3pre_upgrade.py
deleted file mode 100755
--- a/install_files/ansible-base/roles/upgrade/files/0.3pre_upgrade.py
+++ /dev/null
@@ -1,197 +0,0 @@
-#!/usr/bin/python2.7
-
-from datetime import datetime
-import os
-import shutil
-import sqlite3
-import subprocess
-import sys
-import tarfile
-import traceback
-
-
-def backup_app():
- tar_fn = 'backup-app-{}.tar.bz2'.format(datetime.now().strftime("%Y-%m-%d--%H-%M-%S"))
- with tarfile.open(tar_fn, 'w:bz2') as t:
- t.add('/var/lib/securedrop/')
- t.add('/var/lib/tor/services/')
- t.add('/var/www/securedrop/config.py')
- try:
- t.add('/var/www/securedrop/static/i/logo.png')
- except OSError:
- print "[!] Expected but non-essential file ('logo.png') not found. Continuing..."
- print "** Backed up system to {} before migrating.".format(tar_fn)
-
-
-def backup_mon():
- # The only thing we have to back up for the monitor server is the SSH ATHS cert.
- # All other required values are available in prod-specific.yml from the installation.
- tar_fn = 'backup-mon-{}.tar.bz2'.format(datetime.now().strftime("%Y-%m-%d--%H-%M-%S"))
- with tarfile.open(tar_fn, 'w:bz2') as t:
- t.add('/var/lib/tor/services/')
- print "** Backed up system to {} before migrating.".format(tar_fn)
-
-
-def secure_unlink(path):
- subprocess.check_call(['srm', '-r', path])
-
-
-def cleanup_deleted_sources(store_dir, c):
- """
- In 0.3pre and 0.3, there were two bugs that could potentially lead
- to the source directory failing to be deleted when a source was
- deleted from the Journalist Interface. We clean up these leftover
- directories as part of the migration.
-
- These sources can be identified because they have a source_dir in
- the store_dir, but no corresponding Source entry in the database.
-
- See https://github.com/freedomofpress/securedrop/pull/944 for context.
- """
- for source_dir in os.listdir(store_dir):
- try:
- source = c.execute("SELECT * FROM sources WHERE filesystem_id=?",
- (source_dir,)).fetchone()
- if not source:
- print "Deleting source with no db entry ('{}')...".format(source_dir)
- secure_unlink(os.path.join(store_dir, source_dir))
- except Exception as e:
- print "\n!! Error occurred cleaning up deleted sources for source {}".format(source_dir)
- print "Source had {} submissions".format(len(os.listdir(os.path.join(store_dir, source_dir))))
- print traceback.format_exc()
-
-
-def get_db_connection():
- db_path = "/var/lib/securedrop/db.sqlite"
- assert os.path.isfile(db_path)
- conn = sqlite3.connect(db_path)
- return conn, conn.cursor()
-
-
-def migrate_app_db():
- store_dir = "/var/lib/securedrop/store"
- conn, c = get_db_connection()
-
- # Before modifying the database, clean up any source directories that were
- # left on the filesystem after the sources were deleted.
- cleanup_deleted_sources(store_dir, c)
-
- # To get CREATE TABLE from SQLAlchemy:
- # >>> import db
- # >>> from sqlalchemy.schema import CreateTable
- # >>> print CreateTable(db.Journalist.__table__).compile(db.engine)
- # Or, add `echo=True` to the engine constructor.
- # CREATE TABLE replies
- c.execute("""
-CREATE TABLE replies (
- id INTEGER NOT NULL,
- journalist_id INTEGER,
- source_id INTEGER,
- filename VARCHAR(255) NOT NULL,
- size INTEGER NOT NULL,
- PRIMARY KEY (id),
- FOREIGN KEY(journalist_id) REFERENCES journalists (id),
- FOREIGN KEY(source_id) REFERENCES sources (id)
-)""")
-
- # Fill in replies from the replies in STORE_DIR at the time of the migration
- #
- # Caveats:
- #
- # 1. Before we added the `replies` table, we did not keep track of which
- # journalist wrote the reply. There is no way for us to reverse-engineer
- # that information, so the migration will default to saying they were all
- # created by the first journalist (arbitrarily). Since we do not surface
- # this in the UI yet anyway, it should not be a big deal.
- #
- # 2. We do not try to get the order of the (autoincrementing primary key)
- # reply_id to match the order in which the replies were created (which could
- # be inferred from the file timestamps, since we only normalize submission
- # timestamps and not reply timestamps) since this order is not used anywhere
- # in the code.
-
- # Copy from db.py to compute filesystem-safe journalist filenames
- def journalist_filename(s):
- valid_chars = 'abcdefghijklmnopqrstuvwxyz1234567890-_'
- return ''.join([c for c in s.lower().replace(' ', '_') if c in valid_chars])
-
- reply_id = 1
- for source_dir in os.listdir(store_dir):
- try:
- source_id, journalist_designation = c.execute(
- "SELECT id, journalist_designation FROM sources WHERE filesystem_id=?",
- (source_dir,)).fetchone()
- except sqlite3.Error as e:
- print "!!\tError occurred migrating replies for source {}".format(source_dir)
- print traceback.format_exc()
- continue
-
- for filename in os.listdir(os.path.join(store_dir, source_dir)):
- if "-reply.gpg" not in filename:
- continue
-
- # Rename the reply file from 0.3pre convention to 0.3 convention
- interaction_count = filename.split('-')[0]
- new_filename = "{}-{}-reply.gpg".format(interaction_count,
- journalist_filename(journalist_designation))
- os.rename(os.path.join(store_dir, source_dir, filename),
- os.path.join(store_dir, source_dir, new_filename))
-
- # need id, journalist_id, source_id, filename, size
- journalist_id = 1 # *shrug*
- full_path = os.path.join(store_dir, source_dir, new_filename)
- size = os.stat(full_path).st_size
- c.execute("INSERT INTO replies VALUES (?,?,?,?,?)",
- (reply_id, journalist_id, source_id, new_filename, size))
- reply_id += 1 # autoincrement for next reply
-
- # CREATE TABLE journalist_login_attempts
- c.execute("""
-CREATE TABLE journalist_login_attempt (
- id INTEGER NOT NULL,
- timestamp DATETIME,
- journalist_id INTEGER,
- PRIMARY KEY (id),
- FOREIGN KEY(journalist_id) REFERENCES journalists (id)
-)""")
-
- # ALTER TABLE journalists, add last_token column
- c.execute("""ALTER TABLE journalists ADD COLUMN last_token VARCHAR(6)""")
-
- # Save changes and close connection
- conn.commit()
- conn.close()
-
-
-def app_db_migrated():
- """To make the upgrade role idempotent, we need to skip migrating the
- database if it has already been modified. The best way to do this
- is to check whether the last sql command in `migrate_app_db`
- (ALTER TABLE to add the last_token column to the journalists
- table) succeeded. If so, we can assume the database app migration
- succeeded and can safely skip doing it again.
-
- """
- conn, c = get_db_connection()
- journalist_tables = c.execute('PRAGMA table_info(journalists)').fetchall()
- table_names = set([table[1] for table in journalist_tables])
- return 'last_token' in table_names
-
-
-def main():
- if len(sys.argv) <= 1:
- print "Usage: 0.3pre_upgrade.py app|mon"
- sys.exit(1)
-
- server_role = sys.argv[1]
- assert server_role in ("app", "mon")
-
- if server_role == "app":
- backup_app()
- if not app_db_migrated():
- migrate_app_db()
- else:
- backup_mon()
-
-if __name__ == "__main__":
- main()
diff --git a/securedrop/crypto_util.py b/securedrop/crypto_util.py
--- a/securedrop/crypto_util.py
+++ b/securedrop/crypto_util.py
@@ -5,7 +5,7 @@
import os
import subprocess
-from Crypto.Random import random
+from Cryptodome.Random import random
import gnupg
from gnupg._util import _is_stream, _make_binary_stream
import scrypt
diff --git a/securedrop/secure_tempfile.py b/securedrop/secure_tempfile.py
--- a/securedrop/secure_tempfile.py
+++ b/securedrop/secure_tempfile.py
@@ -4,9 +4,9 @@
from tempfile import _TemporaryFileWrapper
from gnupg._util import _STREAMLIKE_TYPES
-from Crypto.Cipher import AES
-from Crypto.Random import random
-from Crypto.Util import Counter
+from Cryptodome.Cipher import AES
+from Cryptodome.Random import random
+from Cryptodome.Util import Counter
class SecureTemporaryFile(_TemporaryFileWrapper, object):
diff --git a/securedrop/version.py b/securedrop/version.py
--- a/securedrop/version.py
+++ b/securedrop/version.py
@@ -1 +1 @@
-__version__ = '0.5.1'
+__version__ = '0.5.2'
| diff --git a/docs/development/testing_continuous_integration.rst b/docs/development/testing_continuous_integration.rst
--- a/docs/development/testing_continuous_integration.rst
+++ b/docs/development/testing_continuous_integration.rst
@@ -3,23 +3,10 @@
Testing: CI
===========
-The SecureDrop project uses multiple automated third-party solutions
-for running automated test suites on code changes:
+The SecureDrop project uses CircleCI_ for running automated test suites on code changes:
- * Travis_
- * CircleCI_
-
-.. _Travis: https://travis-ci.org/freedomofpress/securedrop/
.. _CircleCI: http://circleci.com/gh/freedomofpress/securedrop/
-Travis tests
-------------
-
-The Travis_ test suite provisions the development VM and runs the application
-test suite against the latest version of the code. It also performs basic
-linting and validation, e.g. checking for mistakes in the Sphinx documentation
-(see :doc:`documentation_guidelines`).
-
CI test layout
--------------
@@ -29,10 +16,9 @@ The relevant files for configuring the CI tests are: ::
βββ devops
β βββ inventory <-- environment specific inventory
β βββ playbooks <-- playbooks to start CI boxes
- β βββ scripts <-- shell wrapper scripts
+ β βββ scripts <-- shell wrapper scripts
β βββ templates <-- contains templates for ansible tasks
β βββ vars <-- environment specific variables
- βββ .travis.yml <--- config for development tests on travis
βββ Makefile <-- defines make task shortcuts
The files under ``devops/`` are used to create a minimized staging environment
@@ -46,6 +32,9 @@ The staging environment tests will run automatically in CircleCI,
when changes are submitted by Freedom of the Press Foundation staff
(i.e. members of the ``freedomofpress`` GitHub organization).
+It also performs basic linting and validation, e.g. checking for mistakes in
+the Sphinx documentation.
+
.. tip:: You will need an Amazon Web Services EC2 account to proceed.
See the `AWS Getting Started Guide`_ for detailed instructions.
diff --git a/install_files/ansible-base/roles/ossec-server/files/test_admin_key.pub b/install_files/ansible-base/roles/ossec/files/test_admin_key.pub
similarity index 100%
rename from install_files/ansible-base/roles/ossec-server/files/test_admin_key.pub
rename to install_files/ansible-base/roles/ossec/files/test_admin_key.pub
diff --git a/install_files/ansible-base/roles/ossec-server/files/test_admin_key.sec b/install_files/ansible-base/roles/ossec/files/test_admin_key.sec
similarity index 100%
rename from install_files/ansible-base/roles/ossec-server/files/test_admin_key.sec
rename to install_files/ansible-base/roles/ossec/files/test_admin_key.sec
diff --git a/molecule/ansible-config/tests/test_max_fail_percentage.py b/molecule/ansible-config/tests/test_max_fail_percentage.py
new file mode 100644
--- /dev/null
+++ b/molecule/ansible-config/tests/test_max_fail_percentage.py
@@ -0,0 +1,80 @@
+import os
+
+import pytest
+import yaml
+
+
+# Lots of parent directories to dig out of the Molecule test dir.
+# Could also inspect the Molecule env vars and go from there.
+REPO_ROOT = os.path.abspath(os.path.join(__file__,
+ os.path.pardir,
+ os.path.pardir,
+ os.path.pardir,
+ os.path.pardir,
+ ))
+ANSIBLE_BASE = os.path.join(REPO_ROOT, 'install_files', 'ansible-base')
+
+
+def find_ansible_playbooks():
+ """
+ Test helper to generate list of filepaths for SecureDrop
+ Ansible playbooks. All files will be validated to contain the
+ max_fail option.
+ """
+ playbooks = []
+ # Not using os.walk since all SecureDrop playbooks are in top-level
+ # of the "ansible-base" directory, and *many* YAML files that are
+ # not playbooks reside in subdirectories.
+ for f in os.listdir(ANSIBLE_BASE):
+ # Assume all YAML files in directory are playbooks.
+ if f.endswith(".yml"):
+ # Ignore deprecated production vars file.
+ if f != "prod-specific.yml":
+ playbooks.append(os.path.join(ANSIBLE_BASE, f))
+ # Sanity checking to make sure list of playbooks is not empty.
+ assert len(playbooks) > 0
+ return playbooks
+
+
[email protected]('playbook', find_ansible_playbooks())
+def test_max_fail_percentage(host, playbook):
+ """
+ All SecureDrop playbooks should set `max_fail_percentage` to "0"
+ on each and every play. Doing so ensures that an error on a single
+ host constitutes a play failure.
+
+ In conjunction with the `any_errors_fatal` option, tested separately,
+ this will achieve a "fail fast" behavior from Ansible.
+
+ There's no ansible.cfg option to set for max_fail_percentage, which would
+ allow for a single DRY update that would apply automatically to all
+ invocations of `ansible-playbook`. Therefore this test, which will
+ search for the line present in all playbooks.
+
+ Technically it's only necessary that plays targeting multiple hosts use
+ the parameter, but we'll play it safe and require it everywhere,
+ to avoid mistakes down the road.
+ """
+ with open(playbook, 'r') as f:
+ playbook_yaml = yaml.safe_load(f)
+ # Descend into playbook list structure to validate play attributes.
+ for play in playbook_yaml:
+ assert 'max_fail_percentage' in play
+ assert play['max_fail_percentage'] == 0
+
+
[email protected]('playbook', find_ansible_playbooks())
+def test_any_errors_fatal(host, playbook):
+ """
+ All SecureDrop playbooks should set `any_errors_fatal` to "yes"
+ on each and every play. In conjunction with `max_fail_percentage` set
+ to "0", doing so ensures that any errors will cause an immediate failure
+ on the playbook.
+ """
+ with open(playbook, 'r') as f:
+ playbook_yaml = yaml.safe_load(f)
+ # Descend into playbook list structure to validate play attributes.
+ for play in playbook_yaml:
+ assert 'any_errors_fatal' in play
+ # Ansible coerces booleans, so bare assert is sufficient
+ assert play['any_errors_fatal']
diff --git a/molecule/aws/securedrop_test.pub b/molecule/aws/securedrop_test.pub
new file mode 100644
--- /dev/null
+++ b/molecule/aws/securedrop_test.pub
@@ -0,0 +1,30 @@
+-----BEGIN PGP PUBLIC KEY BLOCK-----
+
+mQENBFhPGZsBCACzn00s3+i5HdGIldDGYXxY2HKL9Qhk0DhiRrNPaQemhNijuFlC
+geCeKN/smDAUyM5mfEoxmWy3V7n8SEQUpqI4dIS2AohReLkyKEKiIpTuXW7F9kO3
+vcXHgrTka+8B4ZQxDuTHNFJLmBwJnP24LrL6BzkDIUNeQFwM0EFTDOJlW1QV6qkm
+9WGizo2sR0VBJJabfRWrTWd8llYOVcc+LptErVNADPaX6iqb+QnZVJ/nYmCTgABj
+lD3aZ4EPZ+ioVOcOxbgBkAX76COObUUw/XahBGwj4fJ5kyzvDSBCHHlRzN39LKpM
+Y+HfSc1scAOWN+Dd0N/joIa0j0U4SGHo1NdzABEBAAG0MVNlY3VyZURyb3AgVEVT
+VElORyBrZXkgPHNlY3VyZWRyb3BAZnJlZWRvbS5wcmVzcz6JAU4EEwEIADgWIQRO
+15zDNi19EoNwRgJKO+SpIhGwPAUCWE8ZmwIbAwULCQgHAgYVCAkKCwIEFgIDAQIe
+AQIXgAAKCRBKO+SpIhGwPCb9B/9SuVoxbe3nLlU0bHDQtoq5P7adyTZK+5gKIiAo
+mtAkc/EuiF6jYIDLo+DBB1GBJVjyD5igTt14XR3JpMe6nLtztD5zgGk47gYQk3y5
+6f5ydd7zRo9OxulRYDvU1mXMUc0EmqfzuSxY55HJy5KQvjeKIU0fTvwbPYXdhFCC
+42iyBIkp4e4/C5oO4lNrNY2DJEZ+a8H5LHasJ4g9A78f/D5q0HWO1HutzfDeiMvq
+WFwlGMD2OzTEQA2MGlVRIYvLHAG1aV9fXY8kjCFT8ri5hxlQeTkKISfbW3pFSq6s
+Ow4r975zWLTPJNm+WTbBpfIOFBVAW34EHkcb/QmntlvqkNM+uQENBFhPGZsBCAC4
+VEtCQEuZ3WzCNL/0yQFih1EjT/AsS3j3++xvSOYWF+c7AjR9X0MkJFTnUZBHs6MX
+PM33bbkWbBBE2ILdDCEF72Uc5HyyC2lW2DvPY9ZLVSGcMCUsKARv5rbeNdgiLVP5
+8AMkmG48q0Pxrr6UVX14M34Jm5G91c/dj9zHtVwkLg4RG/rcumQdlpQhNmMycB2X
+lat48atmEkutfLEQizXIlgiCdNEpgfUBy/jZZcCOjwr8PUPmSUWjKOVMv6CSLx8K
+z2cP4We7tyq4qhc0cWjJOWOmJpu5tbmi6XEEWGaIJyN+POhHEcb0tI1rTJ88nrMb
+DI/NF/35kuWIIkADOb2vABEBAAGJATYEGAEIACAWIQRO15zDNi19EoNwRgJKO+Sp
+IhGwPAUCWE8ZmwIbDAAKCRBKO+SpIhGwPC3fB/0TfuScS718FiEcVRI3F2wBbzTQ
+VARhGzEvPSU5Z3Cur/EB8ihpWvwi39tUMeg5HTheDl/8A7f1QCjIFSVEr1slGNLh
+YFF07XGWhy837z6kiihK2z6/w6Q9QJqjE+QVZCKr97aIPejvEoHoslZTU5pJ52qF
+J7KQd1hEvVs00DxY6VlyK0FzXqByKYq6Arl2tzlCZ6RPEHKXV2xSP06jLEagzgYe
+DylVo9Xahenj4n/Mtq7Am6tGgU9Vy9cGbWNBdUND/mFQEEZSh9RJabPeluH12sir
+5/tfsDr4DGHSz7ws+5M6Zbk6oNJEwQZ4cR+81qCfXE5X5LW1KlAL8wDl7dfS
+=fYUi
+-----END PGP PUBLIC KEY BLOCK-----
\ No newline at end of file
diff --git a/molecule/aws/tor_apt_test.yml b/molecule/aws/tor_apt_test.yml
new file mode 100644
--- /dev/null
+++ b/molecule/aws/tor_apt_test.yml
@@ -0,0 +1,39 @@
+---
+- name: Add apt SD test public key
+ apt_key:
+ data: "{{ lookup('file','securedrop_test.pub') }}"
+ state: present
+
+- name: Temporary fix for GH issue 2938
+ file:
+ state: absent
+ path: "/etc/apt/sources.list.d/tor_apt_freedom_press.list"
+
+- name: Switch apt repo URLs to staging.
+ replace:
+ dest: "/etc/apt/sources.list.d/tor.apt.freedom.press.list"
+ replace: "tor-apt-test.freedom.press"
+ regexp: '//tor-apt\.freedom\.press'
+ ignore_errors: "yes"
+ notify: update tor
+
+- name: Force possible tor update
+ meta: flush_handlers
+
+- name: Squash testinfra failure for packages needing update
+ apt:
+ upgrade: safe
+
+- name: Extract latest tor version
+ shell: |
+ apt-cache policy tor | sed -e 's/^\s*Installed:\ \(\S*\)/\1/g;tx;d;:x'
+ changed_when: false
+ register: extract_tor_version
+
+- name: Dump Tor version to file (for reporting)
+ copy:
+ dest: "{{ playbook_dir }}/../../.tor_version"
+ content: "{{ extract_tor_version.stdout }}"
+ delegate_to: localhost
+ run_once: true
+ become: "no"
diff --git a/molecule/builder/tests/vars.yml b/molecule/builder/tests/vars.yml
--- a/molecule/builder/tests/vars.yml
+++ b/molecule/builder/tests/vars.yml
@@ -1,5 +1,5 @@
---
-securedrop_version: "0.5.1"
+securedrop_version: "0.5.2"
ossec_version: "2.8.2"
keyring_version: "0.1.1"
config_version: "0.1.0"
diff --git a/securedrop/tests/functional/functional_test.py b/securedrop/tests/functional/functional_test.py
--- a/securedrop/tests/functional/functional_test.py
+++ b/securedrop/tests/functional/functional_test.py
@@ -12,7 +12,7 @@
import traceback
import requests
-from Crypto import Random
+from Cryptodome import Random
from selenium import webdriver
from selenium.common.exceptions import (WebDriverException,
NoAlertPresentException)
diff --git a/testinfra/common/test_tor_mirror.py b/testinfra/common/test_tor_mirror.py
--- a/testinfra/common/test_tor_mirror.py
+++ b/testinfra/common/test_tor_mirror.py
@@ -1,6 +1,10 @@
+import os
import pytest
[email protected](
+ os.environ.get('CIRCLE_BRANCH', 'na').startswith('release'),
+ reason="Release branches will use tor-apt-test repo")
def test_tor_mirror_present(host):
"""
Ensure the FPF mirror of the Tor apt repo, tor-apt.freedom.press,
diff --git a/testinfra/mon/test_ossec.py b/testinfra/mon/test_ossec.py
--- a/testinfra/mon/test_ossec.py
+++ b/testinfra/mon/test_ossec.py
@@ -20,7 +20,6 @@ def test_ossec_package(Package, package):
assert Package(package).is_installed
[email protected](strict=True)
def test_ossec_connectivity(Command, Sudo):
"""
Ensure ossec-server machine has active connection to the ossec-agent.
| Temporarily disable safety check
## Description
We'll need to temporarily disable safety in order to merge until #2926 is resolved (and we'll need to cherry pick the disabling of safety into the 0.5.2 release branch).
## User Stories
As a SecureDrop maintainer, I don't want to merge with failing CI.
CI test failures obscure debugging output
# Feature request
## Description
The CI run obscures results from the various test suites, making certain failures difficult to understand. See for example [Circle CI build 5914](https://circleci.com/gh/freedomofpress/securedrop/5914), which shows:
> TASK [fail] ******************************************************************** Friday 05 January 2018 19:42:25 +0000 (0:03:20.042) 0:03:24.094 ******** fatal: [app-staging]: FAILED! => {"changed": false, "failed": true, "msg": "Failed as requested from task"}
The relevant task in `molecule/aws/side_effect.yml` is:
```
- fail:
when: app_test_register|failed or testinfra_results|failed
```
It's not immediately clear from the error output _which_ specific test failed, or even which test suite contained the error. We should break that single untitled task into two named tasks, with separate conditionals. Consider using `assert` rather than `fail` because then we can supply a custom `fail_msg` with informative output for developers.
## User Stories
As a developer, if tests fail I want to know exactly what failed so I can fix it. I don't want to cajole answers out of a stubborn and inscrutable oracle _before_ asking team members for review.
|
I intially tried to fix this in PR #2796 by adding an annotation to the junit file . If that works correctly.. it will show a notification at the top on the Circle UI. Unfortunately... its not picking up for whatever reason... GRRR .. so i can circle back to this but for now `3591fc7` doesnt fix the issue as described but it did add more artifacts for dev to search raw test execution output. | 2018-02-03T15:31:07Z | [] | [] |
freedomofpress/securedrop | 2,979 | freedomofpress__securedrop-2979 | [
"2969"
] | e5cc9a0f629d6fe4c16c33f31fbd78341895853c | diff --git a/securedrop/crypto_util.py b/securedrop/crypto_util.py
--- a/securedrop/crypto_util.py
+++ b/securedrop/crypto_util.py
@@ -1,64 +1,219 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
-from base64 import b32encode
+import gnupg
import os
+import scrypt
import subprocess
+from base64 import b32encode
from Cryptodome.Random import random
-import gnupg
+from flask import current_app
from gnupg._util import _is_stream, _make_binary_stream
-import scrypt
from typing import Dict, List, Text # noqa: F401
-import config
-import store
-
# to fix gpg error #78 on production
os.environ['USERNAME'] = 'www-data'
-GPG_KEY_TYPE = "RSA"
-if os.environ.get('SECUREDROP_ENV') == 'test':
- # Optimize crypto to speed up tests (at the expense of security - DO NOT
- # use these settings in production)
- GPG_KEY_LENGTH = 1024
- SCRYPT_PARAMS = dict(N=2**1, r=1, p=1)
-else: # pragma: no cover
- GPG_KEY_LENGTH = 4096
- SCRYPT_PARAMS = config.SCRYPT_PARAMS
-
-SCRYPT_ID_PEPPER = config.SCRYPT_ID_PEPPER
-SCRYPT_GPG_PEPPER = config.SCRYPT_GPG_PEPPER
-
-DEFAULT_WORDS_IN_RANDOM_ID = 8
-
-
-# Make sure these pass before the app can run
-# TODO: Add more tests
-def do_runtime_tests():
- if config.SCRYPT_ID_PEPPER == config.SCRYPT_GPG_PEPPER:
- raise AssertionError('SCRYPT_ID_PEPPER == SCRYPT_GPG_PEPPER')
- # crash if we don't have srm:
- try:
- subprocess.check_call(['srm'], stdout=subprocess.PIPE)
- except subprocess.CalledProcessError:
- pass
-
-
-do_runtime_tests()
-
-gpg = gnupg.GPG(binary='gpg2', homedir=config.GPG_KEY_DIR)
-
-# map code for a given language to a localized wordlist
-language2words = {} # type: Dict[Text,List[str]]
-nouns = open(config.NOUNS).read().rstrip('\n').split('\n')
-adjectives = open(config.ADJECTIVES).read().rstrip('\n').split('\n')
-
class CryptoException(Exception):
pass
+class CryptoUtil:
+
+ GPG_KEY_TYPE = "RSA"
+ DEFAULT_WORDS_IN_RANDOM_ID = 8
+
+ def __init__(self,
+ scrypt_params,
+ scrypt_id_pepper,
+ scrypt_gpg_pepper,
+ securedrop_root,
+ word_list,
+ nouns_file,
+ adjectives_file,
+ gpg_key_dir):
+ self.__securedrop_root = securedrop_root
+ self.__word_list = word_list
+
+ if os.environ.get('SECUREDROP_ENV') == 'test':
+ # Optimize crypto to speed up tests (at the expense of security
+ # DO NOT use these settings in production)
+ self.__gpg_key_length = 1024
+ self.scrypt_params = dict(N=2**1, r=1, p=1)
+ else: # pragma: no cover
+ self.__gpg_key_length = 4096
+ self.scrypt_params = scrypt_params
+
+ self.scrypt_id_pepper = scrypt_id_pepper
+ self.scrypt_gpg_pepper = scrypt_gpg_pepper
+
+ self.do_runtime_tests()
+
+ self.gpg = gnupg.GPG(binary='gpg2', homedir=gpg_key_dir)
+
+ # map code for a given language to a localized wordlist
+ self.__language2words = {} # type: Dict[Text, List[str]]
+
+ with open(nouns_file) as f:
+ self.nouns = f.read().splitlines()
+
+ with open(adjectives_file) as f:
+ self.adjectives = f.read().splitlines()
+
+ # Make sure these pass before the app can run
+ # TODO: Add more tests
+ def do_runtime_tests(self):
+ if self.scrypt_id_pepper == self.scrypt_gpg_pepper:
+ raise AssertionError('scrypt_id_pepper == scrypt_gpg_pepper')
+ # crash if we don't have srm:
+ try:
+ subprocess.check_call(['srm'], stdout=subprocess.PIPE)
+ except subprocess.CalledProcessError:
+ pass
+
+ def get_wordlist(self, locale):
+ # type: (Text) -> List[str]
+ """" Ensure the wordlist for the desired locale is read and available
+ in the words global variable. If there is no wordlist for the
+ desired local, fallback to the default english wordlist.
+
+ The localized wordlist are read from wordlists/{locale}.txt but
+ for backward compatibility purposes the english wordlist is read
+ from the config.WORD_LIST file.
+ """
+
+ if locale not in self.__language2words:
+ if locale != 'en':
+ path = os.path.join(self.__securedrop_root,
+ 'wordlists',
+ locale + '.txt')
+ if os.path.exists(path):
+ wordlist_path = path
+ else:
+ wordlist_path = self.__word_list
+ else:
+ wordlist_path = self.__word_list
+
+ with open(wordlist_path) as f:
+ content = f.read().splitlines()
+ self.__language2words[locale] = content
+
+ return self.__language2words[locale]
+
+ def genrandomid(self,
+ words_in_random_id=None,
+ locale='en'):
+ if words_in_random_id is None:
+ words_in_random_id = self.DEFAULT_WORDS_IN_RANDOM_ID
+ return ' '.join(random.choice(self.get_wordlist(locale))
+ for x in range(words_in_random_id))
+
+ def display_id(self):
+ return ' '.join([random.choice(self.adjectives),
+ random.choice(self.nouns)])
+
+ def hash_codename(self, codename, salt=None):
+ """Salts and hashes a codename using scrypt.
+
+ :param str codename: A source's codename.
+ :param str salt: The salt to mix with the codename when hashing.
+ :returns: A base32 encoded string; the salted codename hash.
+ """
+ if salt is None:
+ salt = self.scrypt_id_pepper
+ return b32encode(scrypt.hash(clean(codename),
+ salt,
+ **self.scrypt_params))
+
+ def genkeypair(self, name, secret):
+ """Generate a GPG key through batch file key generation. A source's
+ codename is salted with SCRYPT_GPG_PEPPER and hashed with scrypt to
+ provide the passphrase used to encrypt their private key. Their name
+ should be their filesystem id.
+
+ >>> if not gpg.list_keys(hash_codename('randomid')):
+ ... genkeypair(hash_codename('randomid'), 'randomid').type
+ ... else:
+ ... u'P'
+ u'P'
+
+ :param str name: The source's filesystem id (their codename, salted
+ with SCRYPT_ID_PEPPER, and hashed with scrypt).
+ :param str secret: The source's codename.
+ :returns: a :class:`GenKey <gnupg._parser.GenKey>` object, on which
+ the ``__str__()`` method may be called to return the
+ generated key's fingeprint.
+
+ """
+ name = clean(name)
+ secret = self.hash_codename(secret, salt=self.scrypt_gpg_pepper)
+ return self.gpg.gen_key(self.gpg.gen_key_input(
+ key_type=self.GPG_KEY_TYPE,
+ key_length=self.__gpg_key_length,
+ passphrase=secret,
+ name_email=name
+ ))
+
+ def delete_reply_keypair(self, source_filesystem_id):
+ key = self.getkey(source_filesystem_id)
+ # If this source was never flagged for review, they won't have a reply
+ # keypair
+ if not key:
+ return
+ # The private key needs to be deleted before the public key can be
+ # deleted. http://pythonhosted.org/python-gnupg/#deleting-keys
+ self.gpg.delete_keys(key, True) # private key
+ self.gpg.delete_keys(key) # public key
+ # TODO: srm?
+
+ def getkey(self, name):
+ for key in self.gpg.list_keys():
+ for uid in key['uids']:
+ if name in uid:
+ return key['fingerprint']
+ return None
+
+ def encrypt(self, plaintext, fingerprints, output=None):
+ # Verify the output path
+ if output:
+ current_app.storage.verify(output)
+
+ if not isinstance(fingerprints, (list, tuple)):
+ fingerprints = [fingerprints, ]
+ # Remove any spaces from provided fingerprints GPG outputs fingerprints
+ # with spaces for readability, but requires the spaces to be removed
+ # when using fingerprints to specify recipients.
+ fingerprints = [fpr.replace(' ', '') for fpr in fingerprints]
+
+ if not _is_stream(plaintext):
+ plaintext = _make_binary_stream(plaintext, "utf_8")
+
+ out = self.gpg.encrypt(plaintext,
+ *fingerprints,
+ output=output,
+ always_trust=True,
+ armor=False)
+ if out.ok:
+ return out.data
+ else:
+ raise CryptoException(out.stderr)
+
+ def decrypt(self, secret, ciphertext):
+ """
+ >>> crypto = current_app.crypto_util
+ >>> key = crypto.genkeypair('randomid', 'randomid')
+ >>> message = u'Buenos dΓas, mundo hermoso!'
+ >>> ciphertext = crypto.encrypt(message, str(key))
+ >>> crypto.decrypt('randomid', ciphertext) == message.encode('utf-8')
+ True
+ """
+ hashed_codename = self.hash_codename(secret,
+ salt=self.scrypt_gpg_pepper)
+ return self.gpg.decrypt(ciphertext, passphrase=hashed_codename).data
+
+
def clean(s, also=''):
"""
>>> clean("[]")
@@ -78,145 +233,3 @@ def clean(s, also=''):
# scrypt.hash requires input of type str. Since the wordlist is all ASCII
# characters, this conversion is not problematic
return str(s)
-
-
-def _get_wordlist(locale):
- # type: (Text) -> List[str]
- """" Ensure the wordlist for the desired locale is read and available
- in the words global variable. If there is no wordlist for the
- desired local, fallback to the default english wordlist.
-
- The localized wordlist are read from wordlists/{locale}.txt but
- for backward compatibility purposes the english wordlist is read
- from the config.WORD_LIST file.
-
- """
-
- if locale not in language2words:
- if locale != 'en':
- path = os.path.join(config.SECUREDROP_ROOT,
- 'wordlists',
- locale + '.txt')
- if os.path.exists(path):
- wordlist_path = path
- else:
- wordlist_path = config.WORD_LIST
- else:
- wordlist_path = config.WORD_LIST
-
- language2words[locale] = open(
- wordlist_path).read().rstrip('\n').split('\n')
-
- return language2words[locale]
-
-
-def genrandomid(words_in_random_id=DEFAULT_WORDS_IN_RANDOM_ID, locale='en'):
- return ' '.join(random.choice(_get_wordlist(locale))
- for x in range(words_in_random_id))
-
-
-def display_id():
- return ' '.join([random.choice(adjectives), random.choice(nouns)])
-
-
-def hash_codename(codename, salt=SCRYPT_ID_PEPPER):
- """Salts and hashes a codename using scrypt.
-
- :param str codename: A source's codename.
- :param str salt: The salt to mix with the codename when hashing.
- :returns: A base32 encoded string; the salted codename hash.
- """
- return b32encode(scrypt.hash(clean(codename), salt, **SCRYPT_PARAMS))
-
-
-def genkeypair(name, secret):
- """Generate a GPG key through batch file key generation. A source's
- codename is salted with SCRYPT_GPG_PEPPER and hashed with scrypt to
- provide the passphrase used to encrypt their private key. Their name
- should be their filesystem id.
-
- >>> if not gpg.list_keys(hash_codename('randomid')):
- ... genkeypair(hash_codename('randomid'), 'randomid').type
- ... else:
- ... u'P'
- u'P'
-
- :param str name: The source's filesystem id (their codename, salted
- with SCRYPT_ID_PEPPER, and hashed with scrypt).
- :param str secret: The source's codename.
- :returns: a :class:`GenKey <gnupg._parser.GenKey>` object, on which
- the ``__str__()`` method may be called to return the
- generated key's fingeprint.
-
- """
- name = clean(name)
- secret = hash_codename(secret, salt=SCRYPT_GPG_PEPPER)
- return gpg.gen_key(gpg.gen_key_input(
- key_type=GPG_KEY_TYPE, key_length=GPG_KEY_LENGTH,
- passphrase=secret,
- name_email=name
- ))
-
-
-def delete_reply_keypair(source_filesystem_id):
- key = getkey(source_filesystem_id)
- # If this source was never flagged for review, they won't have a reply
- # keypair
- if not key:
- return
- # The private key needs to be deleted before the public key can be deleted
- # http://pythonhosted.org/python-gnupg/#deleting-keys
- gpg.delete_keys(key, True) # private key
- gpg.delete_keys(key) # public key
- # TODO: srm?
-
-
-def getkey(name):
- for key in gpg.list_keys():
- for uid in key['uids']:
- if name in uid:
- return key['fingerprint']
- return None
-
-
-def encrypt(plaintext, fingerprints, output=None):
- # Verify the output path
- if output:
- store.verify(output)
-
- if not isinstance(fingerprints, (list, tuple)):
- fingerprints = [fingerprints, ]
- # Remove any spaces from provided fingerprints GPG outputs fingerprints
- # with spaces for readability, but requires the spaces to be removed when
- # using fingerprints to specify recipients.
- fingerprints = [fpr.replace(' ', '') for fpr in fingerprints]
-
- if not _is_stream(plaintext):
- plaintext = _make_binary_stream(plaintext, "utf_8")
-
- out = gpg.encrypt(plaintext,
- *fingerprints,
- output=output,
- always_trust=True,
- armor=False)
- if out.ok:
- return out.data
- else:
- raise CryptoException(out.stderr)
-
-
-def decrypt(secret, ciphertext):
- """
- >>> key = genkeypair('randomid', 'randomid')
- >>> message = u'Buenos dΓas, mundo hermoso!'
- >>> ciphertext = encrypt(message, str(key))
- >>> decrypt('randomid', ciphertext) == message.encode('utf-8')
- True
- """
- hashed_codename = hash_codename(secret, salt=SCRYPT_GPG_PEPPER)
- return gpg.decrypt(ciphertext, passphrase=hashed_codename).data
-
-
-if __name__ == "__main__": # pragma: no cover
- import doctest
- doctest.testmod()
diff --git a/securedrop/journalist_app/__init__.py b/securedrop/journalist_app/__init__.py
--- a/securedrop/journalist_app/__init__.py
+++ b/securedrop/journalist_app/__init__.py
@@ -11,10 +11,12 @@
import template_filters
import version
+from crypto_util import CryptoUtil
from db import db
-from models import Journalist
from journalist_app import account, admin, main, col
from journalist_app.utils import get_source, logged_in
+from models import Journalist
+from store import Storage
_insecure_views = ['main.login', 'static']
@@ -44,6 +46,21 @@ def create_app(config):
app.config['SQLALCHEMY_DATABASE_URI'] = db_uri
db.init_app(app)
+ app.storage = Storage(config.STORE_DIR,
+ config.TEMP_DIR,
+ config.JOURNALIST_KEY)
+
+ app.crypto_util = CryptoUtil(
+ scrypt_params=config.SCRYPT_PARAMS,
+ scrypt_id_pepper=config.SCRYPT_ID_PEPPER,
+ scrypt_gpg_pepper=config.SCRYPT_GPG_PEPPER,
+ securedrop_root=config.SECUREDROP_ROOT,
+ word_list=config.WORD_LIST,
+ nouns_file=config.NOUNS,
+ adjectives_file=config.ADJECTIVES,
+ gpg_key_dir=config.GPG_KEY_DIR,
+ )
+
@app.errorhandler(CSRFError)
def handle_csrf_error(e):
# render the message first to ensure it's localized.
diff --git a/securedrop/journalist_app/col.py b/securedrop/journalist_app/col.py
--- a/securedrop/journalist_app/col.py
+++ b/securedrop/journalist_app/col.py
@@ -5,9 +5,6 @@
from flask_babel import gettext
from sqlalchemy.orm.exc import NoResultFound
-import crypto_util
-import store
-
from db import db
from models import Submission
from journalist_app.forms import ReplyForm
@@ -36,7 +33,7 @@ def remove_star(filesystem_id):
def col(filesystem_id):
form = ReplyForm()
source = get_source(filesystem_id)
- source.has_key = crypto_util.getkey(filesystem_id)
+ source.has_key = current_app.crypto_util.getkey(filesystem_id)
return render_template("col.html", filesystem_id=filesystem_id,
source=source, form=form)
@@ -83,7 +80,7 @@ def download_single_submission(filesystem_id, fn):
current_app.logger.error(
"Could not mark " + fn + " as downloaded: %s" % (e,))
- return send_file(store.path(filesystem_id, fn),
+ return send_file(current_app.storage.path(filesystem_id, fn),
mimetype="application/pgp-encrypted")
return view
diff --git a/securedrop/journalist_app/main.py b/securedrop/journalist_app/main.py
--- a/securedrop/journalist_app/main.py
+++ b/securedrop/journalist_app/main.py
@@ -6,9 +6,6 @@
from flask_babel import gettext
from sqlalchemy.sql.expression import false
-import crypto_util
-import store
-
from db import db
from models import Source, SourceStar, Submission, Reply
from journalist_app.forms import ReplyForm
@@ -95,10 +92,12 @@ def reply():
g.source.interaction_count += 1
filename = "{0}-{1}-reply.gpg".format(g.source.interaction_count,
g.source.journalist_filename)
- crypto_util.encrypt(form.message.data,
- [crypto_util.getkey(g.filesystem_id),
- config.JOURNALIST_KEY],
- output=store.path(g.filesystem_id, filename))
+ current_app.crypto_util.encrypt(
+ form.message.data,
+ [current_app.crypto_util.getkey(g.filesystem_id),
+ config.JOURNALIST_KEY],
+ output=current_app.storage.path(g.filesystem_id, filename),
+ )
reply = Reply(g.user, g.source, filename)
try:
@@ -157,10 +156,10 @@ def bulk():
@view.route('/regenerate-code', methods=('POST',))
def regenerate_code():
original_journalist_designation = g.source.journalist_designation
- g.source.journalist_designation = crypto_util.display_id()
+ g.source.journalist_designation = current_app.crypto_util.display_id()
for item in g.source.collection:
- item.filename = store.rename_submission(
+ item.filename = current_app.storage.rename_submission(
g.filesystem_id,
item.filename,
g.source.journalist_filename)
diff --git a/securedrop/journalist_app/utils.py b/securedrop/journalist_app/utils.py
--- a/securedrop/journalist_app/utils.py
+++ b/securedrop/journalist_app/utils.py
@@ -6,9 +6,7 @@
from flask_babel import gettext, ngettext
from sqlalchemy.sql.expression import false
-import crypto_util
import i18n
-import store
import worker
from db import db
@@ -115,8 +113,8 @@ def download(zip_basename, submissions):
:param list submissions: A list of :class:`models.Submission`s to
include in the ZIP-file.
"""
- zf = store.get_bulk_archive(submissions,
- zip_directory=zip_basename)
+ zf = current_app.storage.get_bulk_archive(submissions,
+ zip_directory=zip_basename)
attachment_filename = "{}--{}.zip".format(
zip_basename, datetime.utcnow().strftime("%Y-%m-%d--%H-%M-%S"))
@@ -132,7 +130,7 @@ def download(zip_basename, submissions):
def bulk_delete(filesystem_id, items_selected):
for item in items_selected:
- item_path = store.path(filesystem_id, item.filename)
+ item_path = current_app.storage.path(filesystem_id, item.filename)
worker.enqueue(srm, item_path)
db.session.delete(item)
db.session.commit()
@@ -202,7 +200,9 @@ def col_delete(cols_selected):
def make_password(config):
while True:
- password = crypto_util.genrandomid(7, i18n.get_language(config))
+ password = current_app.crypto_util.genrandomid(
+ 7,
+ i18n.get_language(config))
try:
Journalist.check_password_acceptable(password)
return password
@@ -212,10 +212,10 @@ def make_password(config):
def delete_collection(filesystem_id):
# Delete the source's collection of submissions
- job = worker.enqueue(srm, store.path(filesystem_id))
+ job = worker.enqueue(srm, current_app.storage.path(filesystem_id))
# Delete the source's reply keypair
- crypto_util.delete_reply_keypair(filesystem_id)
+ current_app.crypto_util.delete_reply_keypair(filesystem_id)
# Delete their entry in the db
source = get_source(filesystem_id)
diff --git a/securedrop/manage.py b/securedrop/manage.py
--- a/securedrop/manage.py
+++ b/securedrop/manage.py
@@ -5,8 +5,8 @@
import codecs
import logging
import os
-from os.path import dirname, join, realpath
import pwd
+import qrcode
import shutil
import signal
import subprocess
@@ -15,14 +15,15 @@
import traceback
import version
-import qrcode
+from flask import current_app
+from os.path import dirname, join, realpath
from sqlalchemy import text
from sqlalchemy.orm.exc import NoResultFound
os.environ['SECUREDROP_ENV'] = 'dev' # noqa
import config
-import crypto_util
import journalist_app
+
from db import db
from models import Journalist, PasswordError, InvalidUsernameException
from management.run import run
@@ -143,7 +144,7 @@ def _get_yubikey_usage():
def _make_password():
while True:
- password = crypto_util.genrandomid(7)
+ password = current_app.crypto_util.genrandomid(7)
try:
Journalist.check_password_acceptable(password)
return password
diff --git a/securedrop/models.py b/securedrop/models.py
--- a/securedrop/models.py
+++ b/securedrop/models.py
@@ -1,7 +1,13 @@
-import os
+# -*- coding: utf-8 -*-
+import binascii
import datetime
import base64
-import binascii
+import os
+import scrypt
+import pyotp
+import qrcode
+# Using svg because it doesn't require additional dependencies
+import qrcode.image.svg
# Find the best implementation available on this platform
try:
@@ -9,21 +15,14 @@
except ImportError:
from StringIO import StringIO # type: ignore
+from flask import current_app
+from jinja2 import Markup
from sqlalchemy import ForeignKey
from sqlalchemy.orm import relationship, backref
from sqlalchemy import Column, Integer, String, Boolean, DateTime, Binary
from sqlalchemy.orm.exc import MultipleResultsFound, NoResultFound
-from jinja2 import Markup
-
-import scrypt
-import pyotp
-
-import qrcode
-# Using svg because it doesn't require additional dependencies
-import qrcode.image.svg
from db import db
-import store
LOGIN_HARDENING = True
@@ -120,7 +119,8 @@ class Submission(db.Model):
def __init__(self, source, filename):
self.source_id = source.id
self.filename = filename
- self.size = os.stat(store.path(source.filesystem_id, filename)).st_size
+ self.size = os.stat(current_app.storage.path(source.filesystem_id,
+ filename)).st_size
def __repr__(self):
return '<Submission %r>' % (self.filename)
@@ -150,7 +150,8 @@ def __init__(self, journalist, source, filename):
self.journalist_id = journalist.id
self.source_id = source.id
self.filename = filename
- self.size = os.stat(store.path(source.filesystem_id, filename)).st_size
+ self.size = os.stat(current_app.storage.path(source.filesystem_id,
+ filename)).st_size
def __repr__(self):
return '<Reply %r>' % (self.filename)
diff --git a/securedrop/source_app/__init__.py b/securedrop/source_app/__init__.py
--- a/securedrop/source_app/__init__.py
+++ b/securedrop/source_app/__init__.py
@@ -8,18 +8,18 @@
from os import path
from sqlalchemy.orm.exc import NoResultFound
-import crypto_util
import i18n
-import store
import template_filters
import version
+from crypto_util import CryptoUtil
from db import db
from models import Source
from request_that_secures_file_uploads import RequestThatSecuresFileUploads
from source_app import main, info, api
from source_app.decorators import ignore_static
from source_app.utils import logged_in
+from store import Storage
def create_app(config):
@@ -49,17 +49,20 @@ def create_app(config):
app.config['SQLALCHEMY_DATABASE_URI'] = db_uri
db.init_app(app)
- if config.DATABASE_ENGINE == "sqlite":
- db_uri = (config.DATABASE_ENGINE + ":///" +
- config.DATABASE_FILE)
- else: # pragma: no cover
- db_uri = (
- config.DATABASE_ENGINE + '://' +
- config.DATABASE_USERNAME + ':' +
- config.DATABASE_PASSWORD + '@' +
- config.DATABASE_HOST + '/' +
- config.DATABASE_NAME
- )
+ app.storage = Storage(config.STORE_DIR,
+ config.TEMP_DIR,
+ config.JOURNALIST_KEY)
+
+ app.crypto_util = CryptoUtil(
+ scrypt_params=config.SCRYPT_PARAMS,
+ scrypt_id_pepper=config.SCRYPT_ID_PEPPER,
+ scrypt_gpg_pepper=config.SCRYPT_GPG_PEPPER,
+ securedrop_root=config.SECUREDROP_ROOT,
+ word_list=config.WORD_LIST,
+ nouns_file=config.NOUNS,
+ adjectives_file=config.ADJECTIVES,
+ gpg_key_dir=config.GPG_KEY_DIR,
+ )
@app.errorhandler(CSRFError)
def handle_csrf_error(e):
@@ -134,7 +137,7 @@ def setup_g():
# these common values.
if logged_in():
g.codename = session['codename']
- g.filesystem_id = crypto_util.hash_codename(g.codename)
+ g.filesystem_id = app.crypto_util.hash_codename(g.codename)
try:
g.source = Source.query \
.filter(Source.filesystem_id == g.filesystem_id) \
@@ -146,7 +149,7 @@ def setup_g():
del session['logged_in']
del session['codename']
return redirect(url_for('main.index'))
- g.loc = store.path(g.filesystem_id)
+ g.loc = app.storage.path(g.filesystem_id)
@app.errorhandler(404)
def page_not_found(error):
diff --git a/securedrop/source_app/info.py b/securedrop/source_app/info.py
--- a/securedrop/source_app/info.py
+++ b/securedrop/source_app/info.py
@@ -1,7 +1,7 @@
-from cStringIO import StringIO
-from flask import Blueprint, render_template, send_file
+# -*- coding: utf-8 -*-
-import crypto_util
+from cStringIO import StringIO
+from flask import Blueprint, render_template, send_file, current_app
def make_blueprint(config):
@@ -17,7 +17,8 @@ def recommend_tor_browser():
@view.route('/journalist-key')
def download_journalist_pubkey():
- journalist_pubkey = crypto_util.gpg.export_keys(config.JOURNALIST_KEY)
+ journalist_pubkey = current_app.crypto_util.gpg.export_keys(
+ config.JOURNALIST_KEY)
return send_file(StringIO(journalist_pubkey),
mimetype="application/pgp-keys",
attachment_filename=config.JOURNALIST_KEY + ".asc",
diff --git a/securedrop/source_app/main.py b/securedrop/source_app/main.py
--- a/securedrop/source_app/main.py
+++ b/securedrop/source_app/main.py
@@ -7,9 +7,6 @@
from flask_babel import gettext
from sqlalchemy.exc import IntegrityError
-import crypto_util
-import store
-
from db import db
from models import Source, Submission, Reply, get_one_or_else
from rm import srm
@@ -44,9 +41,10 @@ def generate():
@view.route('/create', methods=['POST'])
def create():
- filesystem_id = crypto_util.hash_codename(session['codename'])
+ filesystem_id = current_app.crypto_util.hash_codename(
+ session['codename'])
- source = Source(filesystem_id, crypto_util.display_id())
+ source = Source(filesystem_id, current_app.crypto_util.display_id())
db.session.add(source)
try:
db.session.commit()
@@ -60,7 +58,7 @@ def create():
del session['codename']
abort(500)
else:
- os.mkdir(store.path(filesystem_id))
+ os.mkdir(current_app.storage.path(filesystem_id))
session['logged_in'] = True
return redirect(url_for('.lookup'))
@@ -70,9 +68,12 @@ def create():
def lookup():
replies = []
for reply in g.source.replies:
- reply_path = store.path(g.filesystem_id, reply.filename)
+ reply_path = current_app.storage.path(
+ g.filesystem_id,
+ reply.filename,
+ )
try:
- reply.decrypted = crypto_util.decrypt(
+ reply.decrypted = current_app.crypto_util.decrypt(
g.codename,
open(reply_path).read()).decode('utf-8')
except UnicodeDecodeError:
@@ -89,9 +90,13 @@ def lookup():
# Generate a keypair to encrypt replies from the journalist
# Only do this if the journalist has flagged the source as one
# that they would like to reply to. (Issue #140.)
- if not crypto_util.getkey(g.filesystem_id) and g.source.flagged:
+ if not current_app.crypto_util.getkey(g.filesystem_id) and \
+ g.source.flagged:
db_uri = current_app.config['SQLALCHEMY_DATABASE_URI']
- async_genkey(db_uri, g.filesystem_id, g.codename)
+ async_genkey(current_app.crypto_util,
+ db_uri,
+ g.filesystem_id,
+ g.codename)
return render_template(
'lookup.html',
@@ -99,7 +104,7 @@ def lookup():
replies=replies,
flagged=g.source.flagged,
new_user=session.get('new_user', None),
- haskey=crypto_util.getkey(
+ haskey=current_app.crypto_util.getkey(
g.filesystem_id))
@view.route('/submit', methods=('POST',))
@@ -122,7 +127,7 @@ def submit():
if msg:
g.source.interaction_count += 1
fnames.append(
- store.save_message_submission(
+ current_app.storage.save_message_submission(
g.filesystem_id,
g.source.interaction_count,
journalist_filename,
@@ -130,7 +135,7 @@ def submit():
if fh:
g.source.interaction_count += 1
fnames.append(
- store.save_file_submission(
+ current_app.storage.save_file_submission(
g.filesystem_id,
g.source.interaction_count,
journalist_filename,
@@ -166,7 +171,11 @@ def submit():
entropy_avail = get_entropy_estimate()
if entropy_avail >= 2400:
db_uri = current_app.config['SQLALCHEMY_DATABASE_URI']
- async_genkey(db_uri, g.filesystem_id, g.codename)
+
+ async_genkey(current_app.crypto_util,
+ db_uri,
+ g.filesystem_id,
+ g.codename)
current_app.logger.info("generating key, entropy: {}".format(
entropy_avail))
else:
@@ -186,7 +195,7 @@ def delete():
query = Reply.query.filter(
Reply.filename == request.form['reply_filename'])
reply = get_one_or_else(query, current_app.logger, abort)
- srm(store.path(g.filesystem_id, reply.filename))
+ srm(current_app.storage.path(g.filesystem_id, reply.filename))
db.session.delete(reply)
db.session.commit()
@@ -203,7 +212,7 @@ def batch_delete():
return redirect(url_for('.lookup'))
for reply in replies:
- srm(store.path(g.filesystem_id, reply.filename))
+ srm(current_app.storage.path(g.filesystem_id, reply.filename))
db.session.delete(reply)
db.session.commit()
diff --git a/securedrop/source_app/utils.py b/securedrop/source_app/utils.py
--- a/securedrop/source_app/utils.py
+++ b/securedrop/source_app/utils.py
@@ -7,10 +7,9 @@
from sqlalchemy.orm import sessionmaker
from threading import Thread
-import crypto_util
import i18n
-import store
+from crypto_util import CryptoException
from models import Source
@@ -20,8 +19,8 @@ def logged_in():
def valid_codename(codename):
try:
- filesystem_id = crypto_util.hash_codename(codename)
- except crypto_util.CryptoException as e:
+ filesystem_id = current_app.crypto_util.hash_codename(codename)
+ except CryptoException as e:
current_app.logger.info(
"Could not compute filesystem ID for codename '{}': {}".format(
codename, e))
@@ -34,8 +33,9 @@ def valid_codename(codename):
def generate_unique_codename(config):
"""Generate random codenames until we get an unused one"""
while True:
- codename = crypto_util.genrandomid(Source.NUM_WORDS,
- i18n.get_language(config))
+ codename = current_app.crypto_util.genrandomid(
+ Source.NUM_WORDS,
+ i18n.get_language(config))
# The maximum length of a word in the wordlist is 9 letters and the
# codename length is 7 words, so it is currently impossible to
@@ -50,7 +50,9 @@ def generate_unique_codename(config):
"(Codename='{}')".format(codename))
continue
- filesystem_id = crypto_util.hash_codename(codename) # scrypt (slow)
+ # scrypt (slow)
+ filesystem_id = current_app.crypto_util.hash_codename(codename)
+
matching_sources = Source.query.filter(
Source.filesystem_id == filesystem_id).all()
if len(matching_sources) == 0:
@@ -69,8 +71,11 @@ def wrapper(*args, **kwargs):
@async
-def async_genkey(db_uri, filesystem_id, codename):
- crypto_util.genkeypair(filesystem_id, codename)
+def async_genkey(crypto_util_, db_uri, filesystem_id, codename):
+ # We pass in the `crypto_util_` so we don't have to reference `current_app`
+ # here. The app might not have a pushed context during testing which would
+ # cause this async function to break.
+ crypto_util_.genkeypair(filesystem_id, codename)
# Register key generation as update to the source, so sources will
# filter to the top of the list in the journalist interface if a
@@ -94,7 +99,7 @@ def normalize_timestamps(filesystem_id):
the latest submission. This minimizes metadata that could be useful to
investigators. See #301.
"""
- sub_paths = [store.path(filesystem_id, submission.filename)
+ sub_paths = [current_app.storage.path(filesystem_id, submission.filename)
for submission in g.source.submissions]
if len(sub_paths) > 1:
args = ["touch"]
diff --git a/securedrop/store.py b/securedrop/store.py
--- a/securedrop/store.py
+++ b/securedrop/store.py
@@ -1,17 +1,15 @@
# -*- coding: utf-8 -*-
+import gzip
import os
import re
-import config
-import zipfile
-import crypto_util
import tempfile
-import gzip
+import zipfile
+
+from flask import current_app
from werkzeug.utils import secure_filename
from secure_tempfile import SecureTemporaryFile
-import logging
-log = logging.getLogger(__name__)
VALIDATE_FILENAME = re.compile(
"^(?P<index>\d+)\-[a-z0-9-_]*"
@@ -26,136 +24,151 @@ class PathException(Exception):
pass
-def verify(p):
- """Assert that the path is absolute, normalized, inside `config.STORE_DIR`, and
- matches the filename format.
- """
- if not os.path.isabs(config.STORE_DIR):
- raise PathException("config.STORE_DIR(%s) is not absolute" % (
- config.STORE_DIR, ))
-
- # os.path.abspath makes the path absolute and normalizes
- # '/foo/../bar' to '/bar', etc. We have to check that the path is
- # normalized before checking that it starts with the
- # `config.STORE_DIR` or else a malicious actor could append a
- # bunch of '../../..' to access files outside of the store.
- if not p == os.path.abspath(p):
- raise PathException("The path is not absolute and/or normalized")
-
- # Check that the path p is in config.STORE_DIR
- if os.path.relpath(p, config.STORE_DIR).startswith('..'):
- raise PathException("Invalid directory %s" % (p, ))
-
- if os.path.isfile(p):
- filename = os.path.basename(p)
- ext = os.path.splitext(filename)[-1]
- if filename == '_FLAG':
- return True
- if ext != '.gpg':
- # if there's an extension, verify it's a GPG
- raise PathException("Invalid file extension %s" % (ext, ))
- if not VALIDATE_FILENAME(filename):
- raise PathException("Invalid filename %s" % (filename, ))
-
-
-def path(*s):
- """Get the normalized, absolute file path, within `config.STORE_DIR`."""
- joined = os.path.join(os.path.abspath(config.STORE_DIR), *s)
- absolute = os.path.abspath(joined)
- verify(absolute)
- return absolute
-
-
-def get_bulk_archive(selected_submissions, zip_directory=''):
- """Generate a zip file from the selected submissions"""
- zip_file = tempfile.NamedTemporaryFile(prefix='tmp_securedrop_bulk_dl_',
- dir=config.TEMP_DIR,
- delete=False)
- sources = set([i.source.journalist_designation
- for i in selected_submissions])
- # The below nested for-loops are there to create a more usable
- # folder structure per #383
- with zipfile.ZipFile(zip_file, 'w') as zip:
- for source in sources:
- fname = ""
- submissions = [s for s in selected_submissions
- if s.source.journalist_designation == source]
- for submission in submissions:
- filename = path(submission.source.filesystem_id,
- submission.filename)
- verify(filename)
- document_number = submission.filename.split('-')[0]
- if zip_directory == submission.source.journalist_filename:
- fname = zip_directory
+class Storage:
+
+ def __init__(self, storage_path, temp_dir, gpg_key):
+ if not os.path.isabs(storage_path):
+ raise PathException("storage_path {} is not absolute".format(
+ storage_path))
+ self.__storage_path = storage_path
+
+ if not os.path.isabs(temp_dir):
+ raise PathException("temp_dir {} is not absolute".format(
+ temp_dir))
+ self.__temp_dir = temp_dir
+
+ self.__gpg_key = gpg_key
+
+ def verify(self, p):
+ """Assert that the path is absolute, normalized, inside
+ `self.__storage_path`, and matches the filename format.
+ """
+
+ # os.path.abspath makes the path absolute and normalizes
+ # '/foo/../bar' to '/bar', etc. We have to check that the path is
+ # normalized before checking that it starts with the
+ # `self.__storage_path` or else a malicious actor could append a
+ # bunch of '../../..' to access files outside of the store.
+ if not p == os.path.abspath(p):
+ raise PathException("The path is not absolute and/or normalized")
+
+ # Check that the path p is in self.__storage_path
+ if os.path.relpath(p, self.__storage_path).startswith('..'):
+ raise PathException("Invalid directory %s" % (p, ))
+
+ if os.path.isfile(p):
+ filename = os.path.basename(p)
+ ext = os.path.splitext(filename)[-1]
+ if filename == '_FLAG':
+ return True
+ if ext != '.gpg':
+ # if there's an extension, verify it's a GPG
+ raise PathException("Invalid file extension %s" % (ext, ))
+ if not VALIDATE_FILENAME(filename):
+ raise PathException("Invalid filename %s" % (filename, ))
+
+ def path(self, *s):
+ """Get the normalized, absolute file path, within
+ `self.__storage_path`.
+ """
+ joined = os.path.join(os.path.abspath(self.__storage_path), *s)
+ absolute = os.path.abspath(joined)
+ self.verify(absolute)
+ return absolute
+
+ def get_bulk_archive(self, selected_submissions, zip_directory=''):
+ """Generate a zip file from the selected submissions"""
+ zip_file = tempfile.NamedTemporaryFile(
+ prefix='tmp_securedrop_bulk_dl_',
+ dir=self.__temp_dir,
+ delete=False)
+ sources = set([i.source.journalist_designation
+ for i in selected_submissions])
+ # The below nested for-loops are there to create a more usable
+ # folder structure per #383
+ with zipfile.ZipFile(zip_file, 'w') as zip:
+ for source in sources:
+ fname = ""
+ submissions = [s for s in selected_submissions
+ if s.source.journalist_designation == source]
+ for submission in submissions:
+ filename = self.path(submission.source.filesystem_id,
+ submission.filename)
+ self.verify(filename)
+ document_number = submission.filename.split('-')[0]
+ if zip_directory == submission.source.journalist_filename:
+ fname = zip_directory
+ else:
+ fname = os.path.join(zip_directory, source)
+ zip.write(filename, arcname=os.path.join(
+ fname,
+ "%s_%s" % (document_number,
+ submission.source.last_updated.date()),
+ os.path.basename(filename)
+ ))
+ return zip_file
+
+ def save_file_submission(self, filesystem_id, count, journalist_filename,
+ filename, stream):
+ sanitized_filename = secure_filename(filename)
+
+ # We store file submissions in a .gz file for two reasons:
+ #
+ # 1. Downloading large files over Tor is very slow. If we can
+ # compress the file, we can speed up future downloads.
+ #
+ # 2. We want to record the original filename because it might be
+ # useful, either for context about the content of the submission
+ # or for figuring out which application should be used to open
+ # it. However, we'd like to encrypt that info and have the
+ # decrypted file automatically have the name of the original
+ # file. Given various usability constraints in GPG and Tails, this
+ # is the most user-friendly way we have found to do this.
+
+ encrypted_file_name = "{0}-{1}-doc.gz.gpg".format(
+ count,
+ journalist_filename)
+ encrypted_file_path = self.path(filesystem_id, encrypted_file_name)
+ with SecureTemporaryFile("/tmp") as stf:
+ with gzip.GzipFile(filename=sanitized_filename,
+ mode='wb', fileobj=stf) as gzf:
+ # Buffer the stream into the gzip file to avoid excessive
+ # memory consumption
+ while True:
+ buf = stream.read(1024 * 8)
+ if not buf:
+ break
+ gzf.write(buf)
+
+ current_app.crypto_util.encrypt(
+ stf, self.__gpg_key, encrypted_file_path)
+
+ return encrypted_file_name
+
+ def save_message_submission(self, filesystem_id, count,
+ journalist_filename, message):
+ filename = "{0}-{1}-msg.gpg".format(count, journalist_filename)
+ msg_loc = self.path(filesystem_id, filename)
+ current_app.crypto_util.encrypt(message, self.__gpg_key, msg_loc)
+ return filename
+
+ def rename_submission(self,
+ filesystem_id,
+ orig_filename,
+ journalist_filename):
+ check_submission_name = VALIDATE_FILENAME(orig_filename)
+ if check_submission_name:
+ parsed_filename = check_submission_name.groupdict()
+ if parsed_filename.get('file_type'):
+ new_filename = "{}-{}-{}.gpg".format(
+ parsed_filename['index'], journalist_filename,
+ parsed_filename['file_type'])
+ try:
+ os.rename(self.path(filesystem_id, orig_filename),
+ self.path(filesystem_id, new_filename))
+ except OSError:
+ pass
else:
- fname = os.path.join(zip_directory, source)
- zip.write(filename, arcname=os.path.join(
- fname,
- "%s_%s" % (document_number,
- submission.source.last_updated.date()),
- os.path.basename(filename)
- ))
- return zip_file
-
-
-def save_file_submission(filesystem_id, count, journalist_filename, filename,
- stream):
- sanitized_filename = secure_filename(filename)
-
- # We store file submissions in a .gz file for two reasons:
- #
- # 1. Downloading large files over Tor is very slow. If we can
- # compress the file, we can speed up future downloads.
- #
- # 2. We want to record the original filename because it might be
- # useful, either for context about the content of the submission
- # or for figuring out which application should be used to open
- # it. However, we'd like to encrypt that info and have the
- # decrypted file automatically have the name of the original
- # file. Given various usability constraints in GPG and Tails, this
- # is the most user-friendly way we have found to do this.
-
- encrypted_file_name = "{0}-{1}-doc.gz.gpg".format(
- count,
- journalist_filename)
- encrypted_file_path = path(filesystem_id, encrypted_file_name)
- with SecureTemporaryFile("/tmp") as stf:
- with gzip.GzipFile(filename=sanitized_filename,
- mode='wb', fileobj=stf) as gzf:
- # Buffer the stream into the gzip file to avoid excessive
- # memory consumption
- while True:
- buf = stream.read(1024 * 8)
- if not buf:
- break
- gzf.write(buf)
-
- crypto_util.encrypt(stf, config.JOURNALIST_KEY, encrypted_file_path)
-
- return encrypted_file_name
-
-
-def save_message_submission(filesystem_id, count, journalist_filename,
- message):
- filename = "{0}-{1}-msg.gpg".format(count, journalist_filename)
- msg_loc = path(filesystem_id, filename)
- crypto_util.encrypt(message, config.JOURNALIST_KEY, msg_loc)
- return filename
-
-
-def rename_submission(filesystem_id, orig_filename, journalist_filename):
- check_submission_name = VALIDATE_FILENAME(orig_filename)
- if check_submission_name:
- parsed_filename = check_submission_name.groupdict()
- if parsed_filename.get('file_type'):
- new_filename = "{}-{}-{}.gpg".format(
- parsed_filename['index'], journalist_filename,
- parsed_filename['file_type'])
- try:
- os.rename(path(filesystem_id, orig_filename),
- path(filesystem_id, new_filename))
- except OSError:
- pass
- else:
- return new_filename # Only return new filename if successful
- return orig_filename
+ # Only return new filename if successful
+ return new_filename
+ return orig_filename
| diff --git a/securedrop/tests/functional/functional_test.py b/securedrop/tests/functional/functional_test.py
--- a/securedrop/tests/functional/functional_test.py
+++ b/securedrop/tests/functional/functional_test.py
@@ -1,11 +1,8 @@
# -*- coding: utf-8 -*-
-from datetime import datetime
import errno
import mock
-from multiprocessing import Process
import os
-from os.path import abspath, dirname, join, realpath
import signal
import socket
import time
@@ -13,6 +10,9 @@
import requests
from Cryptodome import Random
+from datetime import datetime
+from multiprocessing import Process
+from os.path import abspath, dirname, join, realpath
from selenium import webdriver
from selenium.common.exceptions import (WebDriverException,
NoAlertPresentException)
@@ -24,10 +24,10 @@
import config
import journalist_app
import source_app
-from db import db
-import crypto_util
import tests.utils.env as env
+from db import db
+
LOG_DIR = abspath(join(dirname(realpath(__file__)), '..', 'log'))
@@ -109,7 +109,10 @@ def setup(self, session_expiration=30):
# Allow custom session expiration lengths
self.session_expiration = session_expiration
- def start_source_server():
+ self.source_app = source_app.create_app(config)
+ self.journalist_app = journalist_app.create_app(config)
+
+ def start_source_server(app):
# We call Random.atfork() here because we fork the source and
# journalist server from the main Python process we use to drive
# our browser with multiprocessing.Process() below. These child
@@ -120,22 +123,25 @@ def start_source_server():
config.SESSION_EXPIRATION_MINUTES = self.session_expiration
- source_app.create_app(config).run(
+ app.run(
port=source_port,
debug=True,
use_reloader=False,
threaded=True)
- def start_journalist_server():
+ def start_journalist_server(app):
Random.atfork()
- journalist_app.create_app(config).run(
+ app.run(
port=journalist_port,
debug=True,
use_reloader=False,
threaded=True)
- self.source_process = Process(target=start_source_server)
- self.journalist_process = Process(target=start_journalist_server)
+ self.source_process = Process(
+ target=lambda: start_source_server(self.source_app))
+
+ self.journalist_process = Process(
+ target=lambda: start_journalist_server(self.journalist_app))
self.source_process.start()
self.journalist_process.start()
@@ -176,10 +182,10 @@ def start_journalist_server():
'invasion of privacy.')
def wait_for_source_key(self, source_name):
- filesystem_id = crypto_util.hash_codename(source_name)
+ filesystem_id = self.source_app.crypto_util.hash_codename(source_name)
def key_available(filesystem_id):
- assert crypto_util.getkey(filesystem_id)
+ assert self.source_app.crypto_util.getkey(filesystem_id)
self.wait_for(
lambda: key_available(filesystem_id), timeout=60)
diff --git a/securedrop/tests/functional/journalist_navigation_steps.py b/securedrop/tests/functional/journalist_navigation_steps.py
--- a/securedrop/tests/functional/journalist_navigation_steps.py
+++ b/securedrop/tests/functional/journalist_navigation_steps.py
@@ -9,7 +9,6 @@
from selenium.webdriver.common.keys import Keys
import tests.utils.db_helper as db_helper
-import crypto_util
from models import Journalist
import config
@@ -699,8 +698,9 @@ def _journalist_confirm_delete_selected(self):
confirm_btn.click()
def _source_delete_key(self):
- filesystem_id = crypto_util.hash_codename(self.source_name)
- crypto_util.delete_reply_keypair(filesystem_id)
+ filesystem_id = self.source_app.crypto_util.hash_codename(
+ self.source_name)
+ self.source_app.crypto_util.delete_reply_keypair(filesystem_id)
def _journalist_continues_after_flagging(self):
self.driver.find_element_by_id('continue-to-list').click()
diff --git a/securedrop/tests/test_crypto_util.py b/securedrop/tests/test_crypto_util.py
--- a/securedrop/tests/test_crypto_util.py
+++ b/securedrop/tests/test_crypto_util.py
@@ -2,14 +2,16 @@
import os
import unittest
+from flask import current_app
+
os.environ['SECUREDROP_ENV'] = 'test' # noqa
import config
import crypto_util
import journalist_app
import models
-import store
import utils
+from crypto_util import CryptoUtil, CryptoException
from db import db
@@ -27,9 +29,9 @@ def tearDown(self):
self.__context.pop()
def test_word_list_does_not_contain_empty_strings(self):
- self.assertNotIn('', (crypto_util._get_wordlist('en')
- + crypto_util.nouns
- + crypto_util.adjectives))
+ self.assertNotIn('', (current_app.crypto_util.get_wordlist('en')
+ + current_app.crypto_util.nouns
+ + current_app.crypto_util.adjectives))
def test_clean(self):
ok = (' !#%$&)(+*-1032547698;:=?@acbedgfihkjmlonqpsrutwvyxzABCDEFGHIJ'
@@ -38,20 +40,21 @@ def test_clean(self):
invalid_2 = 'bar baz~'
self.assertEqual(ok, crypto_util.clean(ok))
- with self.assertRaisesRegexp(crypto_util.CryptoException,
+ with self.assertRaisesRegexp(CryptoException,
'invalid input: {}'.format(invalid_1)):
crypto_util.clean(invalid_1)
- with self.assertRaisesRegexp(crypto_util.CryptoException,
+ with self.assertRaisesRegexp(CryptoException,
'invalid input: {}'.format(invalid_2)):
crypto_util.clean(invalid_2)
def test_encrypt_success(self):
source, _ = utils.db_helper.init_source()
message = str(os.urandom(1))
- ciphertext = crypto_util.encrypt(
+ ciphertext = current_app.crypto_util.encrypt(
message,
- [crypto_util.getkey(source.filesystem_id), config.JOURNALIST_KEY],
- store.path(source.filesystem_id, 'somefile.gpg'))
+ [current_app.crypto_util.getkey(source.filesystem_id),
+ config.JOURNALIST_KEY],
+ current_app.storage.path(source.filesystem_id, 'somefile.gpg'))
self.assertIsInstance(ciphertext, str)
self.assertNotEqual(ciphertext, message)
@@ -59,12 +62,12 @@ def test_encrypt_success(self):
def test_encrypt_failure(self):
source, _ = utils.db_helper.init_source()
- with self.assertRaisesRegexp(crypto_util.CryptoException,
+ with self.assertRaisesRegexp(CryptoException,
'no terminal at all requested'):
- crypto_util.encrypt(
+ current_app.crypto_util.encrypt(
str(os.urandom(1)),
[],
- store.path(source.filesystem_id, 'other.gpg'))
+ current_app.storage.path(source.filesystem_id, 'other.gpg'))
def test_encrypt_without_output(self):
"""We simply do not specify the option output keyword argument
@@ -73,10 +76,11 @@ def test_encrypt_without_output(self):
"""
source, codename = utils.db_helper.init_source()
message = str(os.urandom(1))
- ciphertext = crypto_util.encrypt(
+ ciphertext = current_app.crypto_util.encrypt(
message,
- [crypto_util.getkey(source.filesystem_id), config.JOURNALIST_KEY])
- plaintext = crypto_util.decrypt(codename, ciphertext)
+ [current_app.crypto_util.getkey(source.filesystem_id),
+ config.JOURNALIST_KEY])
+ plaintext = current_app.crypto_util.decrypt(codename, ciphertext)
self.assertEqual(message, plaintext)
@@ -95,12 +99,12 @@ def test_encrypt_binary_stream(self):
"""
source, codename = utils.db_helper.init_source()
with open(os.path.realpath(__file__)) as fh:
- ciphertext = crypto_util.encrypt(
+ ciphertext = current_app.crypto_util.encrypt(
fh,
- [crypto_util.getkey(source.filesystem_id),
+ [current_app.crypto_util.getkey(source.filesystem_id),
config.JOURNALIST_KEY],
- store.path(source.filesystem_id, 'somefile.gpg'))
- plaintext = crypto_util.decrypt(codename, ciphertext)
+ current_app.storage.path(source.filesystem_id, 'somefile.gpg'))
+ plaintext = current_app.crypto_util.decrypt(codename, ciphertext)
with open(os.path.realpath(__file__)) as fh:
self.assertEqual(fh.read(), plaintext)
@@ -111,42 +115,42 @@ def test_encrypt_fingerprints_not_a_list_or_tuple(self):
decryption should work as intended."""
source, codename = utils.db_helper.init_source()
message = str(os.urandom(1))
- ciphertext = crypto_util.encrypt(
+ ciphertext = current_app.crypto_util.encrypt(
message,
- crypto_util.getkey(source.filesystem_id),
- store.path(source.filesystem_id, 'somefile.gpg'))
- plaintext = crypto_util.decrypt(codename, ciphertext)
+ current_app.crypto_util.getkey(source.filesystem_id),
+ current_app.storage.path(source.filesystem_id, 'somefile.gpg'))
+ plaintext = current_app.crypto_util.decrypt(codename, ciphertext)
self.assertEqual(message, plaintext)
def test_basic_encrypt_then_decrypt_multiple_recipients(self):
source, codename = utils.db_helper.init_source()
message = str(os.urandom(1))
- ciphertext = crypto_util.encrypt(
+ ciphertext = current_app.crypto_util.encrypt(
message,
- [crypto_util.getkey(source.filesystem_id),
+ [current_app.crypto_util.getkey(source.filesystem_id),
config.JOURNALIST_KEY],
- store.path(source.filesystem_id, 'somefile.gpg'))
- plaintext = crypto_util.decrypt(codename, ciphertext)
+ current_app.storage.path(source.filesystem_id, 'somefile.gpg'))
+ plaintext = current_app.crypto_util.decrypt(codename, ciphertext)
self.assertEqual(message, plaintext)
# Since there's no way to specify which key to use for
# decryption to python-gnupg, we delete the `source`'s key and
# ensure we can decrypt with the `config.JOURNALIST_KEY`.
- crypto_util.delete_reply_keypair(source.filesystem_id)
- plaintext_ = crypto_util.gpg.decrypt(ciphertext).data
+ current_app.crypto_util.delete_reply_keypair(source.filesystem_id)
+ plaintext_ = current_app.crypto_util.gpg.decrypt(ciphertext).data
self.assertEqual(message, plaintext_)
def verify_genrandomid(self, locale):
- id = crypto_util.genrandomid(locale=locale)
+ id = current_app.crypto_util.genrandomid(locale=locale)
id_words = id.split()
self.assertEqual(id, crypto_util.clean(id))
- self.assertEqual(len(id_words), crypto_util.DEFAULT_WORDS_IN_RANDOM_ID)
+ self.assertEqual(len(id_words), CryptoUtil.DEFAULT_WORDS_IN_RANDOM_ID)
for word in id_words:
- self.assertIn(word, crypto_util._get_wordlist(locale))
+ self.assertIn(word, current_app.crypto_util.get_wordlist(locale))
def test_genrandomid_default_locale_is_en(self):
self.verify_genrandomid('en')
@@ -157,50 +161,53 @@ def test_get_wordlist(self):
for f in os.listdir(wordlists_path):
if f.endswith('.txt') and f != 'en.txt':
locales.append(f.split('.')[0])
- wordlist_en = crypto_util._get_wordlist('en')
+ wordlist_en = current_app.crypto_util.get_wordlist('en')
for locale in locales:
- self.assertNotEqual(wordlist_en, crypto_util._get_wordlist(locale))
+ self.assertNotEqual(wordlist_en,
+ current_app.crypto_util.get_wordlist(locale))
self.verify_genrandomid(locale)
- self.assertEqual(wordlist_en, crypto_util._get_wordlist('unknown'))
+ self.assertEqual(wordlist_en,
+ current_app.crypto_util.get_wordlist('unknown'))
def test_display_id(self):
- id = crypto_util.display_id()
+ id = current_app.crypto_util.display_id()
id_words = id.split()
self.assertEqual(len(id_words), 2)
- self.assertIn(id_words[0], crypto_util.adjectives)
- self.assertIn(id_words[1], crypto_util.nouns)
+ self.assertIn(id_words[0], current_app.crypto_util.adjectives)
+ self.assertIn(id_words[1], current_app.crypto_util.nouns)
def test_hash_codename(self):
- codename = crypto_util.genrandomid()
- hashed_codename = crypto_util.hash_codename(codename)
+ codename = current_app.crypto_util.genrandomid()
+ hashed_codename = current_app.crypto_util.hash_codename(codename)
self.assertRegexpMatches(hashed_codename, '^[2-7A-Z]{103}=$')
def test_genkeypair(self):
- codename = crypto_util.genrandomid()
- filesystem_id = crypto_util.hash_codename(codename)
- journalist_filename = crypto_util.display_id()
+ codename = current_app.crypto_util.genrandomid()
+ filesystem_id = current_app.crypto_util.hash_codename(codename)
+ journalist_filename = current_app.crypto_util.display_id()
source = models.Source(filesystem_id, journalist_filename)
db.session.add(source)
db.session.commit()
- crypto_util.genkeypair(source.filesystem_id, codename)
+ current_app.crypto_util.genkeypair(source.filesystem_id, codename)
- self.assertIsNotNone(crypto_util.getkey(filesystem_id))
+ self.assertIsNotNone(current_app.crypto_util.getkey(filesystem_id))
def test_delete_reply_keypair(self):
source, _ = utils.db_helper.init_source()
- crypto_util.delete_reply_keypair(source.filesystem_id)
+ current_app.crypto_util.delete_reply_keypair(source.filesystem_id)
- self.assertIsNone(crypto_util.getkey(source.filesystem_id))
+ self.assertIsNone(current_app.crypto_util.getkey(source.filesystem_id))
def test_delete_reply_keypair_no_key(self):
"""No exceptions should be raised when provided a filesystem id that
does not exist.
"""
- crypto_util.delete_reply_keypair('Reality Winner')
+ current_app.crypto_util.delete_reply_keypair('Reality Winner')
def test_getkey(self):
source, _ = utils.db_helper.init_source()
- self.assertIsNotNone(crypto_util.getkey(source.filesystem_id))
+ self.assertIsNotNone(
+ current_app.crypto_util.getkey(source.filesystem_id))
diff --git a/securedrop/tests/test_integration.py b/securedrop/tests/test_integration.py
--- a/securedrop/tests/test_integration.py
+++ b/securedrop/tests/test_integration.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-from cStringIO import StringIO
+import gnupg
import gzip
import mock
import os
@@ -12,20 +12,19 @@
import zipfile
from bs4 import BeautifulSoup
-from flask import session, g, escape
+from cStringIO import StringIO
+from flask import session, g, escape, current_app
from mock import patch
-import gnupg
os.environ['SECUREDROP_ENV'] = 'test' # noqa
import config
-import crypto_util
-from db import db
-from models import Journalist
import journalist_app
import source_app
-import store
import utils
+from db import db
+from models import Journalist
+
# Seed the RNG for deterministic testing
random.seed('ΰ² _ΰ² ')
@@ -47,7 +46,7 @@ def setUp(self):
self.__context.push()
utils.env.setup()
- self.gpg = gnupg.GPG(homedir=config.GPG_KEY_DIR)
+ self.gpg = self.journalist_app.crypto_util.gpg
# Patch the two-factor verification to avoid intermittent errors
patcher = mock.patch('models.Journalist.verify_token')
@@ -56,8 +55,8 @@ def setUp(self):
self.mock_journalist_verify_token.return_value = True
# Add a test user to the journalist interface
- self.user_pw = "corret horse battery staple haha cultural reference"
- self.username = crypto_util.genrandomid()
+ self.user_pw = "correct horse battery staple haha cultural reference"
+ self.username = self.journalist_app.crypto_util.genrandomid()
user = Journalist(username=self.username, password=self.user_pw)
db.session.add(user)
db.session.commit()
@@ -154,7 +153,8 @@ def test_submit_message(self):
# needs to wait for the worker to get the job and execute it
utils.async.wait_for_assertion(
lambda: self.assertFalse(
- os.path.exists(store.path(filesystem_id, doc_name))
+ os.path.exists(current_app.storage.path(filesystem_id,
+ doc_name))
)
)
@@ -248,12 +248,15 @@ def test_submit_file(self):
# needs to wait for the worker to get the job and execute it
utils.async.wait_for_assertion(
lambda: self.assertFalse(
- os.path.exists(store.path(filesystem_id, doc_name))
+ os.path.exists(current_app.storage.path(filesystem_id,
+ doc_name))
)
)
def test_reply_normal(self):
+ self.__context.push()
self.helper_test_reply("This is a test reply.", True)
+ self.__context.pop()
def test_unicode_reply_with_ansi_env(self):
# This makes python-gnupg handle encoding equivalent to if we were
@@ -264,12 +267,14 @@ def test_unicode_reply_with_ansi_env(self):
# _encoding attribute it would have had it been initialized in a "C"
# environment. See
# https://github.com/freedomofpress/securedrop/issues/1360 for context.
- old_encoding = crypto_util.gpg._encoding
- crypto_util.gpg._encoding = "ansi_x3.4_1968"
+ self.__context.push()
+ old_encoding = current_app.crypto_util.gpg._encoding
+ current_app.crypto_util.gpg._encoding = "ansi_x3.4_1968"
try:
self.helper_test_reply("α αα»α«αα¦α¦α«α α±α©α α’α±α«α αα±αͺα«α·αα»αΉα¦αα³α’α", True)
finally:
- crypto_util.gpg._encoding = old_encoding
+ current_app.crypto_util.gpg._encoding = old_encoding
+ self.__context.pop()
def _can_decrypt_with_key(self, msg, key_fpr, passphrase=None):
"""
@@ -293,9 +298,9 @@ def _can_decrypt_with_key(self, msg, key_fpr, passphrase=None):
# Attempt decryption with the given key
if passphrase:
- passphrase = crypto_util.hash_codename(
+ passphrase = current_app.crypto_util.hash_codename(
passphrase,
- salt=crypto_util.SCRYPT_GPG_PEPPER)
+ salt=current_app.crypto_util.scrypt_gpg_pepper)
decrypted_data = gpg.decrypt(msg, passphrase=passphrase)
self.assertTrue(
decrypted_data.ok,
@@ -357,8 +362,9 @@ def helper_test_reply(self, test_reply, expected_success=True):
# Block up to 15s for the reply keypair, so we can test sending a reply
utils.async.wait_for_assertion(
- lambda: self.assertNotEqual(crypto_util.getkey(filesystem_id),
- None),
+ lambda: self.assertNotEqual(
+ current_app.crypto_util.getkey(filesystem_id),
+ None),
15)
# Create 2 replies to test deleting on journalist and source interface
@@ -397,8 +403,10 @@ def helper_test_reply(self, test_reply, expected_success=True):
zf = zipfile.ZipFile(StringIO(resp.data), 'r')
data = zf.read(zf.namelist()[0])
self._can_decrypt_with_key(data, config.JOURNALIST_KEY)
- self._can_decrypt_with_key(data, crypto_util.getkey(filesystem_id),
- codename)
+ self._can_decrypt_with_key(
+ data,
+ current_app.crypto_util.getkey(filesystem_id),
+ codename)
# Test deleting reply on the journalist interface
last_reply_number = len(
@@ -433,7 +441,7 @@ def helper_test_reply(self, test_reply, expected_success=True):
# Make sure the reply is deleted from the filesystem
utils.async.wait_for_assertion(
lambda: self.assertFalse(os.path.exists(
- store.path(filesystem_id, msgid))))
+ current_app.storage.path(filesystem_id, msgid))))
app.get('/logout')
@@ -479,7 +487,7 @@ def test_delete_collection(self, async_genkey):
# Make sure the collection is deleted from the filesystem
utils.async.wait_for_assertion(
lambda: self.assertFalse(
- os.path.exists(store.path(filesystem_id)))
+ os.path.exists(current_app.storage.path(filesystem_id)))
)
@patch('source_app.main.async_genkey')
@@ -516,7 +524,7 @@ def test_delete_collections(self, async_genkey):
# Make sure the collections are deleted from the filesystem
utils.async.wait_for_assertion(lambda: self.assertFalse(
- any([os.path.exists(store.path(filesystem_id))
+ any([os.path.exists(current_app.storage.path(filesystem_id))
for filesystem_id in checkbox_values])))
def test_filenames(self):
@@ -680,5 +688,6 @@ def helper_filenames_delete(self, app, soup, i):
# Make sure the files were deleted from the filesystem
utils.async.wait_for_assertion(lambda: self.assertFalse(
- any([os.path.exists(store.path(filesystem_id, doc_name))
+ any([os.path.exists(current_app.storage.path(filesystem_id,
+ doc_name))
for doc_name in checkbox_values])))
diff --git a/securedrop/tests/test_journalist.py b/securedrop/tests/test_journalist.py
--- a/securedrop/tests/test_journalist.py
+++ b/securedrop/tests/test_journalist.py
@@ -5,7 +5,7 @@
import unittest
import zipfile
-from flask import url_for, escape, session
+from flask import url_for, escape, session, current_app
from flask_testing import TestCase
from mock import patch
from sqlalchemy.orm.exc import StaleDataError
@@ -13,7 +13,7 @@
os.environ['SECUREDROP_ENV'] = 'test' # noqa
import config
-import crypto_util
+
from db import db
from models import (InvalidPasswordLength, Journalist, Reply, Source,
Submission)
@@ -50,7 +50,8 @@ def setUp(self):
def tearDown(self):
utils.env.teardown()
- @patch('crypto_util.genrandomid', side_effect=['bad', VALID_PASSWORD])
+ @patch('crypto_util.CryptoUtil.genrandomid',
+ side_effect=['bad', VALID_PASSWORD])
def test_make_password(self, mocked_pw_gen):
class fake_config:
pass
@@ -943,13 +944,13 @@ def test_delete_source_deletes_source_key(self):
self._delete_collection_setup()
# Source key exists
- source_key = crypto_util.getkey(self.source.filesystem_id)
+ source_key = current_app.crypto_util.getkey(self.source.filesystem_id)
self.assertNotEqual(source_key, None)
journalist_app.utils.delete_collection(self.source.filesystem_id)
# Source key no longer exists
- source_key = crypto_util.getkey(self.source.filesystem_id)
+ source_key = current_app.crypto_util.getkey(self.source.filesystem_id)
self.assertEqual(source_key, None)
def test_delete_source_deletes_docs_on_disk(self):
diff --git a/securedrop/tests/test_source.py b/securedrop/tests/test_source.py
--- a/securedrop/tests/test_source.py
+++ b/securedrop/tests/test_source.py
@@ -1,20 +1,20 @@
# -*- coding: utf-8 -*-
-from cStringIO import StringIO
import gzip
-from mock import patch, ANY
+import json
import re
-from flask import session, escape, url_for
+from cStringIO import StringIO
+from flask import session, escape, url_for, current_app
from flask_testing import TestCase
+from mock import patch, ANY
-import crypto_util
-from db import db
-from models import Source
+import config
import source
-import version
import utils
-import json
-import config
+import version
+
+from db import db
+from models import Source
from utils.db_helper import new_codename
overly_long_codename = 'a' * (Source.MAX_CODENAME_LEN + 1)
@@ -49,7 +49,7 @@ def test_all_words_in_wordlist_validate(self):
validation. Otherwise a source will have a codename and be unable to
return."""
- wordlist_en = crypto_util._get_wordlist('en')
+ wordlist_en = current_app.crypto_util.get_wordlist('en')
# chunk the words to cut down on the number of requets we make
# otherwise this test is *slow*
@@ -111,7 +111,7 @@ def test_create_new_source(self):
self.assertIn("Submit Materials", resp.data)
@patch('source.app.logger.warning')
- @patch('crypto_util.genrandomid',
+ @patch('crypto_util.CryptoUtil.genrandomid',
side_effect=[overly_long_codename, 'short codename'])
def test_generate_too_long_codename(self, genrandomid, logger):
"""Generate a codename that exceeds the maximum codename length"""
@@ -407,7 +407,7 @@ def test_metadata_route(self):
self.assertEqual(json.loads(resp.data.decode('utf-8')).get(
'sd_version'), version.__version__)
- @patch('crypto_util.hash_codename')
+ @patch('crypto_util.CryptoUtil.hash_codename')
def test_login_with_overly_long_codename(self, mock_hash_codename):
"""Attempting to login with an overly long codename should result in
an error, and scrypt should not be called to avoid DoS."""
@@ -457,8 +457,8 @@ def test_source_is_deleted_while_logged_in(self, logger):
follow_redirects=True)
# Now the journalist deletes the source
- filesystem_id = crypto_util.hash_codename(codename)
- crypto_util.delete_reply_keypair(filesystem_id)
+ filesystem_id = current_app.crypto_util.hash_codename(codename)
+ current_app.crypto_util.delete_reply_keypair(filesystem_id)
source = Source.query.filter_by(filesystem_id=filesystem_id).one()
db.session.delete(source)
db.session.commit()
diff --git a/securedrop/tests/test_store.py b/securedrop/tests/test_store.py
--- a/securedrop/tests/test_store.py
+++ b/securedrop/tests/test_store.py
@@ -1,15 +1,21 @@
# -*- coding: utf-8 -*-
import os
+import pytest
+import re
import shutil
+import store
import unittest
import zipfile
+from flask import current_app
+
os.environ['SECUREDROP_ENV'] = 'test' # noqa
import config
import journalist_app
-import store
import utils
+from store import Storage
+
class TestStore(unittest.TestCase):
@@ -37,20 +43,23 @@ def create_file_in_source_dir(self, filesystem_id, filename):
return source_directory, file_path
def test_path_returns_filename_of_folder(self):
- """store.path is called in this way in journalist.delete_collection"""
+ """`Storage.path` is called in this way in
+ journalist.delete_collection
+ """
filesystem_id = 'example'
- generated_absolute_path = store.path(filesystem_id)
+ generated_absolute_path = current_app.storage.path(filesystem_id)
expected_absolute_path = os.path.join(config.STORE_DIR, filesystem_id)
self.assertEquals(generated_absolute_path, expected_absolute_path)
def test_path_returns_filename_of_items_within_folder(self):
- """store.path is called in this way in journalist.bulk_delete"""
+ """`Storage.path` is called in this way in journalist.bulk_delete"""
filesystem_id = 'example'
item_filename = '1-quintuple_cant-msg.gpg'
- generated_absolute_path = store.path(filesystem_id, item_filename)
+ generated_absolute_path = current_app.storage.path(filesystem_id,
+ item_filename)
expected_absolute_path = os.path.join(config.STORE_DIR,
filesystem_id, item_filename)
@@ -58,29 +67,39 @@ def test_path_returns_filename_of_items_within_folder(self):
def test_verify_path_not_absolute(self):
with self.assertRaises(store.PathException):
- store.verify(os.path.join(config.STORE_DIR, '..', 'etc', 'passwd'))
+ current_app.storage.verify(
+ os.path.join(config.STORE_DIR, '..', 'etc', 'passwd'))
def test_verify_in_store_dir(self):
with self.assertRaisesRegexp(store.PathException, 'Invalid directory'):
- store.verify(config.STORE_DIR + "_backup")
+ current_app.storage.verify(config.STORE_DIR + "_backup")
+
+ def test_verify_store_path_not_absolute(self):
+ with pytest.raises(store.PathException) as exc_info:
+ current_app.storage.verify('..')
+
+ assert 'The path is not absolute and/or normalized' in str(exc_info)
def test_verify_store_dir_not_absolute(self):
- STORE_DIR = config.STORE_DIR
- try:
- with self.assertRaisesRegexp(
- store.PathException,
- 'config.STORE_DIR\(\S*\) is not absolute'):
- config.STORE_DIR = '.'
- store.verify('something')
- finally:
- config.STORE_DIR = STORE_DIR
+ with pytest.raises(store.PathException) as exc_info:
+ Storage('..', '/', '<not a gpg key>')
+
+ msg = str(exc_info.value)
+ assert re.compile('storage_path.*is not absolute').match(msg)
+
+ def test_verify_store_temp_dir_not_absolute(self):
+ with pytest.raises(store.PathException) as exc_info:
+ Storage('/', '..', '<not a gpg key>')
+
+ msg = str(exc_info.value)
+ assert re.compile('temp_dir.*is not absolute').match(msg)
def test_verify_flagged_file_in_sourcedir_returns_true(self):
source_directory, file_path = self.create_file_in_source_dir(
'example-filesystem-id', '_FLAG'
)
- self.assertTrue(store.verify(file_path))
+ self.assertTrue(current_app.storage.verify(file_path))
shutil.rmtree(source_directory) # Clean up created files
@@ -92,7 +111,7 @@ def test_verify_invalid_file_extension_in_sourcedir_raises_exception(self):
with self.assertRaisesRegexp(
store.PathException,
'Invalid file extension .txt'):
- store.verify(file_path)
+ current_app.storage.verify(file_path)
shutil.rmtree(source_directory) # Clean up created files
@@ -104,7 +123,7 @@ def test_verify_invalid_filename_in_sourcedir_raises_exception(self):
with self.assertRaisesRegexp(
store.PathException,
'Invalid filename NOTVALID.gpg'):
- store.verify(file_path)
+ current_app.storage.verify(file_path)
shutil.rmtree(source_directory) # Clean up created files
@@ -116,7 +135,8 @@ def test_get_zip(self):
submission.filename)
for submission in submissions]
- archive = zipfile.ZipFile(store.get_bulk_archive(submissions))
+ archive = zipfile.ZipFile(
+ current_app.storage.get_bulk_archive(submissions))
archivefile_contents = archive.namelist()
for archived_file, actual_file in zip(archivefile_contents, filenames):
@@ -131,14 +151,14 @@ def test_rename_valid_submission(self):
new_journalist_filename = 'nestor_makhno'
expected_filename = old_filename.replace(old_journalist_filename,
new_journalist_filename)
- actual_filename = store.rename_submission(
+ actual_filename = current_app.storage.rename_submission(
source.filesystem_id, old_filename,
new_journalist_filename)
self.assertEquals(actual_filename, expected_filename)
def test_rename_submission_with_invalid_filename(self):
original_filename = '1-quintuple_cant-msg.gpg'
- returned_filename = store.rename_submission(
+ returned_filename = current_app.storage.rename_submission(
'example-filesystem-id', original_filename,
'this-new-filename-should-not-be-returned')
diff --git a/securedrop/tests/utils/db_helper.py b/securedrop/tests/utils/db_helper.py
--- a/securedrop/tests/utils/db_helper.py
+++ b/securedrop/tests/utils/db_helper.py
@@ -5,11 +5,11 @@
import mock
import os
+from flask import current_app
+
os.environ['SECUREDROP_ENV'] = 'test' # noqa
import config
-import crypto_util
import models
-import store
from db import db
@@ -26,8 +26,8 @@ def init_journalist(is_admin=False):
corresponding to the row just added to the database. The
second, their password string.
"""
- username = crypto_util.genrandomid()
- user_pw = crypto_util.genrandomid()
+ username = current_app.crypto_util.genrandomid()
+ user_pw = current_app.crypto_util.genrandomid()
user = models.Journalist(username, user_pw, is_admin)
db.session.add(user)
db.session.commit()
@@ -53,12 +53,12 @@ def reply(journalist, source, num_replies):
source.interaction_count += 1
fname = "{}-{}-reply.gpg".format(source.interaction_count,
source.journalist_filename)
- crypto_util.encrypt(str(os.urandom(1)),
- [
- crypto_util.getkey(source.filesystem_id),
- config.JOURNALIST_KEY
- ],
- store.path(source.filesystem_id, fname))
+ current_app.crypto_util.encrypt(
+ str(os.urandom(1)),
+ [current_app.crypto_util.getkey(source.filesystem_id),
+ config.JOURNALIST_KEY],
+ current_app.storage.path(source.filesystem_id, fname))
+
reply = models.Reply(journalist, source, fname)
replies.append(reply)
db.session.add(reply)
@@ -102,14 +102,14 @@ def init_source_without_keypair():
initialized. The second, their codename string.
"""
# Create source identity and database record
- codename = crypto_util.genrandomid()
- filesystem_id = crypto_util.hash_codename(codename)
- journalist_filename = crypto_util.display_id()
+ codename = current_app.crypto_util.genrandomid()
+ filesystem_id = current_app.crypto_util.hash_codename(codename)
+ journalist_filename = current_app.crypto_util.display_id()
source = models.Source(filesystem_id, journalist_filename)
db.session.add(source)
db.session.commit()
# Create the directory to store their submissions and replies
- os.mkdir(store.path(source.filesystem_id))
+ os.mkdir(current_app.storage.path(source.filesystem_id))
return source, codename
@@ -124,7 +124,7 @@ def init_source():
initialized. The second, their codename string.
"""
source, codename = init_source_without_keypair()
- crypto_util.genkeypair(source.filesystem_id, codename)
+ current_app.crypto_util.genkeypair(source.filesystem_id, codename)
return source, codename
@@ -146,10 +146,12 @@ def submit(source, num_submissions):
submissions = []
for _ in range(num_submissions):
source.interaction_count += 1
- fpath = store.save_message_submission(source.filesystem_id,
- source.interaction_count,
- source.journalist_filename,
- str(os.urandom(1)))
+ fpath = current_app.storage.save_message_submission(
+ source.filesystem_id,
+ source.interaction_count,
+ source.journalist_filename,
+ str(os.urandom(1))
+ )
submission = models.Submission(source, fpath)
submissions.append(submission)
db.session.add(submission)
diff --git a/securedrop/tests/utils/env.py b/securedrop/tests/utils/env.py
--- a/securedrop/tests/utils/env.py
+++ b/securedrop/tests/utils/env.py
@@ -1,16 +1,16 @@
# -*- coding: utf-8 -*-
"""Testing utilities related to setup and teardown of test environment.
"""
+import gnupg
import os
-from os.path import abspath, dirname, isdir, join, realpath
import shutil
import threading
-import gnupg
+from os.path import abspath, dirname, isdir, join, realpath
os.environ['SECUREDROP_ENV'] = 'test' # noqa
import config
-import crypto_util
+
from db import db
FILES_DIR = abspath(join(dirname(realpath(__file__)), '..', 'files'))
@@ -49,8 +49,6 @@ def setup():
create_directories()
init_gpg()
db.create_all()
- # Do tests that should always run on app startup
- crypto_util.do_runtime_tests()
def teardown():
| Replace `import config` with dependency injection
# Feature request
## Description
We still do `import config` everywhere in the app which breaks dependency injection and leaves us with a bit of spaghetti code. This negatively impacts tests in that it makes it quite hard to properly use fixtures in testing.
## User Stories
As a dev, I want to be able to dependency injection during tests.
| 2018-02-07T11:45:35Z | [] | [] |
|
freedomofpress/securedrop | 2,989 | freedomofpress__securedrop-2989 | [
"2482"
] | 2d46e37c08a30188ef6284ebd3425b8929522b4b | diff --git a/admin/securedrop_admin/__init__.py b/admin/securedrop_admin/__init__.py
--- a/admin/securedrop_admin/__init__.py
+++ b/admin/securedrop_admin/__init__.py
@@ -266,7 +266,7 @@ def __init__(self, args):
int],
['sasl_domain', "gmail.com", str,
u'SASL domain for sending OSSEC alerts',
- SiteConfig.ValidateNotEmpty(),
+ None,
None],
['sasl_username', '', str,
u'SASL username for sending OSSEC alerts',
| diff --git a/admin/tests/test_securedrop-admin.py b/admin/tests/test_securedrop-admin.py
--- a/admin/tests/test_securedrop-admin.py
+++ b/admin/tests/test_securedrop-admin.py
@@ -367,13 +367,19 @@ def verify_prompt_fingerprint(self, site_config, desc):
clean_fpr = site_config.sanitize_fingerprint(fpr)
assert site_config.user_prompt_config_one(desc, fpr) == clean_fpr
+ def verify_desc_consistency_allow_empty(self, site_config, desc):
+ (var, default, etype, prompt, validator, transform) = desc
+ # verify the default passes validation
+ assert site_config.user_prompt_config_one(desc, None) == default
+ assert type(default) == etype
+
verify_prompt_securedrop_app_gpg_fingerprint = verify_prompt_fingerprint
verify_prompt_ossec_alert_gpg_public_key = verify_desc_consistency
verify_prompt_ossec_gpg_fpr = verify_prompt_fingerprint
verify_prompt_ossec_alert_email = verify_prompt_not_empty
verify_prompt_smtp_relay = verify_prompt_not_empty
verify_prompt_smtp_relay_port = verify_desc_consistency
- verify_prompt_sasl_domain = verify_desc_consistency
+ verify_prompt_sasl_domain = verify_desc_consistency_allow_empty
verify_prompt_sasl_username = verify_prompt_not_empty
verify_prompt_sasl_password = verify_prompt_not_empty
| Installation does not like having no sasl_domain
# Bug
## Description
Our mail server auths with just a username, not a username and a domain.
## Steps to Reproduce
set sasl_domain to "" in the site-specific vars
run `./securedrop-admin install`
## Expected Behavior
- pass validation
- installs a config at /etc/postfix/sasl_password that looks like
```
[$smtp_relay]:$smtp_port $sasl_username:$sasl_password
```
## Actual Behavior
- Fails validation by trying to nslookup empty string
- writes out a config with an `@` after the sasl_username
## Comments
I think the `@` is ok, it just looks super weird. It seems to be working though.
| Thanks, @icco! During QA we typically only test with 1 or 2 mail setups, and that's clearly not enough. When we have a patch to address the problem you're describing, we'll ping you for review to make sure it works for your setup, as well as what we're already testing against. | 2018-02-09T18:54:47Z | [] | [] |
freedomofpress/securedrop | 2,990 | freedomofpress__securedrop-2990 | [
"2902"
] | e5cc9a0f629d6fe4c16c33f31fbd78341895853c | diff --git a/securedrop/create-demo-user.py b/securedrop/create-demo-user.py
new file mode 100755
--- /dev/null
+++ b/securedrop/create-demo-user.py
@@ -0,0 +1,41 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+import os
+from sqlalchemy.exc import IntegrityError
+
+os.environ["SECUREDROP_ENV"] = "dev" # noqa
+import journalist_app
+import config
+from db import db
+from models import Journalist
+
+
+def add_test_user(username, password, otp_secret, is_admin=False):
+ context = journalist_app.create_app(config).app_context()
+ context.push()
+ valid_password = "correct horse battery staple profanity oil chewy"
+
+ try:
+ user = Journalist(username=username,
+ password=valid_password,
+ is_admin=is_admin)
+ user.otp_secret = otp_secret
+ user.pw_salt = user._gen_salt()
+ user.pw_hash = user._scrypt_hash(password, user.pw_salt)
+ db.session.add(user)
+ db.session.commit()
+ print('Test user successfully added: '
+ 'username={}, password={}, otp_secret={}'
+ ''.format(username, password, otp_secret))
+ except IntegrityError:
+ print("Test user already added")
+ db.session.rollback()
+
+ context.pop()
+
+
+if __name__ == "__main__": # pragma: no cover
+ add_test_user("journalist",
+ "WEjwn8ZyczDhQSK24YKM8C9a",
+ "JHCOGO7VCER3EJ4L")
| diff --git a/docs/development/testing_application_tests.rst b/docs/development/testing_application_tests.rst
--- a/docs/development/testing_application_tests.rst
+++ b/docs/development/testing_application_tests.rst
@@ -39,9 +39,7 @@ The tests can be run inside the development VM:
.. code:: sh
- vagrant ssh development
- cd /vagrant/securedrop
- pytest -v tests
+ make -C securedrop test
Or the app-staging VM:
@@ -64,18 +62,15 @@ If you just want to run the functional tests, you can use:
.. code:: sh
- pytest -v tests/functional/
+ securedrop/bin/dev-shell bin/run-test -v tests/functional
Similarly, if you want to run a single test, you can specify it through the
file, class, and test name:
.. code:: sh
- pytest tests/test_journalist.py::TestJournalistApp::test_invalid_credentials
-
-Some Selenium tests are decorated to produce before and after screenshots to aid
-in debugging. This behavior is enabled with the ``SCREENSHOTS_ENABLED`` environment
-variable. Output PNG files will be placed in the ``tests/log/`` directory.
+ securedrop/bin/dev-shell bin/run-test \
+ tests/test_journalist.py::TestJournalistApp::test_invalid_credentials
The `gnupg
<https://pythonhosted.org/python-gnupg>`_ library can be quite verbose in its
@@ -84,10 +79,6 @@ be controlled via the ``GNUPG_LOG_LEVEL`` environment variable. It can have valu
such as ``INFO`` or ``DEBUG`` if some particular test case or test run needs
greater verbosity.
-.. code:: sh
-
- SCREENSHOTS_ENABLED=1 pytest tests/functional/
-
Page Layout Tests
~~~~~~~~~~~~~~~~~
@@ -101,7 +92,7 @@ option:
.. code:: sh
- pytest tests/ --page-layout
+ securedrop/bin/dev-shell bin/run-test --page-layout tests
Updating the application tests
diff --git a/docs/development/testing_configuration_tests.rst b/docs/development/testing_configuration_tests.rst
--- a/docs/development/testing_configuration_tests.rst
+++ b/docs/development/testing_configuration_tests.rst
@@ -20,11 +20,7 @@ Running the config tests
------------------------
In order to run the tests, first create and provision the VM you intend
-to test. For the development VM:
-
-.. code:: sh
-
- vagrant up development
+to test.
For the staging VMs:
@@ -39,7 +35,6 @@ machines for faster testing:
.. code:: sh
- ./testinfra/test.py development
./testinfra/test.py app-staging
./testinfra/test.py mon-staging
@@ -65,7 +60,6 @@ than the Ansible playbooks: ::
βββ app-prod.yml
βββ app-staging.yml
βββ build.yml
- βββ development.yml
βββ mon-prod.yml
βββ mon-staging.yml
@@ -102,7 +96,6 @@ Vagrantfile: ::
βββ app-code
βββ build
βββ common
- βββ development
βββ mon
Ideally the config tests would be broken up according to roles,
diff --git a/securedrop/bin/test b/securedrop/bin/run-test
similarity index 81%
rename from securedrop/bin/test
rename to securedrop/bin/run-test
--- a/securedrop/bin/test
+++ b/securedrop/bin/run-test
@@ -1,13 +1,20 @@
#!/bin/bash
+# shellcheck disable=SC1090
+
set -euo pipefail
-# shellcheck source=./dev-deps
-. "${BASH_SOURCE%/*}/dev-deps"
+
+source "${BASH_SOURCE%/*}/dev-deps"
+
+run_xvfb &
+run_redis &
+urandom
+run_sass --update
+maybe_create_config_py
if [ -n "${CIRCLE_BRANCH:-}" ] ; then
touch tests/log/firefox.log
function finish {
cp tests/log/firefox.log /tmp/test-results/logs/
- cp /tmp/securedrop*log /tmp/test-results/logs/
bash <(curl -s https://codecov.io/bash)
}
trap finish EXIT
diff --git a/testinfra/development/test_development_application_settings.py b/testinfra/development/test_development_application_settings.py
deleted file mode 100644
--- a/testinfra/development/test_development_application_settings.py
+++ /dev/null
@@ -1,129 +0,0 @@
-import pytest
-import os
-
-hostenv = os.environ['SECUREDROP_TESTINFRA_TARGET_HOST']
-
-sd_test_vars = pytest.securedrop_test_vars
-
-
[email protected]('package', [
- "securedrop-app-code",
- "apache2-mpm-worker",
- "libapache2-mod-wsgi",
- "libapache2-mod-xsendfile",
-])
-def test_development_lacks_deb_packages(Command, package):
- """
- The development machine does not use Apache, but rather the Flask runner,
- for standing up dev-friendly servers inside the VM. Therefore the
- app-code-related deb packages should be absent.
- """
- # The TestInfra `Package` module doesn't offer state=absent checks,
- # so let's call `dpkg -l` and inspect that output.
- c = Command("dpkg -l {}".format(package))
- assert c.rc == 1
- assert c.stdout == ""
- stderr = c.stderr.rstrip()
- assert stderr == "dpkg-query: no packages found matching {}".format(
- package)
-
-
-def test_development_apparmor_no_complain_mode(Command, Sudo):
- """
- Ensure that AppArmor profiles are not set to complain mode in development.
- The app-staging host sets profiles to complain, viz.
-
- * usr.sbin.apache2
- * usr.sbin.tor
-
- but those changes should not land on the development machine.
- """
-
- with Sudo():
- c = Command("aa-status")
- if hostenv == "travis":
- assert c.rc == 3
- assert 'apparmor filesystem is not mounted' in c.stderr
- else:
- assert c.rc == 0
- assert '0 profiles are in complain mode.' in c.stdout
-
-
[email protected]('unwanted_file', [
- "/var/www/html",
- "/var/www/source.wsgi",
- "/var/www/document.wsgi",
-])
-def test_development_apache_docroot_absent(File, unwanted_file):
- """
- Ensure the default HTML document root is missing.
- Development environment does not serve out of /var/www,
- since it uses the Flask dev server, not Apache.
- """
- f = File(unwanted_file)
- assert not f.exists
-
-
[email protected]('data_dir', [
- "/var/lib/securedrop",
- "/var/lib/securedrop/keys",
- "/var/lib/securedrop/tmp",
- "/var/lib/securedrop/store",
-])
-def test_development_data_directories_exist(File, data_dir):
- """
- Ensure that application code directories are created
- under /vagrant for the development environment, rather than
- /var/www as in staging and prod.
- """
- f = File(data_dir)
- assert f.is_directory
- assert f.user == sd_test_vars.securedrop_user
- assert f.group == sd_test_vars.securedrop_user
- assert oct(f.mode) == "0700"
-
-
-def test_development_app_directories_exist(File):
- """
- Ensure that application code directories are created
- under /vagrant for the development environment, rather than
- /var/www as in staging and prod.
-
- Using a separate check from the data directories because /vagrant
- will be mounted with different mode.
- """
- f = File(sd_test_vars.securedrop_code)
- assert f.is_directory
- assert f.user == sd_test_vars.securedrop_user
- assert f.group == sd_test_vars.securedrop_user
-
-
-def test_development_clean_tmp_cron_job(Command, Sudo):
- """
- Ensure cron job for cleaning the temporary directory for the app code
- exists. Also, ensure that the older format for the cron job is absent,
- since we updated manage.py subcommands to use hyphens instead of
- underscores (e.g. `clean_tmp` -> `clean-tmp`).
- """
-
- with Sudo():
- c = Command.check_output('crontab -l')
- assert "@daily {}/manage.py clean-tmp".format(
- sd_test_vars.securedrop_code) in c
- assert "@daily {}/manage.py clean_tmp".format(
- sd_test_vars.securedrop_code) not in c
- assert "clean_tmp".format(sd_test_vars.securedrop_code) not in c
- # Make sure that the only cron lines are a comment and the actual job.
- # We don't want any duplicates.
- assert len(c.split("\n")) == 2
-
-
-def test_development_default_logo_exists(File):
- """
- Checks for default SecureDrop logo file.
- """
-
- f = File("{}/static/i/logo.png".format(sd_test_vars.securedrop_code))
- assert f.is_file
- assert f.user == sd_test_vars.securedrop_user
- assert f.group == sd_test_vars.securedrop_user
diff --git a/testinfra/development/test_development_environment.py b/testinfra/development/test_development_environment.py
deleted file mode 100644
--- a/testinfra/development/test_development_environment.py
+++ /dev/null
@@ -1,71 +0,0 @@
-import pytest
-import getpass
-
-
-def test_development_app_dependencies(Package):
- """
- Ensure development apt dependencies are installed.
- """
- development_apt_dependencies = [
- 'libssl-dev',
- 'ntp',
- 'python-dev',
- 'python-pip',
- ]
- for dependency in development_apt_dependencies:
- p = Package(dependency)
- assert p.is_installed
-
-
[email protected]('pip_package,version', [
- ('Flask-Testing', '0.7.1'),
- ('Flask', '0.12.2'),
- ('Jinja2', '2.10'),
- ('MarkupSafe', '1.0'),
- ('Werkzeug', '0.12.2'),
- ('beautifulsoup4', '4.6.0'),
- ('click', '6.7'),
- ('coverage', '4.4.2'),
- ('first', '2.0.1'),
- ('funcsigs', '1.0.2'),
- ('itsdangerous', '0.24'),
- ('mock', '2.0.0'),
- ('pbr', '3.1.1'),
- ('pip-tools', '1.11.0'),
- ('py', '1.5.2'),
- ('pytest-cov', '2.5.1'),
- ('pytest', '3.3.2'),
- ('selenium', '2.53.6'),
- ('six', '1.11.0'),
-])
-def test_development_pip_dependencies(Command, Sudo, pip_package, version):
- """
- Declare SecureDrop app pip requirements. On the development VM,
- the pip dependencies should be installed directly via pip, rather
- than relying on the deb packages with pip-wheel inclusions.
- Versions here are intentionally hardcoded to track changes.
- """
- # Using elevated privileges to list the Python packages, since
- # the playbooks use sudo to install the pip packages system-wide.
- # In Travis, lack of sudo here hides a number of dependencies.
- with Sudo():
- c = Command('pip freeze')
- assert "{}=={}".format(pip_package, version) in c.stdout.rstrip()
-
-
[email protected](getpass.getuser() != 'vagrant',
- reason="vagrant bashrc checks dont make sense in CI")
-def test_development_securedrop_env_var(File):
- """
- Ensure that the SECUREDROP_ENV var is set to "dev".
-
-
- TODO: this isn't really checking that the env var is set,
- just that it's declared in the bashrc. spec_helper ignores
- env vars via ssh by default, so start there.
- """
-
- f = File('/home/vagrant/.bashrc')
- assert f.is_file
- assert f.user == 'vagrant'
- assert f.contains('^export SECUREDROP_ENV=dev$')
diff --git a/testinfra/development/test_development_networking.py b/testinfra/development/test_development_networking.py
deleted file mode 100644
--- a/testinfra/development/test_development_networking.py
+++ /dev/null
@@ -1,62 +0,0 @@
-import pytest
-import os
-
-hostenv = os.environ['SECUREDROP_TESTINFRA_TARGET_HOST']
-
-
[email protected](hostenv == 'travis',
- reason="Custom networking in Travis")
-def test_development_iptables_rules(Command, Sudo):
- """
- Declare desired iptables rules
- The 'development' machine doesn't have any custom
- iptables rules, so just check for the default chains.
- """
- desired_iptables_rules = [
- '-P INPUT ACCEPT',
- '-P FORWARD ACCEPT',
- '-P OUTPUT ACCEPT',
- ]
- with Sudo():
- c = Command.check_output('iptables -S')
- for rule in desired_iptables_rules:
- assert rule in c
-
- # If any iptables rules are ever added, this test will
- # fail, so tests can be written for the new rules.
- # Counting newlines in the output simply to avoid calling
- # `iptables -S` again and piping to `wc -l`.
- assert c.count("\n") == len(desired_iptables_rules) - 1
-
-
-def test_development_ssh_listening(Socket):
- """
- Check for ssh listening on all interfaces. In prod environment,
- SSH will be listening only on localhost, i.e. SSH over ATHS.
- """
- s = Socket("tcp://0.0.0.0:22")
- assert s.is_listening
-
-
-def test_development_redis_worker(Socket):
- """
- Ensure that Redis worker is listening on localhost.
- This worker is used to handle incoming submissions.
- """
-
- s = Socket("tcp://127.0.0.1:6379")
- assert s.is_listening
-
-# The Flask runners for the source and journalist interfaces
-# aren't configured to run by default, e.g. on boot. Nor
-# do the app tests cause them to be run. So, we shouldn't
-# really expected them to be running.
-# check for source interface flask port listening
-# describe port(8080) do
-# it { should be_listening.on('0.0.0.0').with('tcp') }
-# end
-#
-# check for journalist interface flask port listening
-# describe port(8081) do
-# it { should be_listening.on('0.0.0.0').with('tcp') }
-# end
diff --git a/testinfra/test.py b/testinfra/test.py
--- a/testinfra/test.py
+++ b/testinfra/test.py
@@ -9,11 +9,7 @@
import sys
import tempfile
-# By default let's assume we're testing against the development VM.
-try:
- target_host = sys.argv[1]
-except IndexError:
- target_host = "development"
+target_host = sys.argv[1]
# Set env var so that `testinfra/conftest.py` can read in a YAML vars file
# specific to the host being tested.
@@ -24,9 +20,7 @@ def get_target_roles(target_host):
"""
Assemble list of role tests to run. Hard-coded per host.
"""
- target_roles = {"development": ['testinfra/app-code',
- 'testinfra/development'],
- "app-staging": ['testinfra/app',
+ target_roles = {"app-staging": ['testinfra/app',
'testinfra/app-code',
'testinfra/common',
'testinfra/development/test_xvfb.py'],
@@ -84,13 +78,8 @@ def run_testinfra(target_host, verbose=True):
""".lstrip().rstrip()
elif os.environ.get("FPF_CI", 'false') == 'true':
- if os.environ.get("CI_SD_ENV", "development") == "development":
- os.environ['SECUREDROP_TESTINFRA_TARGET_HOST'] = "travis"
- ssh_config_path = ""
- testinfra_command_template = "testinfra -vv {target_roles}"
- else:
- ssh_config_path = os.environ["CI_SSH_CONFIG"]
- testinfra_command_template = """
+ ssh_config_path = os.environ["CI_SSH_CONFIG"]
+ testinfra_command_template = """
testinfra \
-vv \
-n 8 \
| Deprecate Ansible dev environment in favor of Docker dev environment
## Description
Currently we have two development environments: one provisioned with Ansible and one using Docker. Since the purpose of the development environment is to allow developers to iterate quickly on the application code, I think we should deprecate the Ansible-based environment in favor of the Docker environment once #2861 is merged.
## User Stories
As a SecureDrop maintainer, I want to maintain a single development environment.
As a SecureDrop developer, I want a development environment that spins up quickly with minimal dependencies so I can get to writing code as quickly as possible.
| 2018-02-09T23:14:27Z | [] | [] |
|
freedomofpress/securedrop | 2,997 | freedomofpress__securedrop-2997 | [
"2983"
] | eeb6ba3cf5cdd68ce02ff57ebf1907e3a983a348 | diff --git a/admin/securedrop_admin/__init__.py b/admin/securedrop_admin/__init__.py
--- a/admin/securedrop_admin/__init__.py
+++ b/admin/securedrop_admin/__init__.py
@@ -154,7 +154,7 @@ def __init__(self, appdir):
os.path.join(appdir, 'translations'))
def get_translations(self):
- translations = set(['en', 'en_US'])
+ translations = set(['en_US'])
for dirname in os.listdir(self.translation_dir):
if dirname != 'messages.pot':
translations.add(dirname)
| diff --git a/admin/tests/test_securedrop-admin.py b/admin/tests/test_securedrop-admin.py
--- a/admin/tests/test_securedrop-admin.py
+++ b/admin/tests/test_securedrop-admin.py
@@ -383,9 +383,9 @@ def verify_prompt_securedrop_supported_locales(self, site_config, desc):
assert site_config.user_prompt_config_one(desc, None) == default
assert type(default) == etype
assert site_config.user_prompt_config_one(
- desc, 'en en_US') == ['en', 'en_US']
+ desc, 'fr_FR en_US') == ['fr_FR', 'en_US']
assert site_config.user_prompt_config_one(
- desc, ['en', 'en_US']) == ['en', 'en_US']
+ desc, ['fr_FR', 'en_US']) == ['fr_FR', 'en_US']
assert site_config.user_prompt_config_one(desc, '') == []
with pytest.raises(ValidationError):
site_config.user_prompt_config_one(desc, 'wrong')
| Error 500 on app startup when language `en` is in `securedrop_supported_locales`
# Bug
## Description
Using `en` in `securedrop_supported_locales` results in app unable to startup.
Validation logic in both in `develop` (new sdconfig) and in `0.5.2` allow a user to specify `en`
## Steps to Reproduce
- `vagrant up /prod/ --no-provision`
- in tails, `securedrop_supported_locales` in `install_files/ansible-base/group-vars/all/site-specific`should contain `en` and `fr_FR`
- `securedrop-admin install`
## Expected Behavior
Once the installation completes, Source and Journalist interface web interface starts and responds to web requests.
## Actual Behavior
Installation completes successfully, but when attempting to connect to source or journalist interface results in an Internal Server Error:
from `/var/log/apache2/journalist-error.log`
```
[Fri Feb 09 00:14:53.874744 2018] [:error] [pid 1437:tid 2929872570112] [remote 127.0.0.1:48940] LocaleNotFound: config.py SUPPORTED_LOCALES contains [u'en'] which is not among the locales found in the /var/www/securedrop/translations directory: ['en_US', 'nb_NO', 'pt_BR', 'es_ES', 'nl', 'zh_Hant', 'it_IT', 'tr', 'fr_FR', 'ar', 'de_DE', 'nb_NO', 'pt_BR', 'es_ES', 'nl', 'zh_Hant', 'it_IT', 'tr', 'fr_FR', 'ar', 'de_DE', 'nb_NO', 'pt_BR', 'es_ES', 'nl', 'zh_Hant', 'it_IT', 'tr', 'fr_FR', 'ar', 'de_DE', 'nb_NO', 'pt_BR', 'es_ES', 'nl', 'zh_Hant', 'it_IT', 'tr', 'fr_FR', 'ar', 'de_DE', 'nb_NO', 'pt_BR', 'es_ES', 'nl', 'zh_Hant', 'it_IT', 'tr', 'fr_FR', 'ar', 'de_DE', 'nb_NO', 'pt_BR', 'es_ES', 'nl', 'zh_Hant', 'it_IT', 'tr', 'fr_FR', 'ar', 'de_DE', 'nb_NO', 'pt_BR', 'es_ES', 'nl', 'zh_Hant', 'it_IT', 'tr', 'fr_FR', 'ar', 'de_DE', 'nb_NO', 'pt_BR', 'es_ES', 'nl', 'zh_Hant', 'it_IT', 'tr', 'fr_FR', 'ar', 'de_DE', 'nb_NO', 'pt_BR', 'es_ES', 'nl', 'zh_Hant', 'it_IT', 'tr', 'fr_FR', 'ar', 'de_DE', 'nb_NO', 'pt_BR', 'es_ES', 'nl', 'zh_Hant', 'it_IT', 'tr', 'fr_FR', 'ar', 'de_DE', 'nb_NO', 'pt_BR', 'es_ES', 'nl', 'zh_Hant', 'it_IT', 'tr', 'fr_FR', 'ar', 'de_DE', 'nb_NO', 'pt_BR', 'es_ES', 'nl', 'zh_Hant', 'it_IT', 'tr', 'fr_FR', 'ar', 'de_DE', 'nb_NO', 'pt_BR', 'es_ES', 'nl', 'zh_Hant', 'it_IT', 'tr', 'fr_FR', 'ar', 'de_DE']
[Fri Feb 09 00:14:53.874821 2018] [core:error] [pid 1441:tid 2929804687104] [client 127.0.0.1:48470] AH00124: Request exceeded the limit of 10 internal redirects due to probable configuration error. Use 'LimitInternalRecursion' to increase the limit if necessary. Use 'LogLevel debug' to get a backtrace.
```
| 2018-02-11T16:11:40Z | [] | [] |
|
freedomofpress/securedrop | 2,998 | freedomofpress__securedrop-2998 | [
"2955"
] | eeb6ba3cf5cdd68ce02ff57ebf1907e3a983a348 | diff --git a/admin/securedrop_admin/__init__.py b/admin/securedrop_admin/__init__.py
--- a/admin/securedrop_admin/__init__.py
+++ b/admin/securedrop_admin/__init__.py
@@ -291,7 +291,7 @@ def load_and_update_config(self):
return self.update_config()
def update_config(self):
- self.config = self.user_prompt_config()
+ self.config.update(self.user_prompt_config())
self.save()
self.validate_gpg_keys()
return True
| diff --git a/admin/tests/files/site-specific b/admin/tests/files/site-specific
--- a/admin/tests/files/site-specific
+++ b/admin/tests/files/site-specific
@@ -17,3 +17,4 @@ securedrop_supported_locales:
smtp_relay: smtp.gmail.com
smtp_relay_port: 587
ssh_users: sd
+user_defined_variable: "must not be discarded"
diff --git a/admin/tests/test_securedrop-admin.py b/admin/tests/test_securedrop-admin.py
--- a/admin/tests/test_securedrop-admin.py
+++ b/admin/tests/test_securedrop-admin.py
@@ -297,6 +297,7 @@ def test_update_config(self, mock_save, mock_validate_input):
site_config = securedrop_admin.SiteConfig(args)
assert site_config.load_and_update_config()
+ assert 'user_defined_variable' in site_config.config
mock_save.assert_called_once()
mock_validate_input.assert_called()
| securedrop-admin sdconfig erases additional values in site-specific
# Bug
## Description
securedrop-admin sdconfig erases values in site-specific when they are not prompted for. `securedrop-admin sdconfig` should not erase entries in site-specific, which would help testing (e.g.: releases that are in development or perhaps alpha/beta features).
## Steps to Reproduce
* edit `/install_files/ansible-base/group-vars/all/site-specific` and add another value
* run ./securedrop-admin sdconfig
* open `/install_files/ansible-base/group-vars/all/site-specific` and observe your value has disappeared
## Expected Behavior
`securedrop-admin sdconfig` should not erase entries in `/install_files/ansible-base/group-vars/all/site-specific`.
## Actual Behavior
`ecuredrop-admin sdconfig` erases entries in `/install_files/ansible-base/group-vars/all/site-specific`.
| @emkll additional values that Ansible would use can be set in a (for instance) `install_files/ansible-base/group-vars/all/myvars` file and it will be loaded. This is indeed convenient for testing.
I think it is good that `site-config` is owned by `securedrop-admin sdconfig` (in the sense that there is no other way to edit it), but it is entirely possible that I missed a use case.
Is there a specific reason that I'm missing for user defined variables to be in the `site-specific` file ?
@dachary This came up when I was testing https://github.com/freedomofpress/securedrop/pull/2803, which required some journalist email specific variables in `site-specific`. This behavior may also come up during development of new features, and perhaps in production, where admins use extra variables to store potentially useful information (e.g. `smtp_relay_old`). I opened this issue because I think it's a regression compared to the behavior of the old `securedrop-admin sdconfig` , but I understand this is more of an edge-case scenario.
EDIT: checked out `0.5.1` branch and ran `./securedrop-admin sdconfig` and the extra variables in `site-specific` are *not* deleted after the run.
@emkll thanks a lot for filling this, it would have been problematic if discovered after the release. The regression is as follows:
* An admin stored extra variables in `site-config` (they should not and it is best served in another file but it is entirely possible that they did)
* The admin key is updated to 0.6
* The admin runs `securedrop-admin sdconfig` (for whatever motive)
* The extra variables in `site-specific` are silentely removed
* The admin runs `securedrop-admin install` and unexpectedly modifies the production instances because the variable is no longer in `site-specific`, leading to unknown regressions
I propose that it is fixed as follows:
* `securedrop-admin sdconfig` loads `site-specific`
* for each top level YAML variable it does not know a message is displayed (**WARNING: custom variable ABC must not be stored in `site-config`, it must be stored in the `custom-variables` file instead**)
* when `securedrop-admin sdconfig` runs it keeps the unknown variables instead of removing them
An alternative would be to write all unknown variables into another file `custom-variables`. But I'm not a fan of this option as it would likely confuse the admin expecting the variables to be in `site-specific`.
That approach makes sense to me @dachary. The alternative `custom-variables` option is not only confusing but may lead to more issues (`.gitignore`?).
Perhaps the error message could also be worded to suggest the user entered a valid (non-custom) but misspelled variable name in`site-specific`. | 2018-02-11T16:35:35Z | [] | [] |
freedomofpress/securedrop | 3,000 | freedomofpress__securedrop-3000 | [
"2937"
] | 0638515a90324b9697ec9f4f082c562ec7934640 | diff --git a/docs/conf.py b/docs/conf.py
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -57,9 +57,9 @@
# built documents.
#
# The short X.Y version.
-version = '0.5.1'
+version = '0.5.2'
# The full version, including alpha/beta/rc tags.
-release = '0.5.1'
+release = '0.5.2'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
diff --git a/securedrop/version.py b/securedrop/version.py
--- a/securedrop/version.py
+++ b/securedrop/version.py
@@ -1 +1 @@
-__version__ = '0.5.1'
+__version__ = '0.5.2'
| diff --git a/molecule/aws/securedrop_test.pub b/molecule/aws/securedrop_test.pub
new file mode 100644
--- /dev/null
+++ b/molecule/aws/securedrop_test.pub
@@ -0,0 +1,30 @@
+-----BEGIN PGP PUBLIC KEY BLOCK-----
+
+mQENBFhPGZsBCACzn00s3+i5HdGIldDGYXxY2HKL9Qhk0DhiRrNPaQemhNijuFlC
+geCeKN/smDAUyM5mfEoxmWy3V7n8SEQUpqI4dIS2AohReLkyKEKiIpTuXW7F9kO3
+vcXHgrTka+8B4ZQxDuTHNFJLmBwJnP24LrL6BzkDIUNeQFwM0EFTDOJlW1QV6qkm
+9WGizo2sR0VBJJabfRWrTWd8llYOVcc+LptErVNADPaX6iqb+QnZVJ/nYmCTgABj
+lD3aZ4EPZ+ioVOcOxbgBkAX76COObUUw/XahBGwj4fJ5kyzvDSBCHHlRzN39LKpM
+Y+HfSc1scAOWN+Dd0N/joIa0j0U4SGHo1NdzABEBAAG0MVNlY3VyZURyb3AgVEVT
+VElORyBrZXkgPHNlY3VyZWRyb3BAZnJlZWRvbS5wcmVzcz6JAU4EEwEIADgWIQRO
+15zDNi19EoNwRgJKO+SpIhGwPAUCWE8ZmwIbAwULCQgHAgYVCAkKCwIEFgIDAQIe
+AQIXgAAKCRBKO+SpIhGwPCb9B/9SuVoxbe3nLlU0bHDQtoq5P7adyTZK+5gKIiAo
+mtAkc/EuiF6jYIDLo+DBB1GBJVjyD5igTt14XR3JpMe6nLtztD5zgGk47gYQk3y5
+6f5ydd7zRo9OxulRYDvU1mXMUc0EmqfzuSxY55HJy5KQvjeKIU0fTvwbPYXdhFCC
+42iyBIkp4e4/C5oO4lNrNY2DJEZ+a8H5LHasJ4g9A78f/D5q0HWO1HutzfDeiMvq
+WFwlGMD2OzTEQA2MGlVRIYvLHAG1aV9fXY8kjCFT8ri5hxlQeTkKISfbW3pFSq6s
+Ow4r975zWLTPJNm+WTbBpfIOFBVAW34EHkcb/QmntlvqkNM+uQENBFhPGZsBCAC4
+VEtCQEuZ3WzCNL/0yQFih1EjT/AsS3j3++xvSOYWF+c7AjR9X0MkJFTnUZBHs6MX
+PM33bbkWbBBE2ILdDCEF72Uc5HyyC2lW2DvPY9ZLVSGcMCUsKARv5rbeNdgiLVP5
+8AMkmG48q0Pxrr6UVX14M34Jm5G91c/dj9zHtVwkLg4RG/rcumQdlpQhNmMycB2X
+lat48atmEkutfLEQizXIlgiCdNEpgfUBy/jZZcCOjwr8PUPmSUWjKOVMv6CSLx8K
+z2cP4We7tyq4qhc0cWjJOWOmJpu5tbmi6XEEWGaIJyN+POhHEcb0tI1rTJ88nrMb
+DI/NF/35kuWIIkADOb2vABEBAAGJATYEGAEIACAWIQRO15zDNi19EoNwRgJKO+Sp
+IhGwPAUCWE8ZmwIbDAAKCRBKO+SpIhGwPC3fB/0TfuScS718FiEcVRI3F2wBbzTQ
+VARhGzEvPSU5Z3Cur/EB8ihpWvwi39tUMeg5HTheDl/8A7f1QCjIFSVEr1slGNLh
+YFF07XGWhy837z6kiihK2z6/w6Q9QJqjE+QVZCKr97aIPejvEoHoslZTU5pJ52qF
+J7KQd1hEvVs00DxY6VlyK0FzXqByKYq6Arl2tzlCZ6RPEHKXV2xSP06jLEagzgYe
+DylVo9Xahenj4n/Mtq7Am6tGgU9Vy9cGbWNBdUND/mFQEEZSh9RJabPeluH12sir
+5/tfsDr4DGHSz7ws+5M6Zbk6oNJEwQZ4cR+81qCfXE5X5LW1KlAL8wDl7dfS
+=fYUi
+-----END PGP PUBLIC KEY BLOCK-----
\ No newline at end of file
diff --git a/molecule/aws/tor_apt_test.yml b/molecule/aws/tor_apt_test.yml
new file mode 100644
--- /dev/null
+++ b/molecule/aws/tor_apt_test.yml
@@ -0,0 +1,39 @@
+---
+- name: Add apt SD test public key
+ apt_key:
+ data: "{{ lookup('file','securedrop_test.pub') }}"
+ state: present
+
+- name: Temporary fix for GH issue 2938
+ file:
+ state: absent
+ path: "/etc/apt/sources.list.d/tor_apt_freedom_press.list"
+
+- name: Switch apt repo URLs to staging.
+ replace:
+ dest: "/etc/apt/sources.list.d/tor.apt.freedom.press.list"
+ replace: "tor-apt-test.freedom.press"
+ regexp: '//tor-apt\.freedom\.press'
+ ignore_errors: "yes"
+ notify: update tor
+
+- name: Force possible tor update
+ meta: flush_handlers
+
+- name: Squash testinfra failure for packages needing update
+ apt:
+ upgrade: safe
+
+- name: Extract latest tor version
+ shell: |
+ apt-cache policy tor | sed -e 's/^\s*Installed:\ \(\S*\)/\1/g;tx;d;:x'
+ changed_when: false
+ register: extract_tor_version
+
+- name: Dump Tor version to file (for reporting)
+ copy:
+ dest: "{{ playbook_dir }}/../../.tor_version"
+ content: "{{ extract_tor_version.stdout }}"
+ delegate_to: localhost
+ run_once: true
+ become: "no"
diff --git a/molecule/builder/tests/vars.yml b/molecule/builder/tests/vars.yml
--- a/molecule/builder/tests/vars.yml
+++ b/molecule/builder/tests/vars.yml
@@ -1,5 +1,5 @@
---
-securedrop_version: "0.5.1"
+securedrop_version: "0.5.2"
ossec_version: "2.8.2"
keyring_version: "0.1.1"
config_version: "0.1.0"
diff --git a/testinfra/common/test_tor_mirror.py b/testinfra/common/test_tor_mirror.py
--- a/testinfra/common/test_tor_mirror.py
+++ b/testinfra/common/test_tor_mirror.py
@@ -1,6 +1,10 @@
+import os
import pytest
[email protected](
+ os.environ.get('CIRCLE_BRANCH', 'na').startswith('release'),
+ reason="Release branches will use tor-apt-test repo")
def test_tor_mirror_present(host):
"""
Ensure the FPF mirror of the Tor apt repo, tor-apt.freedom.press,
| Release SecureDrop 0.5.2
This is a tracking issue for the upcoming release of SecureDrop 0.5.2 - tasks may get added or modified. Note that this is a hotfix release and translation updates will not be required.
_SecureDrop maintainers and testers:_ As you QA 0.5.2, please report back your testing results as comments on this ticket. File GitHub issues for any problems found, tag them "QA: Release", and associate them with the 0.5.2 milestone for tracking.
# Prepare first release candidate (0.5.2~rc1)
- [x] Branch `release/0.5.2` off `master`
- [x] Bring in changes for the release (done by cherry-picking changes into backported branches, then merging these branches into the release branch (except #2935)):
- [x] Add new languages to `sdconfig` prompt (#2894) - backported changes in branch `backport-pr-2894`
- [x] Replace Travis CI with Circle CI, use `xenial` version of `gettext` (#2796) - backported changes in `backport-pr-2796`
- [x] Replacement of `PyCrypto` (#2903) - backported changes in branch `backport-pr-2903`
- [x] Dynamically open firewall during OSSEC registration (#2748) - backported changes in `backport-pr-2748`
- [x] Force immediate Ansible exits (#2922) - backported changes in `backport-pr-2922`
- [x] Disabling of pip `safety` check in release branch due to use of Ansible <2.4 (#2928) - backported changes in `backport-pr-2928`
- [x] Followup to #2922, ensuring Ansible will fail fast for all plays (#2935) - cherry picked directly
- [x] Prepare `0.5.2~rc1` tag
- [x] Build debs and put up `0.5.2~rc1` on test apt server
# QA (0.5.2~rc1)
- [x] Test upgrade from 0.5.1 works on prod w/ test repo debs
- [x] Test fresh install (not upgrade) of 0.5.2 works on prod w/ test repo debs
# Release
- [x] Push signed tag
- [x] Build final Debian packages for 0.5.2
- [x] Pre-Flight: Test install (not upgrade) of 0.5.2 works w/ prod repo debs
- [x] Publish blog post about 0.5.2 Debian package release and instructions for admins
# Post-release
- [x] Merge release changes into master branch
- [ ] Merge release changes into development branch
| 2018-02-12T22:00:35Z | [] | [] |
|
freedomofpress/securedrop | 3,026 | freedomofpress__securedrop-3026 | [
"2976"
] | d49e865114a96a01256b16075d669af8f50a4914 | diff --git a/admin/securedrop_admin/__init__.py b/admin/securedrop_admin/__init__.py
--- a/admin/securedrop_admin/__init__.py
+++ b/admin/securedrop_admin/__init__.py
@@ -461,6 +461,65 @@ def run_tails_config(args):
cwd=args.ansible_path)
+def check_for_updates(args):
+ """Check for SecureDrop updates"""
+ sdlog.info("Checking for SecureDrop updates...")
+
+ # Determine what branch we are on
+ current_tag = subprocess.check_output(['git', 'describe'], cwd=args.root)
+
+ # Fetch all branches
+ git_fetch_cmd = ['git', 'fetch', '--all']
+ subprocess.check_call(git_fetch_cmd, cwd=args.root)
+
+ # Get latest tag
+ git_all_tags = ["git", "tag"]
+ all_tags = subprocess.check_output(git_all_tags,
+ cwd=args.root).rstrip('\n').split('\n')
+
+ # Do not check out any release candidate tags
+ all_prod_tags = [x for x in all_tags if 'rc' not in x]
+
+ latest_tag = all_prod_tags[-1]
+
+ if current_tag != latest_tag:
+ sdlog.info("Update needed")
+ return True, latest_tag
+ sdlog.info("All updates applied")
+ return False, latest_tag
+
+
+def update(args):
+ """Verify, and apply latest SecureDrop workstation update"""
+ sdlog.info("Applying SecureDrop updates...")
+
+ update_status, latest_tag = check_for_updates(args)
+
+ if not update_status:
+ # Exit if we're up to date
+ return 0
+
+ git_checkout_cmd = ['git', 'checkout', latest_tag]
+ subprocess.check_call(git_checkout_cmd, cwd=args.root)
+
+ sdlog.info("Verifying signature on latest update...")
+ get_release_key = ['gpg', '--recv-key',
+ '22245C81E3BAEB4138B36061310F561200F4AD77']
+ subprocess.check_call(get_release_key, cwd=args.root)
+
+ git_verify_tag_cmd = ['git', 'tag', '-v', latest_tag]
+ sig_result = subprocess.check_output(git_verify_tag_cmd,
+ stderr=subprocess.STDOUT,
+ cwd=args.root)
+
+ if 'Good signature' not in sig_result:
+ sdlog.info("Signature verification failed.")
+ return 1
+ sdlog.info("Signature verification successful.")
+
+ sdlog.info("Updated to SecureDrop {}.".format(latest_tag))
+
+
def get_logs(args):
"""Get logs for forensics and debugging purposes"""
sdlog.info("Gathering logs for forensics and debugging")
@@ -528,6 +587,13 @@ class ArgParseFormatterCombo(argparse.ArgumentDefaultsHelpFormatter,
parse_restore.set_defaults(func=restore_securedrop)
parse_restore.add_argument("restore_file")
+ parse_update = subparsers.add_parser('update', help=update.__doc__)
+ parse_update.set_defaults(func=update)
+
+ parse_check_updates = subparsers.add_parser('check_for_updates',
+ help=update.__doc__)
+ parse_check_updates.set_defaults(func=check_for_updates)
+
parse_logs = subparsers.add_parser('logs',
help=get_logs.__doc__)
parse_logs.set_defaults(func=get_logs)
| diff --git a/admin/tests/test_securedrop-admin.py b/admin/tests/test_securedrop-admin.py
--- a/admin/tests/test_securedrop-admin.py
+++ b/admin/tests/test_securedrop-admin.py
@@ -51,6 +51,92 @@ def test_not_verbose(self, capsys):
assert 'HIDDEN' not in out
assert 'VISIBLE' in out
+ def test_check_for_updates_update_needed(self, tmpdir, caplog):
+ git_repo_path = str(tmpdir)
+ args = argparse.Namespace(root=git_repo_path)
+ current_tag = "0.6"
+ tags_available = "0.6\n0.6-rc1\n0.6.1\n"
+
+ with mock.patch('subprocess.check_call'):
+ with mock.patch('subprocess.check_output',
+ side_effect=[current_tag, tags_available]):
+ update_status, tag = securedrop_admin.check_for_updates(args)
+ assert "Update needed" in caplog.text
+ assert update_status is True
+ assert tag == '0.6.1'
+
+ def test_check_for_updates_update_not_needed(self, tmpdir, caplog):
+ git_repo_path = str(tmpdir)
+ args = argparse.Namespace(root=git_repo_path)
+ current_tag = "0.6.1"
+ tags_available = "0.6\n0.6-rc1\n0.6.1\n"
+
+ with mock.patch('subprocess.check_call'):
+ with mock.patch('subprocess.check_output',
+ side_effect=[current_tag, tags_available]):
+ update_status, tag = securedrop_admin.check_for_updates(args)
+ assert "All updates applied" in caplog.text
+ assert update_status is False
+ assert tag == '0.6.1'
+
+ def test_check_for_updates_if_most_recent_tag_is_rc(self, tmpdir, caplog):
+ """During pre-release QA, the most recent tag ends in *-rc. Let's
+ verify that users will not accidentally check out this tag."""
+ git_repo_path = str(tmpdir)
+ args = argparse.Namespace(root=git_repo_path)
+ current_tag = "0.6.1"
+ tags_available = "0.6\n0.6-rc1\n0.6.1\n0.6.1-rc1\n"
+
+ with mock.patch('subprocess.check_call'):
+ with mock.patch('subprocess.check_output',
+ side_effect=[current_tag, tags_available]):
+ update_status, tag = securedrop_admin.check_for_updates(args)
+ assert "All updates applied" in caplog.text
+ assert update_status is False
+ assert tag == '0.6.1'
+
+ def test_update_exits_if_not_needed(self, tmpdir, caplog):
+ git_repo_path = str(tmpdir)
+ args = argparse.Namespace(root=git_repo_path)
+
+ with mock.patch('securedrop_admin.check_for_updates',
+ return_value=(False, "0.6.1")):
+ securedrop_admin.update(args)
+ assert "Applying SecureDrop updates..." in caplog.text
+ assert "Updated to SecureDrop" not in caplog.text
+
+ def test_update_signature_verifies(self, tmpdir, caplog):
+ git_repo_path = str(tmpdir)
+ args = argparse.Namespace(root=git_repo_path)
+
+ git_output = 'Good signature from "SecureDrop Release Signing Key"'
+
+ with mock.patch('securedrop_admin.check_for_updates',
+ return_value=(True, "0.6.1")):
+ with mock.patch('subprocess.check_call'):
+ with mock.patch('subprocess.check_output',
+ return_value=git_output):
+ securedrop_admin.update(args)
+ assert "Applying SecureDrop updates..." in caplog.text
+ assert "Signature verification successful." in caplog.text
+ assert "Updated to SecureDrop" in caplog.text
+
+ def test_update_signature_does_not_verify(self, tmpdir, caplog):
+ git_repo_path = str(tmpdir)
+ args = argparse.Namespace(root=git_repo_path)
+
+ git_output = 'Bad signature from "SecureDrop Release Signing Key"'
+
+ with mock.patch('securedrop_admin.check_for_updates',
+ return_value=(True, "0.6.1")):
+ with mock.patch('subprocess.check_call'):
+ with mock.patch('subprocess.check_output',
+ return_value=git_output):
+ securedrop_admin.update(args)
+ assert "Applying SecureDrop updates..." in caplog.text
+ assert "Signature verification failed." in caplog.text
+ assert "Updated to SecureDrop" not in caplog.text
+
class TestSiteConfig(object):
| Add "securedrop-admin upgrade" command
## Description
Every release, we ask admins to do the same commands to update `securedrop-admin` in their admin workstation:
```
cd ~/Persistent/securedrop
git fetch --tags
git checkout <latest_release_tag>
gpg --recv-key "2224 5C81 E3BA EB41 38B3 6061 310F 5612 00F4 AD77β
git tag -v <latest_release_tag> # Output should include βGood Signatureβ
```
Since there are several manual steps and they involve using `git`, they are confusing to admins. We could provide a `securedrop-admin upgrade` command that did these steps for them and would only bail out 1. there is no new release or 2. if the signature does not verify.
## User Stories
As a SecureDrop administrator, I want an easier way to upgrade my `securedrop-admin` so that I don't make a mistake.
| 2018-02-17T00:24:22Z | [] | [] |
|
freedomofpress/securedrop | 3,033 | freedomofpress__securedrop-3033 | [
"3031"
] | 0e2ceb91e373162d6b6f5162d510f21db148d206 | diff --git a/securedrop/create-demo-user.py b/securedrop/create-demo-user.py
--- a/securedrop/create-demo-user.py
+++ b/securedrop/create-demo-user.py
@@ -6,7 +6,7 @@
os.environ["SECUREDROP_ENV"] = "dev" # noqa
import journalist_app
-import config
+from sdconfig import config
from db import db
from models import Journalist
diff --git a/securedrop/crypto_util.py b/securedrop/crypto_util.py
--- a/securedrop/crypto_util.py
+++ b/securedrop/crypto_util.py
@@ -10,7 +10,15 @@
from Cryptodome.Random import random
from flask import current_app
from gnupg._util import _is_stream, _make_binary_stream
-from typing import Dict, List, Text # noqa: F401
+
+import typing
+# https://www.python.org/dev/peps/pep-0484/#runtime-or-type-checking
+if typing.TYPE_CHECKING:
+ # flake8 can not understand type annotation yet.
+ # That is why all type annotation relative import
+ # statements has to be marked as noqa.
+ # http://flake8.pycqa.org/en/latest/user/error-codes.html?highlight=f401stream
+ from typing import Dict, List, Text # noqa: F401
# to fix gpg error #78 on production
os.environ['USERNAME'] = 'www-data'
diff --git a/securedrop/journalist.py b/securedrop/journalist.py
--- a/securedrop/journalist.py
+++ b/securedrop/journalist.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-import config
+from sdconfig import config
from journalist_app import create_app
diff --git a/securedrop/journalist_app/__init__.py b/securedrop/journalist_app/__init__.py
--- a/securedrop/journalist_app/__init__.py
+++ b/securedrop/journalist_app/__init__.py
@@ -18,15 +18,26 @@
from models import Journalist
from store import Storage
+import typing
+# https://www.python.org/dev/peps/pep-0484/#runtime-or-type-checking
+if typing.TYPE_CHECKING:
+ # flake8 can not understand type annotation yet.
+ # That is why all type annotation relative import
+ # statements has to be marked as noqa.
+ # http://flake8.pycqa.org/en/latest/user/error-codes.html?highlight=f401
+ from sdconfig import SDConfig # noqa: F401
+
_insecure_views = ['main.login', 'static']
def create_app(config):
+ # type: (SDConfig) -> Flask
app = Flask(__name__,
template_folder=config.JOURNALIST_TEMPLATES_DIR,
static_folder=path.join(config.SECUREDROP_ROOT, 'static'))
app.config.from_object(config.JournalistInterfaceFlaskConfig)
+ app.sdconfig = config
CSRFProtect(app)
Environment(app)
@@ -75,7 +86,8 @@ def handle_csrf_error(e):
app.jinja_env.lstrip_blocks = True
app.jinja_env.globals['version'] = version.__version__
if hasattr(config, 'CUSTOM_HEADER_IMAGE'):
- app.jinja_env.globals['header_image'] = config.CUSTOM_HEADER_IMAGE
+ app.jinja_env.globals['header_image'] = \
+ config.CUSTOM_HEADER_IMAGE # type: ignore
app.jinja_env.globals['use_custom_header_image'] = True
else:
app.jinja_env.globals['header_image'] = 'logo.png'
diff --git a/securedrop/journalist_app/utils.py b/securedrop/journalist_app/utils.py
--- a/securedrop/journalist_app/utils.py
+++ b/securedrop/journalist_app/utils.py
@@ -16,6 +16,15 @@
PasswordError, Submission)
from rm import srm
+import typing
+# https://www.python.org/dev/peps/pep-0484/#runtime-or-type-checking
+if typing.TYPE_CHECKING:
+ # flake8 can not understand type annotation yet.
+ # That is why all type annotation relative import
+ # statements has to be marked as noqa.
+ # http://flake8.pycqa.org/en/latest/user/error-codes.html?highlight=f401
+ from sdconfig import SDConfig # noqa: F401
+
def logged_in():
# type: () -> bool
@@ -199,6 +208,7 @@ def col_delete(cols_selected):
def make_password(config):
+ # type: (SDConfig) -> str
while True:
password = current_app.crypto_util.genrandomid(
7,
diff --git a/securedrop/manage.py b/securedrop/manage.py
--- a/securedrop/manage.py
+++ b/securedrop/manage.py
@@ -21,7 +21,7 @@
from sqlalchemy.orm.exc import NoResultFound
os.environ['SECUREDROP_ENV'] = 'dev' # noqa
-import config
+from sdconfig import config
import journalist_app
from db import db
diff --git a/securedrop/sdconfig.py b/securedrop/sdconfig.py
new file mode 100644
--- /dev/null
+++ b/securedrop/sdconfig.py
@@ -0,0 +1,164 @@
+# -*- coding: utf-8 -*-
+
+import config as _config
+
+import typing
+# https://www.python.org/dev/peps/pep-0484/#runtime-or-type-checking
+if typing.TYPE_CHECKING:
+ # flake8 can not understand type annotation yet.
+ # That is why all type annotation relative import
+ # statements has to be marked as noqa.
+ # http://flake8.pycqa.org/en/latest/user/error-codes.html?highlight=f401
+ from typing import List, Dict # noqa: F401
+
+
+class SDConfig(object):
+ def __init__(self):
+ # type: () -> None
+ try:
+ self.JournalistInterfaceFlaskConfig = \
+ _config.JournalistInterfaceFlaskConfig # type: ignore
+ except AttributeError:
+ pass
+
+ try:
+ self.SourceInterfaceFlaskConfig = \
+ _config.SourceInterfaceFlaskConfig # type: ignore
+ except AttributeError:
+ pass
+
+ try:
+ self.DATABASE_FILE = _config.DATABASE_FILE # type: ignore
+ except AttributeError:
+ pass
+
+ try:
+ self.DATABASE_USERNAME = _config.DATABASE_USERNAME # type: ignore
+ except AttributeError:
+ pass
+
+ try:
+ self.DATABASE_PASSWORD = _config.DATABASE_PASSWORD # type: ignore
+ except AttributeError:
+ pass
+
+ try:
+ self.DATABASE_HOST = _config.DATABASE_HOST # type: ignore
+ except AttributeError:
+ pass
+
+ try:
+ self.DATABASE_NAME = _config.DATABASE_NAME # type: ignore
+ except AttributeError:
+ pass
+
+ try:
+ self.ADJECTIVES = _config.ADJECTIVES # type: ignore
+ except AttributeError:
+ pass
+
+ try:
+ self.DATABASE_ENGINE = _config.DATABASE_ENGINE # type: ignore
+ except AttributeError:
+ pass
+
+ try:
+ self.DEFAULT_LOCALE = _config.DEFAULT_LOCALE # type: ignore
+ except AttributeError:
+ pass
+
+ try:
+ self.GPG_KEY_DIR = _config.GPG_KEY_DIR # type: ignore
+ except AttributeError:
+ pass
+
+ try:
+ self.JOURNALIST_KEY = _config.JOURNALIST_KEY # type: ignore
+ except AttributeError:
+ pass
+
+ try:
+ self.JOURNALIST_TEMPLATES_DIR = _config.JOURNALIST_TEMPLATES_DIR # type: ignore # noqa: E501
+ except AttributeError:
+ pass
+
+ try:
+ self.NOUNS = _config.NOUNS # type: ignore
+ except AttributeError:
+ pass
+
+ try:
+ self.SCRYPT_GPG_PEPPER = _config.SCRYPT_GPG_PEPPER # type: ignore
+ except AttributeError:
+ pass
+
+ try:
+ self.SCRYPT_ID_PEPPER = _config.SCRYPT_ID_PEPPER # type: ignore
+ except AttributeError:
+ pass
+
+ try:
+ self.SCRYPT_PARAMS = _config.SCRYPT_PARAMS # type: ignore
+ except AttributeError:
+ pass
+
+ try:
+ self.SECUREDROP_DATA_ROOT = _config.SECUREDROP_DATA_ROOT # type: ignore # noqa: E501
+ except AttributeError:
+ pass
+
+ try:
+ self.SECUREDROP_ROOT = _config.SECUREDROP_ROOT # type: ignore
+ except AttributeError:
+ pass
+
+ try:
+ self.SESSION_EXPIRATION_MINUTES = \
+ _config.SESSION_EXPIRATION_MINUTES # type: ignore
+ except AttributeError:
+ pass
+
+ try:
+ self.SOURCE_TEMPLATES_DIR = \
+ _config.SOURCE_TEMPLATES_DIR # type: ignore
+ except AttributeError:
+ pass
+
+ try:
+ self.STORE_DIR = _config.STORE_DIR # type: ignore
+ except AttributeError:
+ pass
+
+ try:
+ self.SUPPORTED_LOCALES = \
+ _config.SUPPORTED_LOCALES # type: ignore
+ except AttributeError:
+ pass
+
+ try:
+ self.TEMP_DIR = _config.TEMP_DIR # type: ignore
+ except AttributeError:
+ pass
+
+ try:
+ self.WORD_LIST = _config.WORD_LIST # type: ignore
+ except AttributeError:
+ pass
+
+ try:
+ self.WORKER_PIDFILE = _config.WORKER_PIDFILE # type: ignore
+ except AttributeError:
+ pass
+
+ try:
+ self.TRANSLATION_DIRS = _config.TRANSLATION_DIRS # type: ignore
+ except AttributeError:
+ pass
+
+ try:
+ self.env = _config.env # type: ignore
+ except AttributeError:
+ pass
+
+
+config = SDConfig() # type: SDConfig
diff --git a/securedrop/source.py b/securedrop/source.py
--- a/securedrop/source.py
+++ b/securedrop/source.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-import config
+from sdconfig import config
from source_app import create_app
diff --git a/securedrop/source_app/__init__.py b/securedrop/source_app/__init__.py
--- a/securedrop/source_app/__init__.py
+++ b/securedrop/source_app/__init__.py
@@ -21,13 +21,24 @@
from source_app.utils import logged_in
from store import Storage
+import typing
+# https://www.python.org/dev/peps/pep-0484/#runtime-or-type-checking
+if typing.TYPE_CHECKING:
+ # flake8 can not understand type annotation yet.
+ # That is why all type annotation relative import
+ # statements has to be marked as noqa.
+ # http://flake8.pycqa.org/en/latest/user/error-codes.html?highlight=f401
+ from sdconfig import SDConfig # noqa: F401
+
def create_app(config):
+ # type: (SDConfig) -> Flask
app = Flask(__name__,
template_folder=config.SOURCE_TEMPLATES_DIR,
static_folder=path.join(config.SECUREDROP_ROOT, 'static'))
app.request_class = RequestThatSecuresFileUploads
app.config.from_object(config.SourceInterfaceFlaskConfig)
+ app.sdconfig = config
# The default CSRF token expiration is 1 hour. Since large uploads can
# take longer than an hour over Tor, we increase the valid window to 24h.
@@ -80,7 +91,8 @@ def handle_csrf_error(e):
app.jinja_env.lstrip_blocks = True
app.jinja_env.globals['version'] = version.__version__
if getattr(config, 'CUSTOM_HEADER_IMAGE', None):
- app.jinja_env.globals['header_image'] = config.CUSTOM_HEADER_IMAGE
+ app.jinja_env.globals['header_image'] = \
+ config.CUSTOM_HEADER_IMAGE # type: ignore
app.jinja_env.globals['use_custom_header_image'] = True
else:
app.jinja_env.globals['header_image'] = 'logo.png'
@@ -92,7 +104,7 @@ def handle_csrf_error(e):
app.jinja_env.filters['filesizeformat'] = template_filters.filesizeformat
for module in [main, info, api]:
- app.register_blueprint(module.make_blueprint(config))
+ app.register_blueprint(module.make_blueprint(config)) # type: ignore
@app.before_request
@ignore_static
| diff --git a/molecule/builder/tests/test_securedrop_deb_package.py b/molecule/builder/tests/test_securedrop_deb_package.py
--- a/molecule/builder/tests/test_securedrop_deb_package.py
+++ b/molecule/builder/tests/test_securedrop_deb_package.py
@@ -157,7 +157,7 @@ def test_deb_package_contains_no_config_file(File, Command, deb):
deb_package = File(deb.format(
securedrop_test_vars.securedrop_version))
c = Command("dpkg-deb --contents {}".format(deb_package.path))
- assert not re.search("^.*config\.py$", c.stdout, re.M)
+ assert not re.search("^.*/config\.py$", c.stdout, re.M)
@pytest.mark.parametrize("deb", deb_packages)
diff --git a/securedrop/tests/conftest.py b/securedrop/tests/conftest.py
--- a/securedrop/tests/conftest.py
+++ b/securedrop/tests/conftest.py
@@ -10,7 +10,7 @@
import pytest
os.environ['SECUREDROP_ENV'] = 'test' # noqa
-import config
+from sdconfig import config
# TODO: the PID file for the redis worker is hard-coded below.
# Ideally this constant would be provided by a test harness.
diff --git a/securedrop/tests/functional/functional_test.py b/securedrop/tests/functional/functional_test.py
--- a/securedrop/tests/functional/functional_test.py
+++ b/securedrop/tests/functional/functional_test.py
@@ -21,7 +21,7 @@
from selenium.webdriver.support import expected_conditions
os.environ['SECUREDROP_ENV'] = 'test' # noqa
-import config
+from sdconfig import config
import journalist_app
import source_app
import tests.utils.env as env
diff --git a/securedrop/tests/functional/journalist_navigation_steps.py b/securedrop/tests/functional/journalist_navigation_steps.py
--- a/securedrop/tests/functional/journalist_navigation_steps.py
+++ b/securedrop/tests/functional/journalist_navigation_steps.py
@@ -10,7 +10,7 @@
import tests.utils.db_helper as db_helper
from models import Journalist
-import config
+from sdconfig import config
class JournalistNavigationStepsMixin():
diff --git a/securedrop/tests/test_crypto_util.py b/securedrop/tests/test_crypto_util.py
--- a/securedrop/tests/test_crypto_util.py
+++ b/securedrop/tests/test_crypto_util.py
@@ -5,7 +5,7 @@
from flask import current_app
os.environ['SECUREDROP_ENV'] = 'test' # noqa
-import config
+from sdconfig import config
import crypto_util
import journalist_app
import models
diff --git a/securedrop/tests/test_i18n.py b/securedrop/tests/test_i18n.py
--- a/securedrop/tests/test_i18n.py
+++ b/securedrop/tests/test_i18n.py
@@ -27,7 +27,7 @@
from werkzeug.datastructures import Headers
os.environ['SECUREDROP_ENV'] = 'test' # noqa
-import config
+from sdconfig import SDConfig, config
import i18n
import journalist_app
import manage
@@ -56,10 +56,7 @@ def tearDown(self):
self.__context.pop()
def get_fake_config(self):
- class Config:
- def __getattr__(self, name):
- return getattr(config, name)
- return Config()
+ return SDConfig()
def test_get_supported_locales(self):
locales = ['en_US', 'fr_FR']
diff --git a/securedrop/tests/test_integration.py b/securedrop/tests/test_integration.py
--- a/securedrop/tests/test_integration.py
+++ b/securedrop/tests/test_integration.py
@@ -17,7 +17,7 @@
from mock import patch
os.environ['SECUREDROP_ENV'] = 'test' # noqa
-import config
+from sdconfig import config
import journalist_app
import source_app
import utils
diff --git a/securedrop/tests/test_journalist.py b/securedrop/tests/test_journalist.py
--- a/securedrop/tests/test_journalist.py
+++ b/securedrop/tests/test_journalist.py
@@ -12,7 +12,7 @@
from sqlalchemy.exc import IntegrityError
os.environ['SECUREDROP_ENV'] = 'test' # noqa
-import config
+from sdconfig import SDConfig, config
from db import db
from models import (InvalidPasswordLength, Journalist, Reply, Source,
@@ -53,8 +53,7 @@ def tearDown(self):
@patch('crypto_util.CryptoUtil.genrandomid',
side_effect=['bad', VALID_PASSWORD])
def test_make_password(self, mocked_pw_gen):
- class fake_config:
- pass
+ fake_config = SDConfig()
assert (journalist_app.utils.make_password(fake_config) ==
VALID_PASSWORD)
@@ -1315,10 +1314,7 @@ def tearDown(self):
utils.env.teardown()
def get_fake_config(self):
- class Config:
- def __getattr__(self, name):
- return getattr(config, name)
- return Config()
+ return SDConfig()
# A method required by flask_testing.TestCase
def create_app(self):
diff --git a/securedrop/tests/test_manage.py b/securedrop/tests/test_manage.py
--- a/securedrop/tests/test_manage.py
+++ b/securedrop/tests/test_manage.py
@@ -4,7 +4,7 @@
import os
from os.path import abspath, dirname, exists, getmtime, join, realpath
os.environ['SECUREDROP_ENV'] = 'test' # noqa
-import config
+from sdconfig import config
import logging
import manage
import mock
diff --git a/securedrop/tests/test_secure_tempfile.py b/securedrop/tests/test_secure_tempfile.py
--- a/securedrop/tests/test_secure_tempfile.py
+++ b/securedrop/tests/test_secure_tempfile.py
@@ -5,7 +5,7 @@
from gnupg._util import _is_stream
os.environ['SECUREDROP_ENV'] = 'test' # noqa
-import config
+from sdconfig import config
import journalist_app
import secure_tempfile
import utils
diff --git a/securedrop/tests/test_source.py b/securedrop/tests/test_source.py
--- a/securedrop/tests/test_source.py
+++ b/securedrop/tests/test_source.py
@@ -8,7 +8,7 @@
from flask_testing import TestCase
from mock import patch, ANY
-import config
+from sdconfig import config
import source
import utils
import version
diff --git a/securedrop/tests/test_store.py b/securedrop/tests/test_store.py
--- a/securedrop/tests/test_store.py
+++ b/securedrop/tests/test_store.py
@@ -10,7 +10,7 @@
from flask import current_app
os.environ['SECUREDROP_ENV'] = 'test' # noqa
-import config
+from sdconfig import config
import journalist_app
import utils
diff --git a/securedrop/tests/test_template_filters.py b/securedrop/tests/test_template_filters.py
--- a/securedrop/tests/test_template_filters.py
+++ b/securedrop/tests/test_template_filters.py
@@ -7,7 +7,7 @@
from flask import session
os.environ['SECUREDROP_ENV'] = 'test' # noqa
-import config
+from sdconfig import SDConfig, config
import i18n
import journalist_app
import manage
@@ -19,10 +19,7 @@
class TestTemplateFilters(object):
def get_fake_config(self):
- class Config:
- def __getattr__(self, name):
- return getattr(config, name)
- return Config()
+ return SDConfig()
def verify_rel_datetime_format(self, app):
with app.test_client() as c:
diff --git a/securedrop/tests/utils/db_helper.py b/securedrop/tests/utils/db_helper.py
--- a/securedrop/tests/utils/db_helper.py
+++ b/securedrop/tests/utils/db_helper.py
@@ -8,7 +8,7 @@
from flask import current_app
os.environ['SECUREDROP_ENV'] = 'test' # noqa
-import config
+from sdconfig import config
import models
from db import db
diff --git a/securedrop/tests/utils/env.py b/securedrop/tests/utils/env.py
--- a/securedrop/tests/utils/env.py
+++ b/securedrop/tests/utils/env.py
@@ -9,7 +9,7 @@
from os.path import abspath, dirname, isdir, join, realpath
os.environ['SECUREDROP_ENV'] = 'test' # noqa
-import config
+from sdconfig import config
from db import db
| [RFE] Have type annotation for config object
# Feature request
## Description
The config object we pass around to create the Flask applications should be properly annotated.
## User Stories
Related to #2795
| 2018-02-19T15:22:44Z | [] | [] |
|
freedomofpress/securedrop | 3,057 | freedomofpress__securedrop-3057 | [
"3058",
"2801"
] | 46aa4c0e4f8e620f8a13e78a1694af1e5843d3c2 | diff --git a/securedrop/journalist_app/__init__.py b/securedrop/journalist_app/__init__.py
--- a/securedrop/journalist_app/__init__.py
+++ b/securedrop/journalist_app/__init__.py
@@ -27,7 +27,7 @@
# http://flake8.pycqa.org/en/latest/user/error-codes.html?highlight=f401
from sdconfig import SDConfig # noqa: F401
-_insecure_views = ['main.login', 'static']
+_insecure_views = ['main.login', 'main.select_logo', 'static']
def create_app(config):
diff --git a/securedrop/journalist_app/admin.py b/securedrop/journalist_app/admin.py
--- a/securedrop/journalist_app/admin.py
+++ b/securedrop/journalist_app/admin.py
@@ -1,5 +1,7 @@
# -*- coding: utf-8 -*-
+from PIL import Image
+
import os
from flask import (Blueprint, render_template, request, url_for, redirect, g,
@@ -31,11 +33,19 @@ def manage_config():
form = LogoForm()
if form.validate_on_submit():
f = form.logo.data
- static_filepath = os.path.join(config.SECUREDROP_ROOT,
- "static/i/logo.png")
- f.save(static_filepath)
- flash(gettext("Image updated."), "logo-success")
- return redirect(url_for("admin.manage_config"))
+ custom_logo_filepath = os.path.join(config.SECUREDROP_ROOT,
+ "static/i/custom_logo.png")
+ try:
+ with Image.open(f) as im:
+ im.thumbnail((500, 450), resample=3)
+ im.save(custom_logo_filepath, "PNG")
+ flash(gettext("Image updated."), "logo-success")
+ except Exception:
+ flash("Unable to process the image file."
+ " Try another one.", "logo-error")
+ finally:
+ return redirect(url_for("admin.manage_config"))
+
else:
for field, errors in form.errors.items():
for error in errors:
diff --git a/securedrop/journalist_app/forms.py b/securedrop/journalist_app/forms.py
--- a/securedrop/journalist_app/forms.py
+++ b/securedrop/journalist_app/forms.py
@@ -59,5 +59,6 @@ class LogoForm(FlaskForm):
logo = FileField(validators=[
FileRequired(message=gettext('File required.')),
FileAllowed(['jpg', 'png', 'jpeg'],
- message=gettext('Upload images only.'))
+ message=gettext("You can only upload JPG/JPEG"
+ " or PNG image files."))
])
diff --git a/securedrop/journalist_app/main.py b/securedrop/journalist_app/main.py
--- a/securedrop/journalist_app/main.py
+++ b/securedrop/journalist_app/main.py
@@ -1,5 +1,7 @@
# -*- coding: utf-8 -*-
+import os
+
from datetime import datetime
from flask import (Blueprint, request, current_app, session, url_for, redirect,
render_template, g, flash, abort)
@@ -43,6 +45,14 @@ def logout():
session.pop('expires', None)
return redirect(url_for('main.index'))
+ @view.route('/org-logo')
+ def select_logo():
+ if os.path.exists(os.path.join(current_app.static_folder, 'i',
+ 'custom_logo.png')):
+ return redirect(url_for('static', filename='i/custom_logo.png'))
+ else:
+ return redirect(url_for('static', filename='i/logo.png'))
+
@view.route('/')
def index():
unstarred = []
diff --git a/securedrop/source_app/main.py b/securedrop/source_app/main.py
--- a/securedrop/source_app/main.py
+++ b/securedrop/source_app/main.py
@@ -40,6 +40,14 @@ def generate():
session['new_user'] = True
return render_template('generate.html', codename=codename)
+ @view.route('/org-logo')
+ def select_logo():
+ if os.path.exists(os.path.join(current_app.static_folder, 'i',
+ 'custom_logo.png')):
+ return redirect(url_for('static', filename='i/custom_logo.png'))
+ else:
+ return redirect(url_for('static', filename='i/logo.png'))
+
@view.route('/create', methods=['POST'])
def create():
filesystem_id = current_app.crypto_util.hash_codename(
| diff --git a/securedrop/tests/test_journalist.py b/securedrop/tests/test_journalist.py
--- a/securedrop/tests/test_journalist.py
+++ b/securedrop/tests/test_journalist.py
@@ -5,8 +5,10 @@
import random
import unittest
import zipfile
+import base64
from cStringIO import StringIO
+from io import BytesIO
from flask import url_for, escape, session, current_app, g
from flask_testing import TestCase
from mock import patch
@@ -954,9 +956,11 @@ def test_logo_upload_with_valid_image_succeeds(self):
try:
self._login_admin()
-
+ # Create 1px * 1px 'white' PNG file from its base64 string
form = journalist_app_module.forms.LogoForm(
- logo=(StringIO('imagedata'), 'test.png')
+ logo=(BytesIO(base64.decodestring
+ ("iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAQAAAC1HAwCAAAAC0lEQ"
+ "VR42mP8/x8AAwMCAO+ip1sAAAAASUVORK5CYII=")), 'test.png')
)
self.client.post(url_for('admin.manage_config'),
data=form.data,
@@ -977,8 +981,10 @@ def test_logo_upload_with_invalid_filetype_fails(self):
resp = self.client.post(url_for('admin.manage_config'),
data=form.data,
follow_redirects=True)
- self.assertMessageFlashed("Upload images only.", "logo-error")
- self.assertIn('Upload images only.', resp.data)
+ self.assertMessageFlashed("You can only upload JPG/JPEG"
+ " or PNG image files.", "logo-error")
+ self.assertIn("You can only upload JPG/JPEG"
+ " or PNG image files.", resp.data)
def test_logo_upload_with_empty_input_field_fails(self):
self._login_admin()
| Check for a valid image file when uploading logo
# Bug
## Description
In the journalist application, when the Admin uploads an image file for logo, no check is performed to determine if it is a valid image file, or a corrupted/unreadable one. The application currently only checks for a valid extension.
Thus if someone uploads a corrupt/unreadable or non-image file with an extension like JPG, JPEG or PNG, it gets updated and shows up like:
![screenshot from 2018-02-22 19-43-45](https://user-images.githubusercontent.com/29029116/36543078-b87d329c-1808-11e8-981e-d5878de7cc37.png)
## Steps to Reproduce
Upload a non-image file with `.png` extension.
## Expected Behavior
The image should be ignored and user should be prompted to upload again.
## Actual Behavior
The incorrect image file gets uploaded.
## Comments
As suggested by @heartsucker, I'll try to write a fix for this and integrate into #3057
Recommend dimensions for logo(s) uploads
# Recommend dimensions for logo(s) uploads
## Description
Currently logos vary in size dramatically between different [deployed instances](https://securedrop.org/directory). This makes it much more difficult to have a clean design for the top bar layout on mobile (in particular) and also restricts the design options on desktop.
This issue is an extension to #2769 to encourage journalists to upload a logo that will display well on desktop and mobile screens within the constraints of the design and allow for consistent responsive layout as per #2551. Journalists should be warned that logos which have different dimensions to these cannot be guaranteed to display optimally and shown an example guide of preparing the logo including desired clearance (whitespace).
For example:
* Upload desktop square logo (recommended size 150x150) (png)
* Upload square mobile logo (recommended size 50x50) (png) OR Upload landscape mobile logo (recommended size 100x50) (png)
The above sizes are a best guess at recommended dimensions based on the current design and assume clearance is included in the image and it is centered correctly within that space. My assumption is that it's best to avoid retina for now to prioritise download speed on the Tor network. Tor browser doesn't allow SVG in high security mode so PNG at 1x for now.
Would be useful to add a mockup to this issue to show layout of instructions to journalists and upload controls.
|
Not sure if we already have a mechanism to optimise uploaded images for web on the server-side (during runtime)? Something like [PyImageOptimizer](https://pypi.python.org/pypi/PyImageOptimizer/1.1)? - [example usage](https://pyimageoptimizer.readthedocs.io/en/latest/usage.html). @redshiftzero
Thanks for filing this @toast. We should definitely inform admins of the expected size of the images in the logo upload form - currently we have a recommendation of `500px x 450px`, but e.g. on the journalist interface, as you correctly point out we resize it to a wee 150px logo size. And we don't actually optimize uploaded images server-side unfortunately. Filed followup #2807 about that, because that is a great point. | 2018-02-21T23:40:28Z | [] | [] |
freedomofpress/securedrop | 3,064 | freedomofpress__securedrop-3064 | [
"3063"
] | 8c4a0d9e1dee133692ac58d81c5b0c81a193be0e | diff --git a/admin/securedrop_admin/__init__.py b/admin/securedrop_admin/__init__.py
--- a/admin/securedrop_admin/__init__.py
+++ b/admin/securedrop_admin/__init__.py
@@ -26,6 +26,7 @@
import argparse
import logging
import os
+import io
import re
import string
import subprocess
@@ -363,14 +364,14 @@ def exists(self):
return os.path.exists(self.args.site_config)
def save(self):
- with open(self.args.site_config, 'w') as site_config_file:
+ with io.open(self.args.site_config, 'w') as site_config_file:
yaml.safe_dump(self.config,
site_config_file,
default_flow_style=False)
def load(self):
try:
- with open(self.args.site_config) as site_config_file:
+ with io.open(self.args.site_config) as site_config_file:
return yaml.safe_load(site_config_file)
except IOError:
sdlog.error("Config file missing, re-run with sdconfig")
diff --git a/install_files/ansible-base/roles/backup/files/0.3_collect.py b/install_files/ansible-base/roles/backup/files/0.3_collect.py
--- a/install_files/ansible-base/roles/backup/files/0.3_collect.py
+++ b/install_files/ansible-base/roles/backup/files/0.3_collect.py
@@ -9,6 +9,7 @@
import sys
import os
+import io
import zipfile
from datetime import datetime
# Import the application config.py file
@@ -58,7 +59,7 @@ def encrypt_zip_file(zf_fn):
gpg = gnupg.GPG(binary='gpg2', homedir=config.GPG_KEY_DIR)
e_fn = '{}.gpg'.format(zf_fn)
- stream = open(zf_fn, "rb")
+ stream = io.open(zf_fn, "rb")
gpg.encrypt_file(stream, config.JOURNALIST_KEY, always_trust='True',
output=e_fn)
diff --git a/install_files/ansible-base/roles/tails-config/files/securedrop_init.py b/install_files/ansible-base/roles/tails-config/files/securedrop_init.py
--- a/install_files/ansible-base/roles/tails-config/files/securedrop_init.py
+++ b/install_files/ansible-base/roles/tails-config/files/securedrop_init.py
@@ -2,6 +2,7 @@
import grp
import os
+import io
import pwd
import sys
import subprocess
@@ -20,28 +21,28 @@
# load torrc_additions
if os.path.isfile(path_torrc_additions):
- with open(path_torrc_additions) as f:
+ with io.open(path_torrc_additions) as f:
torrc_additions = f.read()
else:
sys.exit('Error opening {0} for reading'.format(path_torrc_additions))
# load torrc
if os.path.isfile(path_torrc_backup):
- with open(path_torrc_backup) as f:
+ with io.open(path_torrc_backup) as f:
torrc = f.read()
else:
if os.path.isfile(path_torrc):
- with open(path_torrc) as f:
+ with io.open(path_torrc) as f:
torrc = f.read()
else:
sys.exit('Error opening {0} for reading'.format(path_torrc))
# save a backup
- with open(path_torrc_backup, 'w') as f:
+ with io.open(path_torrc_backup, 'w') as f:
f.write(torrc)
# append the additions
-with open(path_torrc, 'w') as f:
+with io.open(path_torrc, 'w') as f:
f.write(torrc + torrc_additions)
# reload tor
diff --git a/securedrop/crypto_util.py b/securedrop/crypto_util.py
--- a/securedrop/crypto_util.py
+++ b/securedrop/crypto_util.py
@@ -3,6 +3,7 @@
import gnupg
import os
+import io
import scrypt
import subprocess
@@ -64,10 +65,10 @@ def __init__(self,
# map code for a given language to a localized wordlist
self.__language2words = {} # type: Dict[Text, List[str]]
- with open(nouns_file) as f:
+ with io.open(nouns_file) as f:
self.nouns = f.read().splitlines()
- with open(adjectives_file) as f:
+ with io.open(adjectives_file) as f:
self.adjectives = f.read().splitlines()
# Make sure these pass before the app can run
@@ -104,7 +105,7 @@ def get_wordlist(self, locale):
else:
wordlist_path = self.__word_list
- with open(wordlist_path) as f:
+ with io.open(wordlist_path) as f:
content = f.read().splitlines()
self.__language2words[locale] = content
diff --git a/securedrop/secure_tempfile.py b/securedrop/secure_tempfile.py
--- a/securedrop/secure_tempfile.py
+++ b/securedrop/secure_tempfile.py
@@ -1,6 +1,7 @@
# -*- coding: utf-8 -*-
import base64
import os
+import io
from tempfile import _TemporaryFileWrapper
from gnupg._util import _STREAMLIKE_TYPES
@@ -47,7 +48,7 @@ def __init__(self, store_dir):
self.tmp_file_id = base64.urlsafe_b64encode(os.urandom(32)).strip('=')
self.filepath = os.path.join(store_dir,
'{}.aes'.format(self.tmp_file_id))
- self.file = open(self.filepath, 'w+b')
+ self.file = io.open(self.filepath, 'w+b')
super(SecureTemporaryFile, self).__init__(self.file, self.filepath)
def create_key(self):
diff --git a/securedrop/source_app/main.py b/securedrop/source_app/main.py
--- a/securedrop/source_app/main.py
+++ b/securedrop/source_app/main.py
@@ -1,5 +1,6 @@
import operator
import os
+import io
from datetime import datetime
from flask import (Blueprint, render_template, flash, redirect, url_for, g,
@@ -73,7 +74,7 @@ def lookup():
reply.filename,
)
try:
- with open(reply_path) as f:
+ with io.open(reply_path, "rb") as f:
contents = f.read()
reply.decrypted = current_app.crypto_util.decrypt(
g.codename,
diff --git a/securedrop/source_app/utils.py b/securedrop/source_app/utils.py
--- a/securedrop/source_app/utils.py
+++ b/securedrop/source_app/utils.py
@@ -1,3 +1,4 @@
+import io
import logging
import subprocess
@@ -60,7 +61,7 @@ def generate_unique_codename(config):
def get_entropy_estimate():
- with open('/proc/sys/kernel/random/entropy_avail') as f:
+ with io.open('/proc/sys/kernel/random/entropy_avail') as f:
return int(f.read())
diff --git a/testinfra/conftest.py b/testinfra/conftest.py
--- a/testinfra/conftest.py
+++ b/testinfra/conftest.py
@@ -6,6 +6,7 @@
Vars should be placed in `testinfra/vars/<hostname>.yml`.
"""
+import io
import os
import yaml
@@ -23,7 +24,7 @@ def securedrop_import_testinfra_vars(hostname, with_header=False):
Vars must be stored in `testinfra/vars/<hostname>.yml`.
"""
filepath = os.path.join(os.path.dirname(__file__), "vars", hostname+".yml")
- with open(filepath, 'r') as f:
+ with io.open(filepath, 'r') as f:
hostvars = yaml.safe_load(f)
if with_header:
| diff --git a/admin/tests/test_securedrop-admin.py b/admin/tests/test_securedrop-admin.py
--- a/admin/tests/test_securedrop-admin.py
+++ b/admin/tests/test_securedrop-admin.py
@@ -17,6 +17,7 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
+import io
import argparse
from os.path import dirname, join, basename, exists
import mock
@@ -347,7 +348,7 @@ def test_save(self, tmpdir):
var1: val1
var2: val2
""")
- assert expected == open(site_config_path).read()
+ assert expected == io.open(site_config_path).read()
def test_validate_gpg_key(self, caplog):
args = argparse.Namespace(site_config='INVALID',
diff --git a/molecule/ansible-config/tests/test_max_fail_percentage.py b/molecule/ansible-config/tests/test_max_fail_percentage.py
--- a/molecule/ansible-config/tests/test_max_fail_percentage.py
+++ b/molecule/ansible-config/tests/test_max_fail_percentage.py
@@ -1,5 +1,5 @@
import os
-
+import io
import pytest
import yaml
@@ -55,7 +55,7 @@ def test_max_fail_percentage(host, playbook):
the parameter, but we'll play it safe and require it everywhere,
to avoid mistakes down the road.
"""
- with open(playbook, 'r') as f:
+ with io.open(playbook, 'r') as f:
playbook_yaml = yaml.safe_load(f)
# Descend into playbook list structure to validate play attributes.
for play in playbook_yaml:
@@ -71,7 +71,7 @@ def test_any_errors_fatal(host, playbook):
to "0", doing so ensures that any errors will cause an immediate failure
on the playbook.
"""
- with open(playbook, 'r') as f:
+ with io.open(playbook, 'r') as f:
playbook_yaml = yaml.safe_load(f)
# Descend into playbook list structure to validate play attributes.
for play in playbook_yaml:
diff --git a/molecule/aws/tests/test_tor_interfaces.py b/molecule/aws/tests/test_tor_interfaces.py
--- a/molecule/aws/tests/test_tor_interfaces.py
+++ b/molecule/aws/tests/test_tor_interfaces.py
@@ -1,3 +1,4 @@
+import io
import os
import re
import pytest
@@ -20,7 +21,7 @@ def test_www(host, site):
os.path.dirname(__file__),
"../../../install_files/ansible-base/{}".format(site['file'])
)
- onion_url_raw = open(onion_url_filepath, 'ro').read()
+ onion_url_raw = io.open(onion_url_filepath, 'ro').read()
onion_url = re.search("\w+\.onion", onion_url_raw).group()
# Fetch Onion URL via curl to confirm interface is rendered correctly.
diff --git a/molecule/builder/tests/conftest.py b/molecule/builder/tests/conftest.py
--- a/molecule/builder/tests/conftest.py
+++ b/molecule/builder/tests/conftest.py
@@ -3,6 +3,7 @@
"""
import os
+import io
import yaml
@@ -11,5 +12,5 @@ def pytest_namespace():
global namespace
"""
filepath = os.path.join(os.path.dirname(__file__), "vars.yml")
- with open(filepath, 'r') as f:
+ with io.open(filepath, 'r') as f:
return dict(securedrop_test_vars=yaml.safe_load(f))
diff --git a/securedrop/tests/conftest.py b/securedrop/tests/conftest.py
--- a/securedrop/tests/conftest.py
+++ b/securedrop/tests/conftest.py
@@ -3,6 +3,7 @@
import gnupg
import logging
import os
+import io
import psutil
import pytest
import shutil
@@ -71,9 +72,9 @@ def config(tmpdir):
sqlite = data.join('db.sqlite')
gpg = gnupg.GPG(homedir=str(keys))
- with open(path.join(path.dirname(__file__),
- 'files',
- 'test_journalist_key.pub')) as f:
+ with io.open(path.join(path.dirname(__file__),
+ 'files',
+ 'test_journalist_key.pub')) as f:
gpg.import_keys(f.read())
cnf.SECUREDROP_DATA_ROOT = str(data)
@@ -138,7 +139,7 @@ def test_source(journalist_app):
def _start_test_rqworker(config):
if not psutil.pid_exists(_get_pid_from_file(TEST_WORKER_PIDFILE)):
- tmp_logfile = open('/tmp/test_rqworker.log', 'w')
+ tmp_logfile = io.open('/tmp/test_rqworker.log', 'w')
subprocess.Popen(['rqworker', 'test',
'-P', config.SECUREDROP_ROOT,
'--pid', TEST_WORKER_PIDFILE],
@@ -158,7 +159,7 @@ def _stop_test_rqworker():
def _get_pid_from_file(pid_file_name):
try:
- return int(open(pid_file_name).read())
+ return int(io.open(pid_file_name).read())
except IOError:
return None
diff --git a/securedrop/tests/functional/test_submission_not_in_memory.py b/securedrop/tests/functional/test_submission_not_in_memory.py
--- a/securedrop/tests/functional/test_submission_not_in_memory.py
+++ b/securedrop/tests/functional/test_submission_not_in_memory.py
@@ -2,6 +2,7 @@
import subprocess
from source_navigation_steps import SourceNavigationStepsMixin
import os
+import io
import pytest
import getpass
import re
@@ -11,7 +12,7 @@ class TestSubmissionNotInMemory(FunctionalTest,
SourceNavigationStepsMixin):
def setup(self):
- self.devnull = open('/dev/null', 'r')
+ self.devnull = io.open('/dev/null', 'r')
FunctionalTest.setup(self)
def teardown(self):
@@ -26,7 +27,7 @@ def _memory_dump(self, pid):
stderr=self.devnull)
subprocess.call(["sudo", "chown", getpass.getuser(),
core_dump_file_name])
- with open(core_dump_file_name, 'r') as fp:
+ with io.open(core_dump_file_name, 'r') as fp:
return fp.read()
finally:
pass
diff --git a/securedrop/tests/pages-layout/functional_test.py b/securedrop/tests/pages-layout/functional_test.py
--- a/securedrop/tests/pages-layout/functional_test.py
+++ b/securedrop/tests/pages-layout/functional_test.py
@@ -16,6 +16,7 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import os
+import io
from os.path import abspath, dirname, realpath
import pytest
@@ -74,5 +75,5 @@ def _javascript_toggle(self):
actions.perform()
def _save_alert(self, filename):
- fd = open(os.path.join(self.log_dir, filename), 'wb')
+ fd = io.open(os.path.join(self.log_dir, filename), 'wb')
fd.write(self.driver.switch_to.alert.text.encode('utf-8'))
diff --git a/securedrop/tests/test_crypto_util.py b/securedrop/tests/test_crypto_util.py
--- a/securedrop/tests/test_crypto_util.py
+++ b/securedrop/tests/test_crypto_util.py
@@ -1,5 +1,6 @@
# -*- coding: utf-8 -*-
import os
+import io
import unittest
from flask import current_app
@@ -98,7 +99,7 @@ def test_encrypt_binary_stream(self):
`gnupg._util._is_stream(plaintext)` returns `True`).
"""
source, codename = utils.db_helper.init_source()
- with open(os.path.realpath(__file__)) as fh:
+ with io.open(os.path.realpath(__file__)) as fh:
ciphertext = current_app.crypto_util.encrypt(
fh,
[current_app.crypto_util.getkey(source.filesystem_id),
@@ -106,7 +107,7 @@ def test_encrypt_binary_stream(self):
current_app.storage.path(source.filesystem_id, 'somefile.gpg'))
plaintext = current_app.crypto_util.decrypt(codename, ciphertext)
- with open(os.path.realpath(__file__)) as fh:
+ with io.open(os.path.realpath(__file__)) as fh:
self.assertEqual(fh.read(), plaintext)
def test_encrypt_fingerprints_not_a_list_or_tuple(self):
diff --git a/securedrop/tests/test_i18n_tool.py b/securedrop/tests/test_i18n_tool.py
--- a/securedrop/tests/test_i18n_tool.py
+++ b/securedrop/tests/test_i18n_tool.py
@@ -1,6 +1,7 @@
# -*- coding: utf-8 -*-
import argparse
+import io
import os
from os.path import abspath, dirname, exists, getmtime, join, realpath
os.environ['SECUREDROP_ENV'] = 'test' # noqa
@@ -37,8 +38,9 @@ def test_translate_desktop_l10n(self, tmpdir):
i18n_tool.translate_desktop(args)
messages_file = join(str(tmpdir), 'desktop.pot')
assert exists(messages_file)
- pot = open(messages_file).read()
- assert 'SecureDrop Source Interfaces' in pot
+ with io.open(messages_file) as fobj:
+ pot = fobj.read()
+ assert 'SecureDrop Source Interfaces' in pot
# pretend this happened a few seconds ago
few_seconds_ago = time.time() - 60
os.utime(messages_file, (few_seconds_ago, few_seconds_ago))
@@ -81,11 +83,13 @@ def test_translate_desktop_l10n(self, tmpdir):
old_messages_mtime = current_messages_mtime
i18n_tool.translate_desktop(args)
assert old_messages_mtime == getmtime(messages_file)
- po = open(po_file).read()
- assert 'SecureDrop Source Interfaces' in po
- assert 'SecureDrop Journalist Interfaces' not in po
- i18n = open(i18n_file).read()
- assert 'SOURCE FR' in i18n
+ with io.open(po_file) as fobj:
+ po = fobj.read()
+ assert 'SecureDrop Source Interfaces' in po
+ assert 'SecureDrop Journalist Interfaces' not in po
+ with io.open(i18n_file) as fobj:
+ i18n = fobj.read()
+ assert 'SOURCE FR' in i18n
def test_translate_messages_l10n(self, tmpdir):
source = [
@@ -106,9 +110,10 @@ def test_translate_messages_l10n(self, tmpdir):
i18n_tool.translate_messages(args)
messages_file = join(str(tmpdir), 'messages.pot')
assert exists(messages_file)
- pot = open(messages_file).read()
- assert 'code hello i18n' in pot
- assert 'template hello i18n' in pot
+ with io.open(messages_file) as fobj:
+ pot = fobj.read()
+ assert 'code hello i18n' in pot
+ assert 'template hello i18n' in pot
locale = 'en_US'
locale_dir = join(str(tmpdir), locale)
@@ -121,9 +126,10 @@ def test_translate_messages_l10n(self, tmpdir):
assert not exists(mo_file)
i18n_tool.translate_messages(args)
assert exists(mo_file)
- mo = open(mo_file).read()
- assert 'code hello i18n' in mo
- assert 'template hello i18n' in mo
+ with io.open(mo_file) as fobj:
+ mo = fobj.read()
+ assert 'code hello i18n' in mo
+ assert 'template hello i18n' in mo
def test_translate_messages_compile_arg(self, tmpdir):
source = [
@@ -143,8 +149,9 @@ def test_translate_messages_compile_arg(self, tmpdir):
i18n_tool.translate_messages(args)
messages_file = join(str(tmpdir), 'messages.pot')
assert exists(messages_file)
- pot = open(messages_file).read()
- assert 'code hello i18n' in pot
+ with io.open(messages_file) as fobj:
+ pot = fobj.read()
+ assert 'code hello i18n' in pot
locale = 'en_US'
locale_dir = join(str(tmpdir), locale)
@@ -184,9 +191,10 @@ def test_translate_messages_compile_arg(self, tmpdir):
old_po_mtime = current_po_mtime
i18n_tool.translate_messages(args)
assert old_po_mtime == getmtime(po_file)
- mo = open(mo_file).read()
- assert 'code hello i18n' in mo
- assert 'template hello i18n' not in mo
+ with io.open(mo_file) as fobj:
+ mo = fobj.read()
+ assert 'code hello i18n' in mo
+ assert 'template hello i18n' not in mo
class TestSh(object):
diff --git a/securedrop/tests/test_journalist.py b/securedrop/tests/test_journalist.py
--- a/securedrop/tests/test_journalist.py
+++ b/securedrop/tests/test_journalist.py
@@ -1,6 +1,7 @@
# -*- coding: utf-8 -*-
import os
import pytest
+import io
import random
import unittest
import zipfile
@@ -948,7 +949,7 @@ def test_logo_upload_with_valid_image_succeeds(self):
# Save original logo to restore after test run
logo_image_location = os.path.join(config.SECUREDROP_ROOT,
"static/i/logo.png")
- with open(logo_image_location) as logo_file:
+ with io.open(logo_image_location, 'rb') as logo_file:
original_image = logo_file.read()
try:
@@ -964,7 +965,7 @@ def test_logo_upload_with_valid_image_succeeds(self):
self.assertMessageFlashed("Image updated.", "logo-success")
finally:
# Restore original image to logo location for subsequent tests
- with open(logo_image_location, 'w') as logo_file:
+ with io.open(logo_image_location, 'wb') as logo_file:
logo_file.write(original_image)
def test_logo_upload_with_invalid_filetype_fails(self):
diff --git a/securedrop/tests/test_manage.py b/securedrop/tests/test_manage.py
--- a/securedrop/tests/test_manage.py
+++ b/securedrop/tests/test_manage.py
@@ -1,6 +1,7 @@
# -*- coding: utf-8 -*-
import argparse
+import io
import logging
import os
import manage
@@ -202,7 +203,7 @@ def test_clean_tmp_too_young(config, caplog):
directory=config.TEMP_DIR,
verbose=logging.DEBUG)
# create a file
- open(os.path.join(config.TEMP_DIR, 'FILE'), 'a').close()
+ io.open(os.path.join(config.TEMP_DIR, 'FILE'), 'a').close()
manage.setup_verbosity(args)
manage.clean_tmp(args)
@@ -214,7 +215,7 @@ def test_clean_tmp_removed(config, caplog):
directory=config.TEMP_DIR,
verbose=logging.DEBUG)
fname = os.path.join(config.TEMP_DIR, 'FILE')
- with open(fname, 'a'):
+ with io.open(fname, 'a'):
old = time.time() - 24*60*60
os.utime(fname, (old, old))
manage.setup_verbosity(args)
diff --git a/securedrop/tests/test_secure_tempfile.py b/securedrop/tests/test_secure_tempfile.py
--- a/securedrop/tests/test_secure_tempfile.py
+++ b/securedrop/tests/test_secure_tempfile.py
@@ -1,5 +1,6 @@
# -*- coding: utf-8 -*-
import os
+import io
import unittest
from gnupg._util import _is_stream
@@ -62,7 +63,7 @@ def test_read_write_unicode(self):
def test_file_seems_encrypted(self):
self.f.write(self.msg)
- with open(self.f.filepath, 'rb') as fh:
+ with io.open(self.f.filepath, 'rb') as fh:
contents = fh.read().decode()
self.assertNotIn(self.msg, contents)
diff --git a/securedrop/tests/test_store.py b/securedrop/tests/test_store.py
--- a/securedrop/tests/test_store.py
+++ b/securedrop/tests/test_store.py
@@ -1,5 +1,6 @@
# -*- coding: utf-8 -*-
import os
+import io
import pytest
import re
import shutil
@@ -37,7 +38,7 @@ def create_file_in_source_dir(self, filesystem_id, filename):
os.makedirs(source_directory)
file_path = os.path.join(source_directory, filename)
- with open(file_path, 'a'):
+ with io.open(file_path, 'a'):
os.utime(file_path, None)
return source_directory, file_path
@@ -140,7 +141,7 @@ def test_get_zip(self):
archivefile_contents = archive.namelist()
for archived_file, actual_file in zip(archivefile_contents, filenames):
- actual_file_content = open(actual_file).read()
+ actual_file_content = io.open(actual_file, 'rb').read()
zipped_file_content = archive.read(archived_file)
self.assertEquals(zipped_file_content, actual_file_content)
diff --git a/securedrop/tests/utils/env.py b/securedrop/tests/utils/env.py
--- a/securedrop/tests/utils/env.py
+++ b/securedrop/tests/utils/env.py
@@ -3,6 +3,7 @@
"""
import gnupg
import os
+import io
import shutil
import threading
@@ -40,7 +41,7 @@ def init_gpg():
# Faster to import a pre-generated key than to gen a new one every time.
for keyfile in (join(FILES_DIR, "test_journalist_key.pub"),
join(FILES_DIR, "test_journalist_key.sec")):
- gpg.import_keys(open(keyfile).read())
+ gpg.import_keys(io.open(keyfile).read())
return gpg
diff --git a/testinfra/app/test_network.py b/testinfra/app/test_network.py
--- a/testinfra/app/test_network.py
+++ b/testinfra/app/test_network.py
@@ -1,4 +1,5 @@
import os
+import io
import difflib
import pytest
from jinja2 import Template
@@ -27,7 +28,7 @@ def test_app_iptables_rules(SystemInfo, Command, Sudo):
environment)
# template out a local iptables jinja file
- jinja_iptables = Template(open(iptables_file, 'r').read())
+ jinja_iptables = Template(io.open(iptables_file, 'r').read())
iptables_expected = jinja_iptables.render(**kwargs)
with Sudo():
diff --git a/testinfra/mon/test_network.py b/testinfra/mon/test_network.py
--- a/testinfra/mon/test_network.py
+++ b/testinfra/mon/test_network.py
@@ -1,3 +1,4 @@
+import io
import os
import difflib
import pytest
@@ -27,7 +28,7 @@ def test_mon_iptables_rules(SystemInfo, Command, Sudo):
environment)
# template out a local iptables jinja file
- jinja_iptables = Template(open(iptables_file, 'r').read())
+ jinja_iptables = Template(io.open(iptables_file, 'r').read())
iptables_expected = jinja_iptables.render(**kwargs)
with Sudo():
| [RFE] Use io.open instead of open function call in the codebase
# Feature request
## Description
We should start using [`io.open`](https://docs.python.org/2.7/library/io.html#io.open) instead of the builtin function `open` in Python2 so that we can move another step towards Python3.
In Python3 `open` function call is actually the same of `io.open`. But, that is not the same in Python2. That is why moving to `io.open` in Python2 is [step forward](https://docs.python.org/3/howto/pyporting.html#update-your-code) to Python3 land.
Always remember, [the clock is ticking](https://docs.python.org/3/howto/pyporting.html#update-your-code).
| Not saying we shouldn't, but if `open` "just works" now, is there a compelling reason to change it before we make the actual switch to `python3`? I feel like there's bigger fish to fry.
@heartsucker That is our official suggestion for projects trying to move into Python3. Please have a look at the https://docs.python.org/3/howto/pyporting.html#update-your-code
Fair enough. I've just been lazy and ported whole projects with `2to3` all at once. Ha. | 2018-02-23T10:53:14Z | [] | [] |
freedomofpress/securedrop | 3,082 | freedomofpress__securedrop-3082 | [
"1697"
] | 650ab5b57633e43449be8a6e580b6379293e627b | diff --git a/testinfra/conftest.py b/testinfra/conftest.py
--- a/testinfra/conftest.py
+++ b/testinfra/conftest.py
@@ -25,11 +25,6 @@ def securedrop_import_testinfra_vars(hostname, with_header=False):
filepath = os.path.join(os.path.dirname(__file__), "vars", hostname+".yml")
with open(filepath, 'r') as f:
hostvars = yaml.safe_load(f)
- # The directory Travis runs builds in varies by PR, so we cannot hardcode
- # it in the YAML testvars. Read it from env var and concatenate.
- if hostname.lower() == 'travis':
- build_env = os.environ["TRAVIS_BUILD_DIR"]
- hostvars['securedrop_code'] = build_env+"/securedrop"
if with_header:
hostvars = dict(securedrop_test_vars=hostvars)
| diff --git a/testinfra/app-code/test_securedrop_app_code.py b/testinfra/app-code/test_securedrop_app_code.py
--- a/testinfra/app-code/test_securedrop_app_code.py
+++ b/testinfra/app-code/test_securedrop_app_code.py
@@ -69,16 +69,10 @@ def test_securedrop_application_test_journalist_key(File, Sudo):
securedrop_test_vars.securedrop_code))
with Sudo():
assert securedrop_config.is_file
- # travis needs the config.py file ran owned by root not sure why
- # just saw this note in the travis.yml config
- if hostenv == "travis":
- assert securedrop_config.user == "root"
- assert securedrop_config.group == "root"
- else:
- assert securedrop_config.user == \
- securedrop_test_vars.securedrop_user
- assert securedrop_config.group == \
- securedrop_test_vars.securedrop_user
+ assert securedrop_config.user == \
+ securedrop_test_vars.securedrop_user
+ assert securedrop_config.group == \
+ securedrop_test_vars.securedrop_user
assert oct(securedrop_config.mode) == "0600"
assert securedrop_config.contains(
"^JOURNALIST_KEY = '65A1B5FF195B56353CC63DFFCC40EF1228271441'$")
diff --git a/testinfra/common/test_user_config.py b/testinfra/common/test_user_config.py
--- a/testinfra/common/test_user_config.py
+++ b/testinfra/common/test_user_config.py
@@ -1,5 +1,4 @@
import os
-import pytest
import re
hostenv = os.environ['SECUREDROP_TESTINFRA_TARGET_HOST']
@@ -65,8 +64,6 @@ def test_tmux_installed(Package):
assert Package("tmux").is_installed
[email protected](hostenv == 'travis',
- reason="Bashrc tests dont make sense on Travis")
def test_sudoers_tmux_env_deprecated(File):
"""
Previous version of the Ansible config set the tmux config
diff --git a/testinfra/development/test_xvfb.py b/testinfra/development/test_xvfb.py
--- a/testinfra/development/test_xvfb.py
+++ b/testinfra/development/test_xvfb.py
@@ -1,6 +1,3 @@
-import os
-
-
def test_xvfb_is_installed(Package):
"""
Ensure apt requirements for Xvfb are present.
@@ -13,17 +10,13 @@ def test_firefox_is_installed(Package, Command):
The app test suite requires a very specific version of Firefox, for
compatibility with Selenium. Make sure to check the explicit
version of Firefox, not just that any version of Firefox is installed.
-
- In Travis, the Firefox installation is handled via the travis.yml
- file, and so it won't show as installed via dpkg.
"""
- if "TRAVIS" not in os.environ:
- p = Package("firefox")
- assert p.is_installed
+ p = Package("firefox")
+ assert p.is_installed
c = Command("firefox --version")
# Reminder: the rstrip is only necessary for local-context actions,
- # e.g. in Travis, but it's a fine practice in all contexts.
+ # but it's a fine practice in all contexts.
assert c.stdout.rstrip() == "Mozilla Firefox 46.0.1"
| Delete setup.cfg
`setup.cfg` was used to set configuration options for `pytest` in the past, but since `pytest.ini` is now providing that configuration, it seems like `setup.cfg` should be deleted.
| 2018-03-01T03:10:15Z | [] | [] |
|
freedomofpress/securedrop | 3,128 | freedomofpress__securedrop-3128 | [
"3124"
] | c6ae6251bdbf1ce497bb62418660b643ea344258 | diff --git a/securedrop/journalist_app/account.py b/securedrop/journalist_app/account.py
--- a/securedrop/journalist_app/account.py
+++ b/securedrop/journalist_app/account.py
@@ -6,7 +6,7 @@
from db import db
from journalist_app.utils import (make_password, set_diceware_password,
- validate_user)
+ validate_user, validate_hotp_secret)
def make_blueprint(config):
@@ -60,6 +60,8 @@ def reset_two_factor_totp():
def reset_two_factor_hotp():
otp_secret = request.form.get('otp_secret', None)
if otp_secret:
+ if not validate_hotp_secret(g.user, otp_secret):
+ return render_template('account_edit_hotp_secret.html')
g.user.set_hotp_secret(otp_secret)
db.session.commit()
return redirect(url_for('account.new_two_factor'))
diff --git a/securedrop/journalist_app/admin.py b/securedrop/journalist_app/admin.py
--- a/securedrop/journalist_app/admin.py
+++ b/securedrop/journalist_app/admin.py
@@ -12,7 +12,7 @@
from models import Journalist, InvalidUsernameException, PasswordError
from journalist_app.decorators import admin_required
from journalist_app.utils import (make_password, commit_account_changes,
- set_diceware_password)
+ set_diceware_password, validate_hotp_secret)
from journalist_app.forms import LogoForm, NewUserForm
@@ -131,30 +131,10 @@ def reset_two_factor_hotp():
otp_secret = request.form.get('otp_secret', None)
if otp_secret:
user = Journalist.query.get(uid)
- try:
- user.set_hotp_secret(otp_secret)
- except TypeError as e:
- if "Non-hexadecimal digit found" in str(e):
- flash(gettext(
- "Invalid secret format: "
- "please only submit letters A-F and numbers 0-9."),
- "error")
- elif "Odd-length string" in str(e):
- flash(gettext(
- "Invalid secret format: "
- "odd-length secret. Did you mistype the secret?"),
- "error")
- else:
- flash(gettext(
- "An unexpected error occurred! "
- "Please inform your administrator."), "error")
- current_app.logger.error(
- "set_hotp_secret '{}' (id {}) failed: {}".format(
- otp_secret, uid, e))
+ if not validate_hotp_secret(user, otp_secret):
return render_template('admin_edit_hotp_secret.html', uid=uid)
- else:
- db.session.commit()
- return redirect(url_for('admin.new_user_two_factor', uid=uid))
+ db.session.commit()
+ return redirect(url_for('admin.new_user_two_factor', uid=uid))
else:
return render_template('admin_edit_hotp_secret.html', uid=uid)
diff --git a/securedrop/journalist_app/utils.py b/securedrop/journalist_app/utils.py
--- a/securedrop/journalist_app/utils.py
+++ b/securedrop/journalist_app/utils.py
@@ -102,6 +102,39 @@ def validate_user(username, password, token, error_message=None):
return None
+def validate_hotp_secret(user, otp_secret):
+ """
+ Validates and sets the HOTP provided by a user
+ :param user: the change is for this instance of the User object
+ :param otp_secret: the new HOTP secret
+ :return: True if it validates, False if it does not
+ """
+ try:
+ user.set_hotp_secret(otp_secret)
+ except TypeError as e:
+ if "Non-hexadecimal digit found" in str(e):
+ flash(gettext(
+ "Invalid secret format: "
+ "please only submit letters A-F and numbers 0-9."),
+ "error")
+ return False
+ elif "Odd-length string" in str(e):
+ flash(gettext(
+ "Invalid secret format: "
+ "odd-length secret. Did you mistype the secret?"),
+ "error")
+ return False
+ else:
+ flash(gettext(
+ "An unexpected error occurred! "
+ "Please inform your administrator."), "error")
+ current_app.logger.error(
+ "set_hotp_secret '{}' (id {}) failed: {}".format(
+ otp_secret, user.id, e))
+ return False
+ return True
+
+
def download(zip_basename, submissions):
"""Send client contents of ZIP-file *zip_basename*-<timestamp>.zip
containing *submissions*. The ZIP-file, being a
| diff --git a/securedrop/tests/test_journalist.py b/securedrop/tests/test_journalist.py
--- a/securedrop/tests/test_journalist.py
+++ b/securedrop/tests/test_journalist.py
@@ -544,6 +544,55 @@ def test_user_resets_hotp(self):
# should redirect to verification page
self.assertRedirects(resp, url_for('account.new_two_factor'))
+ def test_user_resets_user_hotp_format_odd(self):
+ self._login_user()
+ old_hotp = self.user.hotp.secret
+
+ self.client.post(url_for('account.reset_two_factor_hotp'),
+ data=dict(uid=self.user.id, otp_secret='123'))
+ new_hotp = self.user.hotp.secret
+
+ self.assertEqual(old_hotp, new_hotp)
+ self.assertMessageFlashed(
+ "Invalid secret format: "
+ "odd-length secret. Did you mistype the secret?", "error")
+
+ def test_user_resets_user_hotp_format_non_hexa(self):
+ self._login_user()
+ old_hotp = self.user.hotp.secret
+
+ self.client.post(url_for('account.reset_two_factor_hotp'),
+ data=dict(uid=self.user.id, otp_secret='ZZ'))
+ new_hotp = self.user.hotp.secret
+
+ self.assertEqual(old_hotp, new_hotp)
+ self.assertMessageFlashed(
+ "Invalid secret format: "
+ "please only submit letters A-F and numbers 0-9.", "error")
+
+ @patch('models.Journalist.set_hotp_secret')
+ @patch('journalist.app.logger.error')
+ def test_user_resets_user_hotp_error(self,
+ mocked_error_logger,
+ mock_set_hotp_secret):
+ self._login_user()
+ old_hotp = self.user.hotp.secret
+
+ error_message = 'SOMETHING WRONG!'
+ mock_set_hotp_secret.side_effect = TypeError(error_message)
+
+ otp_secret = '1234'
+ self.client.post(url_for('account.reset_two_factor_hotp'),
+ data=dict(uid=self.user.id, otp_secret=otp_secret))
+ new_hotp = self.user.hotp.secret
+
+ self.assertEqual(old_hotp, new_hotp)
+ self.assertMessageFlashed("An unexpected error occurred! "
+ "Please inform your administrator.", "error")
+ mocked_error_logger.assert_called_once_with(
+ "set_hotp_secret '{}' (id {}) failed: {}".format(
+ otp_secret, self.user.id, error_message))
+
def test_admin_resets_user_totp(self):
self._login_admin()
old_totp = self.user.totp
| /account/reset-2fa-hotp does not catch TypeError
# Bug
## Description
When modifying the HOTP as a journalist (not in the admin interface), TypeError are not caught and a stack trace shows when debug is activated. Otherwise it would bubble to whatever catches server errors.
## Steps to Reproduce
* login as a journalist
* go to /account/account
* click /account/reset-2fa-hotp
* enter three letters
* click save
## Expected Behavior
An error message similar to what happens with /admin/reset-2fa-hotp
## Comments
The /admin/reset-2fa-hotp error handling code should be shared with /account/reset-2fa-hotp
| Nice catch, I can reproduce this:
![screen shot 2018-03-08 at 12 12 03 pm](https://user-images.githubusercontent.com/7832803/37173956-fb929a9c-22c9-11e8-9aea-7a40770332c3.png) | 2018-03-08T23:06:08Z | [] | [] |
freedomofpress/securedrop | 3,172 | freedomofpress__securedrop-3172 | [
"3160",
"3160"
] | d272e85b79848ff83ad45bbcdf43a1df472308b7 | diff --git a/admin/securedrop_admin/__init__.py b/admin/securedrop_admin/__init__.py
--- a/admin/securedrop_admin/__init__.py
+++ b/admin/securedrop_admin/__init__.py
@@ -51,6 +51,13 @@ def validate(self, document):
raise ValidationError(
message="Must not be an empty string")
+ class ValidateTime(Validator):
+ def validate(self, document):
+ if document.text.isdigit() and int(document.text) in range(0, 24):
+ return True
+ raise ValidationError(
+ message="Must be an integer between 0 and 23")
+
class ValidateUser(Validator):
def validate(self, document):
text = document.text
@@ -210,6 +217,10 @@ def __init__(self, args):
u'Username for SSH access to the servers',
SiteConfig.ValidateUser(),
None],
+ ['daily_reboot_time', 4, int,
+ u'Daily reboot time of the server (24-hour clock)',
+ SiteConfig.ValidateTime(),
+ int],
['app_ip', '10.20.2.2', str,
u'Local IPv4 address for the Application Server',
SiteConfig.ValidateIP(),
| diff --git a/admin/tests/test_securedrop-admin.py b/admin/tests/test_securedrop-admin.py
--- a/admin/tests/test_securedrop-admin.py
+++ b/admin/tests/test_securedrop-admin.py
@@ -157,6 +157,21 @@ def test_validate_not_empty(self):
with pytest.raises(ValidationError):
validator.validate(Document(''))
+ def test_validate_time(self):
+ validator = securedrop_admin.SiteConfig.ValidateTime()
+
+ assert validator.validate(Document('4'))
+ with pytest.raises(ValidationError):
+ validator.validate(Document(''))
+ with pytest.raises(ValidationError):
+ validator.validate(Document('four'))
+ with pytest.raises(ValidationError):
+ validator.validate(Document('4.30'))
+ with pytest.raises(ValidationError):
+ validator.validate(Document('25'))
+ with pytest.raises(ValidationError):
+ validator.validate(Document('-4'))
+
def test_validate_ossec_username(self):
validator = securedrop_admin.SiteConfig.ValidateOSSECUsername()
@@ -484,6 +499,7 @@ def verify_desc_consistency_allow_empty(self, site_config, desc):
verify_prompt_ossec_alert_email = verify_prompt_not_empty
verify_prompt_smtp_relay = verify_prompt_not_empty
verify_prompt_smtp_relay_port = verify_desc_consistency
+ verify_prompt_daily_reboot_time = verify_desc_consistency
verify_prompt_sasl_domain = verify_desc_consistency_allow_empty
verify_prompt_sasl_username = verify_prompt_not_empty
verify_prompt_sasl_password = verify_prompt_not_empty
| Add `daily_reboot_time` option to securedrop-admin sdconfig and document it
## Description
This is a followup to issue #1515. In the corresponding PR (#1590), the ability to manually edit `prod_specific.yml` to customize the `daily_reboot_time` was added. This enabled admins to control when their SecureDrop instances will upgrade and reboot.
However, we now discourage direct edits to `prod_specific.yml` in favor of using `./securedrop-admin sdconfig`. Additionally, the ability to customize the daily reboot time via `daily_reboot_time` is currently not documented.
We should add the `daily_reboot_time` option to the `./securedrop-admin sdconfig` series of prompts and add some documentation to the Install and Administrator Guides explaining what `daily_reboot_time` does and how to enable it.
## User Stories
As a SecureDrop admin, I want to easily customize the reboot time so that I can schedule my SecureDrop upgrade while I am at work in case issues arise.
Add `daily_reboot_time` option to securedrop-admin sdconfig and document it
## Description
This is a followup to issue #1515. In the corresponding PR (#1590), the ability to manually edit `prod_specific.yml` to customize the `daily_reboot_time` was added. This enabled admins to control when their SecureDrop instances will upgrade and reboot.
However, we now discourage direct edits to `prod_specific.yml` in favor of using `./securedrop-admin sdconfig`. Additionally, the ability to customize the daily reboot time via `daily_reboot_time` is currently not documented.
We should add the `daily_reboot_time` option to the `./securedrop-admin sdconfig` series of prompts and add some documentation to the Install and Administrator Guides explaining what `daily_reboot_time` does and how to enable it.
## User Stories
As a SecureDrop admin, I want to easily customize the reboot time so that I can schedule my SecureDrop upgrade while I am at work in case issues arise.
| 2018-03-16T02:18:22Z | [] | [] |
|
freedomofpress/securedrop | 3,211 | freedomofpress__securedrop-3211 | [
"1419",
"3244"
] | 2cd07a16c843fb536aefb27035e1544edb453274 | diff --git a/securedrop/alembic/env.py b/securedrop/alembic/env.py
new file mode 100644
--- /dev/null
+++ b/securedrop/alembic/env.py
@@ -0,0 +1,81 @@
+from __future__ import with_statement
+
+import os
+import sys
+
+from alembic import context
+from sqlalchemy import engine_from_config, pool
+from logging.config import fileConfig
+from os import path
+
+config = context.config
+
+fileConfig(config.config_file_name)
+
+# needed to import local modules
+sys.path.insert(0, path.realpath(path.join(path.dirname(__file__), '..')))
+from db import db # noqa
+
+try:
+ # These imports are only needed for offline generation of automigrations.
+ # Importing them in a prod-like environment breaks things.
+ from journalist_app import create_app # noqa
+ from sdconfig import config as sdconfig # noqa
+
+ # App context is needed for autogenerated migrations
+ create_app(sdconfig).app_context().push()
+except Exception as e:
+ # Only reraise the exception in 'dev' where a developer actually cares
+ if os.environ.get('SECUREDROP_ENV') == 'dev':
+ raise
+
+
+target_metadata = db.Model.metadata
+
+
+def run_migrations_offline():
+ """Run migrations in 'offline' mode.
+
+ This configures the context with just a URL
+ and not an Engine, though an Engine is acceptable
+ here as well. By skipping the Engine creation
+ we don't even need a DBAPI to be available.
+
+ Calls to context.execute() here emit the given string to the
+ script output.
+
+ """
+ url = config.get_main_option("sqlalchemy.url")
+ context.configure(
+ url=url, target_metadata=target_metadata, literal_binds=True)
+
+ with context.begin_transaction():
+ context.run_migrations()
+
+
+def run_migrations_online():
+ """Run migrations in 'online' mode.
+
+ In this scenario we need to create an Engine
+ and associate a connection with the context.
+
+ """
+ connectable = engine_from_config(
+ config.get_section(config.config_ini_section),
+ prefix='sqlalchemy.',
+ poolclass=pool.NullPool)
+
+ with connectable.connect() as connection:
+ context.configure(
+ connection=connection,
+ target_metadata=target_metadata
+ )
+
+ with context.begin_transaction():
+ context.run_migrations()
+
+
+if context.is_offline_mode():
+ run_migrations_offline()
+else:
+ run_migrations_online()
diff --git a/securedrop/alembic/versions/15ac9509fc68_init.py b/securedrop/alembic/versions/15ac9509fc68_init.py
new file mode 100644
--- /dev/null
+++ b/securedrop/alembic/versions/15ac9509fc68_init.py
@@ -0,0 +1,87 @@
+"""init
+
+Revision ID: 15ac9509fc68
+Revises:
+Create Date: 2018-03-30 21:20:58.280753
+
+"""
+from alembic import op
+import sqlalchemy as sa
+
+
+# revision identifiers, used by Alembic.
+revision = '15ac9509fc68'
+down_revision = None
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ op.create_table('journalists',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('username', sa.String(length=255), nullable=False),
+ sa.Column('pw_salt', sa.Binary(), nullable=True),
+ sa.Column('pw_hash', sa.Binary(), nullable=True),
+ sa.Column('is_admin', sa.Boolean(), nullable=True),
+ sa.Column('otp_secret', sa.String(length=16), nullable=True),
+ sa.Column('is_totp', sa.Boolean(), nullable=True),
+ sa.Column('hotp_counter', sa.Integer(), nullable=True),
+ sa.Column('last_token', sa.String(length=6), nullable=True),
+ sa.Column('created_on', sa.DateTime(), nullable=True),
+ sa.Column('last_access', sa.DateTime(), nullable=True),
+ sa.PrimaryKeyConstraint('id'),
+ sa.UniqueConstraint('username')
+ )
+ op.create_table('sources',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('filesystem_id', sa.String(length=96), nullable=True),
+ sa.Column('journalist_designation', sa.String(length=255), nullable=False),
+ sa.Column('flagged', sa.Boolean(), nullable=True),
+ sa.Column('last_updated', sa.DateTime(), nullable=True),
+ sa.Column('pending', sa.Boolean(), nullable=True),
+ sa.Column('interaction_count', sa.Integer(), nullable=False),
+ sa.PrimaryKeyConstraint('id'),
+ sa.UniqueConstraint('filesystem_id')
+ )
+ op.create_table('journalist_login_attempt',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('timestamp', sa.DateTime(), nullable=True),
+ sa.Column('journalist_id', sa.Integer(), nullable=True),
+ sa.ForeignKeyConstraint(['journalist_id'], ['journalists.id'], ),
+ sa.PrimaryKeyConstraint('id')
+ )
+ op.create_table('replies',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('journalist_id', sa.Integer(), nullable=True),
+ sa.Column('source_id', sa.Integer(), nullable=True),
+ sa.Column('filename', sa.String(length=255), nullable=False),
+ sa.Column('size', sa.Integer(), nullable=False),
+ sa.ForeignKeyConstraint(['journalist_id'], ['journalists.id'], ),
+ sa.ForeignKeyConstraint(['source_id'], ['sources.id'], ),
+ sa.PrimaryKeyConstraint('id')
+ )
+ op.create_table('source_stars',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('source_id', sa.Integer(), nullable=True),
+ sa.Column('starred', sa.Boolean(), nullable=True),
+ sa.ForeignKeyConstraint(['source_id'], ['sources.id'], ),
+ sa.PrimaryKeyConstraint('id')
+ )
+ op.create_table('submissions',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('source_id', sa.Integer(), nullable=True),
+ sa.Column('filename', sa.String(length=255), nullable=False),
+ sa.Column('size', sa.Integer(), nullable=False),
+ sa.Column('downloaded', sa.Boolean(), nullable=True),
+ sa.ForeignKeyConstraint(['source_id'], ['sources.id'], ),
+ sa.PrimaryKeyConstraint('id')
+ )
+
+
+def downgrade():
+ op.drop_table('submissions')
+ op.drop_table('source_stars')
+ op.drop_table('replies')
+ op.drop_table('journalist_login_attempt')
+ op.drop_table('sources')
+ op.drop_table('journalists')
diff --git a/securedrop/alembic/versions/faac8092c123_enable_security_pragmas.py b/securedrop/alembic/versions/faac8092c123_enable_security_pragmas.py
new file mode 100644
--- /dev/null
+++ b/securedrop/alembic/versions/faac8092c123_enable_security_pragmas.py
@@ -0,0 +1,26 @@
+"""enable security pragmas
+
+Revision ID: faac8092c123
+Revises: 15ac9509fc68
+Create Date: 2018-03-31 10:44:26.533395
+
+"""
+from alembic import op
+import sqlalchemy as sa
+
+
+# revision identifiers, used by Alembic.
+revision = 'faac8092c123'
+down_revision = '15ac9509fc68'
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ conn = op.get_bind()
+ conn.execute(sa.text('PRAGMA secure_delete = ON'))
+ conn.execute(sa.text('PRAGMA auto_vacuum = FULL'))
+
+
+def downgrade():
+ pass
diff --git a/securedrop/crypto_util.py b/securedrop/crypto_util.py
--- a/securedrop/crypto_util.py
+++ b/securedrop/crypto_util.py
@@ -28,6 +28,11 @@
# It supplies a CSPRNG but with an interface that supports methods like choice
random = SystemRandom()
+# safe characters for every possible word in the wordlist includes capital
+# letters because codename hashes are base32-encoded with capital letters
+DICEWARE_SAFE_CHARS = (' !#%$&)(+*-1032547698;:=?@acbedgfihkjmlonqpsrutwvyxzA'
+ 'BCDEFGHIJKLMNOPQRSTUVWXYZ')
+
class CryptoException(Exception):
pass
@@ -236,12 +241,8 @@ def clean(s, also=''):
>>> clean("Helloworld")
'Helloworld'
"""
- # safe characters for every possible word in the wordlist includes capital
- # letters because codename hashes are base32-encoded with capital letters
- ok = (' !#%$&)(+*-1032547698;:=?@acbedgfihkjmlonqpsrutwvyxzABCDEFGHIJ'
- 'KLMNOPQRSTUVWXYZ')
for c in s:
- if c not in ok and c not in also:
+ if c not in DICEWARE_SAFE_CHARS and c not in also:
raise CryptoException("invalid input: {0}".format(s))
# scrypt.hash requires input of type str. Since the wordlist is all ASCII
# characters, this conversion is not problematic
diff --git a/securedrop/manage.py b/securedrop/manage.py
--- a/securedrop/manage.py
+++ b/securedrop/manage.py
@@ -8,6 +8,7 @@
import os
import pwd
import qrcode
+import subprocess
import shutil
import signal
import sys
@@ -16,7 +17,7 @@
from contextlib import contextmanager
from flask import current_app
-from sqlalchemy import text, create_engine
+from sqlalchemy import create_engine
from sqlalchemy.orm.exc import NoResultFound
from sqlalchemy.orm import sessionmaker
@@ -43,14 +44,45 @@ def reset(args):
if not hasattr(config, 'DATABASE_FILE'):
raise Exception("TODO: ./manage.py doesn't know how to clear the db "
'if the backend is not sqlite')
+
+ # we need to save some data about the old DB file so we can recreate it
+ # with the same state
+ try:
+ stat_res = os.stat(config.DATABASE_FILE)
+ uid = stat_res.st_uid
+ gid = stat_res.st_gid
+ except OSError:
+ uid = os.getuid()
+ gid = os.getgid()
+
try:
os.remove(config.DATABASE_FILE)
except OSError:
pass
# Regenerate the database
- with app_context():
- db.create_all()
+ # 1. Create it
+ subprocess.check_call(['sqlite3', config.DATABASE_FILE, '.databases'])
+ # 2. Set permissions on it
+ os.chown(config.DATABASE_FILE, uid, gid)
+ os.chmod(config.DATABASE_FILE, 0o0640)
+
+ if os.environ.get('SECUREDROP_ENV') == 'dev':
+ # 3. Create the DB from the metadata directly when in 'dev' so
+ # developers can test application changes without first writing
+ # alembic migration.
+ with journalist_app.create_app(config).app_context():
+ db.create_all()
+ else:
+ # We have to override the hardcoded .ini file because during testing
+ # the value in the .ini doesn't exist.
+ ini_dir = os.path.dirname(getattr(config,
+ 'TEST_ALEMBIC_INI',
+ 'alembic.ini'))
+
+ # 3. Migrate it to 'head'
+ subprocess.check_call('cd {} && alembic upgrade head'.format(ini_dir),
+ shell=True) # nosec
# Clear submission/reply storage
try:
@@ -242,14 +274,11 @@ def listdir_fullpath(d):
def init_db(args):
- with journalist_app.create_app(config).app_context():
- db.create_all()
- db.session.execute(text('PRAGMA secure_delete = ON'))
- db.session.execute(text('PRAGMA auto_vacuum = FULL'))
- db.session.commit()
-
user = pwd.getpwnam(args.user)
- os.chown('/var/lib/securedrop/db.sqlite', user.pw_uid, user.pw_gid)
+ subprocess.check_call(['sqlite3', config.DATABASE_FILE, '.databases'])
+ os.chown(config.DATABASE_FILE, user.pw_uid, user.pw_gid)
+ os.chmod(config.DATABASE_FILE, 0o0640)
+ subprocess.check_call(['alembic', 'upgrade', 'head'])
def were_there_submissions_today(args):
diff --git a/securedrop/qa_loader.py b/securedrop/qa_loader.py
new file mode 100755
--- /dev/null
+++ b/securedrop/qa_loader.py
@@ -0,0 +1,239 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+import math
+import os
+import random
+import string
+import sys
+
+from argparse import ArgumentParser
+from datetime import datetime
+from flask import current_app
+from os import path
+from sqlalchemy import text
+
+from crypto_util import DICEWARE_SAFE_CHARS
+from db import db
+from journalist_app import create_app
+from models import (Journalist, Source, Submission, SourceStar, Reply,
+ JournalistLoginAttempt)
+from sdconfig import config as sdconfig
+
+random.seed('~(=^β^)') # mrow?
+
+JOURNALIST_COUNT = 10
+SOURCE_COUNT = 50
+
+
+def random_bool():
+ return bool(random.getrandbits(1))
+
+
+def random_chars(len, nullable, chars=string.printable):
+ if nullable and random_bool():
+ return None
+ else:
+ return ''.join([random.choice(chars) for _ in range(len)])
+
+
+def bool_or_none():
+ return random.choice([True, False, None])
+
+
+def random_datetime(nullable):
+ if nullable and random_bool():
+ return None
+ else:
+ return datetime(
+ year=random.randint(1, 9999),
+ month=random.randint(1, 12),
+ day=random.randint(1, 28),
+ hour=random.randint(0, 23),
+ minute=random.randint(0, 59),
+ second=random.randint(0, 59),
+ microsecond=random.randint(0, 1000),
+ )
+
+
+def new_journalist():
+ # Make a diceware-like password
+ pw = ' '.join([random_chars(3, nullable=False, chars=DICEWARE_SAFE_CHARS)
+ for _ in range(7)])
+ journalist = Journalist(random_chars(random.randint(3, 32),
+ nullable=False),
+ pw,
+ random_bool())
+ journalist.is_admin = bool_or_none()
+
+ journalist.is_totp = bool_or_none()
+ journalist.hotp_counter = (random.randint(-1000, 1000)
+ if random_bool() else None)
+ journalist.created_on = random_datetime(nullable=True)
+ journalist.last_access = random_datetime(nullable=True)
+
+ db.session.add(journalist)
+
+
+def new_source():
+ fid_len = random.randint(4, 32)
+ designation_len = random.randint(4, 32)
+ source = Source(random_chars(fid_len, nullable=False,
+ chars=string.ascii_lowercase),
+ random_chars(designation_len, nullable=False))
+ source.flagged = bool_or_none()
+ source.last_updated = random_datetime(nullable=True)
+ source.pending = False
+
+ db.session.add(source)
+
+
+def new_submission(config, source_id):
+ source = Source.query.get(source_id)
+
+ # A source may have a null fid according to the DB, but this will
+ # break storage.path.
+ if source.filesystem_id is None:
+ return
+
+ filename = fake_file(config, source.filesystem_id)
+ submission = Submission(source, filename)
+
+ # For issue #1189
+ if random_bool():
+ submission.source_id = None
+
+ submission.downloaded = bool_or_none()
+
+ db.session.add(submission)
+
+
+def fake_file(config, source_fid):
+ source_dir = path.join(config.STORE_DIR, source_fid)
+ if not path.exists(source_dir):
+ os.mkdir(source_dir)
+
+ filename = random_chars(20, nullable=False, chars=string.ascii_lowercase)
+ num = random.randint(0, 100)
+ msg_type = 'msg' if random_bool() else 'doc.gz'
+ filename = '{}-{}-{}.gpg'.format(num, filename, msg_type)
+ f_len = int(math.floor(random.expovariate(100000) * 1024 * 1024 * 500))
+ sub_path = current_app.storage.path(source_fid, filename)
+ with open(sub_path, 'w') as f:
+ f.write('x' * f_len)
+
+ return filename
+
+
+def new_source_star(source_id):
+ source = Source.query.get(source_id)
+ star = SourceStar(source, bool_or_none())
+ db.session.add(star)
+
+
+def new_reply(config, journalist_id, source_id):
+ source = Source.query.get(source_id)
+
+ # A source may have a null fid according to the DB, but this will
+ # break storage.path.
+ if source.filesystem_id is None:
+ return
+
+ journalist = Journalist.query.get(journalist_id)
+ filename = fake_file(config, source.filesystem_id)
+ reply = Reply(journalist, source, filename)
+ db.session.add(reply)
+
+
+def new_journalist_login_attempt(journalist_id):
+ journalist = Journalist.query.get(journalist_id)
+ attempt = JournalistLoginAttempt(journalist)
+ attempt.timestamp = random_datetime(nullable=True)
+ db.session.add(attempt)
+
+
+def new_abandoned_submission(config, source_id):
+ '''For issue #1189'''
+
+ source = Source.query.filter(Source.filesystem_id.isnot(None)).all()[0]
+ filename = fake_file(config, source.filesystem_id)
+
+ # Use this as hack to create a real submission then swap out the source_id
+ submission = Submission(source, filename)
+ submission.source_id = source_id
+ db.session.add(submission)
+ db.session.commit()
+ delete_source(source_id)
+
+
+def delete_source(source_id):
+ '''For issue #1189'''
+ db.session.execute(text('DELETE FROM sources WHERE id = :source_id'),
+ {'source_id': source_id})
+
+
+def positive_int(s):
+ i = int(s)
+ if i < 1:
+ raise ValueError('{} is not >= 1'.format(s))
+ return i
+
+
+def load_data(config, multiplier):
+ app = create_app(config)
+
+ with app.app_context():
+ for _ in range(JOURNALIST_COUNT * multiplier):
+ new_journalist()
+ db.session.commit()
+
+ for _ in range(SOURCE_COUNT * multiplier):
+ new_source()
+ db.session.commit()
+
+ for sid in range(1, SOURCE_COUNT * multiplier, 5):
+ for _ in range(1, multiplier + 1):
+ new_submission(config, sid)
+ db.session.commit()
+
+ for sid in range(1, SOURCE_COUNT * multiplier, 5):
+ new_source_star(sid)
+ db.session.commit()
+
+ for jid in range(1, JOURNALIST_COUNT * multiplier, 10):
+ for sid in range(1, SOURCE_COUNT * multiplier, 10):
+ for _ in range(1, 3):
+ new_reply(config, jid, sid)
+ db.session.commit()
+
+ for jid in range(1, JOURNALIST_COUNT * multiplier, 10):
+ new_journalist_login_attempt(jid)
+ db.session.commit()
+
+ for sid in range(SOURCE_COUNT * multiplier,
+ SOURCE_COUNT * multiplier + multiplier):
+ new_abandoned_submission(config, sid)
+
+
+def arg_parser():
+ parser = ArgumentParser(
+ path.basename(__file__),
+ description='Loads data into the database for testing upgrades')
+ parser.add_argument('-m', '--multiplier', type=positive_int, default=100,
+ help=('Factor to multiply the loaded data by '
+ '(default 100)'))
+ return parser
+
+
+def main():
+ args = arg_parser().parse_args()
+ print('Loading data. This make take a while.')
+ load_data(sdconfig, args.multiplier)
+
+
+if __name__ == '__main__':
+ try:
+ main()
+ except KeyboardInterrupt:
+ print('') # for prompt on a newline
+ sys.exit(1)
| diff --git a/securedrop/tests/conftest.py b/securedrop/tests/conftest.py
--- a/securedrop/tests/conftest.py
+++ b/securedrop/tests/conftest.py
@@ -10,6 +10,8 @@
import signal
import subprocess
+from ConfigParser import SafeConfigParser
+
os.environ['SECUREDROP_ENV'] = 'test' # noqa
from sdconfig import SDConfig, config as original_config
@@ -85,9 +87,31 @@ def config(tmpdir):
cnf.TEMP_DIR = str(tmp)
cnf.DATABASE_FILE = str(sqlite)
+ # create the db file
+ subprocess.check_call(['sqlite3', cnf.DATABASE_FILE, '.databases'])
+
return cnf
[email protected](scope='function')
+def alembic_config(config):
+ base_dir = path.join(path.dirname(__file__), '..')
+ migrations_dir = path.join(base_dir, 'alembic')
+ ini = SafeConfigParser()
+ ini.read(path.join(base_dir, 'alembic.ini'))
+
+ ini.set('alembic', 'script_location', path.join(migrations_dir))
+ ini.set('alembic', 'sqlalchemy.url', 'sqlite:///' + config.DATABASE_FILE)
+
+ alembic_path = path.join(config.SECUREDROP_DATA_ROOT, 'alembic.ini')
+ config.TESTING_ALEMBIC_PATH = alembic_path
+
+ with open(alembic_path, 'w') as f:
+ ini.write(f)
+
+ return alembic_path
+
+
@pytest.fixture(scope='function')
def source_app(config):
app = create_source_app(config)
diff --git a/securedrop/tests/migrations/__init__.py b/securedrop/tests/migrations/__init__.py
new file mode 100644
diff --git a/securedrop/tests/migrations/helpers.py b/securedrop/tests/migrations/helpers.py
new file mode 100644
--- /dev/null
+++ b/securedrop/tests/migrations/helpers.py
@@ -0,0 +1,46 @@
+# -*- coding: utf-8 -*-
+
+import random
+import string
+
+from datetime import datetime
+
+
+def random_bool():
+ return bool(random.getrandbits(1))
+
+
+def bool_or_none():
+ return random.choice([None, True, False])
+
+
+def random_bytes(min, max, nullable):
+ if nullable and random_bool():
+ return None
+ else:
+ # python2 just wants strings, fix this in python3
+ return random_chars(random.randint(min, max))
+
+
+def random_username():
+ len = random.randint(3, 64)
+ return random_chars(len)
+
+
+def random_chars(len, chars=string.printable):
+ return ''.join([random.choice(chars) for _ in range(len)])
+
+
+def random_datetime(nullable):
+ if nullable and random_bool():
+ return None
+ else:
+ return datetime(
+ year=random.randint(1, 9999),
+ month=random.randint(1, 12),
+ day=random.randint(1, 28),
+ hour=random.randint(0, 23),
+ minute=random.randint(0, 59),
+ second=random.randint(0, 59),
+ microsecond=random.randint(0, 1000),
+ )
diff --git a/securedrop/tests/migrations/migration_15ac9509fc68.py b/securedrop/tests/migrations/migration_15ac9509fc68.py
new file mode 100644
--- /dev/null
+++ b/securedrop/tests/migrations/migration_15ac9509fc68.py
@@ -0,0 +1,179 @@
+# -*- coding: utf-8 -*-
+
+import random
+import string
+
+from sqlalchemy import text
+
+from db import db
+from journalist_app import create_app
+from .helpers import (random_bool, random_chars, random_username, random_bytes,
+ random_datetime, bool_or_none)
+
+random.seed('α( α )α')
+
+
+class UpgradeTester():
+
+ '''This migration has no upgrade because there are no tables in the
+ database prior to running, so there is no data to load or test.
+ '''
+
+ def __init__(self, config):
+ pass
+
+ def load_data(self):
+ pass
+
+ def check_upgrade(self):
+ pass
+
+
+class DowngradeTester():
+
+ JOURNO_NUM = 200
+ SOURCE_NUM = 200
+
+ def __init__(self, config):
+ self.config = config
+ self.app = create_app(config)
+
+ def load_data(self):
+ with self.app.app_context():
+ for _ in range(self.JOURNO_NUM):
+ self.add_journalist()
+
+ for _ in range(self.SOURCE_NUM):
+ self.add_source()
+
+ for jid in range(1, self.JOURNO_NUM, 10):
+ for _ in range(random.randint(1, 3)):
+ self.add_journalist_login_attempt(jid)
+
+ for jid in range(1, self.JOURNO_NUM, 10):
+ for sid in range(1, self.SOURCE_NUM, 10):
+ self.add_reply(jid, sid)
+
+ for sid in range(1, self.SOURCE_NUM, 10):
+ self.add_source_star(sid)
+
+ for sid in range(1, self.SOURCE_NUM, 8):
+ for _ in range(random.randint(1, 3)):
+ self.add_submission(sid)
+
+ # create "abandoned" submissions (issue #1189)
+ for sid in range(self.SOURCE_NUM, self.SOURCE_NUM + 50):
+ self.add_submission(sid)
+
+ db.session.commit()
+
+ @staticmethod
+ def add_journalist():
+ if random_bool():
+ otp_secret = random_chars(16, string.ascii_uppercase + '234567')
+ else:
+ otp_secret = None
+
+ is_totp = random_bool()
+ if is_totp:
+ hotp_counter = 0 if random_bool() else None
+ else:
+ hotp_counter = random.randint(0, 10000) if random_bool() else None
+
+ last_token = random_chars(6, string.digits) if random_bool() else None
+
+ params = {
+ 'username': random_username(),
+ 'pw_salt': random_bytes(1, 64, nullable=True),
+ 'pw_hash': random_bytes(32, 64, nullable=True),
+ 'is_admin': bool_or_none(),
+ 'otp_secret': otp_secret,
+ 'is_totp': is_totp,
+ 'hotp_counter': hotp_counter,
+ 'last_token': last_token,
+ 'created_on': random_datetime(nullable=True),
+ 'last_access': random_datetime(nullable=True),
+ }
+ sql = '''INSERT INTO journalists (username, pw_salt, pw_hash,
+ is_admin, otp_secret, is_totp, hotp_counter, last_token,
+ created_on, last_access)
+ VALUES (:username, :pw_salt, :pw_hash, :is_admin,
+ :otp_secret, :is_totp, :hotp_counter, :last_token,
+ :created_on, :last_access);
+ '''
+ db.engine.execute(text(sql), **params)
+
+ @staticmethod
+ def add_source():
+ filesystem_id = random_chars(96) if random_bool() else None
+ params = {
+ 'filesystem_id': filesystem_id,
+ 'journalist_designation': random_chars(50),
+ 'flagged': bool_or_none(),
+ 'last_updated': random_datetime(nullable=True),
+ 'pending': bool_or_none(),
+ 'interaction_count': random.randint(0, 1000),
+ }
+ sql = '''INSERT INTO sources (filesystem_id, journalist_designation,
+ flagged, last_updated, pending, interaction_count)
+ VALUES (:filesystem_id, :journalist_designation, :flagged,
+ :last_updated, :pending, :interaction_count)
+ '''
+ db.engine.execute(text(sql), **params)
+
+ @staticmethod
+ def add_journalist_login_attempt(journalist_id):
+ params = {
+ 'timestamp': random_datetime(nullable=True),
+ 'journalist_id': journalist_id,
+ }
+ sql = '''INSERT INTO journalist_login_attempt (timestamp,
+ journalist_id)
+ VALUES (:timestamp, :journalist_id)
+ '''
+ db.engine.execute(text(sql), **params)
+
+ @staticmethod
+ def add_reply(journalist_id, source_id):
+ params = {
+ 'journalist_id': journalist_id,
+ 'source_id': source_id,
+ 'filename': random_chars(50),
+ 'size': random.randint(0, 1024 * 1024 * 500),
+ }
+ sql = '''INSERT INTO replies (journalist_id, source_id, filename,
+ size)
+ VALUES (:journalist_id, :source_id, :filename, :size)
+ '''
+ db.engine.execute(text(sql), **params)
+
+ @staticmethod
+ def add_source_star(source_id):
+ params = {
+ 'source_id': source_id,
+ 'starred': bool_or_none(),
+ }
+ sql = '''INSERT INTO source_stars (source_id, starred)
+ VALUES (:source_id, :starred)
+ '''
+ db.engine.execute(text(sql), **params)
+
+ @staticmethod
+ def add_submission(source_id):
+ params = {
+ 'source_id': source_id,
+ 'filename': random_chars(50),
+ 'size': random.randint(0, 1024 * 1024 * 500),
+ 'downloaded': bool_or_none(),
+ }
+ sql = '''INSERT INTO submissions (source_id, filename, size,
+ downloaded)
+ VALUES (:source_id, :filename, :size, :downloaded)
+ '''
+ db.engine.execute(text(sql), **params)
+
+ def check_downgrade(self):
+ '''We don't need to check anything on this downgrade because the
+ migration drops all the tables. Thus, there is nothing to do.
+ '''
+ pass
diff --git a/securedrop/tests/migrations/migration_faac8092c123.py b/securedrop/tests/migrations/migration_faac8092c123.py
new file mode 100644
--- /dev/null
+++ b/securedrop/tests/migrations/migration_faac8092c123.py
@@ -0,0 +1,31 @@
+# -*- coding: utf-8 -*-
+
+
+class UpgradeTester():
+ '''This migration has no upgrade because it is only the enabling of
+ pragmas which do not affect database contents.
+ '''
+
+ def __init__(self, config):
+ pass
+
+ def load_data(self):
+ pass
+
+ def check_upgrade(self):
+ pass
+
+
+class DowngradeTester():
+ '''This migration has no downgrade because it is only the enabling of
+ pragmas, so we don't need to test the downgrade.
+ '''
+
+ def __init__(self, config):
+ pass
+
+ def load_data(self):
+ pass
+
+ def check_downgrade(self):
+ pass
diff --git a/securedrop/tests/test_alembic.py b/securedrop/tests/test_alembic.py
new file mode 100644
--- /dev/null
+++ b/securedrop/tests/test_alembic.py
@@ -0,0 +1,176 @@
+# -*- coding: utf-8 -*-
+
+import os
+import pytest
+import subprocess
+
+from alembic.config import Config as AlembicConfig
+from alembic.script import ScriptDirectory
+from os import path
+from sqlalchemy import text
+
+import conftest
+
+from db import db
+from journalist_app import create_app
+
+MIGRATION_PATH = path.join(path.dirname(__file__), '..', 'alembic', 'versions')
+
+ALL_MIGRATIONS = [x.split('.')[0].split('_')[0]
+ for x in os.listdir(MIGRATION_PATH)
+ if x.endswith('.py')]
+
+
+def list_migrations(cfg_path, head):
+ cfg = AlembicConfig(cfg_path)
+ script = ScriptDirectory.from_config(cfg)
+ migrations = [x.revision
+ for x in script.walk_revisions(base='base', head=head)]
+ migrations.reverse()
+ return migrations
+
+
+def upgrade(alembic_config, migration):
+ subprocess.check_call(['alembic', 'upgrade', migration],
+ cwd=path.dirname(alembic_config))
+
+
+def downgrade(alembic_config, migration):
+ subprocess.check_call(['alembic', 'downgrade', migration],
+ cwd=path.dirname(alembic_config))
+
+
+def get_schema(app):
+ with app.app_context():
+ return list(db.engine.execute(text('''
+ SELECT type, name, tbl_name, sql
+ FROM sqlite_master
+ ORDER BY type, name, tbl_name
+ ''')))
+
+
+def test_alembic_head_matches_db_models(journalist_app,
+ alembic_config,
+ config):
+ '''This test is to make sure that our database models in `models.py` are
+ always in sync with the schema generated by `alembic upgrade head`.
+ '''
+ models_schema = get_schema(journalist_app)
+
+ config.DATABASE_FILE = config.DATABASE_FILE + '.new'
+ # Use the fixture to rewrite the config with the new URI
+ conftest.alembic_config(config)
+
+ # Create database file
+ subprocess.check_call(['sqlite3', config.DATABASE_FILE, '.databases'])
+ upgrade(alembic_config, 'head')
+
+ # Recreate the app to get a new SQLALCHEMY_DATABASE_URI
+ app = create_app(config)
+ alembic_schema = get_schema(app)
+
+ # The initial migration creates the table 'alembic_version', but this is
+ # not present in the schema created by `db.create_all()`.
+ alembic_schema = list(filter(lambda x: x[2] != 'alembic_version',
+ alembic_schema))
+
+ assert alembic_schema == models_schema
+
+
[email protected]('migration', ALL_MIGRATIONS)
+def test_alembic_migration_upgrade(alembic_config, config, migration):
+ # run migrations in sequence from base -> head
+ for mig in list_migrations(alembic_config, migration):
+ upgrade(alembic_config, mig)
+
+
[email protected]('migration', ALL_MIGRATIONS)
+def test_alembic_migration_downgrade(alembic_config, config, migration):
+ # upgrade to the parameterized test case ("head")
+ upgrade(alembic_config, migration)
+
+ # run migrations in sequence from "head" -> base
+ migrations = list_migrations(alembic_config, migration)
+ migrations.reverse()
+
+ for mig in migrations:
+ downgrade(alembic_config, mig)
+
+
[email protected]('migration', ALL_MIGRATIONS)
+def test_schema_unchanged_after_up_then_downgrade(alembic_config,
+ config,
+ migration):
+ # Create the app here. Using a fixture will init the database.
+ app = create_app(config)
+
+ migrations = list_migrations(alembic_config, migration)
+
+ if len(migrations) > 1:
+ target = migrations[-2]
+ upgrade(alembic_config, target)
+ else:
+ # The first migration is the degenerate case where we don't need to
+ # get the database to some base state.
+ pass
+
+ original_schema = get_schema(app)
+
+ upgrade(alembic_config, '+1')
+ downgrade(alembic_config, '-1')
+
+ reverted_schema = get_schema(app)
+
+ # The initial migration is a degenerate case because it creates the table
+ # 'alembic_version', but rolling back the migration doesn't clear it.
+ if len(migrations) == 1:
+ reverted_schema = list(filter(lambda x: x[2] != 'alembic_version',
+ reverted_schema))
+
+ assert reverted_schema == original_schema
+
+
[email protected]('migration', ALL_MIGRATIONS)
+def test_upgrade_with_data(alembic_config, config, migration):
+ migrations = list_migrations(alembic_config, migration)
+ if len(migrations) == 1:
+ # Degenerate case where there is no data for the first migration
+ return
+
+ # Upgrade to one migration before the target
+ target = migrations[-1]
+ upgrade(alembic_config, target)
+
+ # Dynamic module import
+ mod_name = 'tests.migrations.migration_{}'.format(migration)
+ mod = __import__(mod_name, fromlist=['UpgradeTester'])
+
+ # Load the test data
+ upgrade_tester = mod.UpgradeTester(config=config)
+ upgrade_tester.load_data()
+
+ # Upgrade to the target
+ upgrade(alembic_config, migration)
+
+ # Make sure it applied "cleanly" for some definition of clean
+ upgrade_tester.check_upgrade()
+
+
[email protected]('migration', ALL_MIGRATIONS)
+def test_downgrade_with_data(alembic_config, config, migration):
+ # Upgrade to the target
+ upgrade(alembic_config, migration)
+
+ # Dynamic module import
+ mod_name = 'tests.migrations.migration_{}'.format(migration)
+ mod = __import__(mod_name, fromlist=['DowngradeTester'])
+
+ # Load the test data
+ downgrade_tester = mod.DowngradeTester(config=config)
+ downgrade_tester.load_data()
+
+ # Downgrade to previous migration
+ downgrade(alembic_config, '-1')
+
+ # Make sure it applied "cleanly" for some definition of clean
+ downgrade_tester.check_downgrade()
diff --git a/securedrop/tests/test_manage.py b/securedrop/tests/test_manage.py
--- a/securedrop/tests/test_manage.py
+++ b/securedrop/tests/test_manage.py
@@ -166,14 +166,18 @@ def test_get_username_to_delete(mocker):
assert return_value == 'test-user-12345'
-def test_reset(journalist_app, test_journo, config):
+def test_reset(journalist_app, test_journo, alembic_config, config):
original_config = manage.config
try:
# We need to override the config to point at the per-test DB
manage.config = config
+ # Override the hardcoded alembic.ini value
+ manage.config.TEST_ALEMBIC_INI = alembic_config
+
args = argparse.Namespace(store_dir=config.STORE_DIR)
return_value = manage.reset(args=args)
+
assert return_value == 0
assert os.path.exists(config.DATABASE_FILE)
assert os.path.exists(config.STORE_DIR)
diff --git a/securedrop/tests/test_qa_loader.py b/securedrop/tests/test_qa_loader.py
new file mode 100644
--- /dev/null
+++ b/securedrop/tests/test_qa_loader.py
@@ -0,0 +1,8 @@
+# -*- coding: utf-8 -*-
+
+from qa_loader import load_data
+
+
+def test_load_data(journalist_app, config):
+ # Use the journalist_app fixture to init the DB
+ load_data(config, multiplier=1)
diff --git a/testinfra/app-code/test_securedrop_app_code.py b/testinfra/app-code/test_securedrop_app_code.py
--- a/testinfra/app-code/test_securedrop_app_code.py
+++ b/testinfra/app-code/test_securedrop_app_code.py
@@ -23,7 +23,7 @@ def test_apache_default_docroot_is_absent(File):
'python-pip',
'redis-server',
'secure-delete',
- 'sqlite',
+ 'sqlite3',
'supervisor',
])
def test_securedrop_application_apt_dependencies(Package, package):
@@ -90,4 +90,4 @@ def test_securedrop_application_sqlite_db(File, Sudo):
assert f.is_file
assert f.user == securedrop_test_vars.securedrop_user
assert f.group == securedrop_test_vars.securedrop_user
- assert oct(f.mode) == "0644"
+ assert oct(f.mode) == "0640"
| Schema migrations
Some new features (e.g. #1170) are going to require schema migrations. We should integrate e.g. [`sqlalchemy-migrate`](https://sqlalchemy-migrate.readthedocs.io/en/latest/) to make it easier for us to make changes to the databases on the production instances.
Add test harness to ensure alembic up/downgrades work as intended
# Feature request
## Description
Subtask of #1419
We need a test framework that ensures that every `alembic` database upgrade and downgrade applies cleanly.
As a note, whatever we do to load data into the database **cannot depend on** `models.py` because the database schema will always match `alembic`'s `head` revision, so using it to test an upgrade makes no sense since we need to use the out-of-date models. This means we have to hand craft code to do the inserts and selects that we want.
## Initial Proposal
Branch: [`migration-test-harness`](https://github.com/freedomofpress/securedrop/tree/migration-test-harness)
This proposal uses dynamic module loading and pytest parametrization to ensure that every new migration applies the tests in the same way. If a migration is not present, running `make test` will error out because it won't be able to find the correct module.
- Test harness: https://github.com/freedomofpress/securedrop/blob/cebd70fbccf866783b67406efba6023661f1b363/securedrop/tests/test_alembic.py#L109-L159
- Migration tester: https://github.com/freedomofpress/securedrop/blob/cebd70fbccf866783b67406efba6023661f1b363/securedrop/tests/migrations/migration_faac8092c123.py
## User Stories
As a dev, I want to know that my DB migration scripts work in a prod-like environment.
| Getting this done is blocking some new and frequently requested features (e.g. #1422 - the tagging of sources and submissions and #1359 - the assignment of particular users to address a given source), so I've been testing how to go about doing this in the feature branch `db-migrations`. In that branch, the first steps of integrating database migration support into SecureDrop using `SQLAlchemy-migrate` are implemented. Here I'll describe in more detail how this might work and I welcome feedback or suggestions.
## Requirements
- This will need to work for both new installs and databases already created in the production instances
- This should not require manual intervention by administrators for future migrations, so future versions of SecureDrop should check if a migration is needed and perform it if is necessary
- If a migration fails for some reason (more below) and the database cannot be upgraded, then we will need to keep the SD instance running on the existing database version using the existing app code.
## How it currently works
- A new directory `SECUREDROP_DATA_ROOT/db_repository` tracks database versioning data.
- In new installs, this versioning directory is set up at database initialization time - the code for this has been added to `init_db()` in `db.py`.
- In existing installs, the versioning directory should be set up at first run of the app after update. I was thinking that this could be done in a one-off Ansible task that is run by admins for the 0.4 release (something we are going to do anyway). A new Ansible task copies the `SECUREDROP_DATA_ROOT/db_repository` location into the existing `config.py` so `db.py` can find it.
- When a database migration needs to be performed, `migrate()` in `db.py` can be executed to perform a migration. First, the database is backed up, and then the migration is executed. This is something that pre-release can be tested to make sure that the migrations that `SQLAlchemy-migrate` generates work and no problems on the SQL side are encountered.
## Potential Issues
We should enumerate as many possible issues that could arise and handle them if we can:
- Insufficient disk space to backup the database
- In this situation, the instance should continue to run on the old version of the code, let the administrators know they need to resolve the disk space issue, and then a backup and upgrade can be attempted again in the future when the issue is resolved.
- Any other failure during the migration occurs
- Pre-release testing should be done in order to ensure that no problems with the generated migrations to minimize this possibility
- However, if it does happen, SD should copy the backup back, and continue to run on the old version of the code using the existing database.
## When to migrate
One thing I'm trying to figure out is when to best attempt to perform a migration. It could be done during `postinst`. Another option is to have the SD app code check the db models upon launch of the Flask app, see if they have changed, and if so do the migration during `app` initialization. I think the former is probably the better way to handle this but thoughts welcome on this.
Note: I just checked the diff between 0.3.11 and develop, and there are already merged changes to the database models. I'm fine pushing e.g. #1422 to post-0.4, but in order to remove #1419 from 0.4 we will need to back out these changes in develop
After discussion, we are going to push this until the next release.
(Again, this requires the backing out of changes from `develop`)
I would like to add a second proposition instead of using `sqlalchemy-migrate`. One thing about it that feels awkward is that you have to duplicate the models to both the `models.py` in the main application and the migration scripts. [The docs even say to do this](https://sqlalchemy-migrate.readthedocs.io/en/latest/versioning.html):
> To avoid the above problem, you should use SQLAlchemy schema reflection as shown above or copy-paste your table definition into each change script rather than importing parts of your application.
Also, since it's possible we might only ever support Postgres (#2225), using the SQLAlchemy automagic isn't even necessary.
For example, the Rust ORM (`diesel`) takes the lazy approch. There's just a sorted dir with `{up,down}.sql` that are run against the DB. I like this better because I want full control over the generated SQL, and I don't want to have have to print each generated migration and read over it to ensure it does what I want.
```
migrations/
βββ 0001-init.sql
...
```
And the DB is versioned by the numeric prefix. I actually wrote this once for an app because we kept having problems with `sqlalchemy-migrate` and `alembic`.
```python
import os
from flask_sqlalchemy import SQLAlchemy
from os import path
from sqlalchemy import text
from sqlalchemy.exc import InternalError, OperationalError, ProgrammingError
db = SQLAlchemy()
def migrate(migrations_dir):
version = _initialize_db()
migrations = [(v, s)
for (v, s) in _list_migrations(migrations_dir)
if v > version]
for (version, sql) in migrations:
try:
db.session.execute(sql)
sql = text('UPDATE db_version SET version = :version') \
.bindparams(version=version)
db.session.execute(sql)
db.session.commit()
except Exception:
db.session.rollback()
raise
def _initialize_db() -> int:
version = None
try:
sql = text('SELECT version FROM db_version')
version = list(db.session.execute(sql))[0][0]
except (InternalError, OperationalError, ProgrammingError):
db.session.rollback()
sql = text('CREATE TABLE db_version (version INT)')
db.session.execute(sql)
if version is None:
sql = text('INSERT INTO db_version (version) VALUES (0)')
db.session.execute(sql)
version = 0
db.session.commit()
return version
def _list_migrations(migrations_dir) -> list:
migrations = []
for migration in os.listdir(migrations_dir):
full_path = path.join(migrations_dir, migration)
try:
version = int(migration.split('-')[0])
except ValueError:
continue
with open(full_path, 'r') as f:
sql = f.read()
migrations.append((version, sql))
return list(sorted(migrations, key=lambda x: x[0]))
```
This is one of those times we have to ask about whether or not it makes sense to add a dependency or if we want to just roll our own code. This is only 60 lines of Python + SQL versus having to fiddle with a DSL to get it to generate the code we actually want.
The downside to this method is that it doesn't allow arbitrary Python code in the migrations, but realistically, if you need that you're probably doing something overly complicated with your migration. i can't think of a time at work we (at work) have actually needed code for a migration.
Pinging @redshiftzero for thoughts on this.
I'm working on this now, and will be using [Alembic](http://alembic.zzzcomputing.com/). I'm also setting it up so that the first migration will optionally dump everything into Postgres if the SQLite database exists. This will cover new and existing instances.
Blocked by #2866.
Here's a proposed breakdown for this ticket:
1. Integrate autogenerated Alembic migrations support
2. Add database backup in postinst
3. Add alembic migration in postinst
4. Add realistic data upload script for creating a prod-like database for testing/QA of database migrations
| 2018-03-31T11:33:31Z | [] | [] |
freedomofpress/securedrop | 3,241 | freedomofpress__securedrop-3241 | [
"3233"
] | 1f13992ba8933575bf443c03f1877553f04f5e8b | diff --git a/journalist_gui/journalist_gui/SecureDropUpdater.py b/journalist_gui/journalist_gui/SecureDropUpdater.py
--- a/journalist_gui/journalist_gui/SecureDropUpdater.py
+++ b/journalist_gui/journalist_gui/SecureDropUpdater.py
@@ -2,10 +2,9 @@
from PyQt5 import QtGui, QtWidgets
import sys
import subprocess
-import os
import pexpect
-from journalist_gui import updaterUI, strings, resources_rc
+from journalist_gui import updaterUI, strings, resources_rc # noqa
class UpdaterApp(QtWidgets.QMainWindow, updaterUI.Ui_MainWindow):
@@ -62,11 +61,12 @@ def alert_failure(self, failure_reason):
def check_out_and_verify_latest_tag(self):
self.statusbar.showMessage(strings.fetching_update)
self.progressBar.setProperty("value", 20)
- update_command = ['/home/amnesia/Persistent/securedrop/securedrop-admin',
- 'update']
+ sdadmin_path = '/home/amnesia/Persistent/securedrop/securedrop-admin'
+ update_command = [sdadmin_path, 'update']
try:
- self.output = subprocess.check_output(update_command,
- stderr=subprocess.STDOUT).decode('utf-8')
+ self.output = subprocess.check_output(
+ update_command,
+ stderr=subprocess.STDOUT).decode('utf-8')
if 'Signature verification failed' in self.output:
self.update_success = False
self.failure_reason = strings.update_failed_sig_failure
@@ -90,8 +90,10 @@ def get_sudo_password(self):
sys.exit(0)
def configure_tails(self):
- """Run tailsconfig if the signature verified and the update succeeded."""
- tailsconfig_command = '/home/amnesia/Persistent/securedrop/securedrop-admin tailsconfig'
+ """Run tailsconfig if the signature verified and the
+ update succeeded."""
+ tailsconfig_command = ("/home/amnesia/Persistent/"
+ "securedrop/securedrop-admin tailsconfig")
if self.update_success:
self.statusbar.showMessage(strings.updating_tails_env)
# Get sudo password and add an enter key as tailsconfig command
@@ -110,7 +112,7 @@ def configure_tails(self):
# failures in the Ansible output.
if 'failed=0' not in self.output:
self.update_success = False
- self.failure_reason = strings.tailsconfig_failed_generic_reason
+ self.failure_reason = strings.tailsconfig_failed_generic_reason # noqa
except pexpect.exceptions.TIMEOUT:
self.update_success = False
diff --git a/journalist_gui/journalist_gui/strings.py b/journalist_gui/journalist_gui/strings.py
--- a/journalist_gui/journalist_gui/strings.py
+++ b/journalist_gui/journalist_gui/strings.py
@@ -1,14 +1,21 @@
window_title = 'SecureDrop Updater'
-update_in_progress = "SecureDrop workstation updates are available! You should install them now. If you don\'t want to, you can install them the next time you reboot."
+update_in_progress = ("SecureDrop workstation updates are available! "
+ "You should install them now. If you don\'t want to, "
+ "you can install them the next time you reboot.")
fetching_update = 'Fetching and verifying latest update...'
updating_tails_env = 'Configuring local Tails environment...'
finished = 'Update successfully completed!'
finished_dialog_message = 'Updates completed successfully. Click OK to close.'
finished_dialog_title = 'SecureDrop Workstation is up to date!'
update_failed_dialog_title = 'Error Updating SecureDrop Workstation'
-update_failed_generic_reason = 'Update failed. Please contact your SecureDrop administrator.'
-update_failed_sig_failure = 'WARNING: Signature verification failed. Contact your SecureDrop administrator immediately.'
+update_failed_generic_reason = ("Update failed. "
+ "Please contact your SecureDrop "
+ "administrator.")
+update_failed_sig_failure = ("WARNING: Signature verification failed. "
+ "Contact your SecureDrop administrator "
+ "immediately.")
tailsconfig_failed_sudo_password = 'Sudo password incorrect'
-tailsconfig_failed_generic_reason = 'Tails workstation configuration failed. Contact your administrator.'
+tailsconfig_failed_generic_reason = ("Tails workstation configuration failed. "
+ "Contact your administrator.")
install_update_button = 'Install Now'
install_later_button = 'Install Later'
diff --git a/journalist_gui/journalist_gui/updaterUI.py b/journalist_gui/journalist_gui/updaterUI.py
--- a/journalist_gui/journalist_gui/updaterUI.py
+++ b/journalist_gui/journalist_gui/updaterUI.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Form implementation generated from reading ui file 'journalist_gui/mainwindow2.ui'
+# Form implementation generated from reading ui file 'journalist_gui/mainwindow.ui'
#
# Created by: PyQt5 UI code generator 5.10.1
#
@@ -11,8 +11,14 @@
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
- MainWindow.resize(350, 500)
- MainWindow.setMaximumSize(QtCore.QSize(350, 500))
+ MainWindow.resize(400, 500)
+ sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
+ sizePolicy.setHorizontalStretch(0)
+ sizePolicy.setVerticalStretch(0)
+ sizePolicy.setHeightForWidth(MainWindow.sizePolicy().hasHeightForWidth())
+ MainWindow.setSizePolicy(sizePolicy)
+ MainWindow.setMinimumSize(QtCore.QSize(400, 500))
+ MainWindow.setMaximumSize(QtCore.QSize(400, 500))
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.verticalLayout_3 = QtWidgets.QVBoxLayout(self.centralwidget)
@@ -71,7 +77,7 @@ def setupUi(self, MainWindow):
self.verticalLayout_3.addLayout(self.horizontalLayout_2)
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar(MainWindow)
- self.menubar.setGeometry(QtCore.QRect(0, 0, 350, 25))
+ self.menubar.setGeometry(QtCore.QRect(0, 0, 400, 22))
self.menubar.setObjectName("menubar")
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtWidgets.QStatusBar(MainWindow)
@@ -90,4 +96,3 @@ def retranslateUi(self, MainWindow):
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_2), _translate("MainWindow", "Command Output"))
self.pushButton.setText(_translate("MainWindow", "Install Later"))
self.pushButton_2.setText(_translate("MainWindow", "Install Now"))
-
| diff --git a/journalist_gui/test_gui.py b/journalist_gui/test_gui.py
--- a/journalist_gui/test_gui.py
+++ b/journalist_gui/test_gui.py
@@ -1,14 +1,13 @@
import unittest
import subprocess
from unittest import mock
-from PyQt5 import QtCore
from PyQt5.QtCore import Qt
-from PyQt5.QtWidgets import QApplication, QMainWindow, QLineEdit
+from PyQt5.QtWidgets import QApplication, QSizePolicy
from PyQt5.QtTest import QTest
-import time
from journalist_gui.SecureDropUpdater import UpdaterApp, strings
+
class AppTestCase(unittest.TestCase):
def setUp(self):
qApp = QApplication.instance()
@@ -17,6 +16,7 @@ def setUp(self):
else:
self.app = qApp
+
class WindowTestCase(AppTestCase):
def setUp(self):
super(WindowTestCase, self).setUp()
@@ -24,6 +24,16 @@ def setUp(self):
self.window.show()
QTest.qWaitForWindowExposed(self.window)
+ def test_window_is_a_fixed_size(self):
+ # Verify the size policy is fixed
+ expected_sizePolicy = QSizePolicy(QSizePolicy.Fixed, QSizePolicy.Fixed)
+ assert self.window.sizePolicy() == expected_sizePolicy
+
+ # Verify the maximum and minimum sizes are the same as the current size
+ current_size = self.window.size()
+ assert self.window.minimumSize() == current_size
+ assert self.window.maximumSize() == current_size
+
def test_clicking_install_later_exits_the_application(self):
QTest.mouseClick(self.window.pushButton, Qt.LeftButton)
self.assertFalse(self.window.isVisible())
@@ -38,7 +48,6 @@ def test_output_tab(self):
tab = self.window.tabWidget.tabBar()
QTest.mouseClick(tab, Qt.LeftButton)
- #print(pos)
self.assertEqual(self.window.tabWidget.currentIndex(),
self.window.tabWidget.indexOf(self.window.tab_2))
| [qt-journalist-updater] Fix window size
## Description
[See epic #3076 for primary feature, PRs for this ticket should go into `qt-journalist-updater`]
## Steps to Reproduce
1. Launch the GUI: `python3 SecureDropUpdater`
2. Attempt to resize the window.
## Expected Behavior
The window smoothly adjusts size.
## Actual Behavior
The window resizing is very sluggish.
## Comments
Let's adopt a fixed window size.
| Hey @kushaldas - as discussed on our call today, I've modified this issue from addressing the sluggishness on resize to merely fixing the window size to simply the UI. Since we have a standard environment where users will be running the GUI (always Tails and always on a Desktop computer), we can fix the size of the (small) GUI without concern. | 2018-04-05T23:08:18Z | [] | [] |
freedomofpress/securedrop | 3,246 | freedomofpress__securedrop-3246 | [
"3229"
] | f20baa9013991ca6f96fc1fe3c6bd4d5914f77bf | diff --git a/journalist_gui/journalist_gui/SecureDropUpdater.py b/journalist_gui/journalist_gui/SecureDropUpdater.py
--- a/journalist_gui/journalist_gui/SecureDropUpdater.py
+++ b/journalist_gui/journalist_gui/SecureDropUpdater.py
@@ -11,6 +11,7 @@ class UpdaterApp(QtWidgets.QMainWindow, updaterUI.Ui_MainWindow):
def __init__(self, parent=None):
super(UpdaterApp, self).__init__(parent)
self.setupUi(self)
+ self.output = "Beginning update:"
pixmap = QtGui.QPixmap(":/images/static/securedrop.png")
self.label_2.setPixmap(pixmap)
@@ -31,7 +32,7 @@ def update_securedrop(self):
self.check_out_and_verify_latest_tag()
self.progressBar.setProperty("value", 50)
if self.update_success:
- self.failure_reason = self.configure_tails()
+ self.configure_tails()
self.progressBar.setProperty("value", 80)
if self.update_success:
@@ -43,19 +44,21 @@ def update_securedrop(self):
self.alert_failure(self.failure_reason)
def alert_success(self):
- success_dialog_box = QtWidgets.QMessageBox()
- success_dialog_box.setIcon(QtWidgets.QMessageBox.Information)
- success_dialog_box.setText(strings.finished_dialog_message)
- success_dialog_box.setWindowTitle(strings.finished_dialog_title)
- success_dialog_box.exec_()
- sys.exit(0)
+ self.success_dialog = QtWidgets.QMessageBox()
+ self.success_dialog.setIcon(QtWidgets.QMessageBox.Information)
+ self.success_dialog.setText(strings.finished_dialog_message)
+ self.success_dialog.setWindowTitle(strings.finished_dialog_title)
+ self.success_dialog.show()
+
+ # Close when "OK" is clicked - the update has finished.
+ self.success_dialog.buttonClicked.connect(self.close)
def alert_failure(self, failure_reason):
- error_dialog_box = QtWidgets.QMessageBox()
- error_dialog_box.setIcon(QtWidgets.QMessageBox.Critical)
- error_dialog_box.setText(self.failure_reason)
- error_dialog_box.setWindowTitle(strings.update_failed_dialog_title)
- error_dialog_box.exec_()
+ self.error_dialog = QtWidgets.QMessageBox()
+ self.error_dialog.setIcon(QtWidgets.QMessageBox.Critical)
+ self.error_dialog.setText(self.failure_reason)
+ self.error_dialog.setWindowTitle(strings.update_failed_dialog_title)
+ self.error_dialog.show()
self.progressBar.setProperty("value", 0)
def check_out_and_verify_latest_tag(self):
@@ -89,23 +92,31 @@ def get_sudo_password(self):
else:
sys.exit(0)
+ def pass_sudo_password_to_tailsconfig(self, sudo_password):
+ """Pass the sudo password to tailsconfig, and then return
+ the output from the screen to the user"""
+ tailsconfig_command = ("/home/amnesia/Persistent/"
+ "securedrop/securedrop-admin tailsconfig")
+
+ child = pexpect.spawn(tailsconfig_command)
+ child.expect('SUDO password:')
+ self.output += child.before.decode('utf-8')
+ child.sendline(sudo_password)
+ child.expect(pexpect.EOF)
+ return child.before.decode('utf-8')
+
def configure_tails(self):
"""Run tailsconfig if the signature verified and the
update succeeded."""
- tailsconfig_command = ("/home/amnesia/Persistent/"
- "securedrop/securedrop-admin tailsconfig")
if self.update_success:
self.statusbar.showMessage(strings.updating_tails_env)
# Get sudo password and add an enter key as tailsconfig command
# expects
sudo_password = self.get_sudo_password() + '\n'
try:
- child = pexpect.spawn(tailsconfig_command)
- child.expect('SUDO password:')
- self.output += child.before.decode('utf-8')
- child.sendline(sudo_password)
- child.expect(pexpect.EOF)
- self.output += child.before.decode('utf-8')
+ self.output += self.pass_sudo_password_to_tailsconfig(
+ sudo_password
+ )
self.plainTextEdit.setPlainText(self.output)
# For Tailsconfig to be considered a success, we expect no
@@ -121,5 +132,3 @@ def configure_tails(self):
except subprocess.CalledProcessError:
self.update_success = False
self.failure_reason = strings.tailsconfig_failed_generic_reason
-
- return 'Success!'
| diff --git a/journalist_gui/test_gui.py b/journalist_gui/test_gui.py
--- a/journalist_gui/test_gui.py
+++ b/journalist_gui/test_gui.py
@@ -1,8 +1,10 @@
import unittest
import subprocess
+import pexpect
from unittest import mock
from PyQt5.QtCore import Qt
-from PyQt5.QtWidgets import QApplication, QSizePolicy
+from PyQt5.QtWidgets import (QApplication, QSizePolicy, QInputDialog,
+ QMessageBox)
from PyQt5.QtTest import QTest
from journalist_gui.SecureDropUpdater import UpdaterApp, strings
@@ -75,6 +77,109 @@ def test_check_out_latest_generic_failure(self, check_output):
self.assertEqual(self.window.failure_reason,
strings.update_failed_generic_reason)
+ def test_get_sudo_password_when_password_provided(self):
+ expected_password = "password"
+
+ with mock.patch.object(QInputDialog, 'getText',
+ return_value=[expected_password, True]):
+ sudo_password = self.window.get_sudo_password()
+
+ self.assertEqual(sudo_password, expected_password)
+
+ def test_get_sudo_password_when_password_not_provided(self):
+ test_password = ""
+
+ with mock.patch.object(QInputDialog, 'getText',
+ return_value=[test_password, False]):
+ # If the user does not provide a sudo password, we exit
+ # as we cannot update.
+ with self.assertRaises(SystemExit):
+ self.window.get_sudo_password()
+
+ def test_tailsconfig_no_failures(self):
+ self.window.update_success = True
+ with mock.patch.object(self.window, 'get_sudo_password',
+ return_value="password"):
+ with mock.patch.object(self.window,
+ 'pass_sudo_password_to_tailsconfig',
+ return_value="failed=0"):
+ self.window.configure_tails()
+
+ self.assertIn("failed=0", self.window.output)
+ self.assertEqual(self.window.update_success, True)
+
+ def test_tailsconfig_generic_failure(self):
+ self.window.update_success = True
+ with mock.patch.object(self.window, 'get_sudo_password',
+ return_value="password"):
+ with mock.patch.object(self.window,
+ 'pass_sudo_password_to_tailsconfig',
+ return_value="failed=10 ERROR!!!!!"):
+ self.window.configure_tails()
+
+ self.assertNotIn("failed=0", self.window.output)
+ self.assertEqual(self.window.update_success, False)
+ self.assertEqual(self.window.failure_reason,
+ strings.tailsconfig_failed_generic_reason)
+
+ def test_tailsconfig_sudo_password_is_wrong(self):
+ self.window.update_success = True
+ with mock.patch.object(self.window, 'get_sudo_password',
+ return_value="password"):
+ with mock.patch.object(self.window,
+ 'pass_sudo_password_to_tailsconfig',
+ side_effect=pexpect.exceptions.TIMEOUT(1)):
+ self.window.configure_tails()
+
+ self.assertNotIn("failed=0", self.window.output)
+ self.assertEqual(self.window.update_success, False)
+ self.assertEqual(self.window.failure_reason,
+ strings.tailsconfig_failed_sudo_password)
+
+ def test_tailsconfig_some_other_subprocess_error(self):
+ self.window.update_success = True
+ with mock.patch.object(self.window, 'get_sudo_password',
+ return_value="password"):
+ with mock.patch.object(self.window,
+ 'pass_sudo_password_to_tailsconfig',
+ side_effect=subprocess.CalledProcessError(
+ 1, 'cmd', 'Generic other failure')):
+ self.window.configure_tails()
+
+ self.assertNotIn("failed=0", self.window.output)
+ self.assertEqual(self.window.update_success, False)
+ self.assertEqual(self.window.failure_reason,
+ strings.tailsconfig_failed_generic_reason)
+
+ def test_update_securedrop_success(self):
+ with mock.patch.object(self.window, 'check_out_and_verify_latest_tag',
+ return_value=""):
+ with mock.patch.object(self.window, 'configure_tails',
+ return_value=""):
+ self.window.update_success = True
+ self.window.update_securedrop()
+
+ # A success dialog box should pop up which we should be
+ # able to click. If it is not there, an exception will occur.
+ button = self.window.success_dialog.button(QMessageBox.Ok)
+ button.click()
+
+ self.assertEqual(self.window.progressBar.value(), 100)
+
+ def test_update_securedrop_failure(self):
+ with mock.patch.object(self.window, 'check_out_and_verify_latest_tag',
+ return_value=""):
+ self.window.update_success = False
+ self.window.failure_reason = "This is a generic failure message"
+ self.window.update_securedrop()
+
+ # A failure dialog box should pop up which we should be
+ # able to click. If it is not there, an exception will occur.
+ button = self.window.error_dialog.button(QMessageBox.Ok)
+ button.click()
+
+ self.assertEqual(self.window.progressBar.value(), 0)
+
if __name__ == '__main__':
unittest.main()
| [qt-journalist-updater] Add CI job for test suite
# Description
[See epic #3076 for primary feature, PRs for this ticket should go into `qt-journalist-updater]
We should add a CI job for running tests in `qt-journalist-updater` branch
| 2018-04-09T20:15:47Z | [] | [] |
|
freedomofpress/securedrop | 3,257 | freedomofpress__securedrop-3257 | [
"3243"
] | 9d549d8d5d23aca0b0e8990cb2c317a3d8c900be | diff --git a/admin/securedrop_admin/__init__.py b/admin/securedrop_admin/__init__.py
--- a/admin/securedrop_admin/__init__.py
+++ b/admin/securedrop_admin/__init__.py
@@ -574,6 +574,19 @@ def check_for_updates(args):
return False, latest_tag
+def get_release_key_from_keyserver(args, keyserver=None, timeout=45):
+ gpg_recv = ['timeout', str(timeout), 'gpg', '--recv-key']
+ release_key = ['22245C81E3BAEB4138B36061310F561200F4AD77']
+
+ # We construct the gpg --recv-key command based on optional keyserver arg.
+ if keyserver:
+ get_key_cmd = gpg_recv + ['--keyserver', keyserver] + release_key
+ else:
+ get_key_cmd = gpg_recv + release_key
+
+ subprocess.check_call(get_key_cmd, cwd=args.root)
+
+
def update(args):
"""Verify, and apply latest SecureDrop workstation update"""
sdlog.info("Applying SecureDrop updates...")
@@ -588,9 +601,15 @@ def update(args):
subprocess.check_call(git_checkout_cmd, cwd=args.root)
sdlog.info("Verifying signature on latest update...")
- get_release_key = ['gpg', '--recv-key',
- '22245C81E3BAEB4138B36061310F561200F4AD77']
- subprocess.check_call(get_release_key, cwd=args.root)
+
+ try:
+ # First try to get the release key using Tails default keyserver
+ get_release_key_from_keyserver(args)
+ except subprocess.CalledProcessError:
+ # Now try to get the key from a secondary keyserver.
+ secondary_keyserver = 'hkps://hkps.pool.sks-keyservers.net'
+ get_release_key_from_keyserver(args,
+ keyserver=secondary_keyserver)
git_verify_tag_cmd = ['git', 'tag', '-v', latest_tag]
sig_result = subprocess.check_output(git_verify_tag_cmd,
| diff --git a/admin/tests/test_securedrop-admin.py b/admin/tests/test_securedrop-admin.py
--- a/admin/tests/test_securedrop-admin.py
+++ b/admin/tests/test_securedrop-admin.py
@@ -106,6 +106,50 @@ def test_update_exits_if_not_needed(self, tmpdir, caplog):
assert "Applying SecureDrop updates..." in caplog.text
assert "Updated to SecureDrop" not in caplog.text
+ def test_update_gpg_recv_primary_key_failure(self, tmpdir, caplog):
+ """We should try a secondary keyserver if for some reason the primary
+ keyserver is not available."""
+
+ git_repo_path = str(tmpdir)
+ args = argparse.Namespace(root=git_repo_path)
+
+ git_output = 'Good signature from "SecureDrop Release Signing Key"'
+
+ patchers = [
+ mock.patch('securedrop_admin.check_for_updates',
+ return_value=(True, "0.6.1")),
+ mock.patch('subprocess.check_call'),
+ mock.patch('subprocess.check_output',
+ return_value=git_output),
+ mock.patch('securedrop_admin.get_release_key_from_keyserver',
+ side_effect=[
+ subprocess.CalledProcessError(1, 'cmd', 'BANG'),
+ None])
+ ]
+
+ for patcher in patchers:
+ patcher.start()
+
+ try:
+ securedrop_admin.update(args)
+ assert "Applying SecureDrop updates..." in caplog.text
+ assert "Signature verification successful." in caplog.text
+ assert "Updated to SecureDrop" in caplog.text
+ finally:
+ for patcher in patchers:
+ patcher.stop()
+
+ def test_get_release_key_from_valid_keyserver(self, tmpdir, caplog):
+ git_repo_path = str(tmpdir)
+ args = argparse.Namespace(root=git_repo_path)
+ with mock.patch('subprocess.check_call'):
+ # Check that no exception is raised when the process is fast
+ securedrop_admin.get_release_key_from_keyserver(args)
+
+ # Check that use of the keyword arg also raises no exception
+ securedrop_admin.get_release_key_from_keyserver(
+ args, keyserver='test.com')
+
def test_update_signature_verifies(self, tmpdir, caplog):
git_repo_path = str(tmpdir)
args = argparse.Namespace(root=git_repo_path)
| securedrop-admin update sporadically fails on gpg recv-key
# Bug
## Description
This an [old issue](https://github.com/freedomofpress/securedrop/issues/1797) that has reared its head again in the latest release inside our automated scripts.
## Steps to Reproduce
* Log into Tails
* Hop into the securedrop directory under persistence and checkout a random branch
* Run `./securedrop-admin update`
## Expected Behavior
The branch (at the time of this writing) should be changed to the tag for `0.6` release.
## Actual Behavior
Depending on when you run this, it might bomb out during the `gpg recv-key` portion. Might get an error that looks like this:
![image](https://user-images.githubusercontent.com/1727935/38442332-840da5f0-39d7-11e8-9229-768d8f032a14.png)
## Comments
Lets implement the manual documented fixes that we recommended in #1842 in an automated fashion within the `securedrop-admin` script. Check out this [comment](https://github.com/freedomofpress/securedrop/pull/1842#issuecomment-309579980) specifically.
| Thanks for filing this @msheiny - since this produces a frustrating experience for end users, we should definitely fix in the next release
Yup, this is what I am getting while testing #3246 | 2018-04-11T22:36:33Z | [] | [] |
freedomofpress/securedrop | 3,262 | freedomofpress__securedrop-3262 | [
"3228"
] | 388a537665e261d507347fe1094735b0eb398421 | diff --git a/journalist_gui/journalist_gui/SecureDropUpdater.py b/journalist_gui/journalist_gui/SecureDropUpdater.py
--- a/journalist_gui/journalist_gui/SecureDropUpdater.py
+++ b/journalist_gui/journalist_gui/SecureDropUpdater.py
@@ -1,5 +1,6 @@
#!/usr/bin/python
from PyQt5 import QtGui, QtWidgets
+from PyQt5.QtCore import QThread, pyqtSignal
import sys
import subprocess
import pexpect
@@ -7,11 +8,90 @@
from journalist_gui import updaterUI, strings, resources_rc # noqa
+# This thread will handle the ./securedrop-admin update command
+class UpdateThread(QThread):
+ signal = pyqtSignal('PyQt_PyObject')
+
+ def __init__(self):
+ QThread.__init__(self)
+ self.output = ""
+ self.update_success = False
+ self.failure_reason = ""
+
+ def run(self):
+ sdadmin_path = '/home/amnesia/Persistent/securedrop/securedrop-admin'
+ update_command = [sdadmin_path, 'update']
+ try:
+ self.output = subprocess.check_output(
+ update_command,
+ stderr=subprocess.STDOUT).decode('utf-8')
+ if 'Signature verification failed' in self.output:
+ self.update_success = False
+ self.failure_reason = strings.update_failed_sig_failure
+ elif "Signature verification successful" in self.output:
+ self.update_success = True
+ else:
+ self.failure_reason = strings.update_failed_generic_reason
+ except subprocess.CalledProcessError as e:
+ self.output = e.output.decode('utf-8')
+ self.update_success = False
+ self.failure_reason = strings.update_failed_generic_reason
+ result = {'status': self.update_success,
+ 'output': self.output,
+ 'failure_reason': self.failure_reason}
+ self.signal.emit(result)
+
+
+# This thread will handle the ./securedrop-admin tailsconfig command
+class TailsconfigThread(QThread):
+ signal = pyqtSignal('PyQt_PyObject')
+
+ def __init__(self):
+ QThread.__init__(self)
+ self.output = ""
+ self.update_success = False
+ self.failure_reason = ""
+ self.sudo_password = ""
+
+ def run(self):
+ tailsconfig_command = ("/home/amnesia/Persistent/"
+ "securedrop/securedrop-admin "
+ "tailsconfig")
+ try:
+ child = pexpect.spawn(tailsconfig_command)
+ child.expect('SUDO password:')
+ self.output += child.before.decode('utf-8')
+ child.sendline(self.sudo_password)
+ child.expect(pexpect.EOF)
+ self.output += child.before.decode('utf-8')
+
+ # For Tailsconfig to be considered a success, we expect no
+ # failures in the Ansible output.
+ if 'failed=0' not in self.output:
+ self.update_success = False
+ self.failure_reason = strings.tailsconfig_failed_generic_reason # noqa
+ else:
+ self.update_success = True
+ except pexpect.exceptions.TIMEOUT:
+ self.update_success = False
+ self.failure_reason = strings.tailsconfig_failed_sudo_password
+
+ except subprocess.CalledProcessError:
+ self.update_success = False
+ self.failure_reason = strings.tailsconfig_failed_generic_reason
+ result = {'status': self.update_success,
+ 'output': self.output,
+ 'failure_reason': self.failure_reason}
+ self.signal.emit(result)
+
+
class UpdaterApp(QtWidgets.QMainWindow, updaterUI.Ui_MainWindow):
+
def __init__(self, parent=None):
super(UpdaterApp, self).__init__(parent)
self.setupUi(self)
self.output = "Beginning update:"
+ self.update_success = False
pixmap = QtGui.QPixmap(":/images/static/securedrop.png")
self.label_2.setPixmap(pixmap)
@@ -26,15 +106,48 @@ def __init__(self, parent=None):
self.pushButton.clicked.connect(self.close)
self.pushButton_2.setText(strings.install_update_button)
self.pushButton_2.clicked.connect(self.update_securedrop)
-
- def update_securedrop(self):
- self.progressBar.setProperty("value", 10)
- self.check_out_and_verify_latest_tag()
+ self.update_thread = UpdateThread()
+ self.update_thread.signal.connect(self.update_status)
+ self.tails_thread = TailsconfigThread()
+ self.tails_thread.signal.connect(self.tails_status)
+
+ # This will update the output text after the git commands.
+ # At the end of this function, we will try to do tailsconfig.
+ # A new slot will handle tailsconfig output
+ def update_status(self, result):
+ "This is the slot for update thread"
+ self.output = result['output']
+ self.update_success = result['status']
+ self.failure_reason = result['failure_reason']
+ self.progressBar.setProperty("value", 40)
+ self.plainTextEdit.setPlainText(self.output)
+ self.plainTextEdit.setReadOnly = True
self.progressBar.setProperty("value", 50)
+ self.call_tailsconfig()
+
+ def call_tailsconfig(self):
+ # Now let us work on tailsconfig part
if self.update_success:
- self.configure_tails()
- self.progressBar.setProperty("value", 80)
+ self.statusbar.showMessage(strings.updating_tails_env)
+ # Get sudo password and add an enter key as tailsconfig command
+ # expects
+ sudo_password = self.get_sudo_password() + '\n'
+ self.tails_thread.sudo_password = sudo_password
+ self.tails_thread.start()
+ else:
+ self.pushButton.setEnabled(True)
+ self.pushButton_2.setEnabled(True)
+ self.statusbar.showMessage(self.failure_reason)
+ self.progressBar.setProperty("value", 0)
+ self.alert_failure(self.failure_reason)
+ def tails_status(self, result):
+ "This is the slot for Tailsconfig thread"
+ self.output += result['output']
+ self.update_success = result['status']
+ self.failure_reason = result['failure_reason']
+ self.plainTextEdit.setPlainText(self.output)
+ self.progressBar.setProperty("value", 80)
if self.update_success:
self.statusbar.showMessage(strings.finished)
self.progressBar.setProperty("value", 100)
@@ -42,6 +155,19 @@ def update_securedrop(self):
else:
self.statusbar.showMessage(self.failure_reason)
self.alert_failure(self.failure_reason)
+ # Now everything is done, enable the button.
+ self.pushButton.setEnabled(True)
+ self.pushButton_2.setEnabled(True)
+ self.progressBar.setProperty("value", 0)
+
+ def update_securedrop(self):
+ self.pushButton_2.setEnabled(False)
+ self.pushButton.setEnabled(False)
+ self.progressBar.setProperty("value", 10)
+ self.statusbar.showMessage(strings.fetching_update)
+ self.progressBar.setProperty("value", 20)
+ # Now start the git and gpg commands
+ self.update_thread.start()
def alert_success(self):
self.success_dialog = QtWidgets.QMessageBox()
@@ -59,29 +185,6 @@ def alert_failure(self, failure_reason):
self.error_dialog.setText(self.failure_reason)
self.error_dialog.setWindowTitle(strings.update_failed_dialog_title)
self.error_dialog.show()
- self.progressBar.setProperty("value", 0)
-
- def check_out_and_verify_latest_tag(self):
- self.statusbar.showMessage(strings.fetching_update)
- self.progressBar.setProperty("value", 20)
- sdadmin_path = '/home/amnesia/Persistent/securedrop/securedrop-admin'
- update_command = [sdadmin_path, 'update']
- try:
- self.output = subprocess.check_output(
- update_command,
- stderr=subprocess.STDOUT).decode('utf-8')
- if 'Signature verification failed' in self.output:
- self.update_success = False
- self.failure_reason = strings.update_failed_sig_failure
- else:
- self.update_success = True
- except subprocess.CalledProcessError as e:
- self.output = str(e.output)
- self.update_success = False
- self.failure_reason = strings.update_failed_generic_reason
- self.progressBar.setProperty("value", 40)
- self.plainTextEdit.setPlainText(self.output)
- self.plainTextEdit.setReadOnly = True
def get_sudo_password(self):
sudo_password, ok_is_pressed = QtWidgets.QInputDialog.getText(
@@ -91,44 +194,3 @@ def get_sudo_password(self):
return sudo_password
else:
sys.exit(0)
-
- def pass_sudo_password_to_tailsconfig(self, sudo_password):
- """Pass the sudo password to tailsconfig, and then return
- the output from the screen to the user"""
- tailsconfig_command = ("/home/amnesia/Persistent/"
- "securedrop/securedrop-admin tailsconfig")
-
- child = pexpect.spawn(tailsconfig_command)
- child.expect('SUDO password:')
- self.output += child.before.decode('utf-8')
- child.sendline(sudo_password)
- child.expect(pexpect.EOF)
- return child.before.decode('utf-8')
-
- def configure_tails(self):
- """Run tailsconfig if the signature verified and the
- update succeeded."""
- if self.update_success:
- self.statusbar.showMessage(strings.updating_tails_env)
- # Get sudo password and add an enter key as tailsconfig command
- # expects
- sudo_password = self.get_sudo_password() + '\n'
- try:
- self.output += self.pass_sudo_password_to_tailsconfig(
- sudo_password
- )
- self.plainTextEdit.setPlainText(self.output)
-
- # For Tailsconfig to be considered a success, we expect no
- # failures in the Ansible output.
- if 'failed=0' not in self.output:
- self.update_success = False
- self.failure_reason = strings.tailsconfig_failed_generic_reason # noqa
-
- except pexpect.exceptions.TIMEOUT:
- self.update_success = False
- self.failure_reason = strings.tailsconfig_failed_sudo_password
-
- except subprocess.CalledProcessError:
- self.update_success = False
- self.failure_reason = strings.tailsconfig_failed_generic_reason
| diff --git a/journalist_gui/test_gui.py b/journalist_gui/test_gui.py
--- a/journalist_gui/test_gui.py
+++ b/journalist_gui/test_gui.py
@@ -2,9 +2,9 @@
import subprocess
import pexpect
from unittest import mock
+from unittest.mock import MagicMock
from PyQt5.QtCore import Qt
-from PyQt5.QtWidgets import (QApplication, QSizePolicy, QInputDialog,
- QMessageBox)
+from PyQt5.QtWidgets import (QApplication, QSizePolicy, QInputDialog)
from PyQt5.QtTest import QTest
from journalist_gui.SecureDropUpdater import UpdaterApp, strings
@@ -54,28 +54,34 @@ def test_output_tab(self):
self.window.tabWidget.indexOf(self.window.tab_2))
@mock.patch('subprocess.check_output',
- return_value=b'Updated to SecureDrop')
- def test_check_out_latest_tag_success(self, check_output):
- self.window.check_out_and_verify_latest_tag()
- self.assertEqual(self.window.update_success, True)
- self.assertEqual(self.window.progressBar.value(), 40)
+ return_value=b'Signature verification successful')
+ def test_updateThread(self, check_output):
+ with mock.patch.object(self.window, "call_tailsconfig",
+ return_value=""):
+ self.window.update_thread.run() # Call run directly
+ self.assertEqual(self.window.update_success, True)
+ self.assertEqual(self.window.progressBar.value(), 50)
@mock.patch('subprocess.check_output',
return_value=b'Signature verification failed')
- def test_check_out_latest_tag_verification_failure(self, check_output):
- self.window.check_out_and_verify_latest_tag()
- self.assertEqual(self.window.update_success, False)
- self.assertEqual(self.window.failure_reason,
- strings.update_failed_sig_failure)
+ def test_updateThread_failure(self, check_output):
+ with mock.patch.object(self.window, "call_tailsconfig",
+ return_value=""):
+ self.window.update_thread.run() # Call run directly
+ self.assertEqual(self.window.update_success, False)
+ self.assertEqual(self.window.failure_reason,
+ strings.update_failed_sig_failure)
@mock.patch('subprocess.check_output',
side_effect=subprocess.CalledProcessError(
- 1, 'cmd', 'Generic other failure'))
- def test_check_out_latest_generic_failure(self, check_output):
- self.window.check_out_and_verify_latest_tag()
- self.assertEqual(self.window.update_success, False)
- self.assertEqual(self.window.failure_reason,
- strings.update_failed_generic_reason)
+ 1, 'cmd', b'Generic other failure'))
+ def test_updateThread_generic_failure(self, check_output):
+ with mock.patch.object(self.window, "call_tailsconfig",
+ return_value=""):
+ self.window.update_thread.run() # Call run directly
+ self.assertEqual(self.window.update_success, False)
+ self.assertEqual(self.window.failure_reason,
+ strings.update_failed_generic_reason)
def test_get_sudo_password_when_password_provided(self):
expected_password = "password"
@@ -96,89 +102,65 @@ def test_get_sudo_password_when_password_not_provided(self):
with self.assertRaises(SystemExit):
self.window.get_sudo_password()
- def test_tailsconfig_no_failures(self):
- self.window.update_success = True
- with mock.patch.object(self.window, 'get_sudo_password',
- return_value="password"):
- with mock.patch.object(self.window,
- 'pass_sudo_password_to_tailsconfig',
- return_value="failed=0"):
- self.window.configure_tails()
-
+ @mock.patch('pexpect.spawn')
+ def test_tailsconfigThread_no_failures(self, pt):
+ child = pt()
+ before = MagicMock()
+ before.decode.side_effect = ["SUDO: ", "Update successful. failed=0"]
+ child.before = before
+ self.window.tails_thread.run()
self.assertIn("failed=0", self.window.output)
self.assertEqual(self.window.update_success, True)
- def test_tailsconfig_generic_failure(self):
- self.window.update_success = True
- with mock.patch.object(self.window, 'get_sudo_password',
- return_value="password"):
- with mock.patch.object(self.window,
- 'pass_sudo_password_to_tailsconfig',
- return_value="failed=10 ERROR!!!!!"):
- self.window.configure_tails()
-
+ @mock.patch('pexpect.spawn')
+ def test_tailsconfigThread_generic_failure(self, pt):
+ child = pt()
+ before = MagicMock()
+ before.decode.side_effect = ["SUDO: ", "failed=10 ERROR!!!!!"]
+ child.before = before
+ self.window.tails_thread.run()
self.assertNotIn("failed=0", self.window.output)
self.assertEqual(self.window.update_success, False)
self.assertEqual(self.window.failure_reason,
strings.tailsconfig_failed_generic_reason)
- def test_tailsconfig_sudo_password_is_wrong(self):
- self.window.update_success = True
- with mock.patch.object(self.window, 'get_sudo_password',
- return_value="password"):
- with mock.patch.object(self.window,
- 'pass_sudo_password_to_tailsconfig',
- side_effect=pexpect.exceptions.TIMEOUT(1)):
- self.window.configure_tails()
-
+ @mock.patch('pexpect.spawn')
+ def test_tailsconfigThread_sudo_password_is_wrong(self, pt):
+ child = pt()
+ before = MagicMock()
+ before.decode.side_effect = ["some data",
+ pexpect.exceptions.TIMEOUT(1)]
+ child.before = before
+ self.window.tails_thread.run()
self.assertNotIn("failed=0", self.window.output)
self.assertEqual(self.window.update_success, False)
self.assertEqual(self.window.failure_reason,
strings.tailsconfig_failed_sudo_password)
- def test_tailsconfig_some_other_subprocess_error(self):
- self.window.update_success = True
- with mock.patch.object(self.window, 'get_sudo_password',
- return_value="password"):
- with mock.patch.object(self.window,
- 'pass_sudo_password_to_tailsconfig',
- side_effect=subprocess.CalledProcessError(
- 1, 'cmd', 'Generic other failure')):
- self.window.configure_tails()
-
+ @mock.patch('pexpect.spawn')
+ def test_tailsconfigThread_some_other_subprocess_error(self, pt):
+ child = pt()
+ before = MagicMock()
+ before.decode.side_effect = subprocess.CalledProcessError(
+ 1, 'cmd', b'Generic other failure')
+ child.before = before
+ self.window.tails_thread.run()
self.assertNotIn("failed=0", self.window.output)
self.assertEqual(self.window.update_success, False)
self.assertEqual(self.window.failure_reason,
strings.tailsconfig_failed_generic_reason)
- def test_update_securedrop_success(self):
- with mock.patch.object(self.window, 'check_out_and_verify_latest_tag',
- return_value=""):
- with mock.patch.object(self.window, 'configure_tails',
- return_value=""):
- self.window.update_success = True
- self.window.update_securedrop()
-
- # A success dialog box should pop up which we should be
- # able to click. If it is not there, an exception will occur.
- button = self.window.success_dialog.button(QMessageBox.Ok)
- button.click()
-
- self.assertEqual(self.window.progressBar.value(), 100)
+ def test_tails_status_success(self):
+ result = {'status': True, "output": "successful.",
+ 'failure_reason': ''}
+ self.window.tails_status(result)
+ self.assertEqual(self.window.progressBar.value(), 100)
- def test_update_securedrop_failure(self):
- with mock.patch.object(self.window, 'check_out_and_verify_latest_tag',
- return_value=""):
- self.window.update_success = False
- self.window.failure_reason = "This is a generic failure message"
- self.window.update_securedrop()
-
- # A failure dialog box should pop up which we should be
- # able to click. If it is not there, an exception will occur.
- button = self.window.error_dialog.button(QMessageBox.Ok)
- button.click()
-
- self.assertEqual(self.window.progressBar.value(), 0)
+ def test_tails_status_failure(self):
+ result = {'status': False, "output": "successful.",
+ 'failure_reason': '42'}
+ self.window.tails_status(result)
+ self.assertEqual(self.window.progressBar.value(), 0)
if __name__ == '__main__':
| [qt-journalist-updater] Run blocking actions in a QThread
## Description
[See epic #3076 for primary feature, PRs for this ticket should go into `qt-journalist-updater]
We should run blocking actions `securedrop-admin update` and `securedrop-admin tailsconfig` in a `QThread` such that the UI is still interactive for the user (e.g. if they want to click on the output tab)
## User Stories
As a journalist user, I want the UI to be responsive so that I can watch the output of the upgrade command.
| ![journalist_gui_stuck](https://user-images.githubusercontent.com/272303/38600523-1076488c-3d82-11e8-965f-9d566ae1aa00.png)
Here is one more reason why should we have this mentioned feature. | 2018-04-13T14:57:27Z | [] | [] |
freedomofpress/securedrop | 3,296 | freedomofpress__securedrop-3296 | [
"3284"
] | de2f0c9d9b2805cd183bddda302435f871c487e0 | diff --git a/journalist_gui/journalist_gui/SecureDropUpdater.py b/journalist_gui/journalist_gui/SecureDropUpdater.py
--- a/journalist_gui/journalist_gui/SecureDropUpdater.py
+++ b/journalist_gui/journalist_gui/SecureDropUpdater.py
@@ -8,6 +8,37 @@
from journalist_gui import updaterUI, strings, resources_rc # noqa
+class SetupThread(QThread):
+ signal = pyqtSignal('PyQt_PyObject')
+
+ def __init__(self):
+ QThread.__init__(self)
+ self.output = ""
+ self.update_success = False
+ self.failure_reason = ""
+
+ def run(self):
+ sdadmin_path = '/home/amnesia/Persistent/securedrop/securedrop-admin'
+ update_command = [sdadmin_path, 'setup']
+ try:
+ self.output = subprocess.check_output(
+ update_command,
+ stderr=subprocess.STDOUT).decode('utf-8')
+ if 'Failed to install' in self.output:
+ self.update_success = False
+ self.failure_reason = strings.update_failed_generic_reason
+ else:
+ self.update_success = True
+ except subprocess.CalledProcessError as e:
+ self.output += e.output.decode('utf-8')
+ self.update_success = False
+ self.failure_reason = strings.update_failed_generic_reason
+ result = {'status': self.update_success,
+ 'output': self.output,
+ 'failure_reason': self.failure_reason}
+ self.signal.emit(result)
+
+
# This thread will handle the ./securedrop-admin update command
class UpdateThread(QThread):
signal = pyqtSignal('PyQt_PyObject')
@@ -123,10 +154,30 @@ def __init__(self, parent=None):
self.update_thread.signal.connect(self.update_status)
self.tails_thread = TailsconfigThread()
self.tails_thread.signal.connect(self.tails_status)
+ self.setup_thread = SetupThread()
+ self.setup_thread.signal.connect(self.setup_status)
- # This will update the output text after the git commands.
# At the end of this function, we will try to do tailsconfig.
# A new slot will handle tailsconfig output
+ def setup_status(self, result):
+ "This is the slot for setup thread"
+ self.output += result['output']
+ self.update_success = result['status']
+ self.failure_reason = result['failure_reason']
+ self.progressBar.setProperty("value", 60)
+ self.plainTextEdit.setPlainText(self.output)
+ self.plainTextEdit.setReadOnly = True
+ if not self.update_success: # Failed to do setup update
+ self.pushButton.setEnabled(True)
+ self.pushButton_2.setEnabled(True)
+ self.update_status_bar_and_output(self.failure_reason)
+ self.progressBar.setProperty("value", 0)
+ self.alert_failure(self.failure_reason)
+ return
+ self.progressBar.setProperty("value", 70)
+ self.call_tailsconfig()
+
+ # This will update the output text after the git commands.
def update_status(self, result):
"This is the slot for update thread"
self.output += result['output']
@@ -136,7 +187,8 @@ def update_status(self, result):
self.plainTextEdit.setPlainText(self.output)
self.plainTextEdit.setReadOnly = True
self.progressBar.setProperty("value", 50)
- self.call_tailsconfig()
+ self.update_status_bar_and_output(strings.doing_setup)
+ self.setup_thread.start()
def update_status_bar_and_output(self, status_message):
"""This method updates the status bar and the output window with the
@@ -185,8 +237,6 @@ def update_securedrop(self):
self.pushButton.setEnabled(False)
self.progressBar.setProperty("value", 10)
self.update_status_bar_and_output(strings.fetching_update)
- self.progressBar.setProperty("value", 20)
- # Now start the git and gpg commands
self.update_thread.start()
def alert_success(self):
diff --git a/journalist_gui/journalist_gui/strings.py b/journalist_gui/journalist_gui/strings.py
--- a/journalist_gui/journalist_gui/strings.py
+++ b/journalist_gui/journalist_gui/strings.py
@@ -10,7 +10,7 @@
"automatically appear if you have not "
"completed any required updates.\n")
fetching_update = ('Fetching and verifying latest update...'
- ' (4 mins remaining)')
+ ' (5 mins remaining)')
updating_tails_env = ('Configuring local Tails environment...'
' (1 min remaining)')
finished = 'Update successfully completed!'
@@ -40,3 +40,4 @@
output_tab = 'Detailed Update Progress'
initial_text_box = ("When the update begins, this area will populate with "
"output.\n")
+doing_setup = "Checking dependencies are up to date... (2 mins remaining)"
| diff --git a/journalist_gui/test_gui.py b/journalist_gui/test_gui.py
--- a/journalist_gui/test_gui.py
+++ b/journalist_gui/test_gui.py
@@ -53,11 +53,31 @@ def test_output_tab(self):
self.assertEqual(self.window.tabWidget.currentIndex(),
self.window.tabWidget.indexOf(self.window.tab_2))
+ @mock.patch('subprocess.check_output',
+ return_value=b'Python dependencies for securedrop-admin')
+ def test_setupThread(self, check_output):
+ with mock.patch.object(self.window, "call_tailsconfig",
+ return_value=MagicMock()):
+ self.window.setup_thread.run() # Call run directly
+ self.assertEqual(self.window.update_success, True)
+ self.assertEqual(self.window.progressBar.value(), 70)
+
+ @mock.patch('subprocess.check_output',
+ return_value=b'Failed to install pip dependencies')
+ def test_setupThread_failure(self, check_output):
+ with mock.patch.object(self.window, "call_tailsconfig",
+ return_value=MagicMock()):
+ self.window.setup_thread.run() # Call run directly
+ self.assertEqual(self.window.update_success, False)
+ self.assertEqual(self.window.progressBar.value(), 0)
+ self.assertEqual(self.window.failure_reason,
+ strings.update_failed_generic_reason)
+
@mock.patch('subprocess.check_output',
return_value=b'Signature verification successful')
def test_updateThread(self, check_output):
- with mock.patch.object(self.window, "call_tailsconfig",
- return_value=""):
+ with mock.patch.object(self.window, "setup_thread",
+ return_value=MagicMock()):
self.window.update_thread.run() # Call run directly
self.assertEqual(self.window.update_success, True)
self.assertEqual(self.window.progressBar.value(), 50)
@@ -65,8 +85,8 @@ def test_updateThread(self, check_output):
@mock.patch('subprocess.check_output',
return_value=b'Signature verification failed')
def test_updateThread_failure(self, check_output):
- with mock.patch.object(self.window, "call_tailsconfig",
- return_value=""):
+ with mock.patch.object(self.window, "setup_thread",
+ return_value=MagicMock()):
self.window.update_thread.run() # Call run directly
self.assertEqual(self.window.update_success, False)
self.assertEqual(self.window.failure_reason,
@@ -76,8 +96,8 @@ def test_updateThread_failure(self, check_output):
side_effect=subprocess.CalledProcessError(
1, 'cmd', b'Generic other failure'))
def test_updateThread_generic_failure(self, check_output):
- with mock.patch.object(self.window, "call_tailsconfig",
- return_value=""):
+ with mock.patch.object(self.window, "setup_thread",
+ return_value=MagicMock()):
self.window.update_thread.run() # Call run directly
self.assertEqual(self.window.update_success, False)
self.assertEqual(self.window.failure_reason,
| [qt-journalist-updater] Run securedrop-admin setup
## Description
We rarely _need_ `securedrop-admin update` to be run (though we do in 0.7 as we're adding a dependency to `securedrop-admin`), but since it doesn't hurt, let's run it for each update. We can do this by adding a `QThread` to run `securedrop-admin setup`. Also update the time estimates provided in the updater.
## User Stories
As a SecureDrop user, I don't want to have to manually run `./securedrop-admin setup` on the command line if everything else in the update happens automatically
| I think @redshiftzero [meant](https://github.com/freedomofpress/securedrop/pull/3283/files#r182489560) `./securedrop-admin setup` not update.
Yes, I agree @kushaldas! `./securedrop-admin setup` is what should be run as part of `./securedrop-admin update`.
The `./securedrop-admin setup` is taking around 3-4 minutes in my vms. I am adding 4 minutes extra in the time estimate as the first step, before the tool actually updates.
> The `./securedrop-admin setup` is taking around 3-4 minutes in my vms. I
But only on _first_ run, right? If no changes are required, then it only takes a few seconds for me.
>But only on first run, right? If no changes are required, then it only takes a few seconds for me.
Yes, otherwise it is taking around 50 seconds in my vm. | 2018-04-23T22:03:34Z | [] | [] |
freedomofpress/securedrop | 3,298 | freedomofpress__securedrop-3298 | [
"3264"
] | 0d9856711493308d7b67ff8443b7256316a3a6bb | diff --git a/install_files/ansible-base/roles/tails-config/files/securedrop_init.py b/install_files/ansible-base/roles/tails-config/files/securedrop_init.py
--- a/install_files/ansible-base/roles/tails-config/files/securedrop_init.py
+++ b/install_files/ansible-base/roles/tails-config/files/securedrop_init.py
@@ -108,6 +108,7 @@
'--root', path_securedrop_root,
'check_for_updates'], env=env)
-if 'Update needed' in output:
+lock_location = "/home/amnesia/Persistent/securedrop/securedrop_update.lock"
+if 'Update needed' in output or os.path.exists(lock_location):
# Start the SecureDrop updater GUI.
subprocess.Popen(['python3', path_gui_updater], env=env)
diff --git a/journalist_gui/journalist_gui/SecureDropUpdater.py b/journalist_gui/journalist_gui/SecureDropUpdater.py
--- a/journalist_gui/journalist_gui/SecureDropUpdater.py
+++ b/journalist_gui/journalist_gui/SecureDropUpdater.py
@@ -3,11 +3,15 @@
from PyQt5.QtCore import QThread, pyqtSignal
import sys
import subprocess
+import os
import pexpect
from journalist_gui import updaterUI, strings, resources_rc # noqa
+LOCK_LOCATION = "/home/amnesia/Persistent/securedrop/securedrop_update.lock" # noqa
+
+
class SetupThread(QThread):
signal = pyqtSignal('PyQt_PyObject')
@@ -20,6 +24,12 @@ def __init__(self):
def run(self):
sdadmin_path = '/home/amnesia/Persistent/securedrop/securedrop-admin'
update_command = [sdadmin_path, 'setup']
+
+ # Create lock so we resume failed updates on reboot.
+ # Don't create the lock if it already exists.
+ if not os.path.exists(LOCK_LOCATION):
+ open(LOCK_LOCATION, 'a').close()
+
try:
self.output = subprocess.check_output(
update_command,
@@ -221,6 +231,8 @@ def tails_status(self, result):
self.plainTextEdit.setPlainText(self.output)
self.progressBar.setProperty("value", 80)
if self.update_success:
+ # Remove lock
+ os.remove(LOCK_LOCATION)
self.update_status_bar_and_output(strings.finished)
self.progressBar.setProperty("value", 100)
self.alert_success()
| diff --git a/journalist_gui/test_gui.py b/journalist_gui/test_gui.py
--- a/journalist_gui/test_gui.py
+++ b/journalist_gui/test_gui.py
@@ -7,7 +7,7 @@
from PyQt5.QtWidgets import (QApplication, QSizePolicy, QInputDialog)
from PyQt5.QtTest import QTest
-from journalist_gui.SecureDropUpdater import UpdaterApp, strings
+from journalist_gui.SecureDropUpdater import UpdaterApp, strings, LOCK_LOCATION
class AppTestCase(unittest.TestCase):
@@ -58,7 +58,10 @@ def test_output_tab(self):
def test_setupThread(self, check_output):
with mock.patch.object(self.window, "call_tailsconfig",
return_value=MagicMock()):
- self.window.setup_thread.run() # Call run directly
+ with mock.patch('builtins.open') as mock_open:
+ self.window.setup_thread.run() # Call run directly
+
+ mock_open.assert_called_once_with(LOCK_LOCATION, 'a')
self.assertEqual(self.window.update_success, True)
self.assertEqual(self.window.progressBar.value(), 70)
@@ -67,7 +70,10 @@ def test_setupThread(self, check_output):
def test_setupThread_failure(self, check_output):
with mock.patch.object(self.window, "call_tailsconfig",
return_value=MagicMock()):
- self.window.setup_thread.run() # Call run directly
+ with mock.patch('builtins.open') as mock_open:
+ self.window.setup_thread.run() # Call run directly
+
+ mock_open.assert_called_once_with(LOCK_LOCATION, 'a')
self.assertEqual(self.window.update_success, False)
self.assertEqual(self.window.progressBar.value(), 0)
self.assertEqual(self.window.failure_reason,
@@ -128,7 +134,10 @@ def test_tailsconfigThread_no_failures(self, pt):
before = MagicMock()
before.decode.side_effect = ["SUDO: ", "Update successful. failed=0"]
child.before = before
- self.window.tails_thread.run()
+ with mock.patch('os.remove') as mock_remove:
+ self.window.tails_thread.run()
+
+ mock_remove.assert_called_once_with(LOCK_LOCATION)
self.assertIn("failed=0", self.window.output)
self.assertEqual(self.window.update_success, True)
@@ -173,13 +182,23 @@ def test_tailsconfigThread_some_other_subprocess_error(self, pt):
def test_tails_status_success(self):
result = {'status': True, "output": "successful.",
'failure_reason': ''}
- self.window.tails_status(result)
+
+ with mock.patch('os.remove') as mock_remove:
+ self.window.tails_status(result)
+
+ # We do remove the lock if the update does finish
+ mock_remove.assert_called_once_with(LOCK_LOCATION)
self.assertEqual(self.window.progressBar.value(), 100)
def test_tails_status_failure(self):
result = {'status': False, "output": "successful.",
'failure_reason': '42'}
- self.window.tails_status(result)
+
+ with mock.patch('os.remove') as mock_remove:
+ self.window.tails_status(result)
+
+ # We do not remove the lock if the update does not finish
+ mock_remove.assert_not_called()
self.assertEqual(self.window.progressBar.value(), 0)
| [qt-journalist-updater] Resume failed updates
## Description
[See epic #3076 for primary feature, PRs for this ticket should go into `qt-journalist-updater`]
What happens if an update fails? For example, if there is an error in Tails config. Currently the GUI will throw up a dialog box, alerting the user of an error, but the update won't resume. Instead, we should prompt the user to try again on next boot. Here's a potential flow:
0. On boot, we check if an update was in progress and failed. Recall that on this branch we already are running `./securedrop-admin check_for_updates` on boot to determine whether or not we need to run the GUI. Even if we are on the latest release tag, if the lock is there, start the updater as it means that tails config bailed out.
1. Touch `~/Persistent/securedrop_update.lock`
2. Do the update.
3. If the update completes, delete `~/Persistent/securedrop_update.lock`
4. If it does not complete, then the user will be prompted to update again on next boot.
## User Research Evidence
Based on my knowledge interacting with SecureDrop journalists and admins, I know that two things are not uncommon (to be fair, they are not extremely common, but they happen enough that I think it's worthwhile to handle this case):
1. There is some kind of error in `securedrop-admin tailsconfig` that needs to be debugged, or
2. The admin has made some local changes in their securedrop repo (this is common for admins but not for journalists).
## User Stories
As a SecureDrop user, I don't want to lose my one chance to autoupdate if I hit an unforeseen error.
| Possible implementation:
0. Add `*.lock` files to gitignore
1. In the GUI in `UpdateThread.run`, first touch: `~/Persistent/securedrop/securedrop_update.lock`
2. In `install_files/ansible-base/roles/tailsconfig/files/securedrop_init.py`, in addition to starting the GUI updater if `Update needed` is in the output, also check if `~/Persistent/securedrop/securedrop_update.lock` exists.
3. In the GUI, after the update completes successfully, i.e. in `tails_status`, we remove `~/Persistent/securedrop/securedrop_update.lock` | 2018-04-24T00:12:38Z | [] | [] |
freedomofpress/securedrop | 3,300 | freedomofpress__securedrop-3300 | [
"3310"
] | 3babb776512cfbed25b15daa6d6116860205da4c | diff --git a/admin/securedrop_admin/__init__.py b/admin/securedrop_admin/__init__.py
--- a/admin/securedrop_admin/__init__.py
+++ b/admin/securedrop_admin/__init__.py
@@ -618,7 +618,7 @@ def update(args):
if 'Good signature' not in sig_result:
sdlog.info("Signature verification failed.")
- return 1
+ sys.exit(1)
sdlog.info("Signature verification successful.")
sdlog.info("Updated to SecureDrop {}.".format(latest_tag))
diff --git a/install_files/ansible-base/roles/tails-config/files/securedrop_init.py b/install_files/ansible-base/roles/tails-config/files/securedrop_init.py
--- a/install_files/ansible-base/roles/tails-config/files/securedrop_init.py
+++ b/install_files/ansible-base/roles/tails-config/files/securedrop_init.py
@@ -18,6 +18,13 @@
path_torrc = '/etc/tor/torrc'
path_desktop = '/home/amnesia/Desktop/'
path_persistent_desktop = '/lib/live/mount/persistence/TailsData_unlocked/dotfiles/Desktop/' # noqa: E501
+path_securedrop_root = '/home/amnesia/Persistent/securedrop'
+path_securedrop_admin_venv = os.path.join(path_securedrop_root,
+ 'admin/.venv/bin/python')
+path_securedrop_admin_init = os.path.join(path_securedrop_root,
+ 'admin/securedrop_admin/__init__.py')
+path_gui_updater = os.path.join(path_securedrop_root,
+ 'journalist_gui/SecureDropUpdater')
# load torrc_additions
if os.path.isfile(path_torrc_additions):
@@ -66,6 +73,9 @@
os.setresgid(amnesia_gid, amnesia_gid, -1)
os.setresuid(amnesia_uid, amnesia_uid, -1)
env = os.environ.copy()
+env['XDG_CURRENT_DESKTOP'] = 'GNOME'
+env['DESKTOP_SESSION'] = 'default'
+env['DISPLAY'] = ':1'
env['XDG_RUNTIME_DIR'] = '/run/user/{}'.format(amnesia_uid)
env['XDG_DATA_DIR'] = '/usr/share/gnome:/usr/local/share/:/usr/share/'
env['HOME'] = '/home/amnesia'
@@ -89,3 +99,16 @@
'SecureDrop successfully auto-configured!',
'You can now access the Journalist Interface.\n',
'If you are an admin, you can now SSH to the servers.'])
+
+# As the amnesia user, check for SecureDrop workstation updates.
+os.setresgid(amnesia_gid, amnesia_gid, -1)
+os.setresuid(amnesia_uid, amnesia_uid, -1)
+output = subprocess.check_output([path_securedrop_admin_venv,
+ path_securedrop_admin_init,
+ '--root', path_securedrop_root,
+ 'check_for_updates'], env=env)
+
+flag_location = "/home/amnesia/Persistent/.securedrop/securedrop_update.flag"
+if 'Update needed' in output or os.path.exists(flag_location):
+ # Start the SecureDrop updater GUI.
+ subprocess.Popen(['python3', path_gui_updater], env=env)
diff --git a/journalist_gui/journalist_gui/SecureDropUpdater.py b/journalist_gui/journalist_gui/SecureDropUpdater.py
new file mode 100644
--- /dev/null
+++ b/journalist_gui/journalist_gui/SecureDropUpdater.py
@@ -0,0 +1,287 @@
+#!/usr/bin/python
+from PyQt5 import QtGui, QtWidgets
+from PyQt5.QtCore import QThread, pyqtSignal
+import subprocess
+import os
+import pexpect
+
+from journalist_gui import updaterUI, strings, resources_rc # noqa
+
+
+FLAG_LOCATION = "/home/amnesia/Persistent/.securedrop/securedrop_update.flag" # noqa
+
+
+class SetupThread(QThread):
+ signal = pyqtSignal('PyQt_PyObject')
+
+ def __init__(self):
+ QThread.__init__(self)
+ self.output = ""
+ self.update_success = False
+ self.failure_reason = ""
+
+ def run(self):
+ sdadmin_path = '/home/amnesia/Persistent/securedrop/securedrop-admin'
+ update_command = [sdadmin_path, 'setup']
+
+ # Create flag file to indicate we should resume failed updates on
+ # reboot. Don't create the flag if it already exists.
+ if not os.path.exists(FLAG_LOCATION):
+ open(FLAG_LOCATION, 'a').close()
+
+ try:
+ self.output = subprocess.check_output(
+ update_command,
+ stderr=subprocess.STDOUT).decode('utf-8')
+ if 'Failed to install' in self.output:
+ self.update_success = False
+ self.failure_reason = strings.update_failed_generic_reason
+ else:
+ self.update_success = True
+ except subprocess.CalledProcessError as e:
+ self.output += e.output.decode('utf-8')
+ self.update_success = False
+ self.failure_reason = strings.update_failed_generic_reason
+ result = {'status': self.update_success,
+ 'output': self.output,
+ 'failure_reason': self.failure_reason}
+ self.signal.emit(result)
+
+
+# This thread will handle the ./securedrop-admin update command
+class UpdateThread(QThread):
+ signal = pyqtSignal('PyQt_PyObject')
+
+ def __init__(self):
+ QThread.__init__(self)
+ self.output = ""
+ self.update_success = False
+ self.failure_reason = ""
+
+ def run(self):
+ sdadmin_path = '/home/amnesia/Persistent/securedrop/securedrop-admin'
+ update_command = [sdadmin_path, 'update']
+ try:
+ self.output = subprocess.check_output(
+ update_command,
+ stderr=subprocess.STDOUT).decode('utf-8')
+ if "Signature verification successful" in self.output:
+ self.update_success = True
+ else:
+ self.failure_reason = strings.update_failed_generic_reason
+ except subprocess.CalledProcessError as e:
+ self.update_success = False
+ self.output += e.output.decode('utf-8')
+ if 'Signature verification failed' in self.output:
+ self.failure_reason = strings.update_failed_sig_failure
+ else:
+ self.failure_reason = strings.update_failed_generic_reason
+ result = {'status': self.update_success,
+ 'output': self.output,
+ 'failure_reason': self.failure_reason}
+ self.signal.emit(result)
+
+
+# This thread will handle the ./securedrop-admin tailsconfig command
+class TailsconfigThread(QThread):
+ signal = pyqtSignal('PyQt_PyObject')
+
+ def __init__(self):
+ QThread.__init__(self)
+ self.output = ""
+ self.update_success = False
+ self.failure_reason = ""
+ self.sudo_password = ""
+
+ def run(self):
+ tailsconfig_command = ("/home/amnesia/Persistent/"
+ "securedrop/securedrop-admin "
+ "tailsconfig")
+ try:
+ child = pexpect.spawn(tailsconfig_command)
+ child.expect('SUDO password:')
+ self.output += child.before.decode('utf-8')
+ child.sendline(self.sudo_password)
+ child.expect(pexpect.EOF)
+ self.output += child.before.decode('utf-8')
+ child.close()
+
+ # For Tailsconfig to be considered a success, we expect no
+ # failures in the Ansible output.
+ if child.exitstatus:
+ self.update_success = False
+ self.failure_reason = strings.tailsconfig_failed_generic_reason # noqa
+ else:
+ self.update_success = True
+ except pexpect.exceptions.TIMEOUT:
+ self.update_success = False
+ self.failure_reason = strings.tailsconfig_failed_sudo_password
+
+ except subprocess.CalledProcessError:
+ self.update_success = False
+ self.failure_reason = strings.tailsconfig_failed_generic_reason
+ result = {'status': self.update_success,
+ 'output': self.output,
+ 'failure_reason': self.failure_reason}
+ self.signal.emit(result)
+
+
+class UpdaterApp(QtWidgets.QMainWindow, updaterUI.Ui_MainWindow):
+
+ def __init__(self, parent=None):
+ super(UpdaterApp, self).__init__(parent)
+ self.setupUi(self)
+ self.statusbar.setSizeGripEnabled(False)
+ self.output = strings.initial_text_box
+ self.plainTextEdit.setPlainText(self.output)
+ self.update_success = False
+
+ pixmap = QtGui.QPixmap(":/images/static/banner.png")
+ self.label_2.setPixmap(pixmap)
+ self.label_2.setScaledContents(True)
+
+ self.progressBar.setProperty("value", 0)
+ self.setWindowTitle(strings.window_title)
+ self.setWindowIcon(QtGui.QIcon(':/images/static/securedrop_icon.png'))
+ self.label.setText(strings.update_in_progress)
+
+ self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab),
+ strings.main_tab)
+ self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_2),
+ strings.output_tab)
+
+ # Connect buttons to their functions.
+ self.pushButton.setText(strings.install_later_button)
+ self.pushButton.setStyleSheet("""background-color: lightgrey;
+ min-height: 2em;
+ border-radius: 10px""")
+ self.pushButton.clicked.connect(self.close)
+ self.pushButton_2.setText(strings.install_update_button)
+ self.pushButton_2.setStyleSheet("""background-color: #E6FFEB;
+ min-height: 2em;
+ border-radius: 10px;""")
+ self.pushButton_2.clicked.connect(self.update_securedrop)
+ self.update_thread = UpdateThread()
+ self.update_thread.signal.connect(self.update_status)
+ self.tails_thread = TailsconfigThread()
+ self.tails_thread.signal.connect(self.tails_status)
+ self.setup_thread = SetupThread()
+ self.setup_thread.signal.connect(self.setup_status)
+
+ # At the end of this function, we will try to do tailsconfig.
+ # A new slot will handle tailsconfig output
+ def setup_status(self, result):
+ "This is the slot for setup thread"
+ self.output += result['output']
+ self.update_success = result['status']
+ self.failure_reason = result['failure_reason']
+ self.progressBar.setProperty("value", 60)
+ self.plainTextEdit.setPlainText(self.output)
+ self.plainTextEdit.setReadOnly = True
+ if not self.update_success: # Failed to do setup
+ self.pushButton.setEnabled(True)
+ self.pushButton_2.setEnabled(True)
+ self.update_status_bar_and_output(self.failure_reason)
+ self.progressBar.setProperty("value", 0)
+ self.alert_failure(self.failure_reason)
+ return
+ self.progressBar.setProperty("value", 70)
+ self.call_tailsconfig()
+
+ # This will update the output text after the git commands.
+ def update_status(self, result):
+ "This is the slot for update thread"
+ self.output += result['output']
+ self.update_success = result['status']
+ self.failure_reason = result['failure_reason']
+ self.progressBar.setProperty("value", 40)
+ self.plainTextEdit.setPlainText(self.output)
+ self.plainTextEdit.setReadOnly = True
+ if not self.update_success: # Failed to do update
+ self.pushButton.setEnabled(True)
+ self.pushButton_2.setEnabled(True)
+ self.update_status_bar_and_output(self.failure_reason)
+ self.progressBar.setProperty("value", 0)
+ self.alert_failure(self.failure_reason)
+ return
+ self.progressBar.setProperty("value", 50)
+ self.update_status_bar_and_output(strings.doing_setup)
+ self.setup_thread.start()
+
+ def update_status_bar_and_output(self, status_message):
+ """This method updates the status bar and the output window with the
+ status_message."""
+ self.statusbar.showMessage(status_message)
+ self.output += status_message + '\n'
+ self.plainTextEdit.setPlainText(self.output)
+
+ def call_tailsconfig(self):
+ # Now let us work on tailsconfig part
+ if self.update_success:
+ # Get sudo password and add an enter key as tailsconfig command
+ # expects
+ sudo_password = self.get_sudo_password()
+ if not sudo_password:
+ self.update_success = False
+ self.failure_reason = strings.missing_sudo_password
+ self.on_failure()
+ return
+ self.tails_thread.sudo_password = sudo_password + '\n'
+ self.update_status_bar_and_output(strings.updating_tails_env)
+ self.tails_thread.start()
+ else:
+ self.on_failure()
+
+ def tails_status(self, result):
+ "This is the slot for Tailsconfig thread"
+ self.output += result['output']
+ self.update_success = result['status']
+ self.failure_reason = result['failure_reason']
+ self.plainTextEdit.setPlainText(self.output)
+ self.progressBar.setProperty("value", 80)
+ if self.update_success:
+ # Remove flag file indicating an update is in progress
+ os.remove(FLAG_LOCATION)
+ self.update_status_bar_and_output(strings.finished)
+ self.progressBar.setProperty("value", 100)
+ self.alert_success()
+ else:
+ self.on_failure()
+
+ def on_failure(self):
+ self.update_status_bar_and_output(self.failure_reason)
+ self.alert_failure(self.failure_reason)
+ # Now everything is done, enable the button.
+ self.pushButton.setEnabled(True)
+ self.pushButton_2.setEnabled(True)
+ self.progressBar.setProperty("value", 0)
+
+ def update_securedrop(self):
+ self.pushButton_2.setEnabled(False)
+ self.pushButton.setEnabled(False)
+ self.progressBar.setProperty("value", 10)
+ self.update_status_bar_and_output(strings.fetching_update)
+ self.update_thread.start()
+
+ def alert_success(self):
+ self.success_dialog = QtWidgets.QMessageBox()
+ self.success_dialog.setIcon(QtWidgets.QMessageBox.Information)
+ self.success_dialog.setText(strings.finished_dialog_message)
+ self.success_dialog.setWindowTitle(strings.finished_dialog_title)
+ self.success_dialog.show()
+
+ def alert_failure(self, failure_reason):
+ self.error_dialog = QtWidgets.QMessageBox()
+ self.error_dialog.setIcon(QtWidgets.QMessageBox.Critical)
+ self.error_dialog.setText(self.failure_reason)
+ self.error_dialog.setWindowTitle(strings.update_failed_dialog_title)
+ self.error_dialog.show()
+
+ def get_sudo_password(self):
+ sudo_password, ok_is_pressed = QtWidgets.QInputDialog.getText(
+ self, "Tails Administrator password", strings.sudo_password_text,
+ QtWidgets.QLineEdit.Password, "")
+ if ok_is_pressed and sudo_password:
+ return sudo_password
+ else:
+ return None
diff --git a/journalist_gui/journalist_gui/__init__.py b/journalist_gui/journalist_gui/__init__.py
new file mode 100644
diff --git a/journalist_gui/journalist_gui/resources_rc.py b/journalist_gui/journalist_gui/resources_rc.py
new file mode 100644
--- /dev/null
+++ b/journalist_gui/journalist_gui/resources_rc.py
@@ -0,0 +1,890 @@
+# -*- coding: utf-8 -*-
+
+# Resource object code
+#
+# Created by: The Resource Compiler for PyQt5 (Qt v5.10.0)
+#
+# WARNING! All changes made in this file will be lost!
+
+from PyQt5 import QtCore
+
+qt_resource_data = b"\
+\x00\x00\x27\xca\
+\x89\
+\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
+\x00\x01\x90\x00\x00\x00\x3c\x08\x06\x00\x00\x00\x48\x16\xec\xed\
+\x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x27\
+\x84\x49\x44\x41\x54\x78\x01\xed\x7d\x07\x78\x5c\xc7\x75\xee\x59\
+\xf4\x5e\x88\xde\x3b\x48\x14\x82\xbd\x58\x2c\xa2\x3a\x25\x51\xa2\
+\x25\xea\xc9\x12\x6d\x59\xb6\xfc\xfc\xec\x7c\x71\x2c\x59\x7a\xcf\
+\x8e\x12\xcb\xb2\x9d\xc4\xf9\xe2\x38\xc9\x4b\x7b\x8c\xe2\xd8\x89\
+\xca\x17\xcb\x45\xc5\x32\xad\x62\x49\x94\x28\xb1\x89\x02\x09\x10\
+\x04\x0b\x88\xde\x7b\x5f\x00\xbb\x28\x8b\xf7\x9f\x59\xdc\xc5\xbd\
+\x77\x0b\x76\x17\x95\xe4\x0c\xb9\xb8\x6d\xea\x7f\xef\x9c\x33\x73\
+\xce\x99\x33\x86\x43\x87\x0e\x4d\x93\x0c\x12\x01\x89\x80\x44\x40\
+\x22\x20\x11\xf0\x10\x01\x3f\x8e\x7f\xeb\xbe\x83\x1e\x26\x93\xd1\
+\x25\x02\x12\x01\x89\x80\x44\xe0\x7a\x46\xe0\xbd\xc3\xff\x4d\x3e\
+\xd7\x33\x00\xb2\xed\x12\x01\x89\x80\x44\x40\x22\xe0\x3d\x02\x92\
+\x81\x78\x8f\x9d\x4c\x29\x11\x90\x08\x48\x04\xae\x6b\x04\x24\x03\
+\xb9\xae\x5f\xbf\x6c\xbc\x44\x40\x22\x20\x11\xf0\x1e\x01\xc9\x40\
+\xbc\xc7\x4e\xa6\x94\x08\x48\x04\x24\x02\xd7\x35\x02\x92\x81\x5c\
+\xd7\xaf\x5f\x36\x5e\x22\x20\x11\x90\x08\x78\x8f\x80\x64\x20\xde\
+\x63\x27\x53\x4a\x04\x24\x02\x12\x81\xeb\x1a\x01\xc9\x40\xae\xeb\
+\xd7\x2f\x1b\x2f\x11\x90\x08\x48\x04\xbc\x47\x40\xac\x03\xf1\x3e\
+\xb9\x4c\x29\x11\x90\x08\x78\x8a\xc0\xf4\xf4\x34\x75\xf5\x0c\x10\
+\xe1\x18\x1f\x1b\x4d\x06\x1f\x83\xa7\x59\xc8\xf8\x12\x81\x15\x81\
+\xc0\x82\x30\x90\xde\xbe\x3e\x3a\x76\xea\x34\x7d\x78\xfc\x04\xf5\
+\xf7\x0f\x90\x0f\x3a\x84\xc5\x32\x4d\xb1\x31\x31\x74\xcb\x8d\xbb\
+\xe9\x33\x5b\x36\x52\x54\x64\xa4\xc3\x06\x77\x76\xf7\x50\x47\x67\
+\x17\xd2\x38\x7c\x6c\x77\x13\x7d\x8e\xc2\x43\xc3\x28\x2b\x33\xdd\
+\xee\x19\xdf\x98\x98\x98\xa0\x73\x95\x17\xe8\xed\x23\x1f\x50\x5d\
+\x7d\x03\xfa\xe8\x34\xf7\x53\x0a\x0a\x0c\xa4\x0d\xeb\x4b\xe8\xb6\
+\x3d\x37\x52\x76\x86\xe3\xb4\x8d\xcd\xcd\x34\x30\x38\xac\xa9\xcb\
+\xf4\xb4\x81\x52\x93\x13\x69\x55\x74\xb4\x28\x6f\x72\x72\x92\xaa\
+\x6b\xeb\x68\x02\x47\xc3\x4c\xbf\xe7\xfc\x7d\x7c\x7d\x28\x2f\x2b\
+\x9b\x02\x03\x03\x44\xbc\xee\x9e\x5e\x6a\x47\xbb\x0c\x06\xe7\x0b\
+\xfd\x45\xbd\x82\x82\x28\x29\x21\x9e\xc2\xc3\xc2\x44\x3a\x47\x7f\
+\x2c\x16\x0b\x55\xd7\xd7\x93\xd9\x34\xee\x32\x3f\x75\x5a\xae\x77\
+\x5a\x6a\x32\x45\xcf\xe0\x3e\x8d\xf7\xd1\xd3\x3b\x48\x93\x53\x16\
+\x75\x34\xbb\x73\x6e\x53\x70\x70\x20\xea\x13\x0c\x1c\x9c\xbf\x94\
+\xfe\x81\x61\x1a\x1b\x9b\xb0\x61\x60\x97\xd1\xcc\x8d\x00\xe0\x11\
+\x11\x1e\x4c\xfe\x7e\xbe\xce\xa2\xd8\xdd\xe7\xbc\xab\x6b\xdb\xa8\
+\xb9\xb9\x87\xc6\x05\xce\x56\xa0\x63\xa2\xc3\x69\x75\x5e\x0a\x25\
+\x27\xad\x22\x3f\x3f\xc7\x9f\xae\xd1\x68\xa2\x61\xe3\x18\xf2\x74\
+\x8d\xbb\x7f\x80\x1f\x45\x84\x05\xd9\xde\x97\x5d\x25\x16\xe1\xc6\
+\x34\xde\x63\x4f\xdf\x10\xb5\xb4\xf5\xd0\xd0\xd0\x08\x31\x78\x6d\
+\xed\xbd\xf8\xbe\xe2\x28\x36\x36\x12\x97\x33\x1f\xd4\x22\x94\x2d\
+\xb3\x94\x08\x2c\x06\x02\x8e\x7b\xa1\x9b\x25\x31\xb1\x7e\xed\xf0\
+\x5b\xf4\xc6\xdb\xef\x50\x1f\x18\x07\x48\xb5\xa6\xdf\x76\xf5\xf4\
+\xd0\xc5\xaa\x2b\xf4\xcb\xd7\x62\xe9\xbe\xbb\xef\xa4\x7d\x77\xdc\
+\x4e\xbe\x20\xb4\xea\x70\xe8\xe7\xff\x49\xa7\xcf\x96\x13\x77\x2e\
+\xb7\x02\x3a\x59\x68\x70\x10\xfd\xcd\xb3\x7f\x4e\x39\xd9\x39\x9a\
+\x24\x65\x15\x95\xf4\xc2\x2f\x7f\x45\x97\xab\x6b\xc8\x80\x7f\xcc\
+\x3c\x6c\x01\x7d\xb3\xa9\xb5\x8d\xde\x7e\xef\x7d\xda\xb1\x75\x2b\
+\x3d\xfa\xf0\xe7\x28\x2e\x36\xc6\xf6\xd8\x6c\x32\xd3\x5f\xfe\xe4\
+\xff\x52\x6b\x7b\xbb\x26\x9d\xc1\xc7\x97\xb6\xae\x2b\xa6\xef\x7e\
+\xfb\x29\x10\x2d\x7f\xba\x52\x53\x47\xdf\xfe\xfe\x0f\x69\x6a\x6a\
+\xca\x96\x96\x4f\x98\xd8\xfe\xcf\x2f\x7c\x8e\xee\xbb\xe7\x5e\x71\
+\xff\xa7\x2f\xbe\x24\x98\xea\x5c\xed\xf2\xf5\xf5\x03\xa3\x8d\xa2\
+\x6d\x9b\x36\xd1\xfd\xfb\xee\xa2\x84\xf8\x78\x4d\xbe\x7c\xd1\xd6\
+\xde\x41\xdf\x79\xf6\x87\x64\x36\x8f\xdb\x3d\x73\x76\x83\xeb\xbd\
+\x73\xf3\x7a\xfa\xce\xb7\x9e\x20\x5f\x10\xdb\x81\x41\x23\x1d\x7e\
+\xe7\x34\xea\xcd\x0c\x55\x85\x8b\x83\x0c\xfc\xfc\x7d\x29\x36\x3a\
+\x82\x56\xe7\xa7\x52\x6e\x76\x32\xda\xa6\x25\x6c\x13\x13\x53\xf4\
+\xee\x07\xe5\x34\x3c\x6c\xc2\x40\x41\x8b\x83\x3e\x3b\x26\xf4\x11\
+\xe1\x21\x94\x93\x95\x48\x05\x6b\xd2\x28\x30\xc0\x5f\x1f\xc5\x76\
+\x3d\x86\x77\x50\x7a\xb6\x86\xea\x1a\x3a\x88\xf9\x9c\x45\x60\x3c\
+\x5b\xd7\x91\x51\x33\x35\xb5\x74\x13\x33\x92\x8d\xeb\xb3\x29\x3d\
+\x2d\xc1\x96\x56\x39\x39\x55\x7a\x89\x9a\x5b\xfa\x69\x72\xd2\x35\
+\x56\x3e\xc0\x27\x34\x34\x90\xd2\x52\x62\x69\x6d\x51\xa6\xa8\xa3\
+\x92\xc7\x42\x1f\xa7\xa7\xc1\x38\x7a\x86\xa8\x15\x8c\x63\x70\x78\
+\x54\x7c\x2b\xca\xf7\x33\x3c\x62\xa2\x4b\x57\x9a\x29\xac\xad\x9b\
+\x52\x53\xe2\x28\x2e\x46\x32\x92\x85\xc6\x5f\xe6\xb7\x78\x08\x68\
+\xa9\xb9\x07\xe5\x8c\x4f\x8c\xd3\xdf\xfd\xeb\xbf\xd1\x7f\xbd\xfc\
+\x2b\x30\x8f\x7e\x10\x25\xf4\x78\x41\x98\xb8\xc3\x5b\x7f\xd6\xd1\
+\xbf\x85\xba\xba\xbb\xe9\xb9\xe7\x5f\xa2\x7f\x7e\xee\xa7\xe8\xd8\
+\x13\x9a\x52\x46\x47\x47\x35\xd7\xee\x5c\x0c\x0d\x0d\x51\x7d\x6d\
+\x8d\x26\xea\xfb\x47\x3f\xa6\x1f\xfe\xed\xdf\x51\x55\x4d\xad\xa8\
+\x87\xa8\xcf\x4c\x3d\x44\x7d\x50\x37\xbe\x67\xc2\x28\xfe\xc8\xf1\
+\x93\xf4\xe7\x7f\xf9\x23\x10\xa3\x66\x5b\x1e\x13\x53\x93\x34\x3e\
+\x6e\xb6\x5d\xab\x4f\x1a\x1a\xea\x68\x70\x00\x22\x07\x04\x93\xd9\
+\x04\xe2\x66\xcf\xec\xb8\xc5\xd5\x57\x2e\x8b\x38\xfc\x67\xc4\xe8\
+\x5e\xbb\xa6\x50\x6e\x57\x77\x2f\x1d\xfe\xc3\x7b\xf4\xe4\x33\x3f\
+\xa0\xd3\x67\xce\xda\xf2\x50\x4e\xc6\xc1\xa8\x27\x26\x26\x95\x4b\
+\xb7\x8f\x75\xf5\xb5\x64\x1c\x31\x8a\xf8\x93\x93\x53\x60\x1e\x5c\
+\x6f\x66\x06\xae\x7f\x53\x93\x16\xea\xee\x33\x82\x01\x5e\xa6\xf7\
+\x8f\x96\xa1\xcd\x5a\x62\x6c\x01\x8e\x13\xe3\x5c\x1f\xd7\xf9\xf0\
+\x73\x26\x94\x83\x43\xa3\x54\x7e\xbe\x91\xde\x7a\xa7\x94\x7a\xfb\
+\x06\x45\x7d\xf4\x7f\x8c\x23\x63\xf4\xd6\xbb\x67\xa8\xb6\xa1\x5b\
+\xb4\x75\x0a\xdf\x09\xbf\x2f\xeb\x37\x64\x65\x7a\xcc\x50\x78\x36\
+\xd6\x37\x38\x4a\x47\x8e\x56\x52\x45\x65\x9d\x3e\x1b\xbc\x43\xfe\
+\xbe\xe6\xae\x17\xe7\x3d\x32\x62\xa6\xea\xba\x2e\xfa\xdd\x5b\xa7\
+\xa9\xa1\xb1\xc3\x2e\xaf\xf9\xde\xe0\xba\x76\x76\xf7\x53\x59\x45\
+\x9d\x60\x12\x43\x60\x16\xfc\xbe\x27\xd0\x77\x94\x30\x89\x77\xcb\
+\x7d\xc2\x88\x67\x97\xc1\x48\xca\xce\x55\x63\x46\xde\x27\xda\xa9\
+\xc4\x91\x47\x89\xc0\x4a\x45\xc0\x6b\x06\xf2\xf2\x2b\xbf\xa5\x8f\
+\x21\xb6\x9a\xc6\x08\x74\xae\x11\xad\x20\x02\x88\xf7\x07\x10\xf9\
+\x97\x5f\x79\x55\x83\x85\x37\xd3\x76\xbd\x68\xa5\xaa\xb6\x96\xfe\
+\xf5\x67\x3f\xc7\x08\xdd\xec\xc6\x4c\x06\xc4\x08\x9d\xb8\xa5\xbd\
+\x93\x7e\xf2\x4f\xff\x0f\xe2\x0e\x2b\x81\xb5\x56\x8a\x09\x8f\x7d\
+\x50\x97\xc7\x33\x1b\x67\xc1\xc7\x30\x0b\xa7\x7e\xd4\xee\x2c\x0d\
+\xdf\x67\x7c\x98\x38\xb2\xf8\xef\x6f\xfe\xf1\x5f\xe8\xdc\xf9\xf3\
+\x9a\xe8\x2c\xd9\x70\x55\xae\x26\xb2\xea\x42\xd4\x7b\x66\xb6\xc1\
+\x38\x5b\xc7\xf2\xb3\x0c\x5e\x61\xf4\xfa\x23\xd7\x87\x09\x38\xff\
+\xda\xda\x87\xe8\xc3\xa3\xe5\x20\x72\x5a\x06\x66\x95\xb6\xb8\x99\
+\x97\x20\x9a\x66\x1a\x18\x32\xd1\xbb\x47\xca\x20\xbe\x51\x63\x8e\
+\x99\x06\x08\xed\xd1\x63\x95\x34\x34\x6c\x06\x63\x32\xcd\xf9\x3d\
+\x89\xba\x21\x4d\xe9\xd9\x5a\xaa\xad\x9f\x1d\x04\xcc\x36\xdd\xbd\
+\x7a\xf1\xec\x89\xcb\x33\x83\x19\x72\xf9\xcd\x2d\xed\xb3\x59\xcc\
+\xe3\x8c\xdb\xc3\x4c\xa0\xbc\xa2\x86\xaa\xaa\x5b\xc0\x1c\xc6\x04\
+\x93\x98\x54\x31\x0e\x7d\xf6\x82\x91\x80\x99\x18\x99\xa9\x41\x7c\
+\x57\x86\xb4\xed\xc8\xc3\xca\xf4\xf5\xb1\xe5\xb5\x44\x60\x65\x20\
+\x30\x4b\xf1\x3c\xa8\x0f\x8b\x79\x5e\x7f\xf3\xcd\x39\xc5\x17\xfa\
+\x2c\x79\xd4\xf7\xca\xef\xde\xc4\x68\xaf\x5e\xff\xc8\xeb\x6b\x0b\
+\x88\xdd\x2f\x7e\xf3\x2a\x46\xc9\x3c\x62\x65\xc2\xe1\x5e\x60\xc6\
+\x57\xd3\xd8\x4c\xbf\x7f\xfb\x2d\x91\xc0\x1b\x46\xe6\x5e\x49\xee\
+\xc7\x62\x7c\xc6\x30\xda\x7f\xee\xbf\x5e\xc0\xe8\x18\x32\xf2\x15\
+\x10\xc6\x41\x60\xdb\xba\x86\xe9\x5c\x45\xd5\xbc\x6b\x33\x31\x61\
+\x06\xb1\x26\x3a\xf9\x49\xa5\x66\x84\x5d\xdf\xd8\x89\x91\xfa\x10\
+\x88\xb9\xe3\x19\xa0\xa3\x82\x99\xf8\x1b\x7c\x7d\xa9\xb4\xb4\x8a\
+\xc6\x46\x59\xe7\xe1\x7d\x98\x02\x73\xb4\xc0\x2d\xdc\x89\x53\x17\
+\xc9\x9b\x19\xb1\x52\x32\x13\x7b\x66\x1c\x4c\xfc\x99\x09\x30\x33\
+\x50\x18\x03\xc7\xe1\x6f\x8c\x7f\xcc\xd8\x7d\x51\x77\x47\xdf\x1c\
+\xcf\x46\x78\x86\xc2\xb3\xa3\x1a\xc1\x48\xaa\xa9\xbd\xa3\x07\x03\
+\x0c\xfb\x59\xaf\x52\xae\x3c\x4a\x04\x96\x0b\x01\xaf\x18\xc8\x6f\
+\xdf\x7c\x5b\x88\x82\x40\xb1\xed\xea\x6d\x98\x19\x85\x2b\x47\x4d\
+\x04\xc4\x37\x43\x7e\xfe\xfe\x87\x1f\x69\x6e\x7b\x7a\xa1\x2e\xb6\
+\xa3\xb3\x93\xca\x2b\x2b\x1d\xce\x3c\xac\x75\xe0\x4e\x8b\x66\x5a\
+\x87\xcc\x76\x45\x7d\xf0\xd1\x31\x1a\x05\xb1\x56\xcf\x1e\xec\x22\
+\x2d\xf0\x0d\xd6\x4f\x88\x9f\x83\x3a\x31\x63\xab\x6f\x6a\xa1\xe3\
+\x27\x4f\xcc\xbb\x54\x35\x4e\xae\x32\x63\x3d\x8c\x9f\x7f\x00\x74\
+\x3c\x01\x0e\x89\x1a\x57\xf3\xc2\xa5\x26\xcc\xd6\x86\x5d\x65\x23\
+\x9e\x31\x71\x14\x79\x21\x3f\x47\xdf\x00\xeb\x26\x1a\x9a\xbb\xa8\
+\xb5\xa5\xcd\x96\x57\x6d\x5d\xbb\xc3\x19\x16\xa7\xf7\x41\xdd\x98\
+\xd0\x72\x1d\xf5\x81\x67\x22\xfd\x98\xd5\xd4\xd4\xcd\x3d\x20\xe1\
+\x3c\x94\x7a\xb1\xfe\x43\x1f\x78\x76\x30\x3c\x32\x49\x97\x2e\x69\
+\x45\xa3\xfa\x78\x8e\xae\x99\x71\xb4\x77\xf6\x42\xfc\x74\x45\x30\
+\x0e\x26\xfe\xcc\x04\xd4\xe2\x5a\x36\x2a\x61\x51\x5e\x57\xf7\x20\
+\x8c\x3b\x9a\xf1\xab\x43\x1f\x32\x39\xfd\xee\x14\x46\x32\x3a\x3a\
+\x4e\x35\xf5\x1d\x74\xa6\xbc\x9a\xda\x3a\x30\x23\x81\x28\x52\x06\
+\x89\xc0\x4a\x41\xc0\x2b\x06\x72\x14\x3a\x04\x1e\x2d\x3b\x0a\xa1\
+\x3e\xe3\x54\x90\x16\x47\x09\x51\xc1\x4e\x89\x76\xd5\x95\x2b\xe8\
+\x08\x5a\x91\x88\xa3\xbc\x84\xe8\x0b\x54\x50\x7d\x64\xa6\x35\x05\
+\x22\xa4\x28\xa7\x5b\xa0\x18\x37\x63\xf6\x61\x15\xc3\xcc\xe6\x22\
+\x46\x7a\x53\x66\xca\x8c\x0d\xa6\xec\xc4\x48\xf2\x63\x2a\xc8\x3f\
+\x75\x40\x1b\x3a\xa0\xe8\x6f\x6e\x6e\x14\xa3\x42\xf5\xa3\xc5\x39\
+\x67\x41\xd4\x34\x05\x5a\xcc\x14\x48\x68\xc3\x14\xea\xad\xaf\x13\
+\x0a\x66\xe6\xf2\x49\xe9\x19\xd1\xee\x39\xeb\xa1\xc3\x47\x8d\x95\
+\x05\xf9\xcf\x35\x2b\x33\x80\xe0\xf7\xf7\xb4\x50\x6b\x43\x05\x75\
+\xb4\x5e\x16\x23\x66\x7d\x9d\x98\x68\x8d\x80\x90\x35\x35\x34\xb9\
+\xac\x0e\xe7\x35\x62\x1c\xa0\x96\xfa\x73\xd4\xd6\x50\x09\x02\x39\
+\x8c\xac\xb4\x9f\x18\x8b\x77\xfc\xfc\x82\xa8\xb6\xb6\x41\xe4\xc5\
+\x3a\x12\x56\xf0\xeb\x95\xf1\x9c\x6e\xdc\x3c\x4a\xf5\x97\x8f\x52\
+\x5f\xfb\x39\x32\x0e\xb5\xe3\x1d\x69\x99\x08\xb7\x2d\x20\x20\x98\
+\x5a\x9a\x67\x99\x91\xa3\x0a\xf2\xb7\x30\x3e\x61\xa2\xd6\xfa\x0a\
+\xb4\xf3\x3c\x0d\x0f\x76\x3b\x7c\xdf\xcc\x40\xeb\x1b\x5b\xe6\xc4\
+\x4c\x29\x63\x12\x75\x67\x2b\xaa\xb3\x20\xee\x35\x75\x1d\x34\x3a\
+\x36\x6e\xc7\x38\x38\x2e\x33\x8f\xf6\xce\x01\x58\xc1\x8d\xa2\xad\
+\x63\xc2\xea\xf0\xdc\xf9\x73\x10\xe7\xbd\x45\x63\xa6\x51\x60\xa4\
+\xfb\x2e\x95\x02\x70\x14\x8c\x64\x7c\x9c\x46\x61\x60\x50\x57\xdf\
+\x4e\x67\xcf\xd5\xc0\xd0\xa3\x07\xd6\x74\x92\x91\xa8\x60\x92\xa7\
+\xcb\x84\x80\xb6\x47\xba\x59\x09\x1e\x39\xe9\x03\x77\x02\x26\x88\
+\xfb\xf7\xdd\x41\xfb\xef\x7f\x90\x5a\x5a\x5a\xe8\xbb\x3f\xfa\x31\
+\x8d\xb2\xcc\x02\x1d\x5d\x1d\x58\x09\x3e\x66\x1a\xa3\xb0\xb0\x70\
+\xf5\x6d\xdb\xb9\x18\x79\xd2\x24\xe5\xa7\xc6\x8a\x38\x6a\x66\xc5\
+\x59\xf9\xfb\xf9\x50\x6a\x6a\xaa\x88\xcf\x96\x3b\xdc\xfd\xb4\x25\
+\x58\xaf\x0b\x73\x33\xe8\x5b\x8f\x3f\x49\x21\xa1\xa1\xf4\xe3\x7f\
+\xfc\x67\x2a\xbb\x50\x85\xaa\x68\x3b\xde\x24\xe4\xdf\xac\x7b\xc8\
+\xcc\x62\x5a\xee\xbc\x23\xdb\x2a\x37\x8f\x13\xce\xdf\x17\x66\xbd\
+\x9f\xbb\x77\x1f\xad\x29\x28\x84\xb5\x58\x35\xbd\xf4\xda\x9b\x56\
+\xa5\xbc\x0e\xa3\xee\xae\x2e\x88\x66\x46\x45\xdd\x9d\x15\xc9\x04\
+\x3b\x2a\xc8\x40\x39\x69\x29\x82\xe6\xab\x99\x05\x13\xad\x8c\xf4\
+\x74\x0a\x0c\x02\x23\x77\x11\xc4\xec\xc3\x30\x4a\x77\xdc\xba\x45\
+\x10\xce\xda\xc6\x41\x32\x4d\xfa\x83\x91\xcc\x2a\x7a\xad\x68\x1a\
+\xa8\xbb\xbb\x0b\x39\x15\x39\xcd\x8d\xf3\x9a\x1a\x1f\xa6\x9d\xdb\
+\x0b\x28\x32\x2a\x9a\x1a\x61\x86\xdb\x67\xf4\xc1\x77\xa1\x1d\x6c\
+\x30\x23\xe8\xed\xed\x13\xf9\x58\x60\x15\xa6\xd7\xaf\xf0\x03\xb6\
+\x1c\x1b\xe9\xe9\xa4\x3b\xf7\xde\x4c\x1b\x36\x6e\xa5\x8b\x97\xaa\
+\xe9\xdc\x45\x2e\x5f\x1b\xf8\x95\x0d\xcc\x18\x38\x68\x9f\xcc\x5e\
+\x31\x4e\x13\xa6\x11\xca\xcf\x89\xa6\xfc\xd5\x85\x42\x24\xd4\xd2\
+\x05\x11\x98\xc1\x17\x6d\xd6\xd6\x6d\x70\xc0\x88\x01\x89\x89\x82\
+\x5c\xe0\xc6\xc4\xbb\xb3\xab\x5f\x58\x55\x99\xc7\x59\xb1\x0f\x03\
+\x85\x39\x06\x44\x3c\xc8\xb9\x5c\x55\x8e\x74\x8d\xc8\x7f\x4c\x30\
+\x30\xff\x80\x40\xe8\x7d\x46\x50\x56\x08\xae\xd9\x6a\x90\xbb\x8a\
+\xb6\x3e\x4a\x2b\x38\xff\x29\xf4\x09\x66\xb8\xf5\x0d\x9d\xd4\xda\
+\xda\x0d\x73\xe6\x58\x4a\x48\x58\xe5\x91\x89\xb4\x92\x9f\x3c\x4a\
+\x04\x16\x02\x01\xaf\x18\x88\x33\x3a\xeb\x0b\x5b\xff\xc2\xe2\x75\
+\x14\x1e\x1e\x41\xa9\xe9\x19\x82\x78\x09\x06\xa2\xab\x29\x13\x37\
+\x1e\x89\xba\x0a\x7e\x90\x11\x7f\xf1\x91\x2f\x51\xd1\xda\x75\x76\
+\x71\x99\x10\xfb\x43\x44\xc2\xc1\x15\xd1\xcf\xc8\xcc\xa2\xa4\xe4\
+\x14\x11\x2f\x26\x26\x0e\x47\xc7\x72\x7c\x67\x9d\x56\x24\x5c\xe0\
+\x3f\xcc\x1c\x0b\x8b\x4b\xa8\x64\xdd\x06\xca\xcc\xce\xa7\xd7\xdf\
+\x3d\x0a\xd1\x86\xbd\x68\x68\x02\xe2\x99\x49\x28\x9f\x5d\x07\x03\
+\x65\xa6\x65\xd0\xd3\x4f\xff\x19\x08\xae\xbd\x58\xc6\x0f\x04\x9d\
+\x09\xf1\x5c\x21\x7a\xd5\x2a\xda\xb2\x6d\x87\x88\xe6\x17\x74\x85\
+\xce\x5d\x68\x71\x98\x64\x42\x58\x38\x39\x7c\x64\xbb\xc9\x84\x77\
+\xc3\xa6\xad\x14\x15\xbd\x8a\xe2\x13\xba\xb0\x1e\xa7\xdc\xf6\x4c\
+\x7d\x62\x02\xe3\x57\x82\xb3\x77\x18\x06\xc6\x5f\x58\x54\x22\x98\
+\x68\x4a\x5a\x1a\x95\x5f\xe8\x54\x92\x68\x8e\xee\xbc\x3f\x66\x22\
+\x6b\xd7\x6d\xa2\x82\xc2\x62\x5a\x3d\x66\xa6\xdf\xbc\xfe\x31\xc4\
+\xa9\xf8\x06\x75\x23\x0f\x1e\xf1\xbb\x12\x13\x7d\x7c\xe2\x24\x88\
+\x7d\x28\x44\x6a\x41\xc2\xe2\x4b\x2d\xa6\xd2\x54\x4a\x75\xc1\xa6\
+\xeb\x09\xf1\xa1\x98\x41\x4c\x81\x59\xf8\xe3\xdb\xf5\x81\x35\x5a\
+\x0f\x85\x84\x84\x63\x46\x32\x0e\xbd\x50\x2f\xf9\xf9\x4e\xc2\xac\
+\x38\x88\x42\x43\x98\x99\xd8\xbf\x4b\x25\x3b\xb6\xe2\xb2\xfe\xfc\
+\xa8\x11\x26\xcd\xcd\xad\x5d\x14\x11\x11\x46\x45\x6b\x32\x94\x28\
+\xf2\x28\x11\x58\x32\x04\xe6\xa6\x2e\x1e\x56\x45\xe9\xcc\xc2\x86\
+\x5f\x35\xaa\x16\x44\x02\xc4\x93\x3b\x32\x86\xa4\x6e\xe5\xea\x1f\
+\x10\x60\x63\x14\x6e\x25\xd0\x45\x52\x15\x2f\x46\xd7\xba\xc7\xcb\
+\x76\xa9\x8c\xb8\xd9\x1c\x76\xbe\x81\xf1\x0c\xc4\x22\x49\x77\x18\
+\x85\xb3\xb2\x78\xe6\xc2\x23\x5c\x6b\x1e\xf3\x9b\x85\x71\x5e\xcc\
+\xfc\x38\x60\x2d\xe3\x3c\x03\xcc\x80\x31\xba\xe7\xc0\xeb\x57\xe6\
+\x1b\x94\x59\x02\x1b\x5e\x60\xe8\xe1\x55\x76\xcf\xfd\xfc\x67\x14\
+\x1c\x14\x4a\xeb\x4b\x36\x61\x51\x63\x21\x44\x68\x81\x82\xa0\xab\
+\x67\x7f\xfa\x8c\xf9\x59\x18\x16\xbf\xee\xbd\xed\x1e\x31\x83\xe0\
+\xeb\x86\xa6\x3a\xcc\x48\x2e\x61\xf6\xd4\x03\x1e\x16\x87\xd9\x5d\
+\x2b\xd6\xd5\x74\x52\x09\x06\x60\xa9\x29\x69\x76\x83\x26\x75\x9e\
+\xdc\x97\xf8\x1b\x62\xd1\x9f\xd1\x38\x0e\x46\xd6\x26\x19\x88\x1a\
+\x20\x79\xbe\x64\x08\x2c\x38\x03\x51\x6a\xce\x9d\x64\x12\xb3\x0c\
+\x1e\x4d\xb1\xb2\xd3\x02\xbd\x85\xbf\xc1\x42\xe1\x41\x01\x54\x9c\
+\x5f\x2c\x88\x9e\x12\xd7\xd9\xd1\x55\xa7\x74\x96\xe6\x7a\xbc\x0f\
+\x2d\xd1\xf5\xd8\xec\x65\x69\x73\x80\xbf\x3f\x94\xd9\xad\x10\x5f\
+\x35\x43\xf7\xf1\x29\x6d\xde\xb8\x8d\xf2\x73\x0b\xc4\x40\x87\x19\
+\x94\xb3\x77\x21\xfa\x03\x9e\x2b\x21\x17\xb3\xcf\x94\xe4\x34\x98\
+\x0e\xb7\x89\x59\x74\x7e\x6e\x36\x56\xff\x17\xc2\x0b\x40\x88\x53\
+\xe6\xc1\x8c\x83\xf3\x19\x1e\x1e\x03\xf3\x18\x15\x96\x87\x7c\x2f\
+\x30\xd0\xf9\xe2\x4c\xa5\x3c\x79\x94\x08\x2c\x06\x02\x8b\xc6\x40\
+\x58\x94\x91\x0e\xf9\x6c\x53\x7d\x0d\x25\xa7\xc4\x53\x76\x66\x06\
+\xe5\xe7\xaf\xa1\xec\xdc\x3c\x4a\x4b\xcb\xc4\x47\x1f\xb4\x18\xed\
+\x71\x2f\x4f\x74\x3a\xf1\x0f\xa3\x77\x0b\x44\x02\x92\x51\xb9\x07\
+\x9b\x8c\x65\x45\x80\x4d\x70\x31\x32\x82\x32\xbc\x0d\x8b\x10\x5f\
+\xa5\x94\xa4\x54\x30\x92\xed\x58\xb5\xbf\x5a\x30\x12\x77\xc4\x5a\
+\x3c\x83\x08\x80\x18\x36\x37\x3b\x0b\xb3\x89\x29\x18\x16\xb0\x3e\
+\xc6\x6a\x30\xa2\xc7\xd9\xc6\x38\xe0\xa6\x65\x60\x70\x04\xc6\x09\
+\xd6\x19\x1e\xdf\xe7\x9f\x0c\x12\x81\xe5\x42\x60\x41\x19\x88\x70\
+\xfb\x34\xf3\x41\x07\xc1\x07\xd2\x37\xbf\xfe\x35\x32\x0e\x0f\x53\
+\x4a\x5a\x3a\xe4\xb4\xcb\xeb\xa2\x81\xc5\x45\xac\x7f\xb0\x4c\xc3\
+\xe6\x1f\x0a\x62\xa8\x76\xa1\xd0\xb6\x50\x54\x48\x20\x14\xf5\x56\
+\x3f\x54\x92\x91\x2c\xd7\x67\x78\x75\x96\x2b\x18\x09\xaa\xde\xd6\
+\xde\x4a\x6f\xfc\xfe\x15\x31\xa3\xd8\x02\x46\x92\x83\xd9\x05\xbb\
+\x70\x51\x44\x95\xce\x5a\xa7\x30\x0c\xab\xef\x38\x7b\x71\xa6\xc2\
+\x38\xd8\xbf\x57\x3f\xcf\x38\xe0\x45\x81\x83\x64\x1a\xce\x10\x95\
+\xf7\x97\x1a\x01\xaf\x18\x08\x06\x4a\x76\x81\x3b\x03\xec\x5a\x30\
+\x42\x9a\x55\x08\xa7\x67\x64\xd9\xc5\xf3\xe4\x86\x2f\x08\xfe\x42\
+\x85\x8d\xc5\x85\x54\x7b\xb1\x0c\x0e\xf4\xa2\x28\x76\x55\x2c\xc5\
+\xc7\xc5\x41\xc9\x9b\x48\xa9\x60\x6e\x05\x85\x45\x30\xf3\x9c\x15\
+\x2f\x2c\x54\x99\x4b\x92\x0f\xb8\xb6\xa3\x35\x12\x4b\x52\xb6\x2c\
+\x44\x20\xa0\x30\x12\x16\x6b\xf1\x2f\x2d\x25\x83\xb6\x6c\xda\x4e\
+\xd9\x59\x79\x62\xc1\xe0\x5c\x8c\x44\x0f\xa3\x8d\x71\xc0\xbd\x09\
+\x8b\xaa\xc6\x60\x1e\xcc\x41\x32\x0e\x3d\x52\xf2\x7a\xb9\x11\xf0\
+\x8a\x81\xb0\x1c\x98\xfd\x33\xd9\x05\x10\xfc\x5f\xff\xee\x2d\xca\
+\xca\xca\x45\xe7\xc9\xb0\x7b\xec\xe9\x8d\x7e\x98\xfb\x5a\xfd\x6c\
+\xcd\x72\x2c\x1e\xd9\x45\x46\x44\x78\x9a\x15\xed\xd9\xb5\x8b\xd6\
+\xad\x2d\x82\x02\x34\x84\x82\x85\xa5\x8b\x96\x39\x99\xdd\xb0\x30\
+\xf2\xb8\xd0\x25\x48\xc0\x8e\x0d\x3b\xbb\xba\xa1\xcc\xf5\xd7\x88\
+\xe2\x42\xa1\xb4\xe5\x59\xa0\x0c\x4b\x87\x80\xc2\x48\x9a\x5b\x1b\
+\xe0\x71\xb7\x11\xde\x90\x33\xc1\x48\x3e\x43\x59\x19\x39\x6e\x31\
+\x12\x85\x71\xf0\x42\xc4\x7e\x88\xaa\x24\xe3\x58\xba\x77\x27\x4b\
+\xf2\x0e\x01\xaf\x18\xc8\xf6\xcd\x1b\xe9\xfd\x63\x27\x6c\x8b\xf9\
+\x94\xa2\xd9\x02\xab\xa9\xad\x9d\x9e\x86\xa3\xc2\x2f\x3f\xfc\x20\
+\xed\xbd\xf5\x16\xe5\x91\x47\x47\xce\xc7\x8c\x09\xc1\x3f\x3c\xf7\
+\x9f\x1a\xeb\x22\x56\x50\x06\x82\x79\xfd\xe9\x13\xdf\xa0\xc2\x35\
+\x05\x1e\xe5\xc9\x16\x46\xb1\xb1\xf6\x9e\x6e\x3d\xca\x64\x81\x23\
+\xb3\x05\x15\xeb\x62\xbc\x0d\xbc\x6a\xfd\x52\x5d\x0b\x3d\xf1\x67\
+\xcf\x68\xb3\x80\x18\xb1\x30\x2f\x97\xbe\xfd\xf8\x1f\xbb\x5c\xcf\
+\xa0\x4d\xb4\xf0\x57\xf3\x69\xdb\xc2\xd7\x66\x36\x47\xae\xd7\x42\
+\x8d\xe6\x67\xc5\x50\xb3\x03\x12\x65\x46\xd8\xd4\x5c\x0f\x25\x79\
+\x23\xd6\xe3\x64\x11\x8b\xb6\x32\xc1\x48\xd8\xe3\x81\xde\x3c\xdb\
+\xc6\x38\xe0\x6d\x98\x75\x1c\xa3\xa3\x3c\x38\x9b\x5e\xb0\x3a\xce\
+\xb6\x5c\x9e\x49\x04\x16\x16\x01\xaf\x18\xc8\xc3\x07\x3e\x4b\x65\
+\xe7\x2b\xa8\x0f\x7b\x1a\x4c\x63\x61\x93\x3a\xf0\x0a\xf1\x61\xe8\
+\x3d\xfe\xe9\xdf\x7f\x46\x17\xaa\xaa\xe1\xe2\xfc\x61\x8a\x74\xb2\
+\x17\x88\x3a\x9d\xa3\x73\xe3\x98\xbd\x8f\x23\x03\x44\x65\x65\x67\
+\x3f\xf5\x98\x81\x38\xca\x7f\x39\xee\x31\xbb\xe0\xd9\x02\x07\x13\
+\xda\x37\x8e\x55\xc6\xa0\x15\x5e\x07\x26\x46\x83\xc0\x5b\x1d\x98\
+\x31\xf1\x4a\xe7\xb6\x96\x66\x18\x2d\xe4\xab\x1f\x2d\xc9\x39\x5b\
+\x12\x71\x60\xb1\x20\xd7\x45\xbf\x90\x70\x49\x2a\xa1\x2f\x04\x18\
+\x2b\x33\x04\xde\xcb\x45\x38\x29\x74\x24\x8b\xd5\xa7\x73\x71\xcd\
+\xcc\x23\x08\x6d\xe5\xf6\xf6\xf5\x76\x8b\x98\x6a\xc7\x9b\x0a\x23\
+\xa9\x6f\xa8\xa5\xc6\xa6\x7a\xc1\x40\x58\xb4\x95\x91\x96\x05\xe6\
+\x00\x7d\x9c\x30\x51\x9e\x06\xc3\x60\xc6\x31\x4a\x23\x2c\xaa\x42\
+\x9e\x56\xe6\xa6\x1d\x58\xb0\x07\x68\xde\xd3\x85\xc7\x1b\xbe\xb0\
+\x6c\x94\xba\x73\x17\x2f\x46\x3e\x5a\x32\x04\xbc\x62\x20\xc9\x58\
+\x9c\xf7\xf5\x47\xbf\x40\xff\x02\x26\x31\x64\xc2\xbc\x40\xb7\x28\
+\x90\x3b\x16\x87\xf7\x3f\x3a\x06\x87\x70\x75\xf4\x47\x5f\x79\x94\
+\x4a\x8a\x9c\xaf\x60\x76\xda\x5a\x07\x1d\x9c\xf3\xb6\xee\x13\xe1\
+\x34\xd5\x8a\x7d\xc0\x75\x9f\xb2\x18\xe0\x95\xf8\x18\x55\x56\x37\
+\x50\x69\x59\x39\x5c\x54\xf0\xaa\x7e\x7b\x0e\xc2\x44\x42\x4d\x8c\
+\x5c\x36\x4a\x8f\x13\xae\x79\xa4\xab\xac\xa1\x70\x99\x76\x01\x1f\
+\x32\x41\xf4\xf1\x0f\x86\x17\xda\x5a\x2c\x92\x0b\x83\x7b\x0f\x78\
+\xb7\x75\xb0\xd6\x85\xe3\x61\xb5\xc8\x02\x96\xec\x3a\x2b\x26\xbc\
+\xfe\x01\x41\xd4\xd0\x3a\x48\xe6\xa9\x3a\x2c\xbe\x63\x57\x20\x56\
+\x57\xf1\xfa\x94\xec\xfe\xc5\xdd\xd9\x89\x60\x20\xd8\x10\x6c\xe3\
+\x96\x1d\xd4\xdd\xd5\x4e\x8d\xf5\xd5\x34\xd0\x6f\x5d\x61\xaf\x7e\
+\x77\xca\xe6\x57\x75\x78\xde\x88\xf5\x1f\x59\x99\xb9\xb4\x15\xa2\
+\xad\x55\xab\x12\xa8\x7f\x80\x67\x1c\x70\x6b\x83\x77\x26\xca\xd5\
+\x71\x06\xb1\x75\x00\x3e\x8f\xc4\x84\x68\x2a\x58\x9d\x26\x4c\x77\
+\xcf\x96\x55\x63\xad\x0d\x1b\x85\xe8\x6b\x2f\xaf\x25\x02\x4b\x8b\
+\x80\x57\x0c\x84\xab\xb8\x73\xc7\x2e\x0a\x0b\x09\xa6\xff\x78\xe1\
+\x25\xaa\x6d\xe9\x14\x1f\x3f\x77\x02\x75\x60\x11\x4b\x63\x4b\x2b\
+\x3d\xfb\xd7\x7f\x4b\x0f\x1d\xb8\x8f\x0e\xdc\x7b\x37\x56\xdc\x7a\
+\x5d\xa4\x2d\x6b\x77\x3b\xb8\x2d\xc1\x8a\x39\x01\x03\x01\x44\xef\
+\x7d\x7c\xca\x5a\x23\xc6\x4b\x87\x99\x78\x00\xa2\x1b\x1d\x19\xea\
+\xd6\x5a\x99\x15\xd3\x34\x54\x84\x19\xbb\x7f\x00\x18\x47\x7d\x3f\
+\xae\xfa\xc5\x02\x3b\xbd\xc7\x01\x1e\x79\x8f\x8d\x0e\x53\x44\xb4\
+\x95\xfa\x39\x5b\x37\xb1\x90\xed\x62\x91\xa8\x3f\x7c\x66\x75\x74\
+\x8f\xc3\xeb\x6f\xa3\x18\xf9\xeb\xeb\xc5\xe5\xf1\x0a\x6f\xc3\xb4\
+\x89\x78\x17\x45\x77\x83\xf2\xcd\x27\xc2\x94\x37\x2e\x3e\x91\xba\
+\x3a\xc0\x48\x1a\xaa\xb1\x7f\x0c\x63\x60\xdd\x68\x4c\xc9\x4b\x61\
+\x24\xd5\x35\x97\x85\xe5\xd6\xcd\x37\x1e\x80\xd9\x6f\xa0\x78\xac\
+\xff\xa6\xb9\x7e\xbc\xe0\x31\x21\x2e\x8a\xd6\x15\x67\x43\xa7\x98\
+\x08\xb3\x5f\x38\x96\xc4\x8c\xae\x0b\x6e\x54\x6a\xe0\x17\x8b\xbd\
+\x35\xc8\x20\x11\x58\x4e\x04\xe6\x45\xcd\xd7\x6f\xd8\x4c\xcf\xa6\
+\xa4\xd2\xf3\x2f\xbe\x48\x47\x4e\x96\xc2\xbd\xb6\x9f\x83\xd9\x08\
+\x36\x71\x82\x98\xe6\x79\x6c\x3c\x75\x09\x4e\x14\xff\xf8\x2b\x5f\
+\xc6\x4e\x80\xb1\xcb\xd9\xe6\x65\x2f\x9b\x19\xab\xb3\xc0\x04\xc2\
+\x02\x5f\x49\xc5\xd0\xf1\x28\xee\x5a\x9c\xc5\x5d\x89\xf7\x05\xe1\
+\x83\xb3\x48\x67\x81\x3d\xe2\x0e\xb6\xb6\xd0\xb6\x0d\x9b\x9c\x45\
+\x59\x94\xfb\x4c\xe8\xb5\xfe\xbd\xb4\xc5\xf0\x6e\x93\x3d\x5d\xcd\
+\x94\x97\x11\x0d\x13\x5c\xcf\x17\xe6\xb1\x8f\x2a\x66\x8e\x49\x58\
+\x45\x1e\x97\x90\x04\x46\xd2\x0a\x46\x52\x43\x43\x83\xcc\x48\xac\
+\x2e\xdc\x95\x12\x15\x46\xa2\xcc\x3a\x14\x26\xc4\xcf\x15\x37\x3f\
+\x71\xd8\xe2\xb6\xa4\x38\x4b\xec\x08\x19\x80\xed\x77\xf9\xfe\x84\
+\x58\x2f\xc2\x22\x50\x74\x5b\xed\x58\x4d\xc9\x5a\x1e\x25\x02\x4b\
+\x8a\xc0\xac\xe6\xcf\xcb\x62\x79\xd4\xf5\xc4\xe3\x4f\xd0\xd7\x1e\
+\x79\x10\xab\xcc\x79\x84\xe4\x60\x54\x84\xce\xcb\x62\xae\x4f\xcb\
+\xce\xd1\x5f\xfc\xe4\xef\xa1\x23\x19\x72\xab\x34\xee\x90\x9a\x1f\
+\x88\x2b\x8f\xd4\xf4\x22\x33\xb7\x32\xbb\x0a\x22\x89\xb6\x81\x10\
+\x65\x25\x45\xd3\xae\xdd\x7b\xdc\xaa\x31\xa7\xd1\x60\xc4\x98\x01\
+\x27\x26\x68\x6a\xc2\xe4\x56\x66\x8b\x1c\x89\x09\xf3\x60\x5f\x07\
+\x85\xf8\x8f\x52\xf1\xda\x0d\x8b\x5c\x9a\xfb\xd9\xb3\xb7\x04\xde\
+\x8b\x64\x74\xb0\x81\xb6\xdf\xb0\xcb\xfd\x84\x9a\x98\x06\xe8\xb4\
+\x46\xc5\xda\x0f\x66\x10\xc9\xa9\x30\xe5\xdd\xbe\x9b\x8a\x4a\x36\
+\x53\x38\xd6\x40\xb9\xf3\x2e\x38\x4e\x74\x54\x28\xdd\xb4\xbb\x04\
+\x5b\x23\xdf\x00\x3d\x5f\x3a\x06\x11\xbe\x82\x79\xf0\x3e\xf1\xca\
+\xe6\x52\x88\x26\x83\x44\x60\x45\x20\x30\xaf\x19\x88\xd2\x02\x26\
+\x0c\xfb\xf7\x1f\x80\x6c\x37\x8b\x7e\xf6\xc2\x8b\x74\xa5\x09\xfb\
+\x3b\x80\x90\xe9\x3b\x0d\x8b\x38\x6a\xea\x1b\xe9\x0f\xef\xbd\x4f\
+\x07\xee\xbb\x4f\x49\x6e\x7f\x04\x51\xf4\x85\xd8\x21\x36\xd4\x07\
+\xa3\xad\x00\x0c\xb6\xac\x3d\x86\x3b\x8e\x01\x8c\xc8\xd1\xbe\xe1\
+\xf6\x99\x5c\x5d\x77\x98\x11\x30\x3e\xab\x82\xa6\xe9\xe0\x43\x07\
+\xb1\x7a\x3f\x75\xce\x06\x30\xc6\xc1\xbe\x16\x8a\x09\x0f\x86\xdf\
+\xa9\x59\xaa\xc2\x32\xff\x55\x51\x91\x58\x20\xe9\xb9\xb9\xf3\x9c\
+\x85\x7a\x19\x81\x15\xca\x83\xfd\x5d\x70\xcd\x5e\x41\x8f\x3e\xf6\
+\x25\x61\x4a\xed\x65\x56\x0b\x96\x8c\x31\xe7\x6f\xd7\x34\x36\x42\
+\x4d\x35\x27\xe9\xae\xbb\x6e\xa5\xc4\xa4\x64\xaf\xf2\x67\x05\x7d\
+\x23\xbc\x2e\x0c\x0c\xf4\x52\xde\xea\x62\x8a\x81\xc5\x9f\xaf\xaf\
+\x3f\xd6\x19\x65\x22\xcf\x54\x2a\x2b\x3d\x21\x14\xed\x6a\xdd\x88\
+\xba\x20\x9e\xb9\xc5\x43\x5c\x75\xcf\x9d\xdb\x60\x39\x17\x20\x98\
+\x06\x7f\xf6\xbc\x7f\x08\xeb\x94\x9a\x9a\xbb\xe1\xe9\x7a\xbb\xd0\
+\x85\xa8\xd3\xc9\x73\x89\xc0\x72\x22\xb0\x20\x0c\x44\x69\x40\xc9\
+\xba\x8d\xf4\xcc\xd3\xa9\xf4\xc2\x4b\x10\x69\x1d\x2f\x25\x0b\x3a\
+\xa8\x9e\x89\xf0\x74\xbe\xec\x5c\x05\xdd\xb7\x7f\xbf\x53\x25\x31\
+\x9b\x59\xfa\xf9\x1a\xe8\xab\x8f\x3d\x46\xf9\x6b\x0a\xed\x66\x1c\
+\xec\x2a\xfc\x5a\x0a\x62\x16\x01\xb1\x56\x4e\x42\x18\x3d\xfc\xd0\
+\xe7\x69\xe7\xee\x9b\xdd\x6b\x1e\xf0\xcd\xce\x4c\xa3\x27\xff\xe4\
+\x71\x10\xab\xd9\xc9\x24\x63\xce\xe2\x2f\xf6\x88\xbb\x12\x02\x33\
+\x8f\xbe\xee\x26\x32\x1b\x1b\xe8\xe0\x23\x5f\xa0\x9c\xdc\xd5\x4b\
+\x5e\x2d\xc6\x98\x37\xa7\xb2\x05\x60\x64\x82\x8b\xf7\xf6\x96\x2a\
+\x32\x0d\xb7\xd0\xde\xbd\xb7\xd3\x8e\x5d\x37\xd9\x1e\x7b\x76\x02\
+\xe6\x8f\xf7\x97\x9e\x99\x43\x93\x35\x13\x54\x7e\xe6\x24\xb0\x8f\
+\x81\x05\x5c\x01\x45\xaf\x8a\x11\xef\x82\x9d\x2e\xa2\x33\x38\xcd\
+\x96\x1f\x05\x42\xf7\xc2\x7e\xad\x78\xa6\x61\x34\x8e\xc1\xc8\x82\
+\xf7\x19\x69\x83\xa5\xde\xa4\x58\xd9\x6e\xdf\x97\x9c\x66\x27\x1f\
+\x48\x04\x96\x04\x01\x55\x8f\x5a\x98\xf2\x78\xad\xc5\xe3\xdf\x78\
+\x9c\xa2\xa2\x5e\xa2\x5f\xbf\xf9\x07\x87\x99\xf6\xf4\xf5\x8a\xbd\
+\x2e\x42\x67\x5c\x88\x38\x8c\x84\x0e\x1f\x8d\x4e\x18\x17\x97\xe0\
+\xf0\xb1\xa7\x37\x87\xb1\xeb\x20\x8b\x29\xd8\x6d\x09\xfb\x20\x5a\
+\x69\x81\xc5\x4e\x0f\x3d\xfc\x08\xed\xdc\xb5\xc7\xa3\xaa\x31\x61\
+\x4a\x48\x4c\x72\xca\x8c\x3d\xca\x6c\x91\x22\x33\xf1\x0e\x81\xc1\
+\xc5\x83\x07\xbe\x4c\xf3\xf5\x4e\xe0\x4d\x15\x19\x5b\x33\x14\xf7\
+\xdd\xed\x17\xb0\x77\x06\x16\x5c\xe2\x1f\xfb\x40\xf3\xf3\x9d\xa2\
+\xbc\xac\x4c\xda\xb5\xe7\x1b\x94\x92\x9a\xee\x4d\xd6\x42\x54\xc8\
+\xcc\xc3\x1f\xeb\x93\x42\xb0\x78\x73\xed\xfa\x2d\x94\x31\x98\x4b\
+\x75\x35\x55\x74\xf6\xd3\x63\x98\x89\x24\x50\x6e\x7e\x21\xf2\x76\
+\xce\x3c\x94\x82\x99\x41\xf0\x8f\x07\x03\x75\x8d\x1d\x54\x71\xa1\
+\x9e\x02\x31\x03\x67\x3f\x59\x80\x50\x06\x89\xc0\x8a\x43\x60\xc1\
+\x18\xc8\x10\xf6\xb4\x18\xc1\x06\x48\x49\x89\x09\x62\xf1\xdf\xde\
+\xbd\x7b\xe9\xf0\xfb\x1f\x62\xe3\x28\xd8\xb6\xeb\x3a\x0f\xeb\x30\
+\xdc\x31\x31\x5d\x08\x77\xe7\x0a\xe2\x2f\xfd\xf2\x37\xf4\xd1\xc9\
+\x53\x14\x81\x4e\x1e\x13\x13\x4d\xf1\x50\xe4\xc7\xc5\xc6\x50\x71\
+\x41\x01\xac\x5c\xb8\x83\x2f\x5f\x60\xa2\x61\x81\x38\xaa\xb9\xad\
+\xc3\xf3\x4a\x80\x2e\x09\xf3\x59\x10\xc9\x95\x1a\x58\x1f\xe3\x17\
+\x10\x29\xf6\xf9\x5e\x8e\x3a\x32\x03\x9b\x9c\x1a\xa7\x3d\xbb\xb7\
+\xd9\xf6\x97\xf1\x81\xc8\x89\x45\x7c\xc1\xc1\xae\x37\xdc\x72\x55\
+\x5f\x16\x47\x8d\x40\x9f\x77\xfa\xc4\x07\x94\x01\xff\x57\x2c\xaa\
+\x62\xfd\x47\x78\x04\x2c\xa7\xb0\x70\x70\xa0\xbf\x17\x8c\xe4\x32\
+\x7d\x7a\xf2\x28\x26\xde\x3c\x03\x72\xa0\x1f\x74\x52\x00\xd7\x99\
+\xad\xac\x70\x90\x41\x22\xb0\x62\x11\xf0\x9a\x81\x30\xb3\x68\x6a\
+\x6e\xa1\x4b\xd8\x55\xef\xc2\xa5\x2a\xaa\xae\xab\x63\x4a\x46\x3f\
+\xfe\xc1\xf7\x28\x31\x31\x51\x6c\x8c\x13\x04\x8f\xbb\x63\x62\x47\
+\xc2\xe5\x6f\xff\xc8\x88\x11\x0b\xee\xf0\x1b\x1c\xa2\xe6\xf6\x36\
+\x54\x08\x1d\x1a\xbd\x33\x28\xe0\x30\xfd\xc3\x5f\xfd\x00\xcc\x64\
+\x09\x56\xa9\xa3\x3c\x1f\xe8\x76\x38\xc0\xd1\x3d\xfe\xaa\x46\xa5\
+\x78\xc6\x46\x06\x0f\x40\x37\x24\xc4\x1d\x22\xd6\xd5\xf5\x87\x75\
+\x32\xa3\xc6\x5e\x6c\x24\x16\x46\x3e\xd8\x1e\x56\x11\xd9\x08\x33\
+\x5a\x7c\x0b\x0d\xcd\x9d\xf0\x3b\xb6\x66\x59\x1a\xc5\x04\x39\x1e\
+\xd6\x51\x6c\xf4\xb1\x50\x81\x19\x7f\x18\x36\x4f\xe3\x3c\xaf\x5c\
+\x3e\x4f\x0d\x75\x57\x28\x0b\x1e\x79\x13\x92\x52\x04\x23\x61\x11\
+\xe2\xc6\x2d\x37\x08\xdd\xc7\xa5\xca\x32\xb1\xc2\x9c\xeb\xe1\x69\
+\x60\x73\x5e\xfe\x5a\xd4\x62\x4a\x4f\xf3\x90\xf1\x25\x02\x8b\x81\
+\x80\x57\x0c\xe4\xf9\x5f\xbc\x4c\x1f\x1e\x3f\x05\x05\x5f\x17\x48\
+\xa0\x95\x10\x8a\x6e\x61\x99\xa4\x56\xb8\x6e\x60\x06\xb2\xd2\x02\
+\x13\x37\x26\x68\x4c\xcc\x14\xba\xcd\x3a\x9a\x51\xf8\x1d\x6a\x69\
+\x6e\xa2\x78\x10\x01\x6f\x3a\xb7\x27\xed\x64\xdd\x0e\xfb\xa7\x8a\
+\x08\x0d\xa6\x4e\x6c\x9d\x2a\x56\x16\x2b\x19\xa0\x5e\xf5\x4d\x2d\
+\xd4\xd0\xd0\x00\xb7\xf7\x4b\xaf\x23\x50\xaa\x31\x9f\xa3\x58\xe9\
+\x0d\x13\xde\x69\x8b\x1f\x44\x6a\xc1\x42\x4c\x64\xcb\x0f\xd8\x77\
+\x75\x0f\x43\xb6\x3f\x8c\x91\xbf\xe3\xad\x8c\x6d\x71\x17\xe9\xc4\
+\xd1\xda\x8f\xf9\x14\xc5\x0c\x84\x67\x1c\xac\x34\x4f\x4d\xcf\xa6\
+\xe6\xc6\x5a\xaa\xba\x54\x41\x0d\xf5\x55\x94\x09\x46\x12\x9f\x90\
+\x2c\x44\x8b\xb1\x10\xc3\xf2\xac\x64\x04\x6d\x37\x78\x30\x0b\xe1\
+\xfc\xd9\xd5\x7b\x08\x3c\x46\xaf\x2f\xc9\xa1\xa8\xc8\x30\xed\x37\
+\x33\x9f\xca\xcb\xb4\x12\x81\x05\x40\x80\xa9\xbf\xc7\xe1\x57\xaf\
+\xbf\x01\x07\x7e\x5d\xc2\x52\x84\xd7\x34\xb0\x48\x8a\x3f\x76\x03\
+\xf6\x75\xbe\xda\xc2\x62\x33\x0d\x3d\x1e\xec\xba\xbb\x70\x75\x9e\
+\x6d\x74\xae\x3c\x67\xfc\xcc\x58\x5d\x5c\x5e\x51\xa1\xdc\xba\xfa\
+\x8e\x60\xc8\x81\xb0\x20\x8a\x8e\x0a\x06\xe1\xd4\x8a\x6b\x58\x8c\
+\x35\x06\xaf\x05\x2d\xd8\x40\xe9\x5a\x0a\xfc\xde\xf0\x5f\xf8\x1c\
+\xcb\x5f\xb3\x96\x3e\xb3\xf3\x16\x8a\x8d\x4b\xa2\xcb\x17\xca\xa9\
+\xa7\xbb\x53\x88\xad\x98\x71\x71\x3c\x4f\x82\x05\x8a\x74\x7f\x2c\
+\x1c\x64\xc6\xf1\xc0\xfe\x9d\xb4\x69\x7d\xae\x60\x46\xec\x1e\xa6\
+\xb7\x6f\x18\xe7\x57\x5f\x5f\xf3\xa4\xfd\x32\xee\xd5\x81\x80\x57\
+\x0c\xc4\x1f\xa3\x2e\x4f\x3b\xc4\xd5\x01\xc7\xe2\xd7\x92\xf7\xdb\
+\x2e\x5c\x9d\x8f\x7d\xe3\xb1\x07\x89\x4e\x9c\xc1\xcc\xec\xcc\xb9\
+\xf3\x62\x7b\xd9\xc5\xaf\xc9\x62\x95\x60\xa0\xe4\xc4\x18\xf8\xbf\
+\x9a\xd4\x14\xc0\x33\xbf\x40\x6c\x05\xdb\x04\x31\xd6\xb5\x12\xf8\
+\x7d\xf1\xac\xa2\xb6\xfa\x22\x99\x85\x4b\x1a\x30\x12\xe8\x54\x56\
+\x17\xac\xa5\xed\x3b\x6e\xa6\x88\xc8\x28\x31\xb8\xf2\xb4\xbd\xcc\
+\x70\x92\x93\x62\xe8\xc0\xfe\x1d\xb4\xeb\x86\x22\xec\xa5\x63\xf5\
+\x2d\xc6\xfb\xc8\x7f\x7c\xe2\x02\xf5\xf4\x0e\x0a\x66\xe2\x69\xbe\
+\x32\xbe\x44\x60\xa1\x11\xf0\x8a\x81\x2c\x74\x25\xae\xa7\xfc\xd8\
+\x78\x20\x19\x22\x3e\xb6\xfe\xb1\x9b\xfd\x80\xc8\xf2\x3a\x99\xa6\
+\xe6\xc6\xab\x16\x12\x96\x10\x26\x81\x81\x04\x06\xfa\xd8\xcd\x42\
+\x20\xc1\xa3\x8e\xae\x21\x61\x81\x77\xb5\x36\x50\x3d\x70\xe2\xf7\
+\xc7\xdb\xd8\xd6\x5c\xb9\x48\x9f\x9c\xfc\x00\x8c\xe4\x12\x99\xcd\
+\xec\xdb\x8c\x84\x45\x16\x3b\x59\x54\xc7\x77\xb7\xcd\xbc\xea\x3c\
+\x3e\x2e\x12\x7a\xb9\x48\x91\x84\xdd\xba\x9f\x2d\xaf\xa1\xdf\xbc\
+\x7e\x9c\x2e\x5c\x6e\xb4\x63\x1e\xde\x94\xe1\x6e\x5d\x64\x3c\x89\
+\x80\x2b\x04\x24\x03\x71\x85\xce\x22\x3c\x63\x41\x06\x6f\xe7\xbb\
+\x79\xc3\x7a\xbb\xdc\x99\x10\x8c\x62\x5f\x92\xf2\x8a\xf3\x76\xcf\
+\xae\x96\x1b\x3c\x7a\x8e\x85\x85\x1b\x13\x40\xc5\xfb\xad\x52\x77\
+\x26\xb6\x42\x8c\xd5\xda\xaa\xdc\xba\x2a\x8f\x6c\x16\x6c\x0b\x60\
+\x22\xdc\x4e\x9e\x81\x54\x57\x5d\x80\x45\xd6\x87\x54\x5f\x5b\x85\
+\xb5\x1b\x66\x41\xe8\xed\x06\x09\xb6\x84\xce\x4f\x38\x0d\x5b\x78\
+\x99\xcc\xfc\x2d\xd4\xd1\x2b\x6f\x1c\xa7\xe3\xa7\x2e\xd2\x30\xd6\
+\x86\xa8\xfd\x5f\x71\x3c\x76\x0d\xe3\x4d\x19\xce\x4b\x97\x4f\x24\
+\x02\xee\x23\xa0\xea\x09\xee\x27\x72\x19\x73\x46\x34\x2b\x64\xb4\
+\xf8\xc0\xbd\x0d\x9c\x92\x57\xa1\xcf\x27\xa8\xc5\xc4\x6c\x71\xe5\
+\x6d\xe0\x94\x4a\x27\x75\xb6\x92\xd8\x93\xbc\x79\x16\xb2\xa1\xa4\
+\x98\xfc\x04\x21\xd2\xd6\x8b\x95\xfd\x67\xb0\xd0\xd2\x5d\x85\x2f\
+\xdb\x06\x78\xe3\xbb\x49\x5f\x5f\x85\x28\xaa\x69\xa3\x3e\x8e\xbb\
+\xd7\xbc\xf7\x4a\x2a\x44\x30\xfa\x36\x58\xad\xb1\x42\x3c\x12\x63\
+\xb1\xe1\x01\x07\xf5\xbb\x74\xb7\x1e\x8b\x11\xef\xe9\xa7\x9e\xa2\
+\x9b\x76\xee\x14\x96\x72\x6a\x3d\x0f\x7f\x1f\xcc\x48\x4c\xa6\x31\
+\x28\xd2\xcf\x0b\x46\xc2\x56\x59\x93\xd8\x78\x4d\xcf\x48\x5d\xd5\
+\x8b\xd7\x7c\x4c\x40\xcf\x51\x51\x59\x4f\xaf\xfe\xf6\x38\x44\x56\
+\x95\x34\x34\x34\x8a\x77\xcc\xde\x99\xad\x58\x70\x59\xec\x84\x91\
+\x15\xf8\x59\xe9\x71\x74\xe7\xad\x9b\x5d\x65\x29\x9f\x49\x04\x16\
+\x0d\x01\xaf\xac\xb0\x9c\xe9\x03\x31\xf8\x84\xd5\x88\x55\x59\x38\
+\x88\xad\x6d\xcd\x66\xe7\x4e\xf5\x5c\xb7\x08\x8e\xe3\x90\xd7\xef\
+\xdf\x3d\x42\xa5\xe7\x2f\x6b\x08\x11\xe7\x1e\x8b\x95\xe8\xb7\xdd\
+\x7c\xa3\xe8\x40\xce\xf2\x61\xc2\x63\xc6\x6e\x7d\x4a\xe8\x1b\x18\
+\xb0\x59\x5f\x29\xf7\xf4\x47\x87\xa2\x00\x14\x68\x86\xde\x42\x79\
+\x26\xf2\xd1\x27\xf4\xf0\x9a\x2d\x6b\x52\xe1\x12\x3f\x23\x35\x85\
+\x6a\x1b\x9b\x6d\x79\x8b\x6c\x00\x6e\x55\x6d\x3d\xb5\xc2\xe1\x60\
+\x1a\xb6\xdb\x75\x19\x20\x2f\x6a\xeb\xec\xa3\x17\x7e\xf1\x4b\xb1\
+\xf6\x46\xa9\xa3\x48\x03\x22\xb3\x61\x6d\x31\x15\xad\x99\xdb\xa2\
+\x6b\x7a\x1a\x0b\xed\x60\x6e\x0d\x3d\x37\xbc\xc4\x0e\xa0\x3e\x00\
+\x7f\x1e\x81\x89\x66\x72\x72\x1c\xde\xc0\x15\x30\x5e\x76\x69\x33\
+\x9b\x1f\x13\xbf\x8e\xce\x21\x21\xea\xe1\x99\x98\xb3\x20\xda\xe2\
+\x63\x5d\xf4\xc7\x71\x46\xb0\x67\x06\x13\x6c\xf6\x98\x6b\x1f\xb4\
+\x4c\xd8\xfe\xf9\xc2\xdd\xc9\xcf\xcb\x23\xfe\xed\xbb\xf3\x4e\xfa\
+\xed\xe1\xc3\x98\x19\x7c\x22\xbe\x4f\x5e\x8b\xc3\x41\x61\x24\x63\
+\xf0\x8b\x75\xf9\x62\x05\x35\xf3\x3e\x20\xd8\xda\x96\x8d\x08\x66\
+\x78\xa1\xd3\xca\xb0\xe2\xbc\xf2\x62\x23\x95\x9f\xaf\xa3\xde\xde\
+\x21\x31\x0b\x61\xc6\xa1\x04\x91\x37\x16\x42\x42\xb1\x42\x99\xe9\
+\xb1\x58\xc3\x94\x81\xd9\xde\xca\x71\x57\xa3\xd4\x53\x1e\xaf\x1f\
+\x04\xbc\x62\x20\x31\xd1\x51\xd4\xde\xd5\x03\x94\xac\xcc\x82\xe1\
+\x12\x1d\x1e\xc4\xe2\xf9\x5f\xbf\x4a\xa5\x15\x17\xe9\x62\x55\x95\
+\x75\xaf\x0b\x07\xdc\x86\x47\xde\xae\x46\xf2\x9c\xd7\xd4\xb4\x81\
+\xde\x55\xdc\x9e\xab\xde\x07\x77\xa2\x49\x88\x07\x92\xe2\xa3\x69\
+\xfd\xfa\x8d\x14\x8e\xed\x69\x35\x84\x73\x26\x2e\xaf\x36\x3e\x71\
+\xa6\x82\x7c\x0e\xfd\x3b\x0d\x19\x47\xb0\x01\x56\x25\xe2\xcd\x32\
+\x14\x6b\x34\xb0\x19\x8c\xe0\x79\x64\xe7\x07\x8b\x97\xe0\x20\xb8\
+\x9b\x10\xbd\x5c\xdd\xae\x29\xb1\x71\xd6\xdf\x1f\xfa\x29\x7c\x70\
+\xc5\xd1\xa9\x33\x65\xaa\xda\xcc\x9e\x72\x1d\x94\x11\xe2\xec\x5d\
+\xc7\x67\x1c\x97\x47\xa5\xeb\xb1\xc5\x6e\x2d\x4c\x77\x55\x30\x0a\
+\x62\x6b\x1c\x35\x61\x15\x72\xe5\x9c\x0c\x84\xf3\xe9\xe8\x1d\xa0\
+\x97\x5f\xff\x9d\x5d\x41\x4c\x6c\xcf\x9e\x2d\xa5\xbf\xfe\xfe\xb3\
+\x42\x64\x66\x17\x61\xe6\x06\x13\x7b\x9a\x0e\xa7\xd7\x0e\x9f\x04\
+\xf3\xb7\x60\xf4\x0b\x37\xe2\x33\xc4\x50\x49\xc3\x4c\x60\x7c\x62\
+\x0c\xc4\xd1\x3d\x2f\xb5\xbc\xc9\x55\x1c\x5c\x78\xac\x8a\x0e\xa5\
+\xbe\x81\x71\xe4\xcb\x8b\x49\xad\x81\x57\x80\x1b\x4d\x30\xf7\x6e\
+\x6d\xa3\xec\xec\x6c\xe0\xe0\x23\x46\xd7\x06\x83\xf6\xdd\x4c\x4d\
+\x4e\xc0\x05\x7b\x04\x7d\x52\x5a\x8f\x2d\x8c\xbb\xa8\x11\xbe\xa0\
+\xf4\x33\x1a\xce\x91\xef\x59\xa6\x97\x6e\x6f\x91\x99\x66\xc0\x4b\
+\x6e\x36\x3d\xf5\xcd\x6f\xd2\xbd\x77\xd5\xd0\xeb\x60\x24\x27\x4f\
+\x9f\x46\x5d\xb0\x20\x54\xcf\x48\x46\x47\xe8\x22\xd6\x80\xf0\xfb\
+\x56\xcf\x58\x94\x7c\x94\x23\xe3\xd0\xd9\x3d\x40\xad\x1d\xbd\xe2\
+\x0b\xd4\x33\x0e\x9e\x65\x32\x23\xce\x4c\x8b\xa5\xa2\xc2\x0c\x2c\
+\x84\xb5\xea\x47\x94\xf4\xf2\x28\x11\x58\x0e\x04\xbc\x12\x61\xdd\
+\xb2\x7b\x97\x4d\xa4\xa3\xa9\x34\x08\x5a\x43\x53\x2b\xfd\xfe\xbd\
+\x23\x62\x4d\x03\xbe\x78\xcd\x63\xbe\xe0\x4e\x10\x17\x13\x09\x62\
+\x3d\xd7\x0a\x60\xb0\x00\x74\x46\xfd\xcf\xda\x41\xa7\xb1\x81\x4f\
+\x97\xc8\x3b\x2d\x35\x99\xc2\xc3\x42\x79\xe8\xa7\x2d\x0b\x65\xf3\
+\x8e\x86\xef\x7c\xf8\x11\x9d\x2c\x2d\xb5\x8e\x00\xb5\x31\x44\x9a\
+\x50\xf8\x1e\x8a\x89\x89\xc1\x2e\x6f\x3e\x20\x0a\x59\xf6\xf9\x88\
+\x3a\x63\x17\xc4\xca\x0b\xf4\xf6\x91\xa3\xd8\x00\x88\x67\x32\xba\
+\x76\xa1\x6c\x0b\x88\x64\x02\x56\xb8\x7b\x12\x58\x0f\x82\xf1\xb9\
+\x5d\x12\xf6\x68\x5c\x7a\xb6\x1c\xc5\xd8\x3f\xd3\x47\x66\x3c\xf5\
+\x18\xf1\x35\xe3\x34\x34\x38\x20\x76\x3d\xd4\xa7\xd1\x5e\xb3\xf9\
+\xb5\x2f\xf6\xdf\x9e\x84\xdc\xde\x9a\x4e\xfb\x1c\x0b\xd8\x20\x2a\
+\x19\x1b\xee\xc6\x4a\xeb\x24\xfd\x23\x97\xd7\x29\x29\x31\x76\xcf\
+\x99\xe0\xfb\x07\xcc\x8a\xb1\x98\xb0\x46\x45\xda\x9b\xfd\x72\x42\
+\x6e\x7d\x57\x8f\x91\xaa\xeb\xbb\xb0\x20\x75\x42\x7c\x3b\xea\x0c\
+\x59\xec\x36\x62\x1c\xa0\x60\x28\xec\x97\x2b\xe4\xe5\xe6\xd2\xff\
+\x79\xe2\x09\xfa\xd1\xf7\xbf\x4f\xdb\xb7\x6e\x16\x58\xa9\x3d\x52\
+\xf3\x80\x87\x07\x4b\xee\xbd\x4b\x0c\x2c\xd4\x83\x2b\xa4\x15\x3a\
+\x0e\xdc\x4b\x07\xe3\xb8\xfb\x8e\xcd\xc2\x53\xaf\x64\x1e\xcb\xf5\
+\xb6\x65\xb9\x7a\x04\xbc\xea\x79\x77\xdc\xb2\x07\x1b\x1e\x85\x83\
+\x66\xdb\x27\xb7\x11\x34\x10\x36\x7d\xe0\x8e\x65\x98\x30\xc1\x75\
+\x48\x91\xe8\x68\xfa\xe7\xee\x5e\x73\xa7\xe4\x1f\x07\x56\xd8\x6e\
+\xdb\xb8\x41\x74\x52\xbb\xf4\x20\xc0\x56\xe2\x6a\x5f\x17\x9e\x69\
+\xf0\x68\x38\x27\x2d\x89\x52\x52\xd2\x45\xd2\x9b\x76\xdc\xc0\x77\
+\x1d\x33\x11\x10\x3e\xce\xcb\x8e\x79\x20\x25\xb7\x2b\x02\x44\x6c\
+\x6d\x89\xbd\x62\xdc\xae\x4e\xaa\x1b\x79\x39\x39\x98\xd5\xc4\xda\
+\xe3\x88\x7a\x5f\xae\xa9\xa7\x0e\x6c\x4e\x34\x9f\xe0\x6a\x96\xa7\
+\xce\x97\x89\x1b\x33\x1c\x7e\x77\xfa\xc0\x79\xf0\x06\x50\x01\x3e\
+\x23\x94\x93\x93\xaf\x7f\xec\xf2\x3a\x0d\x62\xac\x29\xb8\x10\x51\
+\xde\x95\x12\x99\x09\x7f\x7b\x47\x3f\x66\x3b\xd6\x99\x49\x56\x46\
+\x3c\xce\xcd\xf6\x38\x20\xc1\x24\x66\x22\xbc\x8f\x87\x23\x17\xfe\
+\x7e\x58\xed\x3e\xd8\xd7\x84\x85\x7c\x9e\xd5\x4b\xa9\xc7\x42\x1e\
+\xd7\xe4\xe7\xd3\x9f\x3e\xf9\x14\xfd\xd5\xf7\xbe\x47\xdb\x37\x6f\
+\x16\xdf\xa3\x9a\x91\x78\x56\x96\x95\x71\x30\x33\x49\x07\x13\xde\
+\x77\xfb\x66\xba\x19\x2e\xde\xd9\x5b\xaf\x0c\x12\x81\x95\x84\x80\
+\x3d\x07\x70\xa3\x76\xb1\xb1\x71\x74\xff\xdd\x77\x58\x09\xce\x0c\
+\x21\x9f\x2b\x19\x33\x1b\x0b\x88\x41\x6e\x5a\xbc\xc7\x0e\x03\xe7\
+\xca\xfb\x81\x7b\xee\xa6\x08\x38\xeb\xf3\xa4\xc3\x32\x11\x0b\xf1\
+\x99\xa0\x3b\x6e\xbb\xdd\xe6\x5a\x7c\xfd\xba\x12\xda\xb9\x65\x13\
+\xf2\x71\x1f\x16\x2e\x73\x0a\x5e\x5d\xb7\x6d\x58\x47\x6b\x0a\x3c\
+\xdb\xb6\x37\x04\xe2\xb7\x92\xc2\x02\x3b\x86\xc5\x84\x7c\x00\xbe\
+\xc5\x2a\x2f\x5c\x98\xab\xe9\x8b\xfa\x9c\x7d\x37\xf1\x2c\xa0\xa5\
+\xb6\x94\x76\xef\xd9\x6d\xc3\xc9\xdd\x42\x63\x30\xd3\x8c\x0c\x0f\
+\x02\x31\xd5\x4a\x4a\xd9\x6d\xfd\xd0\xf0\x04\xb5\xb5\x59\x17\x15\
+\x66\x65\x62\x37\xbf\x68\x3f\x8c\xb6\xfd\xed\x98\x8d\xb3\xb2\x58\
+\x89\xdc\x8b\x0d\xa0\xa2\xc3\x0d\x1e\xe3\xee\x2c\xcf\x85\xb8\x5f\
+\xb8\x66\x0d\x3d\xfd\xbf\x9f\xa2\xbf\x78\xe6\x7b\xc2\xd2\x4e\x0c\
+\x76\x3c\xf8\x9e\x98\x29\xb2\x28\x34\x05\x7b\xc2\xdc\x79\xdb\x26\
+\xba\x65\xcf\x3a\x4a\xc0\x76\xb6\x32\x48\x04\x56\x22\x02\xee\x53\
+\x4a\x5d\xed\xef\xb9\xeb\x2e\xba\x7d\xe7\x36\x31\x2a\x17\xb2\x5d\
+\x67\x8c\x04\xf7\x85\xf2\x13\x22\x9e\xf8\x30\x1f\x7a\xe4\xf3\x8f\
+\x68\xfc\x11\xb1\xeb\x6a\x66\x2e\x9e\xfc\x58\xb2\xa3\x16\x09\x64\
+\x64\x66\xd2\xd7\x1e\x3d\x48\xa1\x01\x90\x33\x83\xe8\x39\x9a\x19\
+\x59\xab\xcf\x3a\x0f\x34\x19\xe5\xf9\x8e\x0f\xd3\x3d\xb7\xdf\x42\
+\x37\xec\xbc\xd1\xd6\x32\x16\xa7\x7c\x15\x7b\xbd\x17\x64\x26\x0b\
+\xa2\x6e\x65\x24\xd6\x99\x8e\x2d\xd2\xcc\x89\xa8\x2f\xdc\x83\x4f\
+\x99\x47\xa8\x18\xdb\x8d\x3e\xf4\xf0\x17\x34\x3e\xac\x58\x69\xea\
+\xa8\x4d\x5c\x77\x75\xd8\xb2\x61\x03\x2e\x79\x3f\x6c\x2d\x06\xec\
+\x4b\xea\x34\xf6\x4c\x57\x02\xb7\x57\x1f\x67\xae\x6b\x35\x46\xac\
+\x13\x62\xc3\x02\xc6\xc7\xd5\x8f\x5d\xaf\xb3\xbc\x9d\x09\xb4\x71\
+\x18\x5b\xa7\x56\x1e\xa1\x5d\x3b\x37\xd1\xc6\x4d\xdb\x94\xaa\xcc\
+\x60\x0f\x3d\x96\x2e\x2f\x4e\xab\x36\x97\xe2\x7c\x92\x41\x08\x99\
+\x31\xa8\xe3\x32\x51\x0d\x0c\x0e\x87\x0b\x19\xab\x18\x92\x71\xdf\
+\x71\xc3\x5a\x9a\x1e\x87\x5e\x0d\xcf\x9c\x99\xa6\x72\x3a\xce\x87\
+\xf7\x37\xef\xee\x6c\x24\x63\xdf\x65\xfa\xec\xfd\x07\x34\xb8\x33\
+\xc7\xe3\x7a\xa8\xcb\xe3\x73\xc6\x6a\x29\xc3\xda\xa2\x42\x7a\xe6\
+\x3b\xdf\xa1\x1f\x7e\xf7\x19\xda\xb4\x6e\x9d\xea\x7b\x72\x5c\x0b\
+\x66\x1c\x5c\xc7\xe4\xa4\x28\x30\x8e\xcd\x74\xfb\xcd\x1b\xb0\x9e\
+\x66\x65\xb8\xe3\x77\x5c\x63\x79\x57\x22\x00\x0b\x50\x6f\x41\x60\
+\x87\x7f\x5f\xff\xea\xff\xc2\xc6\x45\x11\xf4\xd6\x91\x0f\xa9\x6f\
+\x70\x8c\x7c\xd9\x4d\x3a\x3a\xb9\x2d\xb0\x68\x04\xa2\x09\x3f\x28\
+\x48\x8b\x33\x92\xe9\xe0\xc1\x2f\xd2\xa6\x2d\xdb\x6d\x8f\xf9\x24\
+\x09\x22\x9c\x8a\x4a\x28\xbb\x75\x84\x55\x13\x49\x75\xc1\xd1\xb0\
+\x7c\x0b\x4c\x49\x75\x13\xa7\x7b\x6e\xbc\x89\x82\x50\xfe\x0b\x2f\
+\xbf\x4c\x0d\xad\x1d\xd8\x60\x09\x44\x43\x1d\x09\x09\x59\x64\xe5\
+\x03\x71\x48\x7c\x64\x08\xdd\xf5\xd9\xfb\xe9\xde\xfb\x1e\x04\xa1\
+\x44\x9d\x55\x81\xfd\x16\x7d\x1b\x32\xed\xe7\x5f\x7c\x01\x4a\xf8\
+\x32\x1a\x83\x62\x54\xcf\x20\x59\x9c\x62\x99\x34\x51\xa8\x9f\x85\
+\x36\x6f\x2a\xa2\xcf\x3f\xf2\x18\x36\x0e\xca\x50\xe5\x62\x6d\xd7\
+\x99\xb2\x51\xec\xb1\x3d\xdb\x30\x26\xe1\x41\xd0\x43\xab\x4d\x8a\
+\x99\xd0\x64\x26\x27\x60\x01\x61\x3d\xf4\x21\xb3\xd8\xc1\x86\x80\
+\x6a\xaa\xab\x60\xc2\x39\x48\x11\x70\x41\x1f\x19\x16\x44\x3d\x7d\
+\x7d\x9a\x38\x9a\x02\x75\x17\x16\xa4\xf7\x99\x9e\xd5\x33\xf1\x7e\
+\xda\x13\x60\x9a\x46\xa3\x11\xa5\xcc\x96\xa3\x4b\x26\x98\xc3\x38\
+\x98\xa2\xc9\xd8\x03\xdd\x82\x85\x1e\xf8\x1f\xfb\x69\xd3\xe6\xed\
+\x78\xad\xb3\x69\x84\x97\xd8\xe9\x31\xcc\x00\xb4\xab\xca\x79\xa6\
+\x31\x6e\xb2\x32\x05\x25\xdf\xec\x8c\x44\xec\xff\x72\x09\x3a\x16\
+\x88\xa8\x94\x9b\x38\xf2\xde\x1c\x55\x66\x0b\x6d\xff\xcc\x26\xa1\
+\x60\x66\xb7\xfd\x77\xde\xb1\x83\xde\x81\xe5\x5d\x4f\xd7\x04\x05\
+\x85\x46\x43\xf9\x8f\x59\xa5\xaa\xdc\x49\xac\x23\x31\x0e\x76\x53\
+\x5f\x67\x2d\x45\x45\xf8\xd0\x97\x1e\xfb\x8a\x1d\xee\x01\xfe\xd0\
+\x8f\x61\xef\x91\x69\xb6\x7a\x9a\x09\x3c\x4b\x1c\x1d\xea\xc6\x55\
+\x9e\x72\x6b\xc9\x8e\xeb\x60\x09\xc7\xbf\x32\xb8\xa8\x79\xed\x8d\
+\x37\xe8\xdc\x79\xac\xf1\x51\xb5\x89\x99\x2c\x5b\x96\x25\x26\x60\
+\x0b\xdb\xa2\x4c\x4a\x49\xb6\xd7\x1b\x2d\x59\x65\x65\x41\x12\x01\
+\x0f\x11\xf0\x9a\x81\x70\x39\xc1\x10\xc1\x3c\xfa\xe8\x63\xb4\x6d\
+\xeb\x36\x3a\x75\xf2\x18\x55\xd7\xd6\xc1\x65\xb7\xd1\x56\x05\xb6\
+\x2c\x89\x83\x82\x7a\x1d\x74\x03\x5b\xb7\xef\xc0\xde\x08\x71\xb6\
+\x67\xca\xc9\x43\xf7\xdd\x4b\xc9\xab\x42\x85\x92\x5b\x4d\x2c\x94\
+\xe7\xfa\x23\x8f\xaa\xb9\xd3\x15\x14\x14\xeb\x1f\x61\x3b\xd2\x1d\
+\xd8\x7b\x21\x8f\x4e\x1e\xfb\x88\xce\x55\x9e\xa7\xfe\xbe\x7e\x8d\
+\xdb\xf8\xf0\xd0\x50\xca\xcb\xc9\x86\x9b\x89\xdd\x94\x97\x5f\xa0\
+\x21\x4e\xea\xcc\x12\x12\x93\xe9\x5b\x4f\x3c\x49\x7b\xce\x9c\xa6\
+\x4f\x4e\x7f\x42\x2d\x58\xf8\xa6\x36\x49\x66\x62\x9c\x82\xd5\xe4\
+\x9b\xb7\x6e\x17\x23\x73\xc6\x41\x1f\x1e\xb8\x77\x1f\xc5\x85\x07\
+\xc2\x8d\xf8\xa4\xad\x1c\x56\x20\x87\x60\x75\x72\x5a\xfa\x2c\xb3\
+\x09\x0f\x0f\xa7\x6f\xfd\xd1\x57\xe9\xd3\xd3\x27\xa1\x87\xe0\xd9\
+\x98\x95\xcc\xf2\x39\x3b\x1d\x64\x1d\x04\x2b\xf9\xff\xe4\xb1\x2f\
+\x52\x0d\xf6\x98\xe0\xd1\xb4\x3b\x81\xc5\x44\xbc\xbb\x5e\x08\xda\
+\xcc\x21\x12\x8e\xf8\xd6\x15\xc6\x51\x43\x2d\xdc\x60\xb8\xc8\x83\
+\xd9\x5d\x70\x10\x36\x43\xca\xd9\x4e\xb9\x79\x6b\x6c\xe9\x45\x26\
+\x33\x7f\xd8\x42\x68\x7d\x71\x2a\x66\x0c\xed\xa8\xdf\x2c\x5b\xb0\
+\xc0\x91\xe2\xaa\x55\xd9\xc2\x4d\xba\x12\x3f\x09\xcc\x71\xeb\xfa\
+\x34\x30\xc3\x2b\xda\xb8\x53\x26\x4a\x4c\xc6\x4c\x4f\xc5\x60\xb9\
+\xbe\x07\x1f\x7e\x90\xca\x80\xfb\xc5\x0b\x97\xc0\xa0\x06\xc8\x3c\
+\x0e\x3d\xc9\x0c\x0f\x36\x18\x2c\x14\x11\x0e\xe6\xbf\x77\xb7\x53\
+\xdc\x0b\x57\xa7\x50\x7f\x27\xd6\x5f\x88\x99\xad\x82\xe5\x04\x05\
+\xa7\x24\x78\xbd\xe7\x87\xd2\x96\xf9\x1c\x37\x94\x94\xc0\xea\x6e\
+\x2d\x9d\xc1\xac\xf2\xf5\xc3\x6f\x50\x55\x75\x3d\x06\x38\xbe\xd0\
+\x81\x45\x52\x51\x41\x3a\xa5\xa5\xb0\x2e\x6c\x16\xcb\xf9\x94\x25\
+\xd3\x4a\x04\x96\x0a\x01\xc3\xa1\x43\x87\xa6\x6f\xdd\x77\x70\x41\
+\xca\x63\x37\x0e\x13\x30\x0b\x55\x44\x27\x3e\x98\x92\x07\x05\x05\
+\xcd\x4b\x61\xee\x6d\xc5\x78\xd5\xb3\x09\xab\x83\x95\x3d\x45\xb8\
+\x73\xf2\xa6\x3f\xae\xd6\x1e\x38\x2b\x6b\x0c\xd6\x5c\xac\xcc\x55\
+\x82\x2f\x3a\x3e\xfb\x3c\x72\x57\x49\xad\xa4\x93\x47\xcf\x10\xb8\
+\x56\x71\xe7\xfe\xf1\xc9\xa7\x67\xb0\xe9\x5a\x1c\xad\xce\x4b\x97\
+\x8c\xc3\xb3\xcf\x42\xc6\x5e\x21\x08\xbc\x77\xf8\xbf\xbd\x17\x61\
+\x39\x6a\x03\x13\x67\x6f\x08\xb4\xa3\xbc\xe6\x7b\x8f\x4d\x4f\x5d\
+\xee\x78\xe8\x41\x01\xd6\x4d\x87\x66\xc5\x41\x1e\x24\x95\x51\xe7\
+\x81\xc0\xb5\x8a\x3b\x0f\x66\xd8\xe4\x57\x06\x89\xc0\xd5\x8e\x80\
+\x4e\x93\x70\xb5\x37\x47\xd6\x5f\x22\x20\x11\x90\x08\x48\x04\x96\
+\x0a\x01\xc9\x40\x96\x0a\x69\x59\x8e\x44\x40\x22\x20\x11\xb8\xc6\
+\x10\x90\x0c\xe4\x1a\x7b\xa1\xb2\x39\x12\x01\x89\x80\x44\x60\xa9\
+\x10\x90\x0c\x64\xa9\x90\x96\xe5\x48\x04\x24\x02\x12\x81\x6b\x0c\
+\x01\xc9\x40\xae\xb1\x17\x2a\x9b\x23\x11\x90\x08\x48\x04\x96\x0a\
+\x01\xc9\x40\x96\x0a\x69\x59\x8e\x44\x40\x22\x20\x11\xb8\xc6\x10\
+\x90\x0c\xe4\x1a\x7b\xa1\xb2\x39\x12\x01\x89\x80\x44\x60\xa9\x10\
+\x10\x2b\xd1\x79\x41\x88\x0c\x12\x01\x89\x80\x44\x40\x22\x20\x11\
+\xf0\x04\x81\xff\x0f\xdc\x68\x04\x14\x8a\xb3\x10\x5f\x00\x00\x00\
+\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
+\x00\x00\x0b\x3d\
+\x89\
+\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
+\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\
+\x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\
+\x20\x63\x48\x52\x4d\x00\x00\x7a\x26\x00\x00\x80\x84\x00\x00\xfa\
+\x00\x00\x00\x80\xe8\x00\x00\x75\x30\x00\x00\xea\x60\x00\x00\x3a\
+\x98\x00\x00\x17\x70\x9c\xba\x51\x3c\x00\x00\x00\x09\x70\x48\x59\
+\x73\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\x00\x00\
+\x01\x59\x69\x54\x58\x74\x58\x4d\x4c\x3a\x63\x6f\x6d\x2e\x61\x64\
+\x6f\x62\x65\x2e\x78\x6d\x70\x00\x00\x00\x00\x00\x3c\x78\x3a\x78\
+\x6d\x70\x6d\x65\x74\x61\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x3d\x22\
+\x61\x64\x6f\x62\x65\x3a\x6e\x73\x3a\x6d\x65\x74\x61\x2f\x22\x20\
+\x78\x3a\x78\x6d\x70\x74\x6b\x3d\x22\x58\x4d\x50\x20\x43\x6f\x72\
+\x65\x20\x35\x2e\x34\x2e\x30\x22\x3e\x0a\x20\x20\x20\x3c\x72\x64\
+\x66\x3a\x52\x44\x46\x20\x78\x6d\x6c\x6e\x73\x3a\x72\x64\x66\x3d\
+\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\
+\x72\x67\x2f\x31\x39\x39\x39\x2f\x30\x32\x2f\x32\x32\x2d\x72\x64\
+\x66\x2d\x73\x79\x6e\x74\x61\x78\x2d\x6e\x73\x23\x22\x3e\x0a\x20\
+\x20\x20\x20\x20\x20\x3c\x72\x64\x66\x3a\x44\x65\x73\x63\x72\x69\
+\x70\x74\x69\x6f\x6e\x20\x72\x64\x66\x3a\x61\x62\x6f\x75\x74\x3d\
+\x22\x22\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x78\
+\x6d\x6c\x6e\x73\x3a\x74\x69\x66\x66\x3d\x22\x68\x74\x74\x70\x3a\
+\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\x74\
+\x69\x66\x66\x2f\x31\x2e\x30\x2f\x22\x3e\x0a\x20\x20\x20\x20\x20\
+\x20\x20\x20\x20\x3c\x74\x69\x66\x66\x3a\x4f\x72\x69\x65\x6e\x74\
+\x61\x74\x69\x6f\x6e\x3e\x31\x3c\x2f\x74\x69\x66\x66\x3a\x4f\x72\
+\x69\x65\x6e\x74\x61\x74\x69\x6f\x6e\x3e\x0a\x20\x20\x20\x20\x20\
+\x20\x3c\x2f\x72\x64\x66\x3a\x44\x65\x73\x63\x72\x69\x70\x74\x69\
+\x6f\x6e\x3e\x0a\x20\x20\x20\x3c\x2f\x72\x64\x66\x3a\x52\x44\x46\
+\x3e\x0a\x3c\x2f\x78\x3a\x78\x6d\x70\x6d\x65\x74\x61\x3e\x0a\x4c\
+\xc2\x27\x59\x00\x00\x09\x51\x49\x44\x41\x54\x58\x09\x95\x57\x6b\
+\x70\x55\x57\x15\xfe\xce\xb9\xe7\xbe\xef\x4d\x72\x13\xf2\x7e\x11\
+\xf2\x20\x21\x29\xd0\x41\x5b\xa4\xe0\x04\xc1\xc1\xc1\xa1\xb5\x3f\
+\x88\xd5\x19\x3b\x76\x70\xa0\x33\xd5\xb6\xda\xd1\x3f\xda\xe1\xf2\
+\x43\x7f\xd8\x56\x07\x6a\xb5\xb6\xe0\x58\x47\xb4\x93\x5a\x67\x1c\
+\x3a\xa3\x8e\x55\x62\x2b\x4a\xb1\xad\x14\xcb\x9b\x24\x84\x67\x13\
+\x48\xc8\xe3\xbe\xcf\xb9\xe7\xf8\xad\x7d\x72\x12\xc0\x41\x70\xe7\
+\x71\x5e\x6b\xaf\xf5\xad\x6f\x7d\x7b\xed\x73\x80\x3b\x1c\x8e\xe3\
+\x68\xc9\xe4\x7e\xc3\x33\xe7\xb5\xfe\xf2\xab\x7f\xdd\xba\x7b\xef\
+\xc0\xd7\xfb\xfb\xfb\xc3\xde\x7d\xb1\x11\x5b\xef\xfa\x76\xc7\xdb\
+\x1a\x8a\xb3\x1d\x03\x03\xbe\xe4\xda\xb5\x96\x38\xeb\xef\xff\x7b\
+\x78\xca\xcc\x7d\xc9\x76\xb4\x27\x43\x91\x68\x97\xa6\x69\xc8\xa6\
+\x67\x86\x75\x68\xbb\xf4\xf8\xf4\x9e\x2d\x0f\x3c\x30\x23\x76\xc9\
+\xfd\xfb\x8d\xed\xbd\xbd\x45\x3e\x77\xe4\xfa\x56\xe3\x96\x00\x54\
+\xe0\x1d\x0c\x9c\x9c\x0f\x3c\x6d\xe6\xb7\x14\x1d\x3c\x11\x0a\x47\
+\xda\x8a\x45\x0b\x05\x33\x4f\x50\x1a\x0c\xc3\x6f\xf8\xfd\x01\xe4\
+\x32\xa9\x73\x9a\x8e\x9d\xb5\xf1\xf4\x8b\x9b\x36\x6d\xca\x48\x50\
+\x61\x64\xfb\xf6\x5b\x03\xf9\x2f\x00\x37\x07\xde\xb7\xef\xdd\xc8\
+\x58\x6a\xe6\x11\xcb\x76\xbe\x11\x0e\xc7\x16\x99\x66\x01\x56\xd1\
+\xb4\x34\x30\x14\xa0\x3b\x8e\x0d\x38\xb0\x1d\x5d\xb3\x0d\xdd\x30\
+\x02\x81\x10\xb2\xd9\xd4\x59\xdd\xc1\xb3\xa5\x81\xe0\xcf\xfa\xfa\
+\x56\x65\xff\x17\x90\x39\x00\x37\x07\xee\xff\xd3\xbb\xa5\x33\x57\
+\x52\xcc\xd8\xf9\x6a\x30\x1c\x6d\x31\xcd\x3c\x98\xb5\x05\x47\xd3\
+\x2d\xcb\xd2\x4d\xd3\x82\xcf\xa7\x21\x18\x0c\xc1\xb6\x8b\x12\x43\
+\x86\x4d\x42\x6c\xbf\x11\x24\x90\x00\x32\x99\xf4\x90\x66\x3b\xbb\
+\xca\x82\xd8\xd3\xd7\xb7\x36\x25\x06\x52\x9a\xe4\xda\x5e\x4e\x70\
+\x4b\xe3\x01\x90\xa3\xaa\xd5\x1b\x6f\x1f\x49\x8c\x5d\x9a\x7c\xd4\
+\x2a\x16\x1f\x0b\x85\xa2\xf5\x12\xb8\x60\x31\x63\x07\x3a\xeb\xa9\
+\x4f\x4d\x65\x30\x39\x95\x47\x26\x3d\x83\x73\x17\x4e\xa1\xb9\xb1\
+\x06\x4b\x96\x2c\x05\x41\x89\x7f\x88\x26\x38\x6c\x5a\xdb\x7e\x9f\
+\x5b\x9a\x6c\x26\x35\xa2\x39\xfa\xae\x90\x93\xff\xe9\xc3\x0f\x6f\
+\x48\x8b\x81\x37\x68\x2d\x8a\xd5\x9c\xa3\x47\xa7\xca\x3f\x38\x7d\
+\x7c\x6b\x2a\x9d\x7d\x3c\x18\x8a\xd6\x16\x0a\x39\xf2\x5a\xb4\x1c\
+\x92\x4b\x63\x6a\x4c\xaa\xad\xe1\xca\x95\x69\x1c\x38\xf8\x37\x9c\
+\x1a\xfa\x10\x99\x7c\x06\x4b\x17\xf7\xe0\xb3\x9f\xf9\x1c\x42\xa1\
+\x00\x7c\xba\x0e\x93\xda\x80\xc3\x5c\x04\x08\xeb\xa3\x11\x88\x8f\
+\xa5\xf1\x07\x82\xa2\x91\x11\xc3\xe7\x7b\xa6\xa1\xbc\xec\xe7\x1b\
+\x36\x2c\x13\x20\x9a\x0f\xd8\x21\x60\x74\x5f\x49\xc5\x9b\x89\xf2\
+\xda\x2f\x07\x82\xa1\x78\xc1\xcc\x59\x8a\x6d\x80\x24\x4b\x52\x12\
+\x9a\x67\x1c\x91\x48\x00\xa5\xf1\x10\xba\xda\x17\xa3\x2a\x51\x86\
+\x54\xc6\x44\x30\x50\x81\xb1\xd1\x51\x4c\x4e\x8e\x22\x5e\x52\xca\
+\xd2\xd0\xad\x0b\x42\xe3\x51\x2f\x16\x8b\x76\xd1\xb6\x8a\x14\x6a\
+\x39\xdd\x6f\x3c\x39\x7c\xa1\xe5\xcf\xbf\xff\xd5\xeb\x52\x76\xb5\
+\xae\x93\xc9\x7e\xe3\x95\x7d\x3f\x5e\xf0\xe6\x5b\x7f\x41\xef\x7d\
+\x6b\x8b\x9d\x9d\x3d\x46\x2c\x5e\x02\x01\xc1\xc9\x2a\xb0\x80\xe0\
+\x04\x75\xde\xd4\xd4\x0a\x9d\xd9\x2e\x6a\x5d\x8c\xd1\xd1\x4b\xb8\
+\x3a\x9e\x45\x38\x1c\x22\x38\x1d\x86\xcf\x50\x76\xca\x96\xe6\xba\
+\xae\x89\x2d\x75\x53\xd4\x27\xc6\x27\xcd\x74\xf6\xaa\x3f\x9b\xcb\
+\x2e\x10\x47\xf4\xe9\x28\x00\x47\x71\x0c\x95\x89\x0a\x73\x72\x66\
+\x06\xbb\x7f\xfb\x6b\xad\xa7\xb1\x19\xab\x57\xae\x41\x7b\xc7\x12\
+\x44\xa3\x71\x55\x5f\x11\x9a\x62\x82\x40\x4c\xab\x00\x96\x46\x5d\
+\x57\x55\xd5\xa2\xbc\x5c\x04\xa9\x2b\x50\x9e\x16\x04\xa0\xd8\x33\
+\x30\x66\xa6\xb3\xb8\x36\x95\xa5\x96\x2c\x3d\x14\x0a\x53\x1e\xf0\
+\x54\xeb\x32\x20\x68\x48\x91\x26\xd4\x75\xd6\xd6\x23\x9d\x49\xe3\
+\x27\xaf\xed\xc5\xdd\x0b\x5b\xb0\x8a\x40\xda\xda\x3a\x99\x5d\x4c\
+\x56\x01\x15\x4f\xa1\xd3\xf1\x6c\x45\xd4\x3d\xc9\x52\xee\x0b\x5b\
+\x54\x2a\xeb\xa9\x33\xb0\x8d\xe9\x19\x0a\xf6\x5a\x1a\x79\xb5\x62\
+\x84\x1d\x25\x25\xc9\x5d\x74\x25\xc3\x65\x40\x9d\x72\xa2\xd0\x26\
+\x19\x18\x86\x81\x9e\x86\x06\x4c\xce\x4c\xe1\x85\x57\x7f\x81\x8f\
+\xb7\xb5\x61\xd5\xbd\x6b\x14\xe5\xa1\x50\x84\x36\xa6\xb2\x55\x40\
+\x38\x59\x82\x4b\xc6\xf2\x57\x64\xe0\xa9\xe9\x34\x57\x4a\x06\xb9\
+\xbc\xa9\x98\xf1\xfb\xa5\x2c\x36\x45\xed\xb8\x4b\xed\xba\x56\xad\
+\x4a\xd0\x4d\x27\x43\xb3\xf5\x15\xa7\x85\x42\x1e\xac\x13\x22\xe1\
+\x08\xba\xeb\xea\x31\x36\x3e\x8e\xe7\x7e\xb9\x07\xab\x3b\xbb\x08\
+\xe4\x93\x58\xd8\xd2\xae\xd6\xbf\x34\x25\x19\x42\xbf\xcd\x16\x39\
+\x95\x62\xc6\x93\x0c\x5c\xb0\x48\x90\x03\xbf\xdf\xa7\x80\xe6\x0b\
+\xa6\x9b\xbd\x94\x45\x26\x78\xc8\x79\x3a\xab\x01\x72\x21\x0f\xf8\
+\x38\xcf\x75\xdf\xc1\xda\x4b\xcd\x4f\x9e\x38\xa6\x6c\x23\x91\x08\
+\x96\xd5\x37\xe2\x12\x95\xfe\xdc\x2b\x2f\x61\x4d\xcf\x5d\x58\x79\
+\xcf\x6a\xb8\x62\xd4\x98\x71\x06\xd7\x26\xb3\xc8\xe5\x0a\x64\x41\
+\x43\x60\x36\x70\x3a\x53\x80\xdf\xf0\xa1\xae\x3a\xa1\x18\x29\xb0\
+\x14\x0a\x81\xb7\xa4\x3c\x00\x6e\x68\x2a\x96\xd9\x9b\xa6\x89\x78\
+\x69\x02\x75\x0d\x4d\xa8\xaa\xa9\xc7\x85\x91\x41\x8c\x8c\x0c\x8b\
+\x09\x75\x10\x45\x37\xc1\x9c\xbd\x70\x1e\x03\x47\x9e\xc7\xc6\x7b\
+\x57\x63\xc5\x8a\xf5\xd4\x4c\x5e\x01\x95\x8c\x65\x64\x18\x58\x08\
+\xed\x6c\xab\xc3\xd2\x9e\x16\x34\x35\x56\xe3\xc4\xc9\x11\xbc\xf1\
+\xc7\xf7\xb8\x5a\xc2\xc2\x8a\xbb\xa6\x69\xab\x18\x50\xb3\xf8\x4f\
+\x98\x91\x1f\x11\x1b\x1b\x01\xca\x2a\x2a\x91\x28\xaf\x44\x5d\x63\
+\x0b\xce\x9d\x1d\xc4\x85\xf3\x23\x8a\xee\x18\x9d\x48\x69\x86\xcf\
+\x8d\xb0\x0b\x16\xa8\x19\x09\xec\xb0\x74\xb2\x41\x15\xd1\xde\x52\
+\x83\xe5\x4b\x19\xb8\xa1\x06\xfe\x80\xa1\x74\x15\x8f\x45\x60\xf0\
+\x5c\x74\xc0\x38\x37\x02\x10\x0d\x0c\xcf\xdf\x53\x2c\x89\xa0\xc6\
+\x2e\x5f\x42\x98\x59\x0b\x13\xe5\x0b\xaa\xd0\xd4\xbc\x08\x83\xa7\
+\x8f\x63\x62\x62\x9c\x4e\x80\x28\xd9\xf0\x7c\x59\x45\x1b\x95\x15\
+\x25\xf8\xc4\x3d\x5d\xcc\xb8\x8a\x1a\xf1\x63\x9a\x62\x3c\x79\xfa\
+\x3c\xda\x5a\xeb\x15\x40\x87\x3a\x91\xc8\xd7\x55\x80\x2d\x96\xe3\
+\xa8\x7a\x2c\x39\xf0\x47\xb8\x13\x26\xb8\x52\x6c\xdb\xc2\x91\xf7\
+\x0f\xe2\xf4\xb1\x23\xac\x6f\x96\x4c\x2c\x44\xf3\xc2\x56\xcc\xa4\
+\x52\x4a\xf1\x0e\xd5\x2f\x1e\xa5\xee\x52\xef\xf6\xd6\x5a\xb4\xb7\
+\x35\x90\x09\x13\xff\xfe\x70\x08\xaf\xff\xee\x00\xf6\xfd\xe1\x9f\
+\x8a\x01\x85\x58\x45\x50\xc1\xbc\x65\x78\x5d\x09\x24\x30\x7f\xc5\
+\x59\x81\xc1\xc4\x79\x4d\x5d\x13\xca\x12\x0b\xa8\x83\x21\xbc\xf7\
+\x8f\xb7\xd0\xda\xd1\x05\x9b\xcb\x54\xd8\x11\x73\x15\x5d\xe1\x15\
+\xc0\xec\x05\x64\x41\x1e\x5c\xfe\x68\x1c\xbf\xd9\x77\x88\x2d\x3b\
+\x88\xb2\x52\xb2\x34\x97\x33\xd1\xba\x13\x65\xb2\x1a\xf3\xcb\x90\
+\x46\xc2\x40\x28\x18\xc6\x08\xeb\x3d\x35\x7d\x0d\x8d\x6c\xb9\x89\
+\x8a\x2a\x74\x74\x2f\x53\xd9\x5f\x24\x90\x8b\x17\xcf\x21\x1e\x8b\
+\xd1\x0f\x83\x09\x62\x06\xf6\xda\xae\xb0\x21\x40\xa4\x4b\xc6\xa3\
+\x01\xee\x11\x06\x32\x59\x0a\x52\x59\xba\xec\xba\x25\xbb\x49\x84\
+\x52\x02\x71\x26\xb4\x67\x72\x69\xf4\x2c\x5f\xc1\xdd\x2d\x8c\x33\
+\x27\x8f\xb1\x96\x83\x68\x6a\x69\x43\x49\x59\x39\x16\xdf\x75\x37\
+\x22\xd1\x18\x0e\xbd\x73\x00\xb1\x58\x54\xcd\x72\x33\x12\x06\xdc\
+\x4b\xef\x7f\x26\x67\x2a\x41\x76\x77\xd6\x23\x40\x20\xc2\x96\x6e\
+\xb8\x60\x09\x65\xce\xfa\x86\x55\x20\x20\xe4\x47\x46\x75\x5d\x23\
+\x4a\x12\xe5\x18\x1f\xfd\x08\xa7\x8e\x1f\xc1\xa2\x8e\x6e\x54\xb3\
+\x4d\xab\x6e\x27\xb5\x57\xc3\xf5\x23\x14\x2b\x66\xd5\xce\xce\xce\
+\xc8\x8b\xfa\x9a\x04\xd6\xac\xec\x42\x23\x05\x29\x5b\xf5\xe4\x54\
+\x0a\x66\x81\xfb\x49\x54\xac\xe5\xd7\x1d\xf3\x00\x1c\x6e\x07\x74\
+\x1c\x61\xab\x1d\xa2\xd2\xc5\xe3\x82\xea\x5a\xd4\xb2\x1f\x54\x54\
+\x55\xab\xee\x28\xba\x10\x78\xd2\x2f\xdc\xe1\x6e\x48\x02\x5a\x6e\
+\x49\x87\xb7\xb8\x0c\xab\x2a\x13\xe8\x7b\x70\x35\x4a\xe2\x51\x8a\
+\x33\x47\x41\x0e\xe2\xc0\x3b\xc7\x09\xc4\x50\xfb\x05\xad\xdd\x2c\
+\xe9\x44\x01\x48\xd4\xd6\x12\xfb\xe1\x32\x4d\x5e\x0f\xb8\x7b\xcf\
+\x70\x57\x3c\x74\xf0\x6d\xbd\xa6\xa6\x4e\xd1\x5f\xc9\x63\x98\x6d\
+\xd9\x9b\x25\x19\x5e\x3f\x14\x03\xb3\x37\x6c\x6a\xa3\xb4\x34\xca\
+\x86\x66\xe1\xd8\x89\xb3\x38\x7c\x64\x08\xc3\xe7\xaf\xb2\xad\xfb\
+\xf9\xce\xa8\x17\x03\x81\xb0\x8f\x2d\xbc\xc4\x9b\xaf\x00\xbc\xb4\
+\x6d\x9b\xb9\xa2\xf7\xfe\xef\xc0\xb1\x9e\xd6\x7d\x46\x6b\x80\x2a\
+\x0f\xf8\xfd\xd6\xc4\xc4\x84\x3e\x36\x76\x40\xaf\xaa\xae\x86\xf4\
+\xff\xda\x86\x66\x36\x22\xb7\x99\x78\x10\x44\x80\xaa\x6c\xbc\x21\
+\x0d\x49\x4a\x34\x34\x7c\x09\xff\x3a\x3c\x88\x33\x23\x63\xdc\x0f\
+\x74\xbb\x24\x16\xb2\x75\xbe\x39\x53\x63\x81\x4c\x7a\xfa\x7d\xaa\
+\xe1\x69\x17\x80\x33\xc7\xa5\xba\xee\xde\xbc\x39\x60\x5f\x4c\x6d\
+\x21\xfd\xdf\xf2\x19\xc6\x42\x9b\xdb\x2b\x99\xb5\xf2\x85\x82\x9e\
+\xcb\xe7\xf5\xb6\xd6\x36\x3a\x0c\x62\x70\xf0\x14\x8f\x7e\x55\xae\
+\xf5\xeb\x1f\x52\x41\xb3\x54\x7b\x57\x47\x2d\xed\x75\x7c\x70\xfc\
+\x3c\x13\xd0\xed\x70\x30\x60\x33\x21\x83\x7f\xc8\xe7\x32\xc7\x35\
+\x1b\xdf\xfd\xfe\xf6\x87\xf6\x7a\xc1\x59\xb4\x79\x35\xf6\xf6\xf6\
+\x1a\x03\x03\x03\xea\xcd\x72\xc5\xa6\x4d\x91\xcc\x95\xfc\x36\x16\
+\xf6\x49\xdd\xe7\x6f\xe2\xba\xe2\x1c\xc7\xca\x66\x73\xb2\x9d\xe9\
+\x21\xbe\x09\xcb\x96\x1c\x8d\xc4\xb1\x7e\xdd\xe7\x95\x3f\x59\x5e\
+\xd2\x8a\x45\x48\xd1\x70\x90\x81\x7d\x0c\x1c\x40\x21\x9f\x39\x43\
+\x55\x3e\xf3\xb1\x4e\x9d\x6f\xc6\x7d\xea\x45\x64\x73\x7f\xbf\xef\
+\xb5\xd9\x73\x4f\x4d\x2e\x28\x42\x22\x10\x9f\x07\x64\xf1\x7d\xf7\
+\xc7\x75\x14\xbe\xc2\x77\xdc\x27\xe8\xaf\x59\xea\x4b\x30\x16\x59\
+\xf7\x71\xbf\xd0\xa2\xe1\x59\x00\xf4\xc2\x0a\x38\x7c\x29\x2d\xea\
+\x92\xb2\xc1\x8f\x94\x5c\x66\x98\xe5\xf9\x41\xcc\xce\xee\x4e\x26\
+\x1f\xc9\x49\x00\xf9\x48\xf1\x3e\x74\xe6\x02\x7a\x27\x37\x1d\x6f\
+\x00\xd2\xdd\xdb\x1b\x2b\xe6\x42\x5b\xf9\x0a\xf7\x14\xfd\xd7\x49\
+\xa3\xb1\xac\x82\x15\x09\xc7\x7c\xeb\xd7\x49\x09\x34\x06\x66\x95\
+\xfd\xfc\x28\xc9\x65\x2e\xb3\x84\xcf\xc6\xec\xe8\x8b\xc9\xe4\xfc\
+\xd7\x11\x03\x4b\xf6\x9e\x74\xe6\xc2\xdd\xcc\xc0\xdc\x83\xd9\x93\
+\x1b\x80\x2c\xef\xed\x2d\xcb\x17\xc2\x8f\x71\x93\xf8\x1a\xa3\x56\
+\x87\xf9\x15\xf4\xe9\xf5\x5f\x40\x80\x4d\x8b\x35\x1e\x27\xb0\x17\
+\x2c\x5b\xdf\xf9\xc3\x64\xdf\x84\xcc\x77\x33\x9e\xff\x08\xb9\xd9\
+\xb9\x5c\xdf\x0e\x80\x37\xe7\x06\x20\x9d\xeb\x1e\xac\xc8\x5f\x1b\
+\x7f\xb4\xbc\xb4\xf2\xf1\x75\x9f\xda\x1c\xa4\xd1\x8f\xf2\x59\xfb\
+\xf9\x9d\xdf\xfb\xe2\xa8\x4c\xb8\x93\xc0\x9e\xe3\xff\xf7\x28\x40\
+\xe6\x9a\xd7\xc6\xcd\xdf\xac\x79\xea\xdb\x7b\x9b\x3d\x27\x12\x98\
+\x75\xbf\xd3\xa4\xd4\xb4\xff\x00\x83\x16\x5e\xdc\xfa\x0f\x1d\x34\
+\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
+"
+
+qt_resource_name = b"\
+\x00\x06\
+\x07\x03\x7d\xc3\
+\x00\x69\
+\x00\x6d\x00\x61\x00\x67\x00\x65\x00\x73\
+\x00\x06\
+\x07\xaa\x8a\xf3\
+\x00\x73\
+\x00\x74\x00\x61\x00\x74\x00\x69\x00\x63\
+\x00\x0a\
+\x04\xc8\x47\xe7\
+\x00\x62\
+\x00\x61\x00\x6e\x00\x6e\x00\x65\x00\x72\x00\x2e\x00\x70\x00\x6e\x00\x67\
+\x00\x13\
+\x0b\xa6\xbd\x87\
+\x00\x73\
+\x00\x65\x00\x63\x00\x75\x00\x72\x00\x65\x00\x64\x00\x72\x00\x6f\x00\x70\x00\x5f\x00\x69\x00\x63\x00\x6f\x00\x6e\x00\x2e\x00\x70\
+\x00\x6e\x00\x67\
+"
+
+qt_resource_struct_v1 = b"\
+\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
+\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\
+\x00\x00\x00\x12\x00\x02\x00\x00\x00\x02\x00\x00\x00\x03\
+\x00\x00\x00\x24\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
+\x00\x00\x00\x3e\x00\x00\x00\x00\x00\x01\x00\x00\x27\xce\
+"
+
+qt_resource_struct_v2 = b"\
+\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
+\x00\x00\x00\x00\x00\x00\x00\x00\
+\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\
+\x00\x00\x00\x00\x00\x00\x00\x00\
+\x00\x00\x00\x12\x00\x02\x00\x00\x00\x02\x00\x00\x00\x03\
+\x00\x00\x00\x00\x00\x00\x00\x00\
+\x00\x00\x00\x24\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
+\x00\x00\x01\x62\xc0\xd0\x26\x29\
+\x00\x00\x00\x3e\x00\x00\x00\x00\x00\x01\x00\x00\x27\xce\
+\x00\x00\x01\x62\xd9\x46\xbd\x12\
+"
+
+qt_version = QtCore.qVersion().split('.')
+if qt_version < ['5', '8', '0']:
+ rcc_version = 1
+ qt_resource_struct = qt_resource_struct_v1
+else:
+ rcc_version = 2
+ qt_resource_struct = qt_resource_struct_v2
+
+def qInitResources():
+ QtCore.qRegisterResourceData(rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data)
+
+def qCleanupResources():
+ QtCore.qUnregisterResourceData(rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data)
+
+qInitResources()
diff --git a/journalist_gui/journalist_gui/strings.py b/journalist_gui/journalist_gui/strings.py
new file mode 100644
--- /dev/null
+++ b/journalist_gui/journalist_gui/strings.py
@@ -0,0 +1,44 @@
+window_title = 'SecureDrop Workstation Updater'
+update_in_progress = ("SecureDrop workstation updates are available! "
+ "It is recommended to install them now. \n\n"
+ "If you don\'t want to install them now, "
+ "you can install them the next time you reboot.\n\n"
+ "You will need to have set a Tails Administration "
+ "password in "
+ "the Tails Greeter on boot to complete the update.\n\n"
+ "When you start your workstation, this window will "
+ "automatically appear if you have not "
+ "completed any required updates.\n")
+fetching_update = ('Fetching and verifying latest update...'
+ ' (5 mins remaining)')
+updating_tails_env = ('Configuring local Tails environment...'
+ ' (1 min remaining)')
+finished = 'Update successfully completed!'
+finished_dialog_message = 'Updates completed successfully. '
+finished_dialog_title = 'SecureDrop Workstation is up to date!'
+missing_sudo_password = 'Missing Tails Administrator password'
+update_failed_dialog_title = 'Error Updating SecureDrop Workstation'
+update_failed_generic_reason = ("Update failed. "
+ "Please contact your SecureDrop "
+ "administrator.")
+update_failed_sig_failure = ("WARNING: Signature verification failed. "
+ "Contact your SecureDrop administrator "
+ "or [email protected] immediately.")
+tailsconfig_failed_sudo_password = ('Administrator password incorrect. '
+ 'Exiting upgrade - '
+ 'click Update Now to try again.')
+tailsconfig_failed_generic_reason = ("Tails workstation configuration failed. "
+ "Contact your administrator. "
+ "If you are an administrator, contact "
+ "[email protected].")
+install_update_button = 'Update Now'
+install_later_button = 'Update Later'
+sudo_password_text = ("Enter the Tails Administrator password you "
+ "entered in the Tails Greeter.\n If you did not "
+ "set an Administrator password, click Cancel "
+ "and reboot. ")
+main_tab = 'Updates Available'
+output_tab = 'Detailed Update Progress'
+initial_text_box = ("When the update begins, this area will populate with "
+ "output.\n")
+doing_setup = "Checking dependencies are up to date... (2 mins remaining)"
diff --git a/journalist_gui/journalist_gui/updaterUI.py b/journalist_gui/journalist_gui/updaterUI.py
new file mode 100644
--- /dev/null
+++ b/journalist_gui/journalist_gui/updaterUI.py
@@ -0,0 +1,99 @@
+# -*- coding: utf-8 -*-
+
+# Form implementation generated from reading ui file 'journalist_gui/mainwindow.ui'
+#
+# Created by: PyQt5 UI code generator 5.10
+#
+# WARNING! All changes made in this file will be lost!
+
+from PyQt5 import QtCore, QtGui, QtWidgets
+
+class Ui_MainWindow(object):
+ def setupUi(self, MainWindow):
+ MainWindow.setObjectName("MainWindow")
+ MainWindow.resize(400, 500)
+ sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
+ sizePolicy.setHorizontalStretch(0)
+ sizePolicy.setVerticalStretch(0)
+ sizePolicy.setHeightForWidth(MainWindow.sizePolicy().hasHeightForWidth())
+ MainWindow.setSizePolicy(sizePolicy)
+ MainWindow.setMinimumSize(QtCore.QSize(400, 500))
+ MainWindow.setMaximumSize(QtCore.QSize(400, 500))
+ self.centralwidget = QtWidgets.QWidget(MainWindow)
+ self.centralwidget.setObjectName("centralwidget")
+ self.verticalLayout_3 = QtWidgets.QVBoxLayout(self.centralwidget)
+ self.verticalLayout_3.setObjectName("verticalLayout_3")
+ self.label_2 = QtWidgets.QLabel(self.centralwidget)
+ sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
+ sizePolicy.setHorizontalStretch(0)
+ sizePolicy.setVerticalStretch(0)
+ sizePolicy.setHeightForWidth(self.label_2.sizePolicy().hasHeightForWidth())
+ self.label_2.setSizePolicy(sizePolicy)
+ self.label_2.setText("")
+ self.label_2.setPixmap(QtGui.QPixmap("static/banner.png"))
+ self.label_2.setScaledContents(True)
+ self.label_2.setObjectName("label_2")
+ self.verticalLayout_3.addWidget(self.label_2)
+ self.tabWidget = QtWidgets.QTabWidget(self.centralwidget)
+ self.tabWidget.setObjectName("tabWidget")
+ self.tab = QtWidgets.QWidget()
+ self.tab.setObjectName("tab")
+ self.verticalLayout = QtWidgets.QVBoxLayout(self.tab)
+ self.verticalLayout.setObjectName("verticalLayout")
+ self.label = QtWidgets.QLabel(self.tab)
+ sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
+ sizePolicy.setHorizontalStretch(0)
+ sizePolicy.setVerticalStretch(0)
+ sizePolicy.setHeightForWidth(self.label.sizePolicy().hasHeightForWidth())
+ self.label.setSizePolicy(sizePolicy)
+ self.label.setTextFormat(QtCore.Qt.PlainText)
+ self.label.setScaledContents(False)
+ self.label.setWordWrap(True)
+ self.label.setObjectName("label")
+ self.verticalLayout.addWidget(self.label)
+ self.tabWidget.addTab(self.tab, "")
+ self.tab_2 = QtWidgets.QWidget()
+ self.tab_2.setObjectName("tab_2")
+ self.verticalLayout_2 = QtWidgets.QVBoxLayout(self.tab_2)
+ self.verticalLayout_2.setObjectName("verticalLayout_2")
+ self.plainTextEdit = QtWidgets.QPlainTextEdit(self.tab_2)
+ self.plainTextEdit.setReadOnly(True)
+ self.plainTextEdit.setObjectName("plainTextEdit")
+ self.verticalLayout_2.addWidget(self.plainTextEdit)
+ self.tabWidget.addTab(self.tab_2, "")
+ self.verticalLayout_3.addWidget(self.tabWidget)
+ self.progressBar = QtWidgets.QProgressBar(self.centralwidget)
+ self.progressBar.setProperty("value", 24)
+ self.progressBar.setObjectName("progressBar")
+ self.verticalLayout_3.addWidget(self.progressBar)
+ self.horizontalLayout_2 = QtWidgets.QHBoxLayout()
+ self.horizontalLayout_2.setObjectName("horizontalLayout_2")
+ self.pushButton = QtWidgets.QPushButton(self.centralwidget)
+ self.pushButton.setObjectName("pushButton")
+ self.horizontalLayout_2.addWidget(self.pushButton)
+ self.pushButton_2 = QtWidgets.QPushButton(self.centralwidget)
+ self.pushButton_2.setObjectName("pushButton_2")
+ self.horizontalLayout_2.addWidget(self.pushButton_2)
+ self.verticalLayout_3.addLayout(self.horizontalLayout_2)
+ MainWindow.setCentralWidget(self.centralwidget)
+ self.menubar = QtWidgets.QMenuBar(MainWindow)
+ self.menubar.setGeometry(QtCore.QRect(0, 0, 400, 22))
+ self.menubar.setObjectName("menubar")
+ MainWindow.setMenuBar(self.menubar)
+ self.statusbar = QtWidgets.QStatusBar(MainWindow)
+ self.statusbar.setObjectName("statusbar")
+ MainWindow.setStatusBar(self.statusbar)
+
+ self.retranslateUi(MainWindow)
+ self.tabWidget.setCurrentIndex(0)
+ QtCore.QMetaObject.connectSlotsByName(MainWindow)
+
+ def retranslateUi(self, MainWindow):
+ _translate = QtCore.QCoreApplication.translate
+ MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow"))
+ self.label.setText(_translate("MainWindow", "SecureDrop workstation updates are available! You should install them now. If you don\'t want to, you can install them the next time your system boots."))
+ self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab), _translate("MainWindow", "SecureDrop"))
+ self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_2), _translate("MainWindow", "Command Output"))
+ self.pushButton.setText(_translate("MainWindow", "Install Later"))
+ self.pushButton_2.setText(_translate("MainWindow", "Install Now"))
+
| diff --git a/admin/tests/test_securedrop-admin.py b/admin/tests/test_securedrop-admin.py
--- a/admin/tests/test_securedrop-admin.py
+++ b/admin/tests/test_securedrop-admin.py
@@ -177,10 +177,11 @@ def test_update_signature_does_not_verify(self, tmpdir, caplog):
with mock.patch('subprocess.check_call'):
with mock.patch('subprocess.check_output',
return_value=git_output):
- securedrop_admin.update(args)
- assert "Applying SecureDrop updates..." in caplog.text
- assert "Signature verification failed." in caplog.text
- assert "Updated to SecureDrop" not in caplog.text
+ with pytest.raises(SystemExit):
+ securedrop_admin.update(args)
+ assert "Applying SecureDrop updates..." in caplog.text
+ assert "Signature verification failed." in caplog.text
+ assert "Updated to SecureDrop" not in caplog.text
class TestSiteConfig(object):
diff --git a/journalist_gui/test_gui.py b/journalist_gui/test_gui.py
new file mode 100644
--- /dev/null
+++ b/journalist_gui/test_gui.py
@@ -0,0 +1,206 @@
+import unittest
+import subprocess
+import pexpect
+from unittest import mock
+from unittest.mock import MagicMock
+from PyQt5.QtCore import Qt
+from PyQt5.QtWidgets import (QApplication, QSizePolicy, QInputDialog)
+from PyQt5.QtTest import QTest
+
+from journalist_gui.SecureDropUpdater import UpdaterApp, strings, FLAG_LOCATION
+
+
+class AppTestCase(unittest.TestCase):
+ def setUp(self):
+ qApp = QApplication.instance()
+ if qApp is None:
+ self.app = QApplication([''])
+ else:
+ self.app = qApp
+
+
+class WindowTestCase(AppTestCase):
+ def setUp(self):
+ super(WindowTestCase, self).setUp()
+ self.window = UpdaterApp()
+ self.window.show()
+ QTest.qWaitForWindowExposed(self.window)
+
+ def test_window_is_a_fixed_size(self):
+ # Verify the size policy is fixed
+ expected_sizePolicy = QSizePolicy(QSizePolicy.Fixed, QSizePolicy.Fixed)
+ assert self.window.sizePolicy() == expected_sizePolicy
+
+ # Verify the maximum and minimum sizes are the same as the current size
+ current_size = self.window.size()
+ assert self.window.minimumSize() == current_size
+ assert self.window.maximumSize() == current_size
+
+ def test_clicking_install_later_exits_the_application(self):
+ QTest.mouseClick(self.window.pushButton, Qt.LeftButton)
+ self.assertFalse(self.window.isVisible())
+
+ def test_progress_bar_begins_at_zero(self):
+ self.assertEqual(self.window.progressBar.value(), 0)
+
+ def test_default_tab(self):
+ self.assertEqual(self.window.tabWidget.currentIndex(), 0)
+
+ def test_output_tab(self):
+
+ tab = self.window.tabWidget.tabBar()
+ QTest.mouseClick(tab, Qt.LeftButton)
+ self.assertEqual(self.window.tabWidget.currentIndex(),
+ self.window.tabWidget.indexOf(self.window.tab_2))
+
+ @mock.patch('subprocess.check_output',
+ return_value=b'Python dependencies for securedrop-admin')
+ def test_setupThread(self, check_output):
+ with mock.patch.object(self.window, "call_tailsconfig",
+ return_value=MagicMock()):
+ with mock.patch('builtins.open') as mock_open:
+ self.window.setup_thread.run() # Call run directly
+
+ mock_open.assert_called_once_with(FLAG_LOCATION, 'a')
+ self.assertEqual(self.window.update_success, True)
+ self.assertEqual(self.window.progressBar.value(), 70)
+
+ @mock.patch('subprocess.check_output',
+ return_value=b'Failed to install pip dependencies')
+ def test_setupThread_failure(self, check_output):
+ with mock.patch.object(self.window, "call_tailsconfig",
+ return_value=MagicMock()):
+ with mock.patch('builtins.open') as mock_open:
+ self.window.setup_thread.run() # Call run directly
+
+ mock_open.assert_called_once_with(FLAG_LOCATION, 'a')
+ self.assertEqual(self.window.update_success, False)
+ self.assertEqual(self.window.progressBar.value(), 0)
+ self.assertEqual(self.window.failure_reason,
+ strings.update_failed_generic_reason)
+
+ @mock.patch('subprocess.check_output',
+ return_value=b'Signature verification successful')
+ def test_updateThread(self, check_output):
+ with mock.patch.object(self.window, "setup_thread",
+ return_value=MagicMock()):
+ self.window.update_thread.run() # Call run directly
+ self.assertEqual(self.window.update_success, True)
+ self.assertEqual(self.window.progressBar.value(), 50)
+
+ @mock.patch('subprocess.check_output',
+ side_effect=subprocess.CalledProcessError(
+ 1, 'cmd', b'Signature verification failed'))
+ def test_updateThread_failure(self, check_output):
+ with mock.patch.object(self.window, "setup_thread",
+ return_value=MagicMock()):
+ self.window.update_thread.run() # Call run directly
+ self.assertEqual(self.window.update_success, False)
+ self.assertEqual(self.window.failure_reason,
+ strings.update_failed_sig_failure)
+
+ @mock.patch('subprocess.check_output',
+ side_effect=subprocess.CalledProcessError(
+ 1, 'cmd', b'Generic other failure'))
+ def test_updateThread_generic_failure(self, check_output):
+ with mock.patch.object(self.window, "setup_thread",
+ return_value=MagicMock()):
+ self.window.update_thread.run() # Call run directly
+ self.assertEqual(self.window.update_success, False)
+ self.assertEqual(self.window.failure_reason,
+ strings.update_failed_generic_reason)
+
+ def test_get_sudo_password_when_password_provided(self):
+ expected_password = "password"
+
+ with mock.patch.object(QInputDialog, 'getText',
+ return_value=[expected_password, True]):
+ sudo_password = self.window.get_sudo_password()
+
+ self.assertEqual(sudo_password, expected_password)
+
+ def test_get_sudo_password_when_password_not_provided(self):
+ test_password = ""
+
+ with mock.patch.object(QInputDialog, 'getText',
+ return_value=[test_password, False]):
+ self.assertIsNone(self.window.get_sudo_password())
+
+ @mock.patch('pexpect.spawn')
+ def test_tailsconfigThread_no_failures(self, pt):
+ child = pt()
+ before = MagicMock()
+
+ before.decode.side_effect = ["SUDO: ", "Update successful. failed=0"]
+ child.before = before
+ child.exitstatus = 0
+ with mock.patch('os.remove') as mock_remove:
+ self.window.tails_thread.run()
+
+ mock_remove.assert_called_once_with(FLAG_LOCATION)
+ self.assertIn("failed=0", self.window.output)
+ self.assertEqual(self.window.update_success, True)
+
+ @mock.patch('pexpect.spawn')
+ def test_tailsconfigThread_generic_failure(self, pt):
+ child = pt()
+ before = MagicMock()
+ before.decode.side_effect = ["SUDO: ", "failed=10 ERROR!!!!!"]
+ child.before = before
+ self.window.tails_thread.run()
+ self.assertNotIn("failed=0", self.window.output)
+ self.assertEqual(self.window.update_success, False)
+ self.assertEqual(self.window.failure_reason,
+ strings.tailsconfig_failed_generic_reason)
+
+ @mock.patch('pexpect.spawn')
+ def test_tailsconfigThread_sudo_password_is_wrong(self, pt):
+ child = pt()
+ before = MagicMock()
+ before.decode.side_effect = ["some data",
+ pexpect.exceptions.TIMEOUT(1)]
+ child.before = before
+ self.window.tails_thread.run()
+ self.assertNotIn("failed=0", self.window.output)
+ self.assertEqual(self.window.update_success, False)
+ self.assertEqual(self.window.failure_reason,
+ strings.tailsconfig_failed_sudo_password)
+
+ @mock.patch('pexpect.spawn')
+ def test_tailsconfigThread_some_other_subprocess_error(self, pt):
+ child = pt()
+ before = MagicMock()
+ before.decode.side_effect = subprocess.CalledProcessError(
+ 1, 'cmd', b'Generic other failure')
+ child.before = before
+ self.window.tails_thread.run()
+ self.assertNotIn("failed=0", self.window.output)
+ self.assertEqual(self.window.update_success, False)
+ self.assertEqual(self.window.failure_reason,
+ strings.tailsconfig_failed_generic_reason)
+
+ def test_tails_status_success(self):
+ result = {'status': True, "output": "successful.",
+ 'failure_reason': ''}
+
+ with mock.patch('os.remove') as mock_remove:
+ self.window.tails_status(result)
+
+ # We do remove the flag file if the update does finish
+ mock_remove.assert_called_once_with(FLAG_LOCATION)
+ self.assertEqual(self.window.progressBar.value(), 100)
+
+ def test_tails_status_failure(self):
+ result = {'status': False, "output": "successful.",
+ 'failure_reason': '42'}
+
+ with mock.patch('os.remove') as mock_remove:
+ self.window.tails_status(result)
+
+ # We do not remove the flag file if the update does not finish
+ mock_remove.assert_not_called()
+ self.assertEqual(self.window.progressBar.value(), 0)
+
+
+if __name__ == '__main__':
+ unittest.main()
| [RFE] Remove the sizegrip from the statusbar of the GUI tool
# Feature request
## Description
There should not be any sizegrip available in the statusbar of the GUI tool as suggested in https://github.com/freedomofpress/securedrop/pull/3300#discussion_r184066922
| 2018-04-24T01:30:43Z | [] | [] |
|
freedomofpress/securedrop | 3,302 | freedomofpress__securedrop-3302 | [
"3301"
] | b5286b94b8cc47c7f1388897ef11358a96daf6ab | diff --git a/journalist_gui/journalist_gui/SecureDropUpdater.py b/journalist_gui/journalist_gui/SecureDropUpdater.py
--- a/journalist_gui/journalist_gui/SecureDropUpdater.py
+++ b/journalist_gui/journalist_gui/SecureDropUpdater.py
@@ -1,7 +1,6 @@
#!/usr/bin/python
from PyQt5 import QtGui, QtWidgets
from PyQt5.QtCore import QThread, pyqtSignal
-import sys
import subprocess
import os
import pexpect
@@ -212,16 +211,17 @@ def call_tailsconfig(self):
if self.update_success:
# Get sudo password and add an enter key as tailsconfig command
# expects
- sudo_password = self.get_sudo_password() + '\n'
- self.tails_thread.sudo_password = sudo_password
+ sudo_password = self.get_sudo_password()
+ if not sudo_password:
+ self.update_success = False
+ self.failure_reason = strings.missing_sudo_password
+ self.on_failure()
+ return
+ self.tails_thread.sudo_password = sudo_password + '\n'
self.update_status_bar_and_output(strings.updating_tails_env)
self.tails_thread.start()
else:
- self.pushButton.setEnabled(True)
- self.pushButton_2.setEnabled(True)
- self.update_status_bar_and_output(self.failure_reason)
- self.progressBar.setProperty("value", 0)
- self.alert_failure(self.failure_reason)
+ self.on_failure()
def tails_status(self, result):
"This is the slot for Tailsconfig thread"
@@ -237,12 +237,15 @@ def tails_status(self, result):
self.progressBar.setProperty("value", 100)
self.alert_success()
else:
- self.update_status_bar_and_output(self.failure_reason)
- self.alert_failure(self.failure_reason)
- # Now everything is done, enable the button.
- self.pushButton.setEnabled(True)
- self.pushButton_2.setEnabled(True)
- self.progressBar.setProperty("value", 0)
+ self.on_failure()
+
+ def on_failure(self):
+ self.update_status_bar_and_output(self.failure_reason)
+ self.alert_failure(self.failure_reason)
+ # Now everything is done, enable the button.
+ self.pushButton.setEnabled(True)
+ self.pushButton_2.setEnabled(True)
+ self.progressBar.setProperty("value", 0)
def update_securedrop(self):
self.pushButton_2.setEnabled(False)
@@ -272,4 +275,4 @@ def get_sudo_password(self):
if ok_is_pressed and sudo_password:
return sudo_password
else:
- sys.exit(0)
+ return None
diff --git a/journalist_gui/journalist_gui/strings.py b/journalist_gui/journalist_gui/strings.py
--- a/journalist_gui/journalist_gui/strings.py
+++ b/journalist_gui/journalist_gui/strings.py
@@ -16,6 +16,7 @@
finished = 'Update successfully completed!'
finished_dialog_message = 'Updates completed successfully. '
finished_dialog_title = 'SecureDrop Workstation is up to date!'
+missing_sudo_password = 'Missing Tails Administrator password'
update_failed_dialog_title = 'Error Updating SecureDrop Workstation'
update_failed_generic_reason = ("Update failed. "
"Please contact your SecureDrop "
| diff --git a/journalist_gui/test_gui.py b/journalist_gui/test_gui.py
--- a/journalist_gui/test_gui.py
+++ b/journalist_gui/test_gui.py
@@ -123,10 +123,7 @@ def test_get_sudo_password_when_password_not_provided(self):
with mock.patch.object(QInputDialog, 'getText',
return_value=[test_password, False]):
- # If the user does not provide a sudo password, we exit
- # as we cannot update.
- with self.assertRaises(SystemExit):
- self.window.get_sudo_password()
+ self.assertIsNone(self.window.get_sudo_password())
@mock.patch('pexpect.spawn')
def test_tailsconfigThread_no_failures(self, pt):
| [qt-journalist-updater] Update UI properly if Tails Administrator password is not provided
# Feature request
## Description
If the user does not provide the `Tails Administrator` password, the tool should show that in the UI and does not close the application.
| 2018-04-24T20:12:45Z | [] | [] |
|
freedomofpress/securedrop | 3,305 | freedomofpress__securedrop-3305 | [
"3304"
] | 670c8baf18f84b15c0481f82c11e06cfc97da28f | diff --git a/securedrop/store.py b/securedrop/store.py
--- a/securedrop/store.py
+++ b/securedrop/store.py
@@ -131,7 +131,7 @@ def save_file_submission(self, filesystem_id, count, journalist_filename,
encrypted_file_path = self.path(filesystem_id, encrypted_file_name)
with SecureTemporaryFile("/tmp") as stf: # nosec
with gzip.GzipFile(filename=sanitized_filename,
- mode='wb', fileobj=stf) as gzf:
+ mode='wb', fileobj=stf, mtime=0) as gzf:
# Buffer the stream into the gzip file to avoid excessive
# memory consumption
while True:
| diff --git a/securedrop/tests/test_source.py b/securedrop/tests/test_source.py
--- a/securedrop/tests/test_source.py
+++ b/securedrop/tests/test_source.py
@@ -428,7 +428,8 @@ def test_submit_sanitizes_filename(source_app):
assert resp.status_code == 200
gzipfile.assert_called_with(filename=sanitized_filename,
mode=ANY,
- fileobj=ANY)
+ fileobj=ANY,
+ mtime=0)
def test_tor2web_warning_headers(source_app):
| Reduce time information leakage of file uploads and file downloads
Current implementation of the file upload handler seems to be subject of time information leakage in relation to the zip implementation used in order to optimize the download time.
As the file is uploaded, the file is read as SecureTemporaryFile and wrapped in a GZIP to offer an offline single compression mechanism of the file and reduce further download overhead; During the gzip process the local time (as approximation of the time of the effective file upload) seems to be included in the archive.
The described issue affects `save_file_submission` in store.py https://github.com/freedomofpress/securedrop/blob/670c8baf18f84b15c0481f82c11e06cfc97da28f/securedrop/store.py#L133-L134
Proposed solutions for this is the use of the `mtime` that could be passed to `gzip.GzipFile` setting a fake date.
A reasonable easy approach, easy understandable by end users, could probably be to set the date to the the Unix epoch (0:00:00 UTC of 1 January 1970)
Similar conditions seems to affect the download `get_bulk_archive`:
https://github.com/freedomofpress/securedrop/blob/670c8baf18f84b15c0481f82c11e06cfc97da28f/securedrop/store.py#L89
When a Journalist download a bulk archive the stored gzip files are wrapped in a new gzipped file that will include:
- the internal timestamp saved inside the gzip file (as part of each included file);
- the local timestamp of the gzip file saved on the disk (as metadata for each single gzip file included in the gzip archive);
- the local time of the download is attached to the zip file disclosing in the zip file the time of the server as the time of the download. (inside the header of the wrapping gzip archive, eventually leaking the timezone of the server).
Solution for this second problem is more difficult because the ```ZipFile``` implementation requires to be patched.
In GlobaLeaks this issue is addressed by including a modified version of the spideroak zipstream implementation: https://github.com/globaleaks/GlobaLeaks/blob/0acf994b1a7e64cbaf35d950f6c76cc5c22ac7b9/backend/globaleaks/utils/zipstream.py
Inclusion of such a streaming implementation offers the additional value of solving a possible DoS vector when a big amount of files zipped at once.
The issues here described probably affects the solution of ticket https://github.com/freedomofpress/securedrop/issues/301 where an implementation based on `touch` was considering to change the timestamp of previously uploaded files at the arrival of a new file. Due to the reported bug on the Gzip implementation, the compressed archive will continue to maintain internally the original timestamp.
| 2018-04-25T12:33:58Z | [] | [] |
|
freedomofpress/securedrop | 3,327 | freedomofpress__securedrop-3327 | [
"3303"
] | 4734dd68999f0bed3878117517606459c2784f19 | diff --git a/admin/securedrop_admin/__init__.py b/admin/securedrop_admin/__init__.py
--- a/admin/securedrop_admin/__init__.py
+++ b/admin/securedrop_admin/__init__.py
@@ -497,9 +497,9 @@ def install_securedrop(args):
"servers.")
sdlog.info("The sudo password is only necessary during initial "
"installation.")
- subprocess.check_call([os.path.join(args.ansible_path,
- 'securedrop-prod.yml'),
- '--ask-become-pass'], cwd=args.ansible_path)
+ return subprocess.check_call([os.path.join(args.ansible_path,
+ 'securedrop-prod.yml'), '--ask-become-pass'],
+ cwd=args.ansible_path)
def backup_securedrop(args):
@@ -512,7 +512,7 @@ def backup_securedrop(args):
'ansible-playbook',
os.path.join(args.ansible_path, 'securedrop-backup.yml'),
]
- subprocess.check_call(ansible_cmd, cwd=args.ansible_path)
+ return subprocess.check_call(ansible_cmd, cwd=args.ansible_path)
def restore_securedrop(args):
@@ -531,7 +531,7 @@ def restore_securedrop(args):
'-e',
"restore_file='{}'".format(restore_file_basename),
]
- subprocess.check_call(ansible_cmd, cwd=args.ansible_path)
+ return subprocess.check_call(ansible_cmd, cwd=args.ansible_path)
def run_tails_config(args):
@@ -546,8 +546,8 @@ def run_tails_config(args):
# inventory script, which fails if no site vars are configured.
'-i', '/dev/null',
]
- subprocess.check_call(ansible_cmd,
- cwd=args.ansible_path)
+ return subprocess.check_call(ansible_cmd,
+ cwd=args.ansible_path)
def check_for_updates(args):
@@ -622,10 +622,11 @@ def update(args):
if 'Good signature' not in sig_result:
sdlog.info("Signature verification failed.")
- sys.exit(1)
+ return -1
sdlog.info("Signature verification successful.")
sdlog.info("Updated to SecureDrop {}.".format(latest_tag))
+ return 0
def get_logs(args):
@@ -638,6 +639,7 @@ def get_logs(args):
subprocess.check_call(ansible_cmd, cwd=args.ansible_path)
sdlog.info("Encrypt logs and send to [email protected] or upload "
"to the SecureDrop support portal.")
+ return 0
def set_default_paths(args):
@@ -713,17 +715,18 @@ def main(argv):
args = parse_argv(argv)
setup_logger(args.v)
if args.v:
- args.func(args)
+ return_code = args.func(args)
+ sys.exit(return_code)
else:
try:
- args.func(args)
+ return_code = args.func(args)
except KeyboardInterrupt:
- sys.exit(0)
+ sys.exit(-1)
except Exception as e:
raise SystemExit(
'ERROR (run with -v for more): {msg}'.format(msg=e))
else:
- sys.exit(0)
+ sys.exit(return_code)
if __name__ == "__main__":
| diff --git a/admin/tests/test_securedrop-admin.py b/admin/tests/test_securedrop-admin.py
--- a/admin/tests/test_securedrop-admin.py
+++ b/admin/tests/test_securedrop-admin.py
@@ -102,9 +102,10 @@ def test_update_exits_if_not_needed(self, tmpdir, caplog):
with mock.patch('securedrop_admin.check_for_updates',
return_value=(False, "0.6.1")):
- securedrop_admin.update(args)
+ ret_code = securedrop_admin.update(args)
assert "Applying SecureDrop updates..." in caplog.text
assert "Updated to SecureDrop" not in caplog.text
+ assert ret_code == 0
def test_update_gpg_recv_primary_key_failure(self, tmpdir, caplog):
"""We should try a secondary keyserver if for some reason the primary
@@ -131,10 +132,11 @@ def test_update_gpg_recv_primary_key_failure(self, tmpdir, caplog):
patcher.start()
try:
- securedrop_admin.update(args)
+ ret_code = securedrop_admin.update(args)
assert "Applying SecureDrop updates..." in caplog.text
assert "Signature verification successful." in caplog.text
assert "Updated to SecureDrop" in caplog.text
+ assert ret_code == 0
finally:
for patcher in patchers:
patcher.stop()
@@ -161,10 +163,11 @@ def test_update_signature_verifies(self, tmpdir, caplog):
with mock.patch('subprocess.check_call'):
with mock.patch('subprocess.check_output',
return_value=git_output):
- securedrop_admin.update(args)
+ ret_code = securedrop_admin.update(args)
assert "Applying SecureDrop updates..." in caplog.text
assert "Signature verification successful." in caplog.text
assert "Updated to SecureDrop" in caplog.text
+ assert ret_code == 0
def test_update_signature_does_not_verify(self, tmpdir, caplog):
git_repo_path = str(tmpdir)
@@ -177,11 +180,11 @@ def test_update_signature_does_not_verify(self, tmpdir, caplog):
with mock.patch('subprocess.check_call'):
with mock.patch('subprocess.check_output',
return_value=git_output):
- with pytest.raises(SystemExit):
- securedrop_admin.update(args)
- assert "Applying SecureDrop updates..." in caplog.text
- assert "Signature verification failed." in caplog.text
- assert "Updated to SecureDrop" not in caplog.text
+ ret_code = securedrop_admin.update(args)
+ assert "Applying SecureDrop updates..." in caplog.text
+ assert "Signature verification failed." in caplog.text
+ assert "Updated to SecureDrop" not in caplog.text
+ assert ret_code != 0
class TestSiteConfig(object):
| Improve return codes in securedrop-admin
## Description
I noticed while making a change in #3300 that the `return 1` in `securedrop-admin update` did not produce an eventual `sys.exit(1)` as I expected - it always returned `sys.exit(0)`. Looking at the logic in `securedrop-admin/__init__.py`:
```
try:
args.func(args)
except KeyboardInterrupt:
sys.exit(0)
except Exception as e:
raise SystemExit(
'ERROR (run with -v for more): {msg}'.format(msg=e))
else:
sys.exit(0)
```
it looks like we `sys.exit(0)` unless there is an exception, i.e. we're not passing the return codes through to `sys.exit()`. Also, we `sys.exit(0)` for a `KeyboardInterrupt`.
A suggested improvement is to exit non-zero for a `KeyboardInterrupt` and to pass the return codes through to `sys.exit()` (@kushaldas let me know if you have additional thoughts on this). I think this is a small but worthwhile change (I expect most of the diff will be in the unit tests), and I advocate that we resolve it during the extended (2 week) QA period.
| 2018-04-30T22:16:44Z | [] | [] |
|
freedomofpress/securedrop | 3,339 | freedomofpress__securedrop-3339 | [
"3324"
] | cfaa82e7171b65fc70c243caa3709b6b5df5a397 | diff --git a/admin/securedrop_admin/__init__.py b/admin/securedrop_admin/__init__.py
--- a/admin/securedrop_admin/__init__.py
+++ b/admin/securedrop_admin/__init__.py
@@ -71,6 +71,13 @@ def validate(self, document):
raise ValidationError(
message="Must not be root, amnesia or an empty string")
+ class ValidateSSH(Validator):
+ def validate(self, document):
+ text = document.text
+ if text.lower() == 'tor' or text.lower() == 'lan':
+ return True
+ raise ValidationError(message="Must be Tor (recommended) or LAN")
+
class ValidateIP(Validator):
def validate(self, document):
if re.match('((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)(\.|$)){4}$',
@@ -339,9 +346,9 @@ def __init__(self, args):
SiteConfig.ValidateOSSECPassword(),
None],
['enable_ssh_over_tor', True, bool,
- u'Enable SSH over Tor',
- SiteConfig.ValidateYesNo(),
- lambda x: x.lower() == 'yes'],
+ u'Enable SSH over Tor (recommended) or LAN',
+ SiteConfig.ValidateSSH(),
+ self.sanitize_ssh_over_tor_or_lan],
['securedrop_supported_locales', [], types.ListType,
u'Space separated list of additional locales to support '
'(' + translations + ')',
@@ -401,6 +408,12 @@ def validated_input(self, prompt, default, validator, transform):
def sanitize_fingerprint(self, value):
return value.upper().replace(' ', '')
+ def sanitize_ssh_over_tor_or_lan(self, value):
+ if value.lower() == 'tor':
+ return True
+ elif value.lower() == 'lan':
+ return False
+
def validate_gpg_keys(self):
keys = (('securedrop_app_gpg_public_key',
'securedrop_app_gpg_fingerprint'),
| diff --git a/admin/tests/test_securedrop-admin.py b/admin/tests/test_securedrop-admin.py
--- a/admin/tests/test_securedrop-admin.py
+++ b/admin/tests/test_securedrop-admin.py
@@ -363,6 +363,17 @@ def test_validate_yes_no(self):
assert validator.validate(Document("no"))
assert validator.validate(Document("NO"))
+ def test_validate_ssh_tor_or_lan(self):
+ validator = securedrop_admin.SiteConfig.ValidateSSH()
+ with pytest.raises(ValidationError):
+ validator.validate(Document("not Tor or LAN"))
+ with pytest.raises(ValidationError):
+ validator.validate(Document("yes"))
+ with pytest.raises(ValidationError):
+ validator.validate(Document("no"))
+ assert validator.validate(Document("Tor"))
+ assert validator.validate(Document("LAN"))
+
def test_validate_fingerprint(self):
validator = securedrop_admin.SiteConfig.ValidateFingerprint()
assert validator.validate(Document(
@@ -602,7 +613,6 @@ def verify_prompt_boolean(
verify_prompt_securedrop_app_https_on_source_interface = \
verify_prompt_boolean
- verify_prompt_enable_ssh_over_tor = verify_prompt_boolean
verify_prompt_securedrop_app_gpg_public_key = verify_desc_consistency
@@ -639,6 +649,7 @@ def verify_prompt_fingerprint(self, site_config, desc):
verify_prompt_sasl_domain = verify_desc_consistency_allow_empty
verify_prompt_sasl_username = verify_prompt_not_empty
verify_prompt_sasl_password = verify_prompt_not_empty
+ verify_prompt_enable_ssh_over_tor = verify_prompt_not_empty
def verify_prompt_securedrop_supported_locales(self, site_config, desc):
(var, default, etype, prompt, validator, transform) = desc
| SSH over LAN sdconfig prompt could be clearer
## Description
This isn't a bug, but I think there's an opportunity for a slight improvement to Admin UX in this prompt.
The current prompt for SSH over LAN in `securedrop-admin sdconfig`:
```
Enable SSH over Tor: yes
```
A suggestion for improved prompt for SSH over LAN in `securedrop-admin sdconfig`:
```
Enable SSH over Tor or LAN [Tor (Recommended) or LAN]: Tor
```
where Tor is the default
## User Research Evidence
Two people QAing the release asked Mike or asked Gitter about what they should set in the prompt to set LAN only ;)
| 2018-05-01T23:00:41Z | [] | [] |
|
freedomofpress/securedrop | 3,357 | freedomofpress__securedrop-3357 | [
"3354",
"3354"
] | 640f56f44ebe29f26ec6889b0560ad8686c5d4c3 | diff --git a/admin/securedrop_admin/__init__.py b/admin/securedrop_admin/__init__.py
--- a/admin/securedrop_admin/__init__.py
+++ b/admin/securedrop_admin/__init__.py
@@ -71,13 +71,6 @@ def validate(self, document):
raise ValidationError(
message="Must not be root, amnesia or an empty string")
- class ValidateSSH(Validator):
- def validate(self, document):
- text = document.text
- if text.lower() == 'tor' or text.lower() == 'lan':
- return True
- raise ValidationError(message="Must be Tor (recommended) or LAN")
-
class ValidateIP(Validator):
def validate(self, document):
if re.match('((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)(\.|$)){4}$',
@@ -347,9 +340,10 @@ def __init__(self, args):
SiteConfig.ValidateOSSECPassword(),
None],
['enable_ssh_over_tor', True, bool,
- u'Enable SSH over Tor (recommended) or LAN',
- SiteConfig.ValidateSSH(),
- self.sanitize_ssh_over_tor_or_lan],
+ u'Enable SSH over Tor (recommended, disables SSH over LAN). '
+ u'If you respond no, SSH will be available over LAN only',
+ SiteConfig.ValidateYesNo(),
+ lambda x: x.lower() == 'yes'],
['securedrop_supported_locales', [], types.ListType,
u'Space separated list of additional locales to support '
'(' + translations + ')',
@@ -418,12 +412,6 @@ def validated_input(self, prompt, default, validator, transform):
def sanitize_fingerprint(self, value):
return value.upper().replace(' ', '')
- def sanitize_ssh_over_tor_or_lan(self, value):
- if value.lower() == 'tor':
- return True
- elif value.lower() == 'lan':
- return False
-
def validate_gpg_keys(self):
keys = (('securedrop_app_gpg_public_key',
'securedrop_app_gpg_fingerprint'),
| diff --git a/admin/tests/test_securedrop-admin.py b/admin/tests/test_securedrop-admin.py
--- a/admin/tests/test_securedrop-admin.py
+++ b/admin/tests/test_securedrop-admin.py
@@ -363,17 +363,6 @@ def test_validate_yes_no(self):
assert validator.validate(Document("no"))
assert validator.validate(Document("NO"))
- def test_validate_ssh_tor_or_lan(self):
- validator = securedrop_admin.SiteConfig.ValidateSSH()
- with pytest.raises(ValidationError):
- validator.validate(Document("not Tor or LAN"))
- with pytest.raises(ValidationError):
- validator.validate(Document("yes"))
- with pytest.raises(ValidationError):
- validator.validate(Document("no"))
- assert validator.validate(Document("Tor"))
- assert validator.validate(Document("LAN"))
-
def test_validate_fingerprint(self):
validator = securedrop_admin.SiteConfig.ValidateFingerprint()
assert validator.validate(Document(
@@ -613,6 +602,7 @@ def verify_prompt_boolean(
verify_prompt_securedrop_app_https_on_source_interface = \
verify_prompt_boolean
+ verify_prompt_enable_ssh_over_tor = verify_prompt_boolean
verify_prompt_securedrop_app_gpg_public_key = verify_desc_consistency
@@ -649,7 +639,6 @@ def verify_prompt_fingerprint(self, site_config, desc):
verify_prompt_sasl_domain = verify_desc_consistency_allow_empty
verify_prompt_sasl_username = verify_prompt_not_empty
verify_prompt_sasl_password = verify_prompt_not_empty
- verify_prompt_enable_ssh_over_tor = verify_prompt_not_empty
def verify_prompt_securedrop_supported_locales(self, site_config, desc):
(var, default, etype, prompt, validator, transform) = desc
| [0.7.0] Enable ssh over tor config is automatically populated with invalid response
# Bug
## Description
'LAN' setting is not persisted to `site-specific` and must be re-entered at every `./securedrop-admin sdconfig` run
## Steps to Reproduce
* run `./securedrop-admin sdconfig`
* when prompted for `Enable SSH over Tor (recommended) or LAN`, set `LAN`
* inspect `install_files/ansible-base/group_vars/all/site-specific` and observe `enable_ssh_over_tor: false`
* run `./securedrop-admin sdconfig` and observe that the default response for the `Enable SSH over Tor (recommended) or LAN` is no, and requires user input to change back to `LAN`
## Expected Behavior
`./securedrop-admin sdconfig` should populate default responses based on current SecureDrop instance configuration.
## Actual Behavior
The `Enable SSH over Tor (recommended) or LAN` field is automatically populated with an invalid response.
[0.7.0] Enable ssh over tor config is automatically populated with invalid response
# Bug
## Description
'LAN' setting is not persisted to `site-specific` and must be re-entered at every `./securedrop-admin sdconfig` run
## Steps to Reproduce
* run `./securedrop-admin sdconfig`
* when prompted for `Enable SSH over Tor (recommended) or LAN`, set `LAN`
* inspect `install_files/ansible-base/group_vars/all/site-specific` and observe `enable_ssh_over_tor: false`
* run `./securedrop-admin sdconfig` and observe that the default response for the `Enable SSH over Tor (recommended) or LAN` is no, and requires user input to change back to `LAN`
## Expected Behavior
`./securedrop-admin sdconfig` should populate default responses based on current SecureDrop instance configuration.
## Actual Behavior
The `Enable SSH over Tor (recommended) or LAN` field is automatically populated with an invalid response.
| Suggestion for a minimal/quick fix given the time constraints for 0.7.0 release:
* Revert the changes introduced in https://github.com/freedomofpress/securedrop/issues/3324
* Replace the prompt with clearer language such as: "Would you like to enable SSH over local network? This will disable SSH over Tor (If not, SSH will be only available over Tor)"
I think that's a great idea @emkll. That a minimal solution that will be clear for admins. Note that we'll need to update the pre-release announcement.
What is the expected effort on the less minimal solution of persisting the current config if no user input is provided?
It would unfortunately involve either a hack or broader code changes that we should probably avoid so close to release unless necessary
Thanks for the clarification. I agree the bug is severe enough to justify the revert/language change. Will update release announcement once that's merged.
Suggestion for a minimal/quick fix given the time constraints for 0.7.0 release:
* Revert the changes introduced in https://github.com/freedomofpress/securedrop/issues/3324
* Replace the prompt with clearer language such as: "Would you like to enable SSH over local network? This will disable SSH over Tor (If not, SSH will be only available over Tor)"
I think that's a great idea @emkll. That a minimal solution that will be clear for admins. Note that we'll need to update the pre-release announcement.
What is the expected effort on the less minimal solution of persisting the current config if no user input is provided?
It would unfortunately involve either a hack or broader code changes that we should probably avoid so close to release unless necessary
Thanks for the clarification. I agree the bug is severe enough to justify the revert/language change. Will update release announcement once that's merged. | 2018-05-03T18:34:41Z | [] | [] |
freedomofpress/securedrop | 3,358 | freedomofpress__securedrop-3358 | [
"3354"
] | fc244beb78596cc0c53a62912defdb76f2f4712e | diff --git a/admin/securedrop_admin/__init__.py b/admin/securedrop_admin/__init__.py
--- a/admin/securedrop_admin/__init__.py
+++ b/admin/securedrop_admin/__init__.py
@@ -71,13 +71,6 @@ def validate(self, document):
raise ValidationError(
message="Must not be root, amnesia or an empty string")
- class ValidateSSH(Validator):
- def validate(self, document):
- text = document.text
- if text.lower() == 'tor' or text.lower() == 'lan':
- return True
- raise ValidationError(message="Must be Tor (recommended) or LAN")
-
class ValidateIP(Validator):
def validate(self, document):
if re.match('((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)(\.|$)){4}$',
@@ -347,9 +340,10 @@ def __init__(self, args):
SiteConfig.ValidateOSSECPassword(),
None],
['enable_ssh_over_tor', True, bool,
- u'Enable SSH over Tor (recommended) or LAN',
- SiteConfig.ValidateSSH(),
- self.sanitize_ssh_over_tor_or_lan],
+ u'Enable SSH over Tor (recommended, disables SSH over LAN). '
+ u'If you respond no, SSH will be available over LAN only',
+ SiteConfig.ValidateYesNo(),
+ lambda x: x.lower() == 'yes'],
['securedrop_supported_locales', [], types.ListType,
u'Space separated list of additional locales to support '
'(' + translations + ')',
@@ -418,12 +412,6 @@ def validated_input(self, prompt, default, validator, transform):
def sanitize_fingerprint(self, value):
return value.upper().replace(' ', '')
- def sanitize_ssh_over_tor_or_lan(self, value):
- if value.lower() == 'tor':
- return True
- elif value.lower() == 'lan':
- return False
-
def validate_gpg_keys(self):
keys = (('securedrop_app_gpg_public_key',
'securedrop_app_gpg_fingerprint'),
| diff --git a/admin/tests/test_securedrop-admin.py b/admin/tests/test_securedrop-admin.py
--- a/admin/tests/test_securedrop-admin.py
+++ b/admin/tests/test_securedrop-admin.py
@@ -363,17 +363,6 @@ def test_validate_yes_no(self):
assert validator.validate(Document("no"))
assert validator.validate(Document("NO"))
- def test_validate_ssh_tor_or_lan(self):
- validator = securedrop_admin.SiteConfig.ValidateSSH()
- with pytest.raises(ValidationError):
- validator.validate(Document("not Tor or LAN"))
- with pytest.raises(ValidationError):
- validator.validate(Document("yes"))
- with pytest.raises(ValidationError):
- validator.validate(Document("no"))
- assert validator.validate(Document("Tor"))
- assert validator.validate(Document("LAN"))
-
def test_validate_fingerprint(self):
validator = securedrop_admin.SiteConfig.ValidateFingerprint()
assert validator.validate(Document(
@@ -613,6 +602,7 @@ def verify_prompt_boolean(
verify_prompt_securedrop_app_https_on_source_interface = \
verify_prompt_boolean
+ verify_prompt_enable_ssh_over_tor = verify_prompt_boolean
verify_prompt_securedrop_app_gpg_public_key = verify_desc_consistency
@@ -649,7 +639,6 @@ def verify_prompt_fingerprint(self, site_config, desc):
verify_prompt_sasl_domain = verify_desc_consistency_allow_empty
verify_prompt_sasl_username = verify_prompt_not_empty
verify_prompt_sasl_password = verify_prompt_not_empty
- verify_prompt_enable_ssh_over_tor = verify_prompt_not_empty
def verify_prompt_securedrop_supported_locales(self, site_config, desc):
(var, default, etype, prompt, validator, transform) = desc
| [0.7.0] Enable ssh over tor config is automatically populated with invalid response
# Bug
## Description
'LAN' setting is not persisted to `site-specific` and must be re-entered at every `./securedrop-admin sdconfig` run
## Steps to Reproduce
* run `./securedrop-admin sdconfig`
* when prompted for `Enable SSH over Tor (recommended) or LAN`, set `LAN`
* inspect `install_files/ansible-base/group_vars/all/site-specific` and observe `enable_ssh_over_tor: false`
* run `./securedrop-admin sdconfig` and observe that the default response for the `Enable SSH over Tor (recommended) or LAN` is no, and requires user input to change back to `LAN`
## Expected Behavior
`./securedrop-admin sdconfig` should populate default responses based on current SecureDrop instance configuration.
## Actual Behavior
The `Enable SSH over Tor (recommended) or LAN` field is automatically populated with an invalid response.
| Suggestion for a minimal/quick fix given the time constraints for 0.7.0 release:
* Revert the changes introduced in https://github.com/freedomofpress/securedrop/issues/3324
* Replace the prompt with clearer language such as: "Would you like to enable SSH over local network? This will disable SSH over Tor (If not, SSH will be only available over Tor)"
I think that's a great idea @emkll. That a minimal solution that will be clear for admins. Note that we'll need to update the pre-release announcement.
What is the expected effort on the less minimal solution of persisting the current config if no user input is provided?
It would unfortunately involve either a hack or broader code changes that we should probably avoid so close to release unless necessary
Thanks for the clarification. I agree the bug is severe enough to justify the revert/language change. Will update release announcement once that's merged. | 2018-05-03T20:36:42Z | [] | [] |
freedomofpress/securedrop | 3,366 | freedomofpress__securedrop-3366 | [
"3299"
] | 46ed4fbb22e800946329902bc62916a1166e94a8 | diff --git a/admin/securedrop_admin/__init__.py b/admin/securedrop_admin/__init__.py
--- a/admin/securedrop_admin/__init__.py
+++ b/admin/securedrop_admin/__init__.py
@@ -287,15 +287,30 @@ def __init__(self, args):
u'DNS server specified during installation',
SiteConfig.ValidateNotEmpty(),
None],
+ ['securedrop_app_gpg_public_key', 'SecureDrop.asc', str,
+ u'Local filepath to public key for '
+ 'SecureDrop Application GPG public key',
+ SiteConfig.ValidatePath(self.args.ansible_path),
+ None],
['securedrop_app_https_on_source_interface', False, bool,
u'Whether HTTPS should be enabled on '
'Source Interface (requires EV cert)',
SiteConfig.ValidateYesNo(),
lambda x: x.lower() == 'yes'],
- ['securedrop_app_gpg_public_key', 'SecureDrop.asc', str,
- u'Local filepath to public key for '
- 'SecureDrop Application GPG public key',
- SiteConfig.ValidatePath(self.args.ansible_path),
+ ['securedrop_app_https_certificate_cert_src', '', str,
+ u'Local filepath to HTTPS certificate '
+ '(optional, only if using HTTPS on source interface)',
+ SiteConfig.ValidateOptionalPath(self.args.ansible_path),
+ None],
+ ['securedrop_app_https_certificate_key_src', '', str,
+ u'Local filepath to HTTPS certificate key '
+ '(optional, only if using HTTPS on source interface)',
+ SiteConfig.ValidateOptionalPath(self.args.ansible_path),
+ None],
+ ['securedrop_app_https_certificate_chain_src', '', str,
+ u'Local filepath to HTTPS certificate chain file '
+ '(optional, only if using HTTPS on source interface)',
+ SiteConfig.ValidateOptionalPath(self.args.ansible_path),
None],
['securedrop_app_gpg_fingerprint', '', str,
u'Full fingerprint for the SecureDrop Application GPG Key',
| diff --git a/admin/tests/files/ca.crt b/admin/tests/files/ca.crt
new file mode 100644
--- /dev/null
+++ b/admin/tests/files/ca.crt
@@ -0,0 +1 @@
+TEST FILE ONLY
diff --git a/admin/tests/files/sd.crt b/admin/tests/files/sd.crt
new file mode 100644
--- /dev/null
+++ b/admin/tests/files/sd.crt
@@ -0,0 +1 @@
+TEST FILE ONLY
diff --git a/admin/tests/files/sd.key b/admin/tests/files/sd.key
new file mode 100644
--- /dev/null
+++ b/admin/tests/files/sd.key
@@ -0,0 +1 @@
+TEST FILE ONLY
diff --git a/admin/tests/test_securedrop-admin.py b/admin/tests/test_securedrop-admin.py
--- a/admin/tests/test_securedrop-admin.py
+++ b/admin/tests/test_securedrop-admin.py
@@ -643,6 +643,12 @@ def verify_prompt_fingerprint(self, site_config, desc):
verify_desc_consistency_optional)
verify_prompt_journalist_gpg_fpr = verify_prompt_fingerprint_optional
verify_prompt_journalist_alert_email = verify_desc_consistency_optional
+ verify_prompt_securedrop_app_https_certificate_chain_src = (
+ verify_desc_consistency_optional)
+ verify_prompt_securedrop_app_https_certificate_key_src = (
+ verify_desc_consistency_optional)
+ verify_prompt_securedrop_app_https_certificate_cert_src = (
+ verify_desc_consistency_optional)
verify_prompt_smtp_relay = verify_prompt_not_empty
verify_prompt_smtp_relay_port = verify_desc_consistency
verify_prompt_daily_reboot_time = verify_desc_consistency
| securedrop-admin sdconfig does not set https-related config variables
# Bug
## Description
`securedrop-admin sdconfig` does not set `securedrop_app_https_certificate_key_src`, `securedrop_app_https_certificate_cert_src`, or `securedrop_app_https_certificate_chain_src` as explained in https://docs.securedrop.org/en/stable/https_source_interface.html
## Steps to Reproduce
In Tails:
1. `securedrop-admin sdconfig`
2. At the "Whether HTTPS should be enabled on Source Interface (requires EV cert)" prompt, enter "yes"
## Expected Behavior
I am shown prompts to fill out `securedrop_app_https_certificate_key_src`, `securedrop_app_https_certificate_cert_src`, `securedrop_app_https_certificate_chain_src`.
## Actual Behavior
I am _not_ shown these prompts.
## Comments
As a temporary workaround, admins _can_ fill out `site-specific` manually, though that is a bit tricky for some admins. As such, we should fix this issue in 0.7 if possible.
| Instead of asking people for the cert names to set variables. I think should just go with the YES/NO on https enabled. And if yes, require everyone to name their files securedrop_source_onion.crt/key by default and maybe CA.crt and everyone just needs to use those names
Good call @darrow - let's do that - it's easier for users and it's a simpler implementation.
Given @darrow's suggestion, this strikes me as a docs-issue now. Happy to prepare something quick that should suffice.
Well, I think it's docs + Ansible (or docs + sdconfig) no? Since we also need the three variables above to get set in `site-specific` (or elsewhere)?
Roger that, I'll take a look and confirm both are covered. If we update the docs to require specific names, then we don't need to write the values via `sdconfig` config. Will shoot for that and fall back to prompting with defaults. | 2018-05-04T18:16:50Z | [] | [] |
freedomofpress/securedrop | 3,379 | freedomofpress__securedrop-3379 | [
"3316"
] | e10378fd303920f4a31c98e785cfdb44ba42a057 | diff --git a/securedrop/journalist_app/admin.py b/securedrop/journalist_app/admin.py
--- a/securedrop/journalist_app/admin.py
+++ b/securedrop/journalist_app/admin.py
@@ -1,7 +1,5 @@
# -*- coding: utf-8 -*-
-from PIL import Image
-
import os
from flask import (Blueprint, render_template, request, url_for, redirect, g,
@@ -33,19 +31,16 @@ def manage_config():
form = LogoForm()
if form.validate_on_submit():
f = form.logo.data
- custom_logo_filepath = os.path.join(config.SECUREDROP_ROOT,
- "static/i/custom_logo.png")
+ custom_logo_filepath = os.path.join(current_app.static_folder, 'i',
+ 'custom_logo.png')
try:
- with Image.open(f) as im:
- im.thumbnail((500, 450), resample=3)
- im.save(custom_logo_filepath, "PNG")
+ f.save(custom_logo_filepath)
flash(gettext("Image updated."), "logo-success")
except Exception:
flash("Unable to process the image file."
" Try another one.", "logo-error")
finally:
return redirect(url_for("admin.manage_config"))
-
else:
for field, errors in form.errors.items():
for error in errors:
diff --git a/securedrop/journalist_app/forms.py b/securedrop/journalist_app/forms.py
--- a/securedrop/journalist_app/forms.py
+++ b/securedrop/journalist_app/forms.py
@@ -58,7 +58,6 @@ class ReplyForm(FlaskForm):
class LogoForm(FlaskForm):
logo = FileField(validators=[
FileRequired(message=gettext('File required.')),
- FileAllowed(['jpg', 'png', 'jpeg'],
- message=gettext("You can only upload JPG/JPEG"
- " or PNG image files."))
+ FileAllowed(['png'],
+ message=gettext("Upload images only."))
])
| diff --git a/securedrop/tests/test_journalist.py b/securedrop/tests/test_journalist.py
--- a/securedrop/tests/test_journalist.py
+++ b/securedrop/tests/test_journalist.py
@@ -1042,10 +1042,9 @@ def test_logo_upload_with_invalid_filetype_fails(self):
resp = self.client.post(url_for('admin.manage_config'),
data=form.data,
follow_redirects=True)
- self.assertMessageFlashed("You can only upload JPG/JPEG"
- " or PNG image files.", "logo-error")
- self.assertIn("You can only upload JPG/JPEG"
- " or PNG image files.", resp.data)
+ self.assertMessageFlashed("Upload images only.",
+ "logo-error")
+ self.assertIn("Upload images only.", resp.data)
def test_logo_upload_with_empty_input_field_fails(self):
self._login_admin()
| Debian package securedrop-app-code is not getting upgraded
# Bug
## Description
In my prod vm `securedrop-app-code` package is not getting upgraded.
## Steps to Reproduce
Follow the QA steps to fetch the latest packages from apt-test repo.
## Expected Behavior
We should get the 0.7~RC1 packages.
## Actual Behavior
```
$ dpkg -al | grep secure
ii openssh-client 1:6.6p1-2ubuntu2.10 amd64 secure shell (SSH) client, for secure access to remote machines
ii openssh-server 1:6.6p1-2ubuntu2.10 amd64 secure shell (SSH) server, for secure access from remote machines
ii openssh-sftp-server 1:6.6p1-2ubuntu2.10 amd64 secure shell (SSH) sftp server module, for SFTP access from remote machines
ii secure-delete 3.1-6 amd64 tools to wipe files, free disk space, swap and memory
ii securedrop-app-code 0.6 amd64 Packages the SecureDrop application code pip dependencies and apparmor profiles. This package will put the apparmor profiles in enforce mode. This package does use pip to install the pip wheelhouse
ii securedrop-config 0.1.1+0.7.0~rc1 all Establishes baseline system state for running SecureDrop.
ii securedrop-grsec 4.4.115+r1 amd64 Metapackage providing a grsecurity-patched Linux kernel for use
ii securedrop-keyring 0.1.1+0.7.0~rc1 amd64 Provides an apt keyring for SecureDrop-related packages, so the master signing key used for SecureDrop packages can be updated via apt.
ii securedrop-ossec-agent 2.8.2+0.7.0~rc1 amd64 Installs the securedrop pre-configured OSSEC agent
ii ssh-import-id 3.21-0ubuntu1 all securely retrieve an SSH public key and install it locally
```
```
$ sudo apt-get upgrade
Reading package lists... Done
Building dependency tree
Reading state information... Done
Calculating upgrade... Done
The following packages have been kept back:
securedrop-app-code
0 upgraded, 0 newly installed, 0 to remove and 1 not upgraded.
```
| It seems if I manually do a `sudo apt-get update` and then `sudo apt upgrade`, it pulls in the right dependencies and the upgraded package.
Did you figure out why your manual intervention was necessary @kushaldas?
In my latest testing I could not reproduce this error. I am closing this issue.
Reopening because I also see this same behavior while upgrade testing `0.7.0~rc3` on hardware - the `securedrop-app-code` package was held back at 0.6. All other packages upgraded without intervention. But if so it's unclear why you didn't see this in further testing @kushaldas. Hypothesis: this is due to the addition of the `libjpeg-dev` dependency in 9880a30c8d58048ce33e02cdc940a26369a1d601
It looks like @emkll and @dachary also performed upgrade testing - did you experience this issue?
@redshiftzero yes, it is missing libjpeg-dev package dependency and that is why it is holding back. In my last testing `sudo cron-apt -s -i` actually did a `apt-get update`, so it found the proper dependencies and upgraded to the right `securedrop-app-code` package.
Do you understand why this only happens in some situations? `cron-apt -s -i` always does an `apt-get update` first, no?
>Do you understand why this only happens in some situations? cron-apt -s -i always does an apt-get update first, no?
Yes (about doing that `apt-get update` by `cron-apt -s -i`), but, for some reason it failed for me once, I am guessing the same happened to you too.
> did you experience this issue?
I did not. Could it be that something installs libjpeg-dev when running ``vagrant up app-prod``?
yeah im able to also reproduce this on my instance... digging
Sooo I think this might be by design on debian's side ... I'm having trouble finding links explaining this behavior but as an analogy it seems to be similar to how permissions work on Android apps:
* You have an app `Foo` that you previously installed
* New updates to `Foo` come in without new permission changes and can be automatically updated
* Say one day `Foo` updates but is now requiring access to your `Camera` ... Android wants you to manually approve that change in underlying app dependency.
[This article](https://debian-administration.org/article/69/Some_upgrades_show_packages_being_kept_back) had a really good quote summing this up:
> If I may try to rephrase: 'Apt-get dist-upgrade'ing also installs new packages brought in the chain of dependencies, whereas 'apt-get upgrade'ing only install newer versions of packages already installed. After a couple of years working with Debian, I never managed to exactly understand that difference. Would you believe I never found documentation who states this as simple?
Still diggin into solutions here
Hmm interesting - but then how did we add dependencies before? For example, when we added the [`securedrop-keyring`](https://github.com/freedomofpress/securedrop/commit/75961546e0078ed0c3ad47e8d67bb4a94c78bc2a#diff-8f4b8b485ba54d091bf0bd9e34f6b86b) package as a dependency of the `securedrop-app-code`?
To re-cap conversation that was conducted out of band.. the gist is this ..
* In order to add any dependency to an existing installed package, a `dist-upgrade` needs to be run
* We do in-fact run a `dist-upgrade` as part of `cron-apt` but its very narrow in scope: `dist-upgrade -y -o APT::Get::Show-Upgraded=true -o Dir::Etc::SourceList=/etc/apt/security.list -o Dpkg::Options::=--force-confdef -o Dpkg::Options::=--force-confold`.
* That `security.list` file from above has a very select number of repos that are whitelisted:
```
root@app-staging:/home/vagrant# cat /etc/apt/security.list
deb http://security.ubuntu.com/ubuntu trusty-security main
deb-src http://security.ubuntu.com/ubuntu trusty-security main
deb http://security.ubuntu.com/ubuntu trusty-security universe
deb-src http://security.ubuntu.com/ubuntu trusty-security universe
deb [arch=amd64] https://apt.freedom.press trusty main
deb https://tor-apt.freedom.press trusty main
```
the problem is that the newly added dependency, `libjpeg-dev` is not in one of those repos, its under `archive.ubuntu.com/ubuntu trusty main` ... so apt-cron won't upgrade here.
* This worked previously for new depdendencies like `securedrop-keyring` because those were hosted under `apt.freedom.press` which is on the `security.list`.
Now theoretically we could add those new libjpeg-dev deps to our internal repo and things should work good -- but the consensus, this close to release, is to roll back the necessary changes that required this new dependency and punt solving this issue to a later release. | 2018-05-08T20:08:14Z | [] | [] |
freedomofpress/securedrop | 3,401 | freedomofpress__securedrop-3401 | [
"3391",
"3391"
] | 323c228a2e307f5d58bf4c5990cb86ddc545ea3d | diff --git a/admin/securedrop_admin/__init__.py b/admin/securedrop_admin/__init__.py
--- a/admin/securedrop_admin/__init__.py
+++ b/admin/securedrop_admin/__init__.py
@@ -255,115 +255,141 @@ def __init__(self, args):
['ssh_users', 'sd', str,
u'Username for SSH access to the servers',
SiteConfig.ValidateUser(),
- None],
+ None,
+ lambda config: True],
['daily_reboot_time', 4, int,
u'Daily reboot time of the server (24-hour clock)',
SiteConfig.ValidateTime(),
- int],
+ int,
+ lambda config: True],
['app_ip', '10.20.2.2', str,
u'Local IPv4 address for the Application Server',
SiteConfig.ValidateIP(),
- None],
+ None,
+ lambda config: True],
['monitor_ip', '10.20.3.2', str,
u'Local IPv4 address for the Monitor Server',
SiteConfig.ValidateIP(),
- None],
+ None,
+ lambda config: True],
['app_hostname', 'app', str,
u'Hostname for Application Server',
SiteConfig.ValidateNotEmpty(),
- None],
+ None,
+ lambda config: True],
['monitor_hostname', 'mon', str,
u'Hostname for Monitor Server',
SiteConfig.ValidateNotEmpty(),
- None],
+ None,
+ lambda config: True],
['dns_server', '8.8.8.8', str,
u'DNS server specified during installation',
SiteConfig.ValidateNotEmpty(),
- None],
+ None,
+ lambda config: True],
['securedrop_app_gpg_public_key', 'SecureDrop.asc', str,
u'Local filepath to public key for '
'SecureDrop Application GPG public key',
SiteConfig.ValidatePath(self.args.ansible_path),
- None],
+ None,
+ lambda config: True],
['securedrop_app_https_on_source_interface', False, bool,
u'Whether HTTPS should be enabled on '
'Source Interface (requires EV cert)',
SiteConfig.ValidateYesNo(),
- lambda x: x.lower() == 'yes'],
+ lambda x: x.lower() == 'yes',
+ lambda config: True],
['securedrop_app_https_certificate_cert_src', '', str,
- u'Local filepath to HTTPS certificate '
- '(optional, only if using HTTPS on source interface)',
+ u'Local filepath to HTTPS certificate',
SiteConfig.ValidateOptionalPath(self.args.ansible_path),
- None],
+ None,
+ lambda config: config.get(
+ 'securedrop_app_https_on_source_interface')],
['securedrop_app_https_certificate_key_src', '', str,
- u'Local filepath to HTTPS certificate key '
- '(optional, only if using HTTPS on source interface)',
+ u'Local filepath to HTTPS certificate key',
SiteConfig.ValidateOptionalPath(self.args.ansible_path),
- None],
+ None,
+ lambda config: config.get(
+ 'securedrop_app_https_on_source_interface')],
['securedrop_app_https_certificate_chain_src', '', str,
- u'Local filepath to HTTPS certificate chain file '
- '(optional, only if using HTTPS on source interface)',
+ u'Local filepath to HTTPS certificate chain file',
SiteConfig.ValidateOptionalPath(self.args.ansible_path),
- None],
+ None,
+ lambda config: config.get(
+ 'securedrop_app_https_on_source_interface')],
['securedrop_app_gpg_fingerprint', '', str,
u'Full fingerprint for the SecureDrop Application GPG Key',
SiteConfig.ValidateFingerprint(),
- self.sanitize_fingerprint],
+ self.sanitize_fingerprint,
+ lambda config: True],
['ossec_alert_gpg_public_key', 'ossec.pub', str,
u'Local filepath to OSSEC alerts GPG public key',
SiteConfig.ValidatePath(self.args.ansible_path),
- None],
+ None,
+ lambda config: True],
['ossec_gpg_fpr', '', str,
u'Full fingerprint for the OSSEC alerts GPG public key',
SiteConfig.ValidateFingerprint(),
- self.sanitize_fingerprint],
+ self.sanitize_fingerprint,
+ lambda config: True],
['ossec_alert_email', '', str,
u'Admin email address for receiving OSSEC alerts',
SiteConfig.ValidateOSSECEmail(),
- None],
+ None,
+ lambda config: True],
['journalist_alert_gpg_public_key', '', str,
u'Local filepath to journalist alerts GPG public key (optional)',
SiteConfig.ValidateOptionalPath(self.args.ansible_path),
- None],
+ None,
+ lambda config: True],
['journalist_gpg_fpr', '', str,
u'Full fingerprint for the journalist alerts '
u'GPG public key (optional)',
SiteConfig.ValidateOptionalFingerprint(),
- self.sanitize_fingerprint],
+ self.sanitize_fingerprint,
+ lambda config: config.get('journalist_alert_gpg_public_key')],
['journalist_alert_email', '', str,
u'Email address for receiving journalist alerts (optional)',
SiteConfig.ValidateOptionalEmail(),
- None],
+ None,
+ lambda config: config.get('journalist_alert_gpg_public_key')],
['smtp_relay', "smtp.gmail.com", str,
u'SMTP relay for sending OSSEC alerts',
SiteConfig.ValidateNotEmpty(),
- None],
+ None,
+ lambda config: True],
['smtp_relay_port', 587, int,
u'SMTP port for sending OSSEC alerts',
SiteConfig.ValidateInt(),
- int],
+ int,
+ lambda config: True],
['sasl_domain', "gmail.com", str,
u'SASL domain for sending OSSEC alerts',
None,
- None],
+ None,
+ lambda config: True],
['sasl_username', '', str,
u'SASL username for sending OSSEC alerts',
SiteConfig.ValidateOSSECUsername(),
- None],
+ None,
+ lambda config: True],
['sasl_password', '', str,
u'SASL password for sending OSSEC alerts',
SiteConfig.ValidateOSSECPassword(),
- None],
+ None,
+ lambda config: True],
['enable_ssh_over_tor', True, bool,
u'Enable SSH over Tor (recommended, disables SSH over LAN). '
u'If you respond no, SSH will be available over LAN only',
SiteConfig.ValidateYesNo(),
- lambda x: x.lower() == 'yes'],
+ lambda x: x.lower() == 'yes',
+ lambda config: True],
['securedrop_supported_locales', [], types.ListType,
u'Space separated list of additional locales to support '
'(' + translations + ')',
SiteConfig.ValidateLocales(self.args.app_path),
- string.split],
+ string.split,
+ lambda config: True],
]
def load_and_update_config(self):
@@ -382,23 +408,17 @@ def update_config(self):
def user_prompt_config(self):
config = {}
for desc in self.desc:
- (var, default, type, prompt, validator, transform) = desc
- if var == 'journalist_gpg_fpr':
- if not config.get('journalist_alert_gpg_public_key',
- None):
- config[var] = ''
- continue
- if var == 'journalist_alert_email':
- if not config.get('journalist_alert_gpg_public_key',
- None):
- config[var] = ''
- continue
+ (var, default, type, prompt, validator, transform,
+ condition) = desc
+ if not condition(config):
+ config[var] = ''
+ continue
config[var] = self.user_prompt_config_one(desc,
self.config.get(var))
return config
def user_prompt_config_one(self, desc, from_config):
- (var, default, type, prompt, validator, transform) = desc
+ (var, default, type, prompt, validator, transform, condition) = desc
if from_config is not None:
default = from_config
prompt += ': '
| diff --git a/admin/tests/test_securedrop-admin.py b/admin/tests/test_securedrop-admin.py
--- a/admin/tests/test_securedrop-admin.py
+++ b/admin/tests/test_securedrop-admin.py
@@ -573,26 +573,66 @@ def get_desc(self, site_config, var):
return desc
def verify_desc_consistency_optional(self, site_config, desc):
- (var, default, etype, prompt, validator, transform) = desc
+ (var, default, etype, prompt, validator, transform, condition) = desc
# verify the default passes validation
assert site_config.user_prompt_config_one(desc, None) == default
assert type(default) == etype
def verify_desc_consistency(self, site_config, desc):
self.verify_desc_consistency_optional(site_config, desc)
- (var, default, etype, prompt, validator, transform) = desc
+ (var, default, etype, prompt, validator, transform, condition) = desc
with pytest.raises(ValidationError):
site_config.user_prompt_config_one(desc, '')
def verify_prompt_boolean(
self, site_config, desc):
self.verify_desc_consistency(site_config, desc)
- (var, default, etype, prompt, validator, transform) = desc
+ (var, default, etype, prompt, validator, transform, condition) = desc
assert site_config.user_prompt_config_one(desc, True) is True
assert site_config.user_prompt_config_one(desc, False) is False
assert site_config.user_prompt_config_one(desc, 'YES') is True
assert site_config.user_prompt_config_one(desc, 'NO') is False
+ def test_desc_conditional(self):
+ """Ensure that conditional prompts behave correctly.
+
+ Prompts which depend on another question should only be
+ asked if the prior question was answered appropriately."""
+
+ questions = [
+ ['first_question',
+ False,
+ bool,
+ u'Test Question 1',
+ None,
+ lambda x: x.lower() == 'yes',
+ lambda config: True],
+ ['dependent_question',
+ 'default_value',
+ str,
+ u'Test Question 2',
+ None,
+ None,
+ lambda config: config.get('first_question', False)]
+ ]
+ args = argparse.Namespace(site_config='tests/files/site-specific',
+ ansible_path='tests/files',
+ app_path=dirname(__file__))
+ site_config = securedrop_admin.SiteConfig(args)
+ site_config.desc = questions
+
+ def auto_prompt(prompt, default, **kwargs):
+ return default
+
+ with mock.patch('prompt_toolkit.prompt', side_effect=auto_prompt):
+ config = site_config.user_prompt_config()
+ assert config['dependent_question'] != 'default_value'
+
+ site_config.desc[0][1] = True
+
+ config = site_config.user_prompt_config()
+ assert config['dependent_question'] == 'default_value'
+
verify_prompt_ssh_users = verify_desc_consistency
verify_prompt_app_ip = verify_desc_consistency
verify_prompt_monitor_ip = verify_desc_consistency
@@ -616,7 +656,7 @@ def verify_prompt_fingerprint_optional(self, site_config, desc):
assert site_config.user_prompt_config_one(desc, fpr) == clean_fpr
def verify_desc_consistency_allow_empty(self, site_config, desc):
- (var, default, etype, prompt, validator, transform) = desc
+ (var, default, etype, prompt, validator, transform, condition) = desc
# verify the default passes validation
assert site_config.user_prompt_config_one(desc, None) == default
assert type(default) == etype
@@ -647,7 +687,7 @@ def verify_prompt_fingerprint(self, site_config, desc):
verify_prompt_sasl_password = verify_prompt_not_empty
def verify_prompt_securedrop_supported_locales(self, site_config, desc):
- (var, default, etype, prompt, validator, transform) = desc
+ (var, default, etype, prompt, validator, transform, condition) = desc
# verify the default passes validation
assert site_config.user_prompt_config_one(desc, None) == default
assert type(default) == etype
@@ -672,7 +712,8 @@ def auto_prompt(prompt, default, **kwargs):
with mock.patch('prompt_toolkit.prompt', side_effect=auto_prompt):
for desc in site_config.desc:
- (var, default, etype, prompt, validator, transform) = desc
+ (var, default, etype, prompt, validator, transform,
+ condition) = desc
method = 'verify_prompt_' + var
print("checking " + method)
getattr(self, method)(site_config, desc)
| Support context-dependent behavior in securedrop-admin sdconfig
## Description
Currently, the default behavior in `securedrop-admin sdconfig` is that each question will be displayed, even if the question is not relevant for the SecureDrop instance. An enhancement would be to support asking optional questions only when necessary. For example, only an instance that answers "yes" to wanting to configure HTTPS on their source interface needs to answer the three questions added in #3366. This functionality can then replace the approach taken in #3340, which worked around this limitation to implement this behavior for the journalist notifications.
## User Stories
As a SecureDrop administrator, I want to only answer necessary questions such that it is faster to install SecureDrop.
Support context-dependent behavior in securedrop-admin sdconfig
## Description
Currently, the default behavior in `securedrop-admin sdconfig` is that each question will be displayed, even if the question is not relevant for the SecureDrop instance. An enhancement would be to support asking optional questions only when necessary. For example, only an instance that answers "yes" to wanting to configure HTTPS on their source interface needs to answer the three questions added in #3366. This functionality can then replace the approach taken in #3340, which worked around this limitation to implement this behavior for the journalist notifications.
## User Stories
As a SecureDrop administrator, I want to only answer necessary questions such that it is faster to install SecureDrop.
| I intend to work on this issue, <s>following a similar approach to what was taken here:
https://github.com/freedomofpress/securedrop/blob/develop/admin/securedrop_admin/__init__.py#L386</s>
**Edit:** Discussing other approaches with Kushal.
**Edit 2:** I thought it would be nice if `prompt_toolkit` offered some way to display prompts conditionally, but this doesn't seem to be supported. So I'm considering a custom approach to conditional/dependent prompts.
I intend to work on this issue, <s>following a similar approach to what was taken here:
https://github.com/freedomofpress/securedrop/blob/develop/admin/securedrop_admin/__init__.py#L386</s>
**Edit:** Discussing other approaches with Kushal.
**Edit 2:** I thought it would be nice if `prompt_toolkit` offered some way to display prompts conditionally, but this doesn't seem to be supported. So I'm considering a custom approach to conditional/dependent prompts. | 2018-05-14T19:33:22Z | [] | [] |
freedomofpress/securedrop | 3,406 | freedomofpress__securedrop-3406 | [
"3405"
] | ece99cd5e468788e095d788d4bfa07905177c752 | diff --git a/admin/securedrop_admin/__init__.py b/admin/securedrop_admin/__init__.py
--- a/admin/securedrop_admin/__init__.py
+++ b/admin/securedrop_admin/__init__.py
@@ -37,6 +37,7 @@
import yaml
sdlog = logging.getLogger(__name__)
+RELEASE_KEY = '22245C81E3BAEB4138B36061310F561200F4AD77'
class FingerprintException(Exception):
@@ -612,7 +613,7 @@ def check_for_updates(args):
def get_release_key_from_keyserver(args, keyserver=None, timeout=45):
gpg_recv = ['timeout', str(timeout), 'gpg', '--recv-key']
- release_key = ['22245C81E3BAEB4138B36061310F561200F4AD77']
+ release_key = [RELEASE_KEY]
# We construct the gpg --recv-key command based on optional keyserver arg.
if keyserver:
@@ -633,9 +634,6 @@ def update(args):
# Exit if we're up to date
return 0
- git_checkout_cmd = ['git', 'checkout', latest_tag]
- subprocess.check_call(git_checkout_cmd, cwd=args.root)
-
sdlog.info("Verifying signature on latest update...")
try:
@@ -648,14 +646,35 @@ def update(args):
keyserver=secondary_keyserver)
git_verify_tag_cmd = ['git', 'tag', '-v', latest_tag]
- sig_result = subprocess.check_output(git_verify_tag_cmd,
- stderr=subprocess.STDOUT,
- cwd=args.root)
+ try:
+ sig_result = subprocess.check_output(git_verify_tag_cmd,
+ stderr=subprocess.STDOUT,
+ cwd=args.root)
+
+ good_sig_text = 'Good signature from "SecureDrop Release Signing Key"'
+ bad_sig_text = 'BAD signature'
+ # To ensure that an adversary cannot name a malicious key good_sig_text
+ # we check that bad_sig_text does not appear and that the release key
+ # appears on the second line of the output.
+ gpg_lines = sig_result.split('\n')
+ if RELEASE_KEY in gpg_lines[1] and \
+ sig_result.count(good_sig_text) == 1 and \
+ bad_sig_text not in sig_result:
+ sdlog.info("Signature verification successful.")
+ else: # If anything else happens, fail and exit 1
+ sdlog.info("Signature verification failed.")
+ return 1
- if 'Good signature' not in sig_result:
+ except subprocess.CalledProcessError:
+ # If there is no signature, or if the signature does not verify,
+ # then git tag -v exits subprocess.check_output will exit 1
+ # and subprocess.check_output will throw a CalledProcessError
sdlog.info("Signature verification failed.")
- return -1
- sdlog.info("Signature verification successful.")
+ return 1
+
+ # Only if the proper signature verifies do we check out the latest
+ git_checkout_cmd = ['git', 'checkout', latest_tag]
+ subprocess.check_call(git_checkout_cmd, cwd=args.root)
sdlog.info("Updated to SecureDrop {}.".format(latest_tag))
return 0
| diff --git a/admin/tests/test_securedrop-admin.py b/admin/tests/test_securedrop-admin.py
--- a/admin/tests/test_securedrop-admin.py
+++ b/admin/tests/test_securedrop-admin.py
@@ -114,7 +114,11 @@ def test_update_gpg_recv_primary_key_failure(self, tmpdir, caplog):
git_repo_path = str(tmpdir)
args = argparse.Namespace(root=git_repo_path)
- git_output = 'Good signature from "SecureDrop Release Signing Key"'
+ git_output = ('gpg: Signature made Tue 13 Mar 2018 01:14:11 AM UTC\n'
+ 'gpg: using RSA key '
+ '22245C81E3BAEB4138B36061310F561200F4AD77\n'
+ 'gpg: Good signature from "SecureDrop Release '
+ 'Signing Key" [unknown]\n')
patchers = [
mock.patch('securedrop_admin.check_for_updates',
@@ -156,7 +160,11 @@ def test_update_signature_verifies(self, tmpdir, caplog):
git_repo_path = str(tmpdir)
args = argparse.Namespace(root=git_repo_path)
- git_output = 'Good signature from "SecureDrop Release Signing Key"'
+ git_output = ('gpg: Signature made Tue 13 Mar 2018 01:14:11 AM UTC\n'
+ 'gpg: using RSA key '
+ '22245C81E3BAEB4138B36061310F561200F4AD77\n'
+ 'gpg: Good signature from "SecureDrop Release '
+ 'Signing Key" [unknown]\n')
with mock.patch('securedrop_admin.check_for_updates',
return_value=(True, "0.6.1")):
@@ -173,7 +181,11 @@ def test_update_signature_does_not_verify(self, tmpdir, caplog):
git_repo_path = str(tmpdir)
args = argparse.Namespace(root=git_repo_path)
- git_output = 'Bad signature from "SecureDrop Release Signing Key"'
+ git_output = ('gpg: Signature made Tue 13 Mar 2018 01:14:11 AM UTC\n'
+ 'gpg: using RSA key '
+ '22245C81E3BAEB4138B36061310F561200F4AD77\n'
+ 'gpg: BAD signature from "SecureDrop Release '
+ 'Signing Key" [unknown]\n')
with mock.patch('securedrop_admin.check_for_updates',
return_value=(True, "0.6.1")):
@@ -186,6 +198,88 @@ def test_update_signature_does_not_verify(self, tmpdir, caplog):
assert "Updated to SecureDrop" not in caplog.text
assert ret_code != 0
+ def test_update_malicious_key_named_fingerprint(self, tmpdir, caplog):
+ git_repo_path = str(tmpdir)
+ args = argparse.Namespace(root=git_repo_path)
+
+ git_output = ('gpg: Signature made Tue 13 Mar 2018 01:14:11 AM UTC\n'
+ 'gpg: using RSA key '
+ '1234567812345678123456781234567812345678\n'
+ 'gpg: Good signature from "22245C81E3BAEB4138'
+ 'B36061310F561200F4AD77" [unknown]\n')
+
+ with mock.patch('securedrop_admin.check_for_updates',
+ return_value=(True, "0.6.1")):
+ with mock.patch('subprocess.check_call'):
+ with mock.patch('subprocess.check_output',
+ return_value=git_output):
+ ret_code = securedrop_admin.update(args)
+ assert "Applying SecureDrop updates..." in caplog.text
+ assert "Signature verification failed." in caplog.text
+ assert "Updated to SecureDrop" not in caplog.text
+ assert ret_code != 0
+
+ def test_update_malicious_key_named_good_sig(self, tmpdir, caplog):
+ git_repo_path = str(tmpdir)
+ args = argparse.Namespace(root=git_repo_path)
+
+ git_output = ('gpg: Signature made Tue 13 Mar 2018 01:14:11 AM UTC\n'
+ 'gpg: using RSA key '
+ '1234567812345678123456781234567812345678\n'
+ 'gpg: Good signature from Good signature from '
+ '"SecureDrop Release Signing Key" [unknown]\n')
+
+ with mock.patch('securedrop_admin.check_for_updates',
+ return_value=(True, "0.6.1")):
+ with mock.patch('subprocess.check_call'):
+ with mock.patch('subprocess.check_output',
+ return_value=git_output):
+ ret_code = securedrop_admin.update(args)
+ assert "Applying SecureDrop updates..." in caplog.text
+ assert "Signature verification failed." in caplog.text
+ assert "Updated to SecureDrop" not in caplog.text
+ assert ret_code != 0
+
+ def test_update_malicious_key_named_good_sig_fingerprint(self, tmpdir,
+ caplog):
+ git_repo_path = str(tmpdir)
+ args = argparse.Namespace(root=git_repo_path)
+
+ git_output = ('gpg: Signature made Tue 13 Mar 2018 01:14:11 AM UTC\n'
+ 'gpg: using RSA key '
+ '1234567812345678123456781234567812345678\n'
+ 'gpg: Good signature from 22245C81E3BAEB4138'
+ 'B36061310F561200F4AD77 Good signature from '
+ '"SecureDrop Release Signing Key" [unknown]\n')
+
+ with mock.patch('securedrop_admin.check_for_updates',
+ return_value=(True, "0.6.1")):
+ with mock.patch('subprocess.check_call'):
+ with mock.patch('subprocess.check_output',
+ return_value=git_output):
+ ret_code = securedrop_admin.update(args)
+ assert "Applying SecureDrop updates..." in caplog.text
+ assert "Signature verification failed." in caplog.text
+ assert "Updated to SecureDrop" not in caplog.text
+ assert ret_code != 0
+
+ def test_no_signature_on_update(self, tmpdir, caplog):
+ git_repo_path = str(tmpdir)
+ args = argparse.Namespace(root=git_repo_path)
+
+ with mock.patch('securedrop_admin.check_for_updates',
+ return_value=(True, "0.6.1")):
+ with mock.patch('subprocess.check_call'):
+ with mock.patch('subprocess.check_output',
+ side_effect=subprocess.CalledProcessError(
+ 1, 'git', 'error: no signature found')
+ ):
+ ret_code = securedrop_admin.update(args)
+ assert "Applying SecureDrop updates..." in caplog.text
+ assert "Signature verification failed." in caplog.text
+ assert "Updated to SecureDrop" not in caplog.text
+ assert ret_code != 0
+
class TestSiteConfig(object):
| Securedrop-admin update should verify tag signature prior to checking out the tag
# Bug
## Description
SecureDrop admin update logic introduced in 0.6 checks out a tag before verifying that it was properly signed with the SecureDrop release signing key. Should an incorrect (non-production-ready) or malicious tag be selected, the error will appear to an administrator once. Should the admin ignore this error, the SecureDrop repository may no longer update to production-ready code.
As a SecureDrop administrator, I would like to check out and run only code that has been certified by the SecureDrop team as production ready.
## Expected Behavior
updater should only checkout code if the signature is valid. The order should be as follows:
1- fetch
2- tag -v
3- checkout
## Actual Behavior
The order is:
1- fetch
2- checkout
3- tag -v
## Comments
All upgrades to 0.7 should use the manual method of validating tags in the order described as above in the blog post, and can safely use the GUI updater in subsequent SecureDrop releases.
| 2018-05-14T20:53:13Z | [] | [] |
|
freedomofpress/securedrop | 3,425 | freedomofpress__securedrop-3425 | [
"3071"
] | fa1421c06e27145278ef1900450612aac268d988 | diff --git a/securedrop/i18n_tool.py b/securedrop/i18n_tool.py
--- a/securedrop/i18n_tool.py
+++ b/securedrop/i18n_tool.py
@@ -5,6 +5,7 @@
import io
import logging
import os
+import glob
import re
import signal
import subprocess
@@ -14,52 +15,12 @@
from os.path import dirname, join, realpath
+from sh import git, pybabel, sed, msgmerge, xgettext, msgfmt
+
logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s')
log = logging.getLogger(__name__)
-def sh(command, input=None):
- """Run the *command* which must be a shell snippet. The stdin is
- either /dev/null or the *input* argument string.
-
- The stderr/stdout of the snippet are captured and logged via
- logging.debug(), one line at a time.
- """
- log.debug(":sh: " + command)
- if input is None:
- stdin = None
- else:
- stdin = subprocess.PIPE
- proc = subprocess.Popen(
- args=command,
- stdin=stdin,
- stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT,
- shell=True,
- bufsize=1)
- if stdin is not None:
- proc.stdin.write(input)
- proc.stdin.close()
- lines_of_command_output = []
- loggable_line_list = []
- with proc.stdout:
- for line in iter(proc.stdout.readline, b''):
- line = line.decode('utf-8')
- lines_of_command_output.append(line)
- loggable_line = line.strip().encode('ascii', 'ignore')
- log.debug(loggable_line)
- loggable_line_list.append(loggable_line)
- if proc.wait() != 0:
- if log.getEffectiveLevel() > logging.DEBUG:
- for loggable_line in loggable_line_list:
- log.error(loggable_line)
- raise subprocess.CalledProcessError(
- returncode=proc.returncode,
- cmd=command
- )
- return "".join(lines_of_command_output)
-
-
class I18NTool(object):
#
@@ -92,114 +53,86 @@ def file_is_modified(self, path):
return subprocess.call(['git', '-C', dir, 'diff', '--quiet', path])
def ensure_i18n_remote(self, args):
- sh("""
- set -ex
- cd {root}
- if ! git remote | grep --quiet i18n ; then
- git remote add i18n {url}
- fi
- git fetch i18n
- """.format(root=args.root,
- url=args.url))
+ k = {'_cwd': args.root}
+ if 'i18n' not in git.remote(**k).stdout:
+ git.remote.add('i18n', args.url, **k)
+ git.fetch('i18n', **k)
def translate_messages(self, args):
messages_file = os.path.join(args.translations_dir, 'messages.pot')
if args.extract_update:
- sh("""
- set -xe
-
- mkdir -p {translations_dir}
-
- pybabel extract \
- --charset=utf-8 \
- --mapping={mapping} \
- --output={messages_file} \
- --project=SecureDrop \
- --version={version} \
- --msgid-bugs-address='[email protected]' \
- --copyright-holder='Freedom of the Press Foundation' \
- {sources}
-
- # remove this line so the file does not change if no
- # strings are modified
- sed -i '/^"POT-Creation-Date/d' {messages_file}
- """.format(translations_dir=args.translations_dir,
- mapping=args.mapping,
- messages_file=messages_file,
- version=args.version,
- sources=" ".join(args.sources.split(','))))
+ if not os.path.exists(args.translations_dir):
+ os.makedirs(args.translations_dir)
+ sources = args.sources.split(',')
+ pybabel.extract(
+ '--charset=utf-8',
+ '--mapping', args.mapping,
+ '--output', messages_file,
+ '--project=SecureDrop',
+ '--version', args.version,
+ "[email protected]",
+ "--copyright-holder=Freedom of the Press Foundation",
+ *sources)
+ sed('-i', '-e', '/^"POT-Creation-Date/d', messages_file)
if (self.file_is_modified(messages_file) and
len(os.listdir(args.translations_dir)) > 1):
- sh("""
- set -xe
- for translation in {translations_dir}/*/LC_MESSAGES/*.po ; do
- msgmerge --previous --update $translation {messages_file}
- done
- """.format(translations_dir=args.translations_dir,
- messages_file=messages_file))
- log.warning("messages translations updated in " +
- messages_file)
+ tglob = '{}/*/LC_MESSAGES/*.po'.format(args.translations_dir)
+ for translation in glob.iglob(tglob):
+ msgmerge('--previous', '--update', translation,
+ messages_file)
+ log.warning("messages translations updated in {}".format(
+ messages_file))
else:
log.warning("messages translations are already up to date")
if args.compile and len(os.listdir(args.translations_dir)) > 1:
- sh("""
- set -x
- pybabel compile --directory {translations_dir}
- """.format(translations_dir=args.translations_dir))
+ pybabel.compile('--directory', args.translations_dir)
def translate_desktop(self, args):
messages_file = os.path.join(args.translations_dir, 'desktop.pot')
if args.extract_update:
- sh("""
- set -xe
- cd {translations_dir}
- xgettext \
- --output=desktop.pot \
- --language=Desktop \
- --keyword \
- --keyword=Name \
- --package-version={version} \
- --msgid-bugs-address='[email protected]' \
- --copyright-holder='Freedom of the Press Foundation' \
- {sources}
-
- # remove this line so the file does not change if no
- # strings are modified
- sed -i '/^"POT-Creation-Date/d' {messages_file}
- """.format(translations_dir=args.translations_dir,
- messages_file=messages_file,
- version=args.version,
- sources=" ".join(args.sources.split(','))))
+ sources = args.sources.split(',')
+ k = {'_cwd': args.translations_dir}
+ xgettext(
+ "--output=desktop.pot",
+ "--language=Desktop",
+ "--keyword",
+ "--keyword=Name",
+ "--package-version", args.version,
+ "[email protected]",
+ "--copyright-holder=Freedom of the Press Foundation",
+ *sources,
+ **k)
+ sed('-i', '-e', '/^"POT-Creation-Date/d', messages_file, **k)
if self.file_is_modified(messages_file):
for f in os.listdir(args.translations_dir):
if not f.endswith('.po'):
continue
po_file = os.path.join(args.translations_dir, f)
- sh("""
- msgmerge --update {po_file} {messages_file}
- """.format(po_file=po_file,
- messages_file=messages_file))
+ msgmerge('--update', po_file, messages_file)
log.warning("messages translations updated in " +
messages_file)
else:
log.warning("desktop translations are already up to date")
if args.compile:
- sh("""
- set -ex
- cd {translations_dir}
- find *.po | sed -e 's/\.po$//' > LINGUAS
- for source in {sources} ; do
- target=$(basename $source .in)
- msgfmt --desktop --template $source -o $target -d .
- done
- """.format(translations_dir=args.translations_dir,
- sources=" ".join(args.sources.split(','))))
+ pos = filter(lambda f: f.endswith('.po'),
+ os.listdir(args.translations_dir))
+ linguas = map(lambda l: l.rstrip('.po'), pos)
+ content = "\n".join(linguas) + "\n"
+ open(join(args.translations_dir, 'LINGUAS'), 'w').write(content)
+
+ for source in args.sources.split(','):
+ target = source.rstrip('.in')
+ msgfmt('--desktop',
+ '--template', source,
+ '-o', target,
+ '-d', '.',
+ _cwd=args.translations_dir)
def set_translate_parser(self,
subps,
@@ -280,16 +213,13 @@ def update_docs(self, args):
io.open(l10n_txt, mode='w').write(l10n_content)
self.require_git_email_name(includes)
if self.file_is_modified(l10n_txt):
- sh("""
- set -ex
- cd {includes}
- git add l10n.txt
- git commit \
- -m 'docs: update the list of supported languages' \
- l10n.txt
- """.format(includes=includes))
+ k = {'_cwd': includes}
+ git.add('l10n.txt', **k)
+ msg = 'docs: update the list of supported languages'
+ git.commit('-m', msg, 'l10n.txt', **k)
log.warning(l10n_txt + " updated")
- log.warning(sh("cd " + includes + "; git show"))
+ git_show_out = git.show(**k)
+ log.warning(git_show_out)
else:
log.warning(l10n_txt + " already up to date")
@@ -314,19 +244,17 @@ def update_from_weblate(self, args):
def need_update(p):
exists = os.path.exists(join(args.root, p))
- sh("""
- set -ex
- cd {r}
- git checkout i18n/i18n -- {p}
- git reset HEAD -- {p}
- """.format(r=args.root, p=p))
+ k = {'_cwd': args.root}
+ git.checkout('i18n/i18n', '--', p, **k)
+ git.reset('HEAD', '--', p, **k)
if not exists:
return True
else:
return self.file_is_modified(join(args.root, p))
def add(p):
- sh("git -C {r} add {p}".format(r=args.root, p=p))
+ git('-C', args.root, 'add', p)
+
updated = False
#
# Update messages
@@ -353,20 +281,24 @@ def add(p):
def upstream_commit(self, args, code):
self.require_git_email_name(args.root)
authors = set()
- for path in sh("git -C {r} diff --name-only --cached".format(
- r=args.root)).split():
- previous_message = sh("git -C {r} log -n 1 {p}".format(
- r=args.root, p=path))
- m = re.search('copied from (\w+)', previous_message)
+ diffs = git('--no-pager', '-C', args.root,
+ 'diff', '--name-only', '--cached').stdout
+ for path in diffs.strip().split('\n'):
+ previous_message = unicode(git(
+ '--no-pager', '-C', args.root, 'log', '-n', '1', path,
+ _encoding='utf-8'))
+ m = re.search(u'copied from (\w+)', previous_message)
if m:
origin = m.group(1)
else:
origin = ''
- authors |= set(sh("""
- git -C {r} log --format=%aN {o}..i18n/i18n -- {p}
- """.format(r=args.root, o=origin, p=path)).strip().split('\n'))
- current = sh("git -C {r} rev-parse i18n/i18n".format(
- r=args.root)).strip()
+ git_authors = unicode(git(
+ '--no-pager', '-C', args.root, 'log', '--format=%aN',
+ '{}..i18n/i18n'.format(origin), '--',
+ path, _encoding='utf-8'))
+ git_authors = git_authors.strip().split(u'\n')
+ authors |= set(git_authors)
+ current = git('-C', args.root, 'rev-parse', 'i18n/i18n').stdout
info = I18NTool.SUPPORTED_LANGUAGES[code]
message = textwrap.dedent(u"""
l10n: updated {code} {name}
@@ -377,11 +309,10 @@ def upstream_commit(self, args, code):
copied from {current}
""".format(remote=args.url,
name=info['name'],
- authors=", ".join(authors),
+ authors=u", ".join(authors),
code=code,
current=current))
- sh(u'git -C {r} commit -m "{message}"'.format(
- r=args.root, message=message.replace('"', '\"')).encode('utf-8'))
+ git('-C', args.root, 'commit', '-m', message)
def set_update_from_weblate_parser(self, subps):
parser = subps.add_parser('update-from-weblate',
@@ -419,6 +350,7 @@ def get_args(self):
def setup_verbosity(self, args):
if args.verbose:
+ logging.getLogger('sh.command').setLevel(logging.INFO)
log.setLevel(logging.DEBUG)
else:
log.setLevel(logging.INFO)
| diff --git a/securedrop/tests/test_i18n.py b/securedrop/tests/test_i18n.py
--- a/securedrop/tests/test_i18n.py
+++ b/securedrop/tests/test_i18n.py
@@ -31,6 +31,8 @@
import pytest
import source_app
+from sh import sed, pybabel
+
def verify_i18n(app):
not_translated = 'code hello i18n'
@@ -185,29 +187,19 @@ def test_i18n(journalist_app, config):
'--extract-update',
])
- i18n_tool.sh("""
- pybabel init -i {d}/messages.pot -d {d} -l en_US
-
- pybabel init -i {d}/messages.pot -d {d} -l fr_FR
- sed -i -e '/code hello i18n/,+1s/msgstr ""/msgstr "code bonjour"/' \
- {d}/fr_FR/LC_MESSAGES/messages.po
-
- pybabel init -i {d}/messages.pot -d {d} -l zh_Hans_CN
- sed -i -e '/code hello i18n/,+1s/msgstr ""/msgstr "code chinese"/' \
- {d}/zh_Hans_CN/LC_MESSAGES/messages.po
-
- pybabel init -i {d}/messages.pot -d {d} -l ar
- sed -i -e '/code hello i18n/,+1s/msgstr ""/msgstr "code arabic"/' \
- {d}/ar/LC_MESSAGES/messages.po
-
- pybabel init -i {d}/messages.pot -d {d} -l nb_NO
- sed -i -e '/code hello i18n/,+1s/msgstr ""/msgstr "code norwegian"/' \
- {d}/nb_NO/LC_MESSAGES/messages.po
-
- pybabel init -i {d}/messages.pot -d {d} -l es_ES
- sed -i -e '/code hello i18n/,+1s/msgstr ""/msgstr "code spanish"/' \
- {d}/es_ES/LC_MESSAGES/messages.po
- """.format(d=config.TEMP_DIR))
+ pot = os.path.join(config.TEMP_DIR, 'messages.pot')
+ pybabel('init', '-i', pot, '-d', config.TEMP_DIR, '-l', 'en_US')
+
+ for (l, s) in (('fr_FR', 'code bonjour'),
+ ('zh_Hans_CN', 'code chinese'),
+ ('ar', 'code arabic'),
+ ('nb_NO', 'code norwegian'),
+ ('es_ES', 'code spanish')):
+ pybabel('init', '-i', pot, '-d', config.TEMP_DIR, '-l', l)
+ po = os.path.join(config.TEMP_DIR, l, 'LC_MESSAGES/messages.po')
+ sed('-i', '-e',
+ '/code hello i18n/,+1s/msgstr ""/msgstr "{}"/'.format(s),
+ po)
i18n_tool.I18NTool().main([
'--verbose',
diff --git a/securedrop/tests/test_i18n_tool.py b/securedrop/tests/test_i18n_tool.py
--- a/securedrop/tests/test_i18n_tool.py
+++ b/securedrop/tests/test_i18n_tool.py
@@ -4,15 +4,15 @@
import os
from os.path import abspath, dirname, exists, getmtime, join, realpath
os.environ['SECUREDROP_ENV'] = 'test' # noqa
-import logging
import i18n_tool
from mock import patch
import pytest
import shutil
import signal
-import subprocess
import time
+from sh import sed, msginit, pybabel, git, touch
+
class TestI18NTool(object):
@@ -32,21 +32,6 @@ def test_main(self, tmpdir, caplog):
'--translations-dir', str(tmpdir)
]) == signal.SIGINT
- assert tool.main([
- 'translate-messages',
- '--translations-dir', str(tmpdir),
- '--extract-update'
- ]) is None
- assert 'pybabel extract' not in caplog.text
-
- assert tool.main([
- '--verbose',
- 'translate-messages',
- '--translations-dir', str(tmpdir),
- '--extract-update'
- ]) is None
- assert 'pybabel extract' in caplog.text
-
def test_translate_desktop_l10n(self, tmpdir):
in_files = {}
for what in ('source', 'journalist'):
@@ -89,17 +74,15 @@ def test_translate_desktop_l10n(self, tmpdir):
locale = 'fr_FR'
po_file = join(str(tmpdir), locale + ".po")
- i18n_tool.sh("""
- msginit --no-translator \
- --locale {locale} \
- --output {po_file} \
- --input {messages_file}
- sed -i -e '/{source}/,+1s/msgstr ""/msgstr "SOURCE FR"/' \
- {po_file}
- """.format(source='SecureDrop Source Interfaces',
- messages_file=messages_file,
- po_file=po_file,
- locale=locale))
+ msginit(
+ '--no-translator',
+ '--locale', locale,
+ '--output', po_file,
+ '--input', messages_file)
+ source = 'SecureDrop Source Interfaces'
+ sed('-i', '-e',
+ '/{}/,+1s/msgstr ""/msgstr "SOURCE FR"/'.format(source),
+ po_file)
assert exists(po_file)
#
@@ -146,11 +129,7 @@ def test_translate_messages_l10n(self, tmpdir):
locale = 'en_US'
locale_dir = join(str(tmpdir), locale)
- i18n_tool.sh("pybabel init -i {} -d {} -l {}".format(
- messages_file,
- str(tmpdir),
- locale,
- ))
+ pybabel('init', '-i', messages_file, '-d', str(tmpdir), '-l', locale)
mo_file = join(locale_dir, 'LC_MESSAGES/messages.mo')
assert not exists(mo_file)
i18n_tool.I18NTool().main(args)
@@ -180,11 +159,7 @@ def test_translate_messages_compile_arg(self, tmpdir):
locale = 'en_US'
locale_dir = join(str(tmpdir), locale)
po_file = join(locale_dir, 'LC_MESSAGES/messages.po')
- i18n_tool.sh("pybabel init -i {} -d {} -l {}".format(
- messages_file,
- str(tmpdir),
- locale,
- ))
+ pybabel(['init', '-i', messages_file, '-d', str(tmpdir), '-l', locale])
assert exists(po_file)
# pretend this happened a few seconds ago
few_seconds_ago = time.time() - 60
@@ -224,32 +199,26 @@ def test_translate_messages_compile_arg(self, tmpdir):
assert 'template hello i18n' not in mo
def test_require_git_email_name(self, tmpdir):
- i18n_tool.sh("""
- cd {dir}
- git init
- """.format(dir=str(tmpdir)))
+ k = {'_cwd': str(tmpdir)}
+ git('init', **k)
with pytest.raises(Exception) as excinfo:
i18n_tool.I18NTool.require_git_email_name(str(tmpdir))
assert 'please set name' in excinfo.value.message
- i18n_tool.sh("""
- cd {dir}
- git config user.email "[email protected]"
- git config user.name "Your Name"
- """.format(dir=str(tmpdir)))
+
+ git.config('user.email', "[email protected]", **k)
+ git.config('user.name', "Your Name", **k)
assert i18n_tool.I18NTool.require_git_email_name(str(tmpdir))
def test_update_docs(self, tmpdir, caplog):
- os.makedirs(join(str(tmpdir), 'includes'))
- i18n_tool.sh("""
- cd {dir}
- git init
- git config user.email "[email protected]"
- git config user.name "Your Name"
- mkdir includes
- touch includes/l10n.txt
- git add includes/l10n.txt
- git commit -m 'init'
- """.format(dir=str(tmpdir)))
+ k = {'_cwd': str(tmpdir)}
+ git.init(**k)
+ git.config('user.email', "[email protected]", **k)
+ git.config('user.name', "Your Name", **k)
+ os.mkdir(join(str(tmpdir), 'includes'))
+ touch('includes/l10n.txt', **k)
+ git.add('includes/l10n.txt', **k)
+ git.commit('-m', 'init', **k)
+
i18n_tool.I18NTool().main([
'--verbose',
'update-docs',
@@ -264,25 +233,25 @@ def test_update_docs(self, tmpdir, caplog):
def test_update_from_weblate(self, tmpdir, caplog):
d = str(tmpdir)
- i18n_tool.sh("""
- set -ex
- for r in i18n securedrop ; do
- mkdir {d}/$r
- cd {d}/$r
- git init
- git config user.email "[email protected]"
- git config user.name "LoΓ―c NordhΓΈy"
- touch README.md
- git add README.md
- git commit -m 'README' README.md
- done
- cp -a {o}/i18n/* {d}/i18n
- cd {d}/i18n
- git add securedrop install_files
- git commit -m 'init' -a
- git checkout -b i18n master
- """.format(o=self.dir,
- d=d))
+ for repo in ('i18n', 'securedrop'):
+ os.mkdir(join(d, repo))
+ k = {'_cwd': join(d, repo)}
+ git.init(**k)
+ git.config('user.email', '[email protected]', **k)
+ git.config('user.name', u'LoΓ―c NordhΓΈy', **k)
+ touch('README.md', **k)
+ git.add('README.md', **k)
+ git.commit('-m', 'README', 'README.md', **k)
+ for o in os.listdir(join(self.dir, 'i18n')):
+ f = join(self.dir, 'i18n', o)
+ if os.path.isfile(f):
+ shutil.copyfile(f, join(d, 'i18n', o))
+ else:
+ shutil.copytree(f, join(d, 'i18n', o))
+ k = {'_cwd': join(d, 'i18n')}
+ git.add('securedrop', 'install_files', **k)
+ git.commit('-m', 'init', '-a', **k)
+ git.checkout('-b', 'i18n', 'master', **k)
def r():
return "".join([str(l) for l in caplog.records])
@@ -330,26 +299,24 @@ def r():
])
assert 'l10n: updated nl' not in r()
assert 'l10n: updated de_DE' not in r()
- message = i18n_tool.sh("git -C {d}/securedrop show".format(d=d))
+ message = unicode(git('--no-pager', '-C', 'securedrop', 'show',
+ _cwd=d, _encoding='utf-8'))
assert u"LoΓ―c" in message
#
# an update is done to nl in weblate
#
- i18n_tool.sh("""
- set -ex
- cd {d}/i18n
- f=securedrop/translations/nl/LC_MESSAGES/messages.po
- sed -i -e 's/inactiviteit/INACTIVITEIT/' $f
- git add $f
- git config user.email "[email protected]"
- git config user.name "Someone Else"
- git commit -m 'translation change' $f
-
- cd {d}/securedrop
- git config user.email "[email protected]"
- git config user.name "Someone Else"
- """.format(d=d))
+ k = {'_cwd': join(d, 'i18n')}
+ f = 'securedrop/translations/nl/LC_MESSAGES/messages.po'
+ sed('-i', '-e', 's/inactiviteit/INACTIVITEIT/', f, **k)
+ git.add(f, **k)
+ git.config('user.email', '[email protected]', **k)
+ git.config('user.name', 'Someone Else', **k)
+ git.commit('-m', 'translation change', f, **k)
+
+ k = {'_cwd': join(d, 'securedrop')}
+ git.config('user.email', '[email protected]', **k)
+ git.config('user.name', 'Someone Else', **k)
#
# the nl translation update from weblate is copied
@@ -365,42 +332,7 @@ def r():
])
assert 'l10n: updated nl' in r()
assert 'l10n: updated de_DE' not in r()
- message = i18n_tool.sh("git -C {d}/securedrop show".format(d=d))
+ message = unicode(git('--no-pager', '-C', 'securedrop', 'show',
+ _cwd=d))
assert "Someone Else" in message
assert u"LoΓ―c" not in message
-
-
-class TestSh(object):
-
- def test_sh(self):
- assert 'A' == i18n_tool.sh("echo -n A")
- with pytest.raises(Exception) as excinfo:
- i18n_tool.sh("exit 123")
- assert excinfo.value.returncode == 123
-
- def test_sh_progress(self, caplog):
- i18n_tool.sh("echo AB ; sleep 5 ; echo C")
- records = caplog.records
- assert ':sh: ' in records[0].message
- assert records[0].levelname == 'DEBUG'
- assert 'AB' == records[1].message
- assert records[1].levelname == 'DEBUG'
- assert 'C' == records[2].message
- assert records[2].levelname == 'DEBUG'
-
- def test_sh_input(self, caplog):
- assert 'abc' == i18n_tool.sh("cat", 'abc')
-
- def test_sh_fail(self, caplog):
- level = i18n_tool.log.getEffectiveLevel()
- i18n_tool.log.setLevel(logging.INFO)
- assert i18n_tool.log.getEffectiveLevel() == logging.INFO
- with pytest.raises(subprocess.CalledProcessError) as excinfo:
- i18n_tool.sh("echo AB ; echo C ; exit 111")
- i18n_tool.log.setLevel(level)
- assert excinfo.value.returncode == 111
- records = caplog.records
- assert 'AB' == records[0].message
- assert records[0].levelname == 'ERROR'
- assert 'C' == records[1].message
- assert records[1].levelname == 'ERROR'
diff --git a/securedrop/tests/test_template_filters.py b/securedrop/tests/test_template_filters.py
--- a/securedrop/tests/test_template_filters.py
+++ b/securedrop/tests/test_template_filters.py
@@ -11,6 +11,8 @@
import source_app
import template_filters
+from sh import pybabel
+
def verify_rel_datetime_format(app):
with app.test_client() as c:
@@ -103,10 +105,9 @@ def do_test(config, create_app):
'--compile',
])
- i18n_tool.sh("""
- pybabel init -i {d}/messages.pot -d {d} -l en_US
- pybabel init -i {d}/messages.pot -d {d} -l fr_FR
- """.format(d=config.TEMP_DIR))
+ for l in ('en_US', 'fr_FR'):
+ pot = os.path.join(config.TEMP_DIR, 'messages.pot')
+ pybabel('init', '-i', pot, '-d', config.TEMP_DIR, '-l', l)
app = create_app(config)
| Improve input validation in `manage.py`
# Feature request
## Description
* The [sh](https://github.com/freedomofpress/securedrop/blob/develop/securedrop/manage.py:L35) function as well as the translate function it [helps](https://github.com/freedomofpress/securedrop/blob/develop/securedrop/manage.py:L35) could benefit from further validation of the [messages.pot](https://github.com/freedomofpress/securedrop/blob/develop/securedrop/translations/messages.pot) file before invoking `sh`.
* Two commands appear to be invoked with an unneeded shell
https://github.com/freedomofpress/securedrop/blob/develop/securedrop/manage.py:L315
https://github.com/freedomofpress/securedrop/blob/develop/securedrop/manage.py:L362
However, removing the `shell=True` causes `tests/test_i18n.py::TestI18N::test_i18n` to fail with `OSError: [Errno 2] No such file or directory.`
While these features are only used by translation maintainers, they require maintainers to be diligent in handling translation files while merging them into the codebase (see https://github.com/freedomofpress/securedrop/pull/3042).
## User Stories
As a translations maintainer, I was to ensure that malicious code within a translations file does not not execute unwanted code.
| Marking this as `easy` since this is relatively contained to a single part of the code even though solving it might be a bit of a challenge.
I would like to work on this issue.
@usmanmuhd98 consider yourself assigned!
I was going through the code. It is that the shell is invoked for the git operations which do not run when shell=False. To avoid that problem, I think the git library in python may help.
If not, how else should I validate the input?
I would personally reject a PR that pulled in the whole git lib for this because that's too much for this simple thing. I think we can do some manual validation ourselves and (ideally) yank the `shell=True` line from the subprocess call.
@usmanmuhd98 what is the status of this issue? Let me know If you are not working then i would like to give it a shot.
@Anonymous26 I think you can give it a shot since there has not been activity in the past two weeks. @usmanmuhd98 if you're also working on it, please speak up now :-)
@Anonymous26 You can go ahead.
@dachary I am caught up with quite a lot of college work. Maybe I can take up a different issue once I am free.
Presently working on this during PyCon 2018 Sprints!
End of day update on this issue:
Basically, the `sh()` method is referenced in a number of places (including the tests), so swapping to `shell=False` isn't nearly as easy as it would seem on the surface. I've started re-writing a few commands at a time to some success (eg, the `pybabel` call in `translate_messages()`), but a better approach may be to refactor the commands to use `os` python functions for file manipulation and only call `subprocess.Popen` in specific instances.
An even more robust approach would involve directly calling the [`babel.messages` low-level extraction interface](http://babel.pocoo.org/en/latest/api/messages/extract.html) instead of the command-line interface for babel. The API to directly call babel's `extract` isn't as nice as the command-line interface, so it's not as big of a deal.
I also noted that babel has some explicit documentation for adding it to a distutils setup: http://babel.pocoo.org/en/latest/setup.html#extract-messages which may be an alternative approach to importing translations.
I've only started looking at securedrop's code today, so I may be missing out on some important historic reasons for adding a separate util for handling translations.
One last note: In my humble opinion I don't think this should be labeled as `easy`. There are a few hidden moving parts which complicate this issue.
I'll take another crack at this tomorrow (EDT) and see what I can come up with, but I figured I'd get these observations down in case someone else starts down this issue.
@octaflop I agree this is not easy. The i18n_tool.py helper is meant to abstract the gory details of i18n for scripting. Back when @emkll created this issue, this script was part of manage.py which is a script deployed with SecureDrop and used in production. It deserves a lot of attention and my guess is that it is the primary reason why avoiding using shell snippets was suggested.
However i18n_tool.py was later extracted from manage.py and is never used in production, only for development. It may be worth reconsidering the benefits of removing the shell snippets in favor of more complicated and possibly more secure python equivalents. | 2018-05-15T20:08:40Z | [] | [] |
freedomofpress/securedrop | 3,429 | freedomofpress__securedrop-3429 | [
"3426"
] | f6e62d6285c633eba7295bed376419d0b7b7c54e | diff --git a/admin/securedrop_admin/__init__.py b/admin/securedrop_admin/__init__.py
--- a/admin/securedrop_admin/__init__.py
+++ b/admin/securedrop_admin/__init__.py
@@ -588,7 +588,8 @@ def check_for_updates(args):
sdlog.info("Checking for SecureDrop updates...")
# Determine what branch we are on
- current_tag = subprocess.check_output(['git', 'describe'], cwd=args.root)
+ current_tag = subprocess.check_output(['git', 'describe'],
+ cwd=args.root).rstrip('\n')
# Fetch all branches
git_fetch_cmd = ['git', 'fetch', '--all']
| diff --git a/admin/tests/test_securedrop-admin.py b/admin/tests/test_securedrop-admin.py
--- a/admin/tests/test_securedrop-admin.py
+++ b/admin/tests/test_securedrop-admin.py
@@ -66,6 +66,21 @@ def test_check_for_updates_update_needed(self, tmpdir, caplog):
assert update_status is True
assert tag == '0.6.1'
+ def test_check_for_updates_ensure_newline_stripped(self, tmpdir, caplog):
+ """Regression test for #3426"""
+ git_repo_path = str(tmpdir)
+ args = argparse.Namespace(root=git_repo_path)
+ current_tag = "0.6.1\n"
+ tags_available = "0.6\n0.6-rc1\n0.6.1\n"
+
+ with mock.patch('subprocess.check_call'):
+ with mock.patch('subprocess.check_output',
+ side_effect=[current_tag, tags_available]):
+ update_status, tag = securedrop_admin.check_for_updates(args)
+ assert "All updates applied" in caplog.text
+ assert update_status is False
+ assert tag == '0.6.1'
+
def test_check_for_updates_update_not_needed(self, tmpdir, caplog):
git_repo_path = str(tmpdir)
args = argparse.Namespace(root=git_repo_path)
| [QA] Tails GUI updater reporting new versions
# Bug
## Description
The 0.7.0 GUI updater reports new versions even when it's running the latest. Just ran a pre-flight check with the 0.7.0 tag, checked out inside a Tails VM. The install portion completed fine. So did `./securedrop-admin tailsconfig`. However, after finishing, it popped up the GUI declaring there were new updates. Which there definitely should not be, given that 0.7.0 is the latest release.
After a reboot of the Tails VM, the GUI updater displayed again, prompting to install updates. I accepted. Here's the detailed output: https://gist.github.com/conorsch/2e2da8fb909df067b693949474ef945c
## Steps to Reproduce
See above.
## Expected Behavior
0.7.0 is determined to be latest release; no further prompting.
## Actual Behavior
Prompts for updates even though 0.7.0 is latest release.
## Comments
| The GUI only appears when `./securedrop-admin check_for_updates` reports that updates are needed, so there is something wrong with that logic.. | 2018-05-15T22:07:28Z | [] | [] |
freedomofpress/securedrop | 3,430 | freedomofpress__securedrop-3430 | [
"3429"
] | 931f6d7556bf4ff88f1638efb7e9853b708eb26d | diff --git a/admin/securedrop_admin/__init__.py b/admin/securedrop_admin/__init__.py
--- a/admin/securedrop_admin/__init__.py
+++ b/admin/securedrop_admin/__init__.py
@@ -588,7 +588,8 @@ def check_for_updates(args):
sdlog.info("Checking for SecureDrop updates...")
# Determine what branch we are on
- current_tag = subprocess.check_output(['git', 'describe'], cwd=args.root)
+ current_tag = subprocess.check_output(['git', 'describe'],
+ cwd=args.root).rstrip('\n')
# Fetch all branches
git_fetch_cmd = ['git', 'fetch', '--all']
| diff --git a/admin/tests/test_securedrop-admin.py b/admin/tests/test_securedrop-admin.py
--- a/admin/tests/test_securedrop-admin.py
+++ b/admin/tests/test_securedrop-admin.py
@@ -66,6 +66,21 @@ def test_check_for_updates_update_needed(self, tmpdir, caplog):
assert update_status is True
assert tag == '0.6.1'
+ def test_check_for_updates_ensure_newline_stripped(self, tmpdir, caplog):
+ """Regression test for #3426"""
+ git_repo_path = str(tmpdir)
+ args = argparse.Namespace(root=git_repo_path)
+ current_tag = "0.6.1\n"
+ tags_available = "0.6\n0.6-rc1\n0.6.1\n"
+
+ with mock.patch('subprocess.check_call'):
+ with mock.patch('subprocess.check_output',
+ side_effect=[current_tag, tags_available]):
+ update_status, tag = securedrop_admin.check_for_updates(args)
+ assert "All updates applied" in caplog.text
+ assert update_status is False
+ assert tag == '0.6.1'
+
def test_check_for_updates_update_not_needed(self, tmpdir, caplog):
git_repo_path = str(tmpdir)
args = argparse.Namespace(root=git_repo_path)
| Fix check_for_updates
## Status
Ready for review
## Description of Changes
Fixes #3426.
Changes proposed in this pull request:
* Strips newline from `current_tag` in `securedrop-admin check_for_updates`
## Test plan
All of the following should be done in Tails.
### No updates needed case
0. Check out 0.6
1. Apply the diff in `admin/securedrop_admin/__init__.py` to 0.6
2. Run `./securedrop-admin check_for_updates`
You should see that updates are not needed: `INFO: All updates applied`
### Updates needed case
0. Check out a random branch that is not this one
1. Add a new tag locally that has a higher version number than the current prod version of SecureDrop, e.g. 0.7.0
2. Switch back to this branch
3. Run `./securedrop-admin check_for_updates`
You should see that updates are needed: `INFO: Update needed`
## Deployment
Will go out in workstation update
## Checklist
### If you made changes to `securedrop-admin`:
- [x] Linting and tests (`make -C admin test`) pass in the admin development container
| 2018-05-15T23:06:25Z | [] | [] |
|
freedomofpress/securedrop | 3,448 | freedomofpress__securedrop-3448 | [
"3303"
] | 2dd7bbe177fb4c99c8b9ad9cc6efc59c9b3574df | diff --git a/admin/securedrop_admin/__init__.py b/admin/securedrop_admin/__init__.py
--- a/admin/securedrop_admin/__init__.py
+++ b/admin/securedrop_admin/__init__.py
@@ -23,6 +23,7 @@
instances.
"""
+from __future__ import print_function
import argparse
import logging
import os
@@ -38,6 +39,9 @@
sdlog = logging.getLogger(__name__)
RELEASE_KEY = '22245C81E3BAEB4138B36061310F561200F4AD77'
+EXIT_SUCCESS = 0
+EXIT_SUBPROCESS_ERROR = 1
+EXIT_INTERRUPT = 2
class FingerprintException(Exception):
@@ -788,17 +792,23 @@ def main(argv):
setup_logger(args.v)
if args.v:
return_code = args.func(args)
- sys.exit(return_code)
+ if return_code != 0:
+ sys.exit(EXIT_SUBPROCESS_ERROR)
else:
try:
return_code = args.func(args)
except KeyboardInterrupt:
- sys.exit(-1)
+ print('Process was interrupted.')
+ sys.exit(EXIT_INTERRUPT)
+ except subprocess.CalledProcessError as e:
+ print('ERROR (run with -v for more): {msg}'.format(msg=e),
+ file=sys.stderr)
+ sys.exit(EXIT_SUBPROCESS_ERROR)
except Exception as e:
raise SystemExit(
'ERROR (run with -v for more): {msg}'.format(msg=e))
else:
- sys.exit(return_code)
+ sys.exit(EXIT_SUCCESS)
if __name__ == "__main__":
| diff --git a/admin/tests/test_securedrop-admin.py b/admin/tests/test_securedrop-admin.py
--- a/admin/tests/test_securedrop-admin.py
+++ b/admin/tests/test_securedrop-admin.py
@@ -295,6 +295,33 @@ def test_no_signature_on_update(self, tmpdir, caplog):
assert "Updated to SecureDrop" not in caplog.text
assert ret_code != 0
+ def test_exit_codes(self, tmpdir):
+ """Ensure that securedrop-admin returns the correct
+ exit codes for success or failure."""
+ with mock.patch(
+ 'securedrop_admin.install_securedrop',
+ return_value=True):
+ with pytest.raises(SystemExit) as e:
+ securedrop_admin.main(
+ ['--root', str(tmpdir), 'install'])
+ assert e.value.code == securedrop_admin.EXIT_SUCCESS
+
+ with mock.patch(
+ 'securedrop_admin.install_securedrop',
+ side_effect=subprocess.CalledProcessError(1, 'TestError')):
+ with pytest.raises(SystemExit) as e:
+ securedrop_admin.main(
+ ['--root', str(tmpdir), 'install'])
+ assert e.value.code == securedrop_admin.EXIT_SUBPROCESS_ERROR
+
+ with mock.patch(
+ 'securedrop_admin.install_securedrop',
+ side_effect=KeyboardInterrupt):
+ with pytest.raises(SystemExit) as e:
+ securedrop_admin.main(
+ ['--root', str(tmpdir), 'install'])
+ assert e.value.code == securedrop_admin.EXIT_INTERRUPT
+
class TestSiteConfig(object):
| Improve return codes in securedrop-admin
## Description
I noticed while making a change in #3300 that the `return 1` in `securedrop-admin update` did not produce an eventual `sys.exit(1)` as I expected - it always returned `sys.exit(0)`. Looking at the logic in `securedrop-admin/__init__.py`:
```
try:
args.func(args)
except KeyboardInterrupt:
sys.exit(0)
except Exception as e:
raise SystemExit(
'ERROR (run with -v for more): {msg}'.format(msg=e))
else:
sys.exit(0)
```
it looks like we `sys.exit(0)` unless there is an exception, i.e. we're not passing the return codes through to `sys.exit()`. Also, we `sys.exit(0)` for a `KeyboardInterrupt`.
A suggested improvement is to exit non-zero for a `KeyboardInterrupt` and to pass the return codes through to `sys.exit()` (@kushaldas let me know if you have additional thoughts on this). I think this is a small but worthwhile change (I expect most of the diff will be in the unit tests), and I advocate that we resolve it during the extended (2 week) QA period.
| Reopening for more improvements in 0.8 as described in [this comment](https://github.com/freedomofpress/securedrop/pull/3327#discussion_r185136483) | 2018-05-16T23:58:55Z | [] | [] |
freedomofpress/securedrop | 3,458 | freedomofpress__securedrop-3458 | [
"3399"
] | aee07c8f5bf7789381155d461673f2c69147cc50 | diff --git a/securedrop/crypto_util.py b/securedrop/crypto_util.py
--- a/securedrop/crypto_util.py
+++ b/securedrop/crypto_util.py
@@ -6,9 +6,9 @@
import io
import scrypt
import subprocess
+from random import SystemRandom
from base64 import b32encode
-from Cryptodome.Random import random
from flask import current_app
from gnupg._util import _is_stream, _make_binary_stream
@@ -24,6 +24,10 @@
# to fix gpg error #78 on production
os.environ['USERNAME'] = 'www-data'
+# SystemRandom sources from the system rand (e.g. urandom, CryptGenRandom, etc)
+# It supplies a CSPRNG but with an interface that supports methods like choice
+random = SystemRandom()
+
class CryptoException(Exception):
pass
diff --git a/securedrop/secure_tempfile.py b/securedrop/secure_tempfile.py
--- a/securedrop/secure_tempfile.py
+++ b/securedrop/secure_tempfile.py
@@ -5,9 +5,11 @@
from tempfile import _TemporaryFileWrapper
from gnupg._util import _STREAMLIKE_TYPES
-from Cryptodome.Cipher import AES
-from Cryptodome.Random import random
-from Cryptodome.Util import Counter
+from cryptography.exceptions import AlreadyFinalized
+from cryptography.hazmat.backends import default_backend
+from cryptography.hazmat.primitives.ciphers.algorithms import AES
+from cryptography.hazmat.primitives.ciphers.modes import CTR
+from cryptography.hazmat.primitives.ciphers import Cipher
class SecureTemporaryFile(_TemporaryFileWrapper, object):
@@ -60,17 +62,16 @@ def create_key(self):
https://github.com/freedomofpress/securedrop/pull/477#issuecomment-168445450).
"""
self.key = os.urandom(self.AES_key_size / 8)
- self.iv = random.getrandbits(self.AES_block_size)
+ self.iv = os.urandom(self.AES_block_size / 8)
self.initialize_cipher()
def initialize_cipher(self):
"""Creates the cipher-related objects needed for AES-CTR
encryption and decryption.
"""
- self.ctr_e = Counter.new(self.AES_block_size, initial_value=self.iv)
- self.ctr_d = Counter.new(self.AES_block_size, initial_value=self.iv)
- self.encryptor = AES.new(self.key, AES.MODE_CTR, counter=self.ctr_e)
- self.decryptor = AES.new(self.key, AES.MODE_CTR, counter=self.ctr_d)
+ self.cipher = Cipher(AES(self.key), CTR(self.iv), default_backend())
+ self.encryptor = self.cipher.encryptor()
+ self.decryptor = self.cipher.decryptor()
def write(self, data):
"""Write `data` to the secure temporary file. This method may be
@@ -85,7 +86,7 @@ def write(self, data):
if isinstance(data, unicode): # noqa
data = data.encode('utf-8')
- self.file.write(self.encryptor.encrypt(data))
+ self.file.write(self.encryptor.update(data))
def read(self, count=None):
"""Read `data` from the secure temporary file. This method may
@@ -112,9 +113,23 @@ def read(self, count=None):
self.last_action = 'read'
if count:
- return self.decryptor.decrypt(self.file.read(count))
+ return self.decryptor.update(self.file.read(count))
else:
- return self.decryptor.decrypt(self.file.read())
+ return self.decryptor.update(self.file.read())
+
+ def close(self):
+ """The __del__ method in tempfile._TemporaryFileWrapper (which
+ SecureTemporaryFile class inherits from) calls close() when the
+ temporary file is deleted.
+ """
+ try:
+ self.decryptor.finalize()
+ except AlreadyFinalized:
+ pass
+
+ # Since tempfile._TemporaryFileWrapper.close() does other cleanup,
+ # (i.e. deleting the temp file on disk), we need to call it also.
+ super(SecureTemporaryFile, self).close()
# python-gnupg will not recognize our SecureTemporaryFile as a stream-like type
| diff --git a/securedrop/tests/functional/functional_test.py b/securedrop/tests/functional/functional_test.py
--- a/securedrop/tests/functional/functional_test.py
+++ b/securedrop/tests/functional/functional_test.py
@@ -9,7 +9,6 @@
import traceback
import requests
-from Cryptodome import Random
from datetime import datetime
from multiprocessing import Process
from os.path import abspath, dirname, join, realpath
@@ -113,14 +112,6 @@ def setup(self, session_expiration=30):
self.journalist_app = journalist_app.create_app(config)
def start_source_server(app):
- # We call Random.atfork() here because we fork the source and
- # journalist server from the main Python process we use to drive
- # our browser with multiprocessing.Process() below. These child
- # processes inherit the same RNG state as the parent process, which
- # is a problem because they would produce identical output if we
- # didn't re-seed them after forking.
- Random.atfork()
-
config.SESSION_EXPIRATION_MINUTES = self.session_expiration
app.run(
@@ -130,7 +121,6 @@ def start_source_server(app):
threaded=True)
def start_journalist_server(app):
- Random.atfork()
app.run(
port=journalist_port,
debug=True,
| Use python-cryptography in SecureDrop
# Feature request
## Description
[Cryptography](https://cryptography.io/en/latest/) is most used cryptographic module. During PyCon development sprints 2018, many security folks in the Python community suggested to use it inside SecureDrop than any other module.
## User Stories
I am a `SecureDrop` developer and I want to use properly audited code for cryptography in SecureDrop.
| 2018-05-18T23:07:42Z | [] | [] |
|
freedomofpress/securedrop | 3,506 | freedomofpress__securedrop-3506 | [
"2918"
] | 300915d8a2be8ed936f0de2359580357824aabea | diff --git a/securedrop/alembic/versions/2d0ce3ee5bdc_added_passphrase_hash_column_to_.py b/securedrop/alembic/versions/2d0ce3ee5bdc_added_passphrase_hash_column_to_.py
new file mode 100644
--- /dev/null
+++ b/securedrop/alembic/versions/2d0ce3ee5bdc_added_passphrase_hash_column_to_.py
@@ -0,0 +1,53 @@
+"""added passphrase_hash column to journalists table
+
+Revision ID: 2d0ce3ee5bdc
+Revises: fccf57ceef02
+Create Date: 2018-06-08 15:08:37.718268
+
+"""
+from alembic import op
+import sqlalchemy as sa
+
+
+# revision identifiers, used by Alembic.
+revision = '2d0ce3ee5bdc'
+down_revision = 'fccf57ceef02'
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ op.add_column('journalists', sa.Column('passphrase_hash', sa.String(length=256), nullable=True))
+
+
+def downgrade():
+ # sqlite has no `drop column` command, so we recreate the original table
+ # then load it from a temp table
+
+ op.rename_table('journalists', 'journalists_tmp')
+
+ op.create_table('journalists',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('username', sa.String(length=255), nullable=False),
+ sa.Column('pw_salt', sa.Binary(), nullable=True),
+ sa.Column('pw_hash', sa.Binary(), nullable=True),
+ sa.Column('is_admin', sa.Boolean(), nullable=True),
+ sa.Column('otp_secret', sa.String(length=16), nullable=True),
+ sa.Column('is_totp', sa.Boolean(), nullable=True),
+ sa.Column('hotp_counter', sa.Integer(), nullable=True),
+ sa.Column('last_token', sa.String(length=6), nullable=True),
+ sa.Column('created_on', sa.DateTime(), nullable=True),
+ sa.Column('last_access', sa.DateTime(), nullable=True),
+ sa.PrimaryKeyConstraint('id'),
+ sa.UniqueConstraint('username')
+ )
+
+ conn = op.get_bind()
+ conn.execute('''
+ INSERT INTO journalists
+ SELECT id, username, pw_salt, pw_hash, is_admin, otp_secret, is_totp,
+ hotp_counter, last_token, created_on, last_access
+ FROM journalists_tmp
+ ''')
+
+ op.drop_table('journalists_tmp')
diff --git a/securedrop/create-dev-data.py b/securedrop/create-dev-data.py
--- a/securedrop/create-dev-data.py
+++ b/securedrop/create-dev-data.py
@@ -14,15 +14,12 @@
def add_test_user(username, password, otp_secret, is_admin=False):
context = journalist_app.create_app(config).app_context()
context.push()
- valid_password = "correct horse battery staple profanity oil chewy"
try:
user = Journalist(username=username,
- password=valid_password,
+ password=password,
is_admin=is_admin)
user.otp_secret = otp_secret
- user.pw_salt = user._gen_salt()
- user.pw_hash = user._scrypt_hash(password, user.pw_salt)
db.session.add(user)
db.session.commit()
print('Test user successfully added: '
@@ -72,7 +69,7 @@ def create_source_and_submissions(num_submissions=2):
if __name__ == "__main__": # pragma: no cover
# Add two test users
- test_password = "WEjwn8ZyczDhQSK24YKM8C9a"
+ test_password = "correct horse battery staple profanity oil chewy"
test_otp_secret = "JHCOGO7VCER3EJ4L"
add_test_user("journalist",
diff --git a/securedrop/models.py b/securedrop/models.py
--- a/securedrop/models.py
+++ b/securedrop/models.py
@@ -19,6 +19,7 @@
from flask import current_app, url_for
from itsdangerous import TimedJSONWebSignatureSerializer, BadData
from jinja2 import Markup
+from passlib.hash import argon2
from sqlalchemy import ForeignKey
from sqlalchemy.orm import relationship, backref
from sqlalchemy import Column, Integer, String, Boolean, DateTime, Binary
@@ -31,6 +32,8 @@
if os.environ.get('SECUREDROP_ENV') == 'test':
LOGIN_HARDENING = False
+ARGON2_PARAMS = dict(memory_cost=2**16, rounds=4, parallelism=2)
+
def get_one_or_else(query, logger, failure_method):
try:
@@ -269,13 +272,13 @@ class InvalidPasswordLength(PasswordError):
password length.
"""
- def __init__(self, password):
- self.pw_len = len(password)
+ def __init__(self, passphrase):
+ self.passphrase_len = len(passphrase)
def __str__(self):
- if self.pw_len > Journalist.MAX_PASSWORD_LEN:
- return "Password too long (len={})".format(self.pw_len)
- if self.pw_len < Journalist.MIN_PASSWORD_LEN:
+ if self.passphrase_len > Journalist.MAX_PASSWORD_LEN:
+ return "Password too long (len={})".format(self.passphrase_len)
+ if self.passphrase_len < Journalist.MIN_PASSWORD_LEN:
return "Password needs to be at least {} characters".format(
Journalist.MIN_PASSWORD_LEN
)
@@ -302,6 +305,7 @@ class Journalist(db.Model):
created_on = Column(DateTime, default=datetime.datetime.utcnow)
last_access = Column(DateTime)
+ passphrase_hash = Column(String(256))
login_attempts = relationship(
"JournalistLoginAttempt",
backref="journalist")
@@ -322,28 +326,31 @@ def __repr__(self):
self.username,
" [admin]" if self.is_admin else "")
- def _gen_salt(self, salt_bytes=32):
- return os.urandom(salt_bytes)
-
- _SCRYPT_PARAMS = dict(N=2**14, r=8, p=1)
+ _LEGACY_SCRYPT_PARAMS = dict(N=2**14, r=8, p=1)
- def _scrypt_hash(self, password, salt, params=None):
- if not params:
- params = self._SCRYPT_PARAMS
- return scrypt.hash(str(password), salt, **params)
+ def _scrypt_hash(self, password, salt):
+ return scrypt.hash(str(password), salt, **self._LEGACY_SCRYPT_PARAMS)
MAX_PASSWORD_LEN = 128
MIN_PASSWORD_LEN = 14
- def set_password(self, password):
- self.check_password_acceptable(password)
+ def set_password(self, passphrase):
+ self.check_password_acceptable(passphrase)
+
+ # "migrate" from the legacy case
+ if not self.passphrase_hash:
+ self.passphrase_hash = \
+ argon2.using(**ARGON2_PARAMS).hash(passphrase)
+ # passlib creates one merged field that embeds randomly generated
+ # salt in the output like $alg$salt$hash
+ self.pw_hash = None
+ self.pw_salt = None
# Don't do anything if user's password hasn't changed.
- if self.pw_hash and self.valid_password(password):
+ if self.passphrase_hash and self.valid_password(passphrase):
return
- self.pw_salt = self._gen_salt()
- self.pw_hash = self._scrypt_hash(password, self.pw_salt)
+ self.passphrase_hash = argon2.using(**ARGON2_PARAMS).hash(passphrase)
@classmethod
def check_username_acceptable(cls, username):
@@ -366,15 +373,35 @@ def check_password_acceptable(cls, password):
if len(password.split()) < 7:
raise NonDicewarePassword()
- def valid_password(self, password):
+ def valid_password(self, passphrase):
# Avoid hashing passwords that are over the maximum length
- if len(password) > self.MAX_PASSWORD_LEN:
- raise InvalidPasswordLength(password)
+ if len(passphrase) > self.MAX_PASSWORD_LEN:
+ raise InvalidPasswordLength(passphrase)
+
# No check on minimum password length here because some passwords
- # may have been set prior to setting the minimum password length.
- return pyotp.utils.compare_digest(
- self._scrypt_hash(password, self.pw_salt),
- self.pw_hash)
+ # may have been set prior to setting the mininum password length.
+
+ if self.passphrase_hash:
+ # default case
+ is_valid = argon2.verify(passphrase, self.passphrase_hash)
+ else:
+ # legacy support
+ is_valid = pyotp.utils.compare_digest(
+ self._scrypt_hash(passphrase, self.pw_salt),
+ self.pw_hash)
+
+ # migrate new passwords
+ if is_valid and not self.passphrase_hash:
+ self.passphrase_hash = \
+ argon2.using(**ARGON2_PARAMS).hash(passphrase)
+ # passlib creates one merged field that embeds randomly generated
+ # salt in the output like $alg$salt$hash
+ self.pw_salt = None
+ self.pw_hash = None
+ db.session.add(self)
+ db.session.commit()
+
+ return is_valid
def regenerate_totp_shared_secret(self):
self.otp_secret = pyotp.random_base32()
diff --git a/securedrop/qa_loader.py b/securedrop/qa_loader.py
--- a/securedrop/qa_loader.py
+++ b/securedrop/qa_loader.py
@@ -64,6 +64,12 @@ def new_journalist():
nullable=False),
pw,
random_bool())
+ if random_bool():
+ # to add legacy passwords back in
+ journalist.passphrase_hash = None
+ journalist.pw_salt = random_chars(32, nullable=False)
+ journalist.pw_hash = random_chars(64, nullable=False)
+
journalist.is_admin = bool_or_none()
journalist.is_totp = bool_or_none()
| diff --git a/securedrop/tests/migrations/migration_2d0ce3ee5bdc.py b/securedrop/tests/migrations/migration_2d0ce3ee5bdc.py
new file mode 100644
--- /dev/null
+++ b/securedrop/tests/migrations/migration_2d0ce3ee5bdc.py
@@ -0,0 +1,215 @@
+# -*- coding: utf-8 -*-
+
+import random
+import string
+
+from sqlalchemy import text
+from uuid import uuid4
+
+from db import db
+from journalist_app import create_app
+from .helpers import (random_bool, random_chars, random_username, random_bytes,
+ random_datetime, bool_or_none)
+
+random.seed('α( α )α')
+
+
+class Helper():
+
+ @staticmethod
+ def add_source():
+ filesystem_id = random_chars(96) if random_bool() else None
+ params = {
+ 'uuid': str(uuid4()),
+ 'filesystem_id': filesystem_id,
+ 'journalist_designation': random_chars(50),
+ 'flagged': bool_or_none(),
+ 'last_updated': random_datetime(nullable=True),
+ 'pending': bool_or_none(),
+ 'interaction_count': random.randint(0, 1000),
+ }
+ sql = '''INSERT INTO sources (uuid, filesystem_id,
+ journalist_designation, flagged, last_updated, pending,
+ interaction_count)
+ VALUES (:uuid, :filesystem_id, :journalist_designation,
+ :flagged, :last_updated, :pending, :interaction_count)
+ '''
+ db.engine.execute(text(sql), **params)
+
+ @staticmethod
+ def add_journalist_login_attempt(journalist_id):
+ params = {
+ 'timestamp': random_datetime(nullable=True),
+ 'journalist_id': journalist_id,
+ }
+ sql = '''INSERT INTO journalist_login_attempt (timestamp,
+ journalist_id)
+ VALUES (:timestamp, :journalist_id)
+ '''
+ db.engine.execute(text(sql), **params)
+
+ @staticmethod
+ def add_reply(journalist_id, source_id):
+ params = {
+ 'journalist_id': journalist_id,
+ 'source_id': source_id,
+ 'filename': random_chars(50),
+ 'size': random.randint(0, 1024 * 1024 * 500),
+ }
+ sql = '''INSERT INTO replies (journalist_id, source_id, filename,
+ size)
+ VALUES (:journalist_id, :source_id, :filename, :size)
+ '''
+ db.engine.execute(text(sql), **params)
+
+ @staticmethod
+ def extract(app):
+ with app.app_context():
+ sql = '''SELECT j.id, count(distinct a.id), count(distinct r.id)
+ FROM journalists AS j
+ LEFT OUTER JOIN journalist_login_attempt AS a
+ ON a.journalist_id = j.id
+ LEFT OUTER JOIN replies AS r
+ ON r.journalist_id = j.id
+ GROUP BY j.id
+ ORDER BY j.id
+ '''
+ res = list(db.session.execute(text(sql)))
+ return res
+
+
+class UpgradeTester(Helper):
+
+ JOURNO_NUM = 100
+
+ def __init__(self, config):
+ self.config = config
+ self.app = create_app(config)
+ self.initial_data = None
+
+ def load_data(self):
+ with self.app.app_context():
+ for _ in range(self.JOURNO_NUM):
+ self.add_journalist()
+
+ self.add_source()
+
+ for jid in range(1, self.JOURNO_NUM):
+ for _ in range(random.randint(1, 3)):
+ self.add_journalist_login_attempt(jid)
+
+ for jid in range(1, self.JOURNO_NUM):
+ self.add_reply(jid, 1)
+
+ db.session.commit()
+ self.initial_data = self.extract(self.app)
+
+ def check_upgrade(self):
+ extracted = self.extract(self.app)
+ assert len(extracted) == self.JOURNO_NUM
+ assert extracted == self.initial_data
+
+ @staticmethod
+ def add_journalist():
+ if random_bool():
+ otp_secret = random_chars(16, string.ascii_uppercase + '234567')
+ else:
+ otp_secret = None
+
+ is_totp = random_bool()
+ if is_totp:
+ hotp_counter = 0 if random_bool() else None
+ else:
+ hotp_counter = random.randint(0, 10000) if random_bool() else None
+
+ last_token = random_chars(6, string.digits) if random_bool() else None
+
+ params = {
+ 'username': random_username(),
+ 'pw_salt': random_bytes(1, 64, nullable=True),
+ 'pw_hash': random_bytes(32, 64, nullable=True),
+ 'is_admin': bool_or_none(),
+ 'otp_secret': otp_secret,
+ 'is_totp': is_totp,
+ 'hotp_counter': hotp_counter,
+ 'last_token': last_token,
+ 'created_on': random_datetime(nullable=True),
+ 'last_access': random_datetime(nullable=True),
+ }
+ sql = '''INSERT INTO journalists (username, pw_salt, pw_hash,
+ is_admin, otp_secret, is_totp, hotp_counter, last_token,
+ created_on, last_access)
+ VALUES (:username, :pw_salt, :pw_hash, :is_admin,
+ :otp_secret, :is_totp, :hotp_counter, :last_token,
+ :created_on, :last_access);
+ '''
+ db.engine.execute(text(sql), **params)
+
+
+class DowngradeTester(Helper):
+
+ JOURNO_NUM = 100
+
+ def __init__(self, config):
+ self.config = config
+ self.app = create_app(config)
+ self.initial_data = None
+
+ def load_data(self):
+ with self.app.app_context():
+ for _ in range(self.JOURNO_NUM):
+ self.add_journalist()
+
+ self.add_source()
+
+ for jid in range(1, self.JOURNO_NUM):
+ for _ in range(random.randint(1, 3)):
+ self.add_journalist_login_attempt(jid)
+
+ for jid in range(1, self.JOURNO_NUM):
+ self.add_reply(jid, 1)
+
+ db.session.commit()
+ self.initial_data = self.extract(self.app)
+
+ def check_downgrade(self):
+ extracted = self.extract(self.app)
+ assert len(extracted) == self.JOURNO_NUM
+ assert extracted == self.initial_data
+
+ @staticmethod
+ def add_journalist():
+ if random_bool():
+ otp_secret = random_chars(16, string.ascii_uppercase + '234567')
+ else:
+ otp_secret = None
+
+ is_totp = random_bool()
+ if is_totp:
+ hotp_counter = 0 if random_bool() else None
+ else:
+ hotp_counter = random.randint(0, 10000) if random_bool() else None
+
+ last_token = random_chars(6, string.digits) if random_bool() else None
+
+ params = {
+ 'username': random_username(),
+ 'pw_salt': random_bytes(1, 64, nullable=True),
+ 'pw_hash': random_bytes(32, 64, nullable=True),
+ 'is_admin': bool_or_none(),
+ 'otp_secret': otp_secret,
+ 'is_totp': is_totp,
+ 'hotp_counter': hotp_counter,
+ 'last_token': last_token,
+ 'created_on': random_datetime(nullable=True),
+ 'last_access': random_datetime(nullable=True),
+ 'passphrase_hash': random_bytes(32, 64, nullable=True)
+ }
+ sql = '''INSERT INTO journalists (username, pw_salt, pw_hash,
+ is_admin, otp_secret, is_totp, hotp_counter, last_token,
+ created_on, last_access, passphrase_hash)
+ VALUES (:username, :pw_salt, :pw_hash, :is_admin,
+ :otp_secret, :is_totp, :hotp_counter, :last_token,
+ :created_on, :last_access, :passphrase_hash);
+ '''
+ db.engine.execute(text(sql), **params)
diff --git a/securedrop/tests/test_alembic.py b/securedrop/tests/test_alembic.py
--- a/securedrop/tests/test_alembic.py
+++ b/securedrop/tests/test_alembic.py
@@ -2,6 +2,7 @@
import os
import pytest
+import re
import subprocess
from alembic.config import Config as AlembicConfig
@@ -20,6 +21,8 @@
for x in os.listdir(MIGRATION_PATH)
if x.endswith('.py')]
+WHITESPACE_REGEX = re.compile('\s*')
+
def list_migrations(cfg_path, head):
cfg = AlembicConfig(cfg_path)
@@ -83,12 +86,12 @@ def ddl_equal(left, right):
if left is None and right is None:
return True
- left = [x for x in left.split('\n') if x]
- right = [x for x in right.split('\n') if x]
+ left = [x for x in WHITESPACE_REGEX.split(left) if x]
+ right = [x for x in WHITESPACE_REGEX.split(right) if x]
- # Strip commas, whitespace, quotes
- left = [x.replace("\"", "").replace(",", "").strip() for x in left]
- right = [x.replace("\"", "").replace(",", "").strip() for x in right]
+ # Strip commas and quotes
+ left = [x.replace("\"", "").replace(",", "") for x in left]
+ right = [x.replace("\"", "").replace(",", "") for x in right]
return sorted(left) == sorted(right)
diff --git a/securedrop/tests/test_journalist.py b/securedrop/tests/test_journalist.py
--- a/securedrop/tests/test_journalist.py
+++ b/securedrop/tests/test_journalist.py
@@ -7,6 +7,7 @@
import zipfile
import base64
+from base64 import b64decode
from cStringIO import StringIO
from io import BytesIO
from flask import url_for, escape, session, current_app, g
@@ -1299,6 +1300,50 @@ def test_incorrect_current_password_change(journalist_app, test_journo):
assert 'Incorrect password or two-factor code' in text
+# need a journalist app for the app context
+def test_passphrase_migration_on_verification(journalist_app):
+ salt = b64decode('+mGOQmD5Nnb+mH9gwBoxKRhKZmmJ6BzpmD5YArPHZsY=')
+ journalist = Journalist('test', VALID_PASSWORD)
+
+ # manually set the params
+ hash = journalist._scrypt_hash(VALID_PASSWORD, salt)
+ journalist.passphrase_hash = None
+ journalist.pw_salt = salt
+ journalist.pw_hash = hash
+
+ assert journalist.valid_password(VALID_PASSWORD)
+
+ # check that the migration happened
+ assert journalist.passphrase_hash is not None
+ assert journalist.pw_salt is None
+ assert journalist.pw_hash is None
+
+ # check that that a verification post-migration works
+ assert journalist.valid_password(VALID_PASSWORD)
+
+
+# need a journalist app for the app context
+def test_passphrase_migration_on_reset(journalist_app):
+ salt = b64decode('+mGOQmD5Nnb+mH9gwBoxKRhKZmmJ6BzpmD5YArPHZsY=')
+ journalist = Journalist('test', VALID_PASSWORD)
+
+ # manually set the params
+ hash = journalist._scrypt_hash(VALID_PASSWORD, salt)
+ journalist.passphrase_hash = None
+ journalist.pw_salt = salt
+ journalist.pw_hash = hash
+
+ journalist.set_password(VALID_PASSWORD)
+
+ # check that the migration happened
+ assert journalist.passphrase_hash is not None
+ assert journalist.pw_salt is None
+ assert journalist.pw_hash is None
+
+ # check that that a verification post-migration works
+ assert journalist.valid_password(VALID_PASSWORD)
+
+
class TestJournalistApp(TestCase):
# A method required by flask_testing.TestCase
| Use passlib for password hashing
# Feature request
## Description
Using [`passlib`](https://pythonhosted.org/passlib/) for password hashing will give us future proof and easily migrateable password management should we change password hashing algorithms/parameters.
Possibly conflicts with #1609 depending on what the underlying cryptolib is.
## User Stories
As a dev, I don't want to have to write boilerplate migration code if we bump the number of scrypt rounds.
| The [migration functionality](https://passlib.readthedocs.io/en/stable/narr/context-tutorial.html#deprecation-hash-migration) is very handy, also `passlib` doesn't use `PyCrypto` so it shouldn't cause any issues with respect to #1609.
While it isn't incredibly pressing _yet_, our scrypt parameters for password hashing are starting to show some age, specifically N which we have currently set to `2**14` - [Golang updated](https://go-review.googlesource.com/c/crypto/+/67070) their recommendations to `N=2**15` in 2017. If we were to increase N to `2**15`, we'd also be doubling the memory and CPU resources consumed while hashing, so we could use the migration opportunity to move to a slightly less memory hard password hashing algorithm.
May I work on this?
@Aniq55 absolutely yes :-)
@Aniq55 Any update on this?
Note that this is blocked by https://github.com/freedomofpress/securedrop/issues/1419
@kushaldas I'm on a break currently. You may unassigned me for now. :/
@Aniq55 consider yourself unassigned, thanks for the update.
@dachary can i take this up..
@pwnchief As mentioned above, this is blocked by #1419 (which I'm getting to), but that ticket itself is blocked by at least #3033 and #2948. It will probably be a long while before we can merge anything you work on. I don't want to discourage you, but it can be disheartening to work on something and have the PR sit open for months.
Ohhhhh okay thanks for the recommendation. I will wait for the labels to clear up... | 2018-06-08T16:51:58Z | [] | [] |
freedomofpress/securedrop | 3,540 | freedomofpress__securedrop-3540 | [
"3533"
] | f0aab499c49f54330faef9ea7e3ded56fe85dfd6 | diff --git a/admin/securedrop_admin/__init__.py b/admin/securedrop_admin/__init__.py
--- a/admin/securedrop_admin/__init__.py
+++ b/admin/securedrop_admin/__init__.py
@@ -537,6 +537,7 @@ def setup_logger(verbose=False):
def sdconfig(args):
"""Configure SD site settings"""
SiteConfig(args).load_and_update_config()
+ return 0
def install_securedrop(args):
@@ -807,8 +808,10 @@ def main(argv):
except Exception as e:
raise SystemExit(
'ERROR (run with -v for more): {msg}'.format(msg=e))
- else:
- sys.exit(EXIT_SUCCESS)
+ if return_code == 0:
+ sys.exit(EXIT_SUCCESS)
+ else:
+ sys.exit(EXIT_SUBPROCESS_ERROR)
if __name__ == "__main__":
| diff --git a/admin/tests/test_integration.py b/admin/tests/test_integration.py
--- a/admin/tests/test_integration.py
+++ b/admin/tests/test_integration.py
@@ -475,6 +475,19 @@ def securedrop_git_repo(tmpdir):
test_name)])
+def set_reliable_keyserver(gpgdir):
+ # If gpg.conf doesn't exist, create it and set a reliable default
+ # keyserver for the tests.
+ gpgconf_path = os.path.join(gpgdir, 'gpg.conf')
+ if not os.path.exists(gpgconf_path):
+ os.mkdir(gpgdir)
+ with open(gpgconf_path, 'a') as f:
+ f.write('keyserver hkp://ipv4.pool.sks-keyservers.net')
+
+ # Ensure correct permissions on .gnupg home directory.
+ os.chmod(gpgdir, 0700)
+
+
# This class is to test all the git related operations.
class TestGitOperations:
def test_check_for_update_when_updates_needed(self, securedrop_git_repo):
@@ -517,17 +530,7 @@ def test_check_for_update_when_updates_not_needed(self,
def test_update(self, securedrop_git_repo):
gpgdir = os.path.join(os.path.expanduser('~'), '.gnupg')
-
- # If gpg.conf doesn't exist, create it and set a reliable default
- # keyserver for the tests.
- gpgconf_path = os.path.join(gpgdir, 'gpg.conf')
- if not os.path.exists(gpgconf_path):
- os.mkdir(gpgdir)
- with open(gpgconf_path, 'a') as f:
- f.write('keyserver hkp://ipv4.pool.sks-keyservers.net')
-
- # Ensure correct permissions on .gnupg home directory.
- os.chmod(gpgdir, 0700)
+ set_reliable_keyserver(gpgdir)
cmd = os.path.join(os.path.dirname(CURRENT_DIR),
'securedrop_admin/__init__.py')
@@ -541,3 +544,33 @@ def test_update(self, securedrop_git_repo):
child.close()
assert child.exitstatus == 0
assert child.signalstatus is None
+
+ def test_update_fails_when_no_signature_present(self, securedrop_git_repo):
+ gpgdir = os.path.join(os.path.expanduser('~'), '.gnupg')
+ set_reliable_keyserver(gpgdir)
+
+ # First we make a very high version tag of SecureDrop so that the
+ # updater will try to update to it. Since the tag is unsigned, it
+ # should fail.
+ subprocess.check_call('git checkout develop'.split())
+ subprocess.check_call('git tag 9999999.0.0'.split())
+
+ # Switch back to an older branch for the test
+ subprocess.check_call('git checkout 0.6'.split())
+
+ cmd = os.path.join(os.path.dirname(CURRENT_DIR),
+ 'securedrop_admin/__init__.py')
+ ansible_base = os.path.join(str(securedrop_git_repo),
+ 'securedrop/install_files/ansible-base')
+ child = pexpect.spawn('coverage run {0} --root {1} update'.format(
+ cmd, ansible_base))
+ output = child.read()
+ assert 'Updated to SecureDrop' not in output
+ assert 'Signature verification failed' in output
+
+ child.expect(pexpect.EOF, timeout=10) # Wait for CLI to exit
+ child.close()
+
+ # Failures should eventually exit non-zero.
+ assert child.exitstatus != 0
+ assert child.signalstatus != 0
| admin CLI: return 1 should eventually exit with a non-zero return code
# Description
On develop and in the 0.8.0 release branch a `return 1` does not cause the securedrop-admin CLI to eventually exit with a non-zero return code.
# Steps to reproduce
1. It's a little difficult to trigger a `return 1` but one method is to just modify the happy path of `securedrop-admin update` to `return 1` and commit this change in a test branch
2. `securedrop-admin update`
3. `echo $?`
# Expected Behavior
non-zero exit code :)
# Actual Behavior
zero exit code :(
# Comments
This is important to fix during release QA as in 0.7.0 we released the SecureDrop workstation updater, which behind the scenes is calling out to the `securedrop-admin` CLI to do updates, and the logic relies on sensible return codes being returned by the CLI. On the 0.7.0 tag a `return 1` does make `securedrop-admin` eventually exit non-zero.
| Also now that we have integration tests, we should have a regression test that covers this case for the future
@redshiftzero this should fix this.
```
diff --git a/admin/securedrop_admin/__init__.py b/admin/securedrop_admin/__init__.py
index 597114bd..017b0a3a 100755
--- a/admin/securedrop_admin/__init__.py
+++ b/admin/securedrop_admin/__init__.py
@@ -807,9 +807,10 @@ def main(argv):
except Exception as e:
raise SystemExit(
'ERROR (run with -v for more): {msg}'.format(msg=e))
- else:
- sys.exit(EXIT_SUCCESS)
-
+ if return_code == 0:
+ sys.exit(EXIT_SUCCESS)
+ else:
+ sys.exit(EXIT_SUBPROCESS_ERROR)
if __name__ == "__main__":
main(sys.argv[1:])
```
To test, add `return 1` to the update function in the first line. And then run `./securedrop-admin update` and that should have 1 as exit code.
Sweet, will test today, thanks @kushaldas. We also really should cover this with automated testing (as it would have prevented the introduction of this regression to begin with), another test along the lines of #3481 should do the trick | 2018-06-19T18:50:03Z | [] | [] |
freedomofpress/securedrop | 3,549 | freedomofpress__securedrop-3549 | [
"3460"
] | 902d0baa3a96dd99aeb0c039805382a0384d2abd | diff --git a/admin/securedrop_admin/__init__.py b/admin/securedrop_admin/__init__.py
--- a/admin/securedrop_admin/__init__.py
+++ b/admin/securedrop_admin/__init__.py
@@ -537,6 +537,7 @@ def setup_logger(verbose=False):
def sdconfig(args):
"""Configure SD site settings"""
SiteConfig(args).load_and_update_config()
+ return 0
def install_securedrop(args):
@@ -807,8 +808,10 @@ def main(argv):
except Exception as e:
raise SystemExit(
'ERROR (run with -v for more): {msg}'.format(msg=e))
- else:
- sys.exit(EXIT_SUCCESS)
+ if return_code == 0:
+ sys.exit(EXIT_SUCCESS)
+ else:
+ sys.exit(EXIT_SUBPROCESS_ERROR)
if __name__ == "__main__":
| diff --git a/admin/tests/test_integration.py b/admin/tests/test_integration.py
--- a/admin/tests/test_integration.py
+++ b/admin/tests/test_integration.py
@@ -1,6 +1,9 @@
import os
+import io
import pexpect
+import pytest
import re
+import requests
import subprocess
import tempfile
@@ -430,3 +433,144 @@ def test_sdconfig_enable_https_on_source_interface():
with open(os.path.join(SD_DIR, 'install_files/ansible-base/group_vars/all/site-specific')) as fobj: # noqa: E501
data = fobj.read()
assert HTTPS_OUTPUT == data
+
+
+# The following is the minimal git configuration which can be used to fetch
+# from the SecureDrop Github repository. We want to use this because the
+# developers may have the git setup to fetch from [email protected]: instead
+# of the https, and that requires authentication information.
+GIT_CONFIG = u'''[core]
+ repositoryformatversion = 0
+ filemode = true
+ bare = false
+ logallrefupdates = true
+[remote "origin"]
+ url = https://github.com/freedomofpress/securedrop.git
+ fetch = +refs/heads/*:refs/remotes/origin/*
+'''
+
+
[email protected]
+def securedrop_git_repo(tmpdir):
+ os.chdir(str(tmpdir))
+ # Clone the SecureDrop repository into the temp directory.
+ cmd = ['git', 'clone',
+ 'https://github.com/freedomofpress/securedrop.git']
+ subprocess.check_call(cmd)
+ os.chdir(os.path.join(str(tmpdir), 'securedrop/admin'))
+ subprocess.check_call('git reset --hard'.split())
+ # Now we will put in our own git configuration
+ with io.open('../.git/config', 'w') as fobj:
+ fobj.write(GIT_CONFIG)
+ # Let us move to an older tag
+ subprocess.check_call('git checkout 0.6'.split())
+ yield tmpdir
+
+ # Save coverage information in same directory as unit test coverage
+ test_name = str(tmpdir).split('/')[-1]
+ subprocess.check_call(['cp',
+ '{}/securedrop/admin/.coverage'.format(
+ str(tmpdir)),
+ '{}/../.coverage.{}'.format(CURRENT_DIR,
+ test_name)])
+
+
+def set_reliable_keyserver(gpgdir):
+ # If gpg.conf doesn't exist, create it and set a reliable default
+ # keyserver for the tests.
+ gpgconf_path = os.path.join(gpgdir, 'gpg.conf')
+ if not os.path.exists(gpgconf_path):
+ os.mkdir(gpgdir)
+ with open(gpgconf_path, 'a') as f:
+ f.write('keyserver hkp://ipv4.pool.sks-keyservers.net')
+
+ # Ensure correct permissions on .gnupg home directory.
+ os.chmod(gpgdir, 0700)
+
+
+# This class is to test all the git related operations.
+class TestGitOperations:
+ def test_check_for_update_when_updates_needed(self, securedrop_git_repo):
+ cmd = os.path.join(os.path.dirname(CURRENT_DIR),
+ 'securedrop_admin/__init__.py')
+ ansible_base = os.path.join(str(securedrop_git_repo),
+ 'securedrop/install_files/ansible-base')
+ fullcmd = 'coverage run {0} --root {1} check_for_updates'.format(
+ cmd, ansible_base)
+ child = pexpect.spawn(fullcmd)
+ child.expect('Update needed', timeout=20)
+
+ child.expect(pexpect.EOF, timeout=10) # Wait for CLI to exit
+ child.close()
+ assert child.exitstatus == 0
+ assert child.signalstatus is None
+
+ def test_check_for_update_when_updates_not_needed(self,
+ securedrop_git_repo):
+ # Determine latest production tag using GitHub release object
+ github_url = 'https://api.github.com/repos/freedomofpress/securedrop/releases/latest' # noqa: E501
+ latest_release = requests.get(github_url).json()
+ latest_tag = str(latest_release["tag_name"])
+
+ subprocess.check_call(["git", "checkout", latest_tag])
+
+ cmd = os.path.join(os.path.dirname(CURRENT_DIR),
+ 'securedrop_admin/__init__.py')
+ ansible_base = os.path.join(str(securedrop_git_repo),
+ 'securedrop/install_files/ansible-base')
+ fullcmd = 'coverage run {0} --root {1} check_for_updates'.format(
+ cmd, ansible_base)
+ child = pexpect.spawn(fullcmd)
+ child.expect('All updates applied', timeout=20)
+
+ child.expect(pexpect.EOF, timeout=10) # Wait for CLI to exit
+ child.close()
+ assert child.exitstatus == 0
+ assert child.signalstatus is None
+
+ def test_update(self, securedrop_git_repo):
+ gpgdir = os.path.join(os.path.expanduser('~'), '.gnupg')
+ set_reliable_keyserver(gpgdir)
+
+ cmd = os.path.join(os.path.dirname(CURRENT_DIR),
+ 'securedrop_admin/__init__.py')
+ ansible_base = os.path.join(str(securedrop_git_repo),
+ 'securedrop/install_files/ansible-base')
+ child = pexpect.spawn('coverage run {0} --root {1} update'.format(
+ cmd, ansible_base))
+ child.expect('Updated to SecureDrop', timeout=100)
+
+ child.expect(pexpect.EOF, timeout=10) # Wait for CLI to exit
+ child.close()
+ assert child.exitstatus == 0
+ assert child.signalstatus is None
+
+ def test_update_fails_when_no_signature_present(self, securedrop_git_repo):
+ gpgdir = os.path.join(os.path.expanduser('~'), '.gnupg')
+ set_reliable_keyserver(gpgdir)
+
+ # First we make a very high version tag of SecureDrop so that the
+ # updater will try to update to it. Since the tag is unsigned, it
+ # should fail.
+ subprocess.check_call('git checkout develop'.split())
+ subprocess.check_call('git tag 9999999.0.0'.split())
+
+ # Switch back to an older branch for the test
+ subprocess.check_call('git checkout 0.6'.split())
+
+ cmd = os.path.join(os.path.dirname(CURRENT_DIR),
+ 'securedrop_admin/__init__.py')
+ ansible_base = os.path.join(str(securedrop_git_repo),
+ 'securedrop/install_files/ansible-base')
+ child = pexpect.spawn('coverage run {0} --root {1} update'.format(
+ cmd, ansible_base))
+ output = child.read()
+ assert 'Updated to SecureDrop' not in output
+ assert 'Signature verification failed' in output
+
+ child.expect(pexpect.EOF, timeout=10) # Wait for CLI to exit
+ child.close()
+
+ # Failures should eventually exit non-zero.
+ assert child.exitstatus != 0
+ assert child.signalstatus != 0
| [admin CLI integration testing] securedrop-admin update
## Description
We should have integration tests for the `securedrop-admin update` command. This command has a lot of unit tests, but the tests do a lot of mocking out of subprocess calls, I recommend examining those first.
Parent ticket: #3341
| While working on the integration test, I can see that `gpg` is sometimes failing to reach any keyserver, and throwing some cryptic error messages. Retrying the same a few times solved the issue.
```
INFO: Verifying signature on latest update...
gpg: directory '/tmp/.gnupg' created
gpg: keybox '/tmp/.gnupg/pubring.kbx' created
gpg: keyserver receive failed: Cannot assign requested address
gpg: keyserver receive failed: Cannot assign requested address
ERROR (run with -v for more): Command '['timeout', '45', 'gpg', '--recv-key', '--keyserver', 'hkps://hkps.pool.sks-keyservers.net', '22245C81E3BAEB4138B36061310F561200F4AD77']' returned non-zero exit status 2
```
While searching the error term, I found many other folks saw the same error message, one such [example](https://github.com/nodejs/docker-node/issues/340#issuecomment-321669029).
@redshiftzero @emkll please let me know your suggestions.
I have code ready for PR, but, the above mentioned error is showing up randomly. | 2018-06-21T21:40:45Z | [] | [] |
freedomofpress/securedrop | 3,564 | freedomofpress__securedrop-3564 | [
"3563",
"3563"
] | af5df3009bf70958db8d281a05b3e8674e8adb80 | diff --git a/securedrop/models.py b/securedrop/models.py
--- a/securedrop/models.py
+++ b/securedrop/models.py
@@ -407,6 +407,7 @@ def throttle_login(cls, user):
login_attempt_period = datetime.datetime.utcnow() - \
datetime.timedelta(seconds=cls._LOGIN_ATTEMPT_PERIOD)
attempts_within_period = JournalistLoginAttempt.query.filter(
+ JournalistLoginAttempt.journalist_id == user.id).filter(
JournalistLoginAttempt.timestamp > login_attempt_period).all()
if len(attempts_within_period) > cls._MAX_LOGIN_ATTEMPTS_PER_PERIOD:
raise LoginThrottledException(
| diff --git a/securedrop/tests/test_journalist.py b/securedrop/tests/test_journalist.py
--- a/securedrop/tests/test_journalist.py
+++ b/securedrop/tests/test_journalist.py
@@ -188,6 +188,49 @@ def test_login_throttle(journalist_app, test_journo):
models.LOGIN_HARDENING = False
+def test_login_throttle_is_not_global(journalist_app, test_journo, test_admin):
+ """The login throttling should be per-user, not global. Global login
+ throttling can prevent all users logging into the application."""
+
+ # Overwrite the default value used during testing
+ # Note that this may break other tests if doing parallel testing
+ models.LOGIN_HARDENING = True
+ try:
+ with journalist_app.test_client() as app:
+ for _ in range(Journalist._MAX_LOGIN_ATTEMPTS_PER_PERIOD):
+ resp = app.post(
+ url_for('main.login'),
+ data=dict(username=test_journo['username'],
+ password='invalid',
+ token='invalid'))
+ assert resp.status_code == 200
+ text = resp.data.decode('utf-8')
+ assert "Login failed" in text
+
+ resp = app.post(
+ url_for('main.login'),
+ data=dict(username=test_journo['username'],
+ password='invalid',
+ token='invalid'))
+ assert resp.status_code == 200
+ text = resp.data.decode('utf-8')
+ assert ("Please wait at least {} seconds".format(
+ Journalist._LOGIN_ATTEMPT_PERIOD) in text)
+
+ # A different user should be able to login
+ resp = app.post(
+ url_for('main.login'),
+ data=dict(username=test_admin['username'],
+ password=test_admin['password'],
+ token=TOTP(test_admin['otp_secret']).now()),
+ follow_redirects=True)
+ assert resp.status_code == 200
+ text = resp.data.decode('utf-8')
+ assert "Sources" in text
+ finally:
+ models.LOGIN_HARDENING = False
+
+
def test_login_invalid_credentials(journalist_app, test_journo):
with journalist_app.test_client() as app:
resp = app.post(url_for('main.login'),
| Login throttling should be per-user
# Description
This is a denial of service bug that is fortunately behind the journalist interface ATHS. Login throttling is currently not per-user, it's global, i.e. if a single user is blocked from logging in due to login throttling, then all users are. We should fix this in 0.8.0.
# Steps to reproduce
1. Provision dev env
2. Try to login 6 times with incorrect credentials as journalist user. You will start seeing the following in the log output:
```
--------------------------------------------------------------------------------
ERROR in utils [/Users/redshiftzero/Documents/Github/securedrop/securedrop/journalist_app/utils.py:83]:
Login for 'journalist' failed: throttled (6 attempts in last 60 seconds)
--------------------------------------------------------------------------------
```
3. Try to login for the first time as dellsberg
# Expected behavior
Dellsberg can login and is not impacted by the tricksy behavior of the journalist user
# Actual behavior
Dellsberg is denied access to the journalist interface:
```
172.17.0.1 - - [23/May/2018 23:00:13] "POST /login HTTP/1.1" 200 -
172.17.0.1 - - [23/May/2018 23:00:13] "GET /org-logo HTTP/1.1" 302 -
--------------------------------------------------------------------------------
ERROR in utils [/Users/redshiftzero/Documents/Github/securedrop/securedrop/journalist_app/utils.py:83]:
Login for 'dellsberg' failed: throttled (7 attempts in last 60 seconds)
ββββββββββββββββββββββββββββββββββββββββ
```
# Resolution
The fix here is the addition of an additional filter in the ORM query we use to filter by user when computing `attempts_within_period` in `Journalist.throttle_login`. A fix _must_ include a regression test.
Login throttling should be per-user
# Description
This is a denial of service bug that is fortunately behind the journalist interface ATHS. Login throttling is currently not per-user, it's global, i.e. if a single user is blocked from logging in due to login throttling, then all users are. We should fix this in 0.8.0.
# Steps to reproduce
1. Provision dev env
2. Try to login 6 times with incorrect credentials as journalist user. You will start seeing the following in the log output:
```
--------------------------------------------------------------------------------
ERROR in utils [/Users/redshiftzero/Documents/Github/securedrop/securedrop/journalist_app/utils.py:83]:
Login for 'journalist' failed: throttled (6 attempts in last 60 seconds)
--------------------------------------------------------------------------------
```
3. Try to login for the first time as dellsberg
# Expected behavior
Dellsberg can login and is not impacted by the tricksy behavior of the journalist user
# Actual behavior
Dellsberg is denied access to the journalist interface:
```
172.17.0.1 - - [23/May/2018 23:00:13] "POST /login HTTP/1.1" 200 -
172.17.0.1 - - [23/May/2018 23:00:13] "GET /org-logo HTTP/1.1" 302 -
--------------------------------------------------------------------------------
ERROR in utils [/Users/redshiftzero/Documents/Github/securedrop/securedrop/journalist_app/utils.py:83]:
Login for 'dellsberg' failed: throttled (7 attempts in last 60 seconds)
ββββββββββββββββββββββββββββββββββββββββ
```
# Resolution
The fix here is the addition of an additional filter in the ORM query we use to filter by user when computing `attempts_within_period` in `Journalist.throttle_login`. A fix _must_ include a regression test.
| 2018-06-25T18:00:39Z | [] | [] |
|
freedomofpress/securedrop | 3,568 | freedomofpress__securedrop-3568 | [
"3567"
] | 0a901362b84a5378fba80e9cd0ffe4542bdcd598 | diff --git a/admin/securedrop_admin/__init__.py b/admin/securedrop_admin/__init__.py
--- a/admin/securedrop_admin/__init__.py
+++ b/admin/securedrop_admin/__init__.py
@@ -686,7 +686,23 @@ def update(args):
if RELEASE_KEY in gpg_lines[1] and \
sig_result.count(good_sig_text) == 1 and \
bad_sig_text not in sig_result:
- sdlog.info("Signature verification successful.")
+ # Finally, we check that there is no branch of the same name
+ # prior to reporting success.
+ cmd = ['git', 'show-ref', '--heads', '--verify',
+ 'refs/heads/{}'.format(latest_tag)]
+ try:
+ # We expect this to produce a non-zero exit code, which
+ # will produce a subprocess.CalledProcessError
+ subprocess.check_output(cmd, stderr=subprocess.STDOUT)
+ sdlog.info("Signature verification failed.")
+ return 1
+ except subprocess.CalledProcessError, e:
+ if 'not a valid ref' in e.output:
+ # Then there is no duplicate branch.
+ sdlog.info("Signature verification successful.")
+ else: # If any other exception occurs, we bail.
+ sdlog.info("Signature verification failed.")
+ return 1
else: # If anything else happens, fail and exit 1
sdlog.info("Signature verification failed.")
return 1
| diff --git a/admin/tests/test_integration.py b/admin/tests/test_integration.py
--- a/admin/tests/test_integration.py
+++ b/admin/tests/test_integration.py
@@ -538,7 +538,10 @@ def test_update(self, securedrop_git_repo):
'securedrop/install_files/ansible-base')
child = pexpect.spawn('coverage run {0} --root {1} update'.format(
cmd, ansible_base))
- child.expect('Updated to SecureDrop', timeout=100)
+
+ output = child.read()
+ assert 'Updated to SecureDrop' in output
+ assert 'Signature verification successful' in output
child.expect(pexpect.EOF, timeout=10) # Wait for CLI to exit
child.close()
@@ -574,3 +577,35 @@ def test_update_fails_when_no_signature_present(self, securedrop_git_repo):
# Failures should eventually exit non-zero.
assert child.exitstatus != 0
assert child.signalstatus != 0
+
+ def test_update_with_duplicate_branch_and_tag(self,
+ securedrop_git_repo):
+ gpgdir = os.path.join(os.path.expanduser('~'), '.gnupg')
+ set_reliable_keyserver(gpgdir)
+
+ github_url = 'https://api.github.com/repos/freedomofpress/securedrop/releases/latest' # noqa: E501
+ latest_release = requests.get(github_url).json()
+ latest_tag = str(latest_release["tag_name"])
+
+ # Create a branch with the same name as a tag.
+ subprocess.check_call(['git', 'checkout', '-b', latest_tag])
+ # Checkout the older tag again in preparation for the update.
+ subprocess.check_call('git checkout 0.6'.split())
+
+ cmd = os.path.join(os.path.dirname(CURRENT_DIR),
+ 'securedrop_admin/__init__.py')
+ ansible_base = os.path.join(str(securedrop_git_repo),
+ 'securedrop/install_files/ansible-base')
+
+ child = pexpect.spawn('coverage run {0} --root {1} update'.format(
+ cmd, ansible_base))
+ output = child.read()
+ # Verify that we do not falsely check out a branch instead of a tag.
+ assert 'Switched to branch' not in output
+ assert 'Updated to SecureDrop' not in output
+ assert 'Signature verification failed' in output
+
+ child.expect(pexpect.EOF, timeout=10) # Wait for CLI to exit
+ child.close()
+ assert child.exitstatus != 0
+ assert child.signalstatus != 0
diff --git a/admin/tests/test_securedrop-admin.py b/admin/tests/test_securedrop-admin.py
--- a/admin/tests/test_securedrop-admin.py
+++ b/admin/tests/test_securedrop-admin.py
@@ -140,7 +140,10 @@ def test_update_gpg_recv_primary_key_failure(self, tmpdir, caplog):
return_value=(True, "0.6.1")),
mock.patch('subprocess.check_call'),
mock.patch('subprocess.check_output',
- return_value=git_output),
+ side_effect=[
+ git_output,
+ subprocess.CalledProcessError(1, 'cmd',
+ 'not a valid ref')]),
mock.patch('securedrop_admin.get_release_key_from_keyserver',
side_effect=[
subprocess.CalledProcessError(1, 'cmd', 'BANG'),
@@ -181,16 +184,63 @@ def test_update_signature_verifies(self, tmpdir, caplog):
'gpg: Good signature from "SecureDrop Release '
'Signing Key" [unknown]\n')
- with mock.patch('securedrop_admin.check_for_updates',
- return_value=(True, "0.6.1")):
- with mock.patch('subprocess.check_call'):
- with mock.patch('subprocess.check_output',
- return_value=git_output):
- ret_code = securedrop_admin.update(args)
- assert "Applying SecureDrop updates..." in caplog.text
- assert "Signature verification successful." in caplog.text
- assert "Updated to SecureDrop" in caplog.text
- assert ret_code == 0
+ patchers = [
+ mock.patch('securedrop_admin.check_for_updates',
+ return_value=(True, "0.6.1")),
+ mock.patch('subprocess.check_call'),
+ mock.patch('subprocess.check_output',
+ side_effect=[
+ git_output,
+ subprocess.CalledProcessError(1, 'cmd',
+ 'not a valid ref')]),
+ ]
+
+ for patcher in patchers:
+ patcher.start()
+
+ try:
+ ret_code = securedrop_admin.update(args)
+ assert "Applying SecureDrop updates..." in caplog.text
+ assert "Signature verification successful." in caplog.text
+ assert "Updated to SecureDrop" in caplog.text
+ assert ret_code == 0
+ finally:
+ for patcher in patchers:
+ patcher.stop()
+
+ def test_update_unexpected_exception_git_refs(self, tmpdir, caplog):
+ git_repo_path = str(tmpdir)
+ args = argparse.Namespace(root=git_repo_path)
+
+ git_output = ('gpg: Signature made Tue 13 Mar 2018 01:14:11 AM UTC\n'
+ 'gpg: using RSA key '
+ '22245C81E3BAEB4138B36061310F561200F4AD77\n'
+ 'gpg: Good signature from "SecureDrop Release '
+ 'Signing Key" [unknown]\n')
+
+ patchers = [
+ mock.patch('securedrop_admin.check_for_updates',
+ return_value=(True, "0.6.1")),
+ mock.patch('subprocess.check_call'),
+ mock.patch('subprocess.check_output',
+ side_effect=[
+ git_output,
+ subprocess.CalledProcessError(1, 'cmd',
+ 'a random error')]),
+ ]
+
+ for patcher in patchers:
+ patcher.start()
+
+ try:
+ ret_code = securedrop_admin.update(args)
+ assert "Applying SecureDrop updates..." in caplog.text
+ assert "Signature verification successful." not in caplog.text
+ assert "Updated to SecureDrop" not in caplog.text
+ assert ret_code == 1
+ finally:
+ for patcher in patchers:
+ patcher.stop()
def test_update_signature_does_not_verify(self, tmpdir, caplog):
git_repo_path = str(tmpdir)
| Implement logic to guard against checkout of branch in the case of a duplicate branch and tag
## Description
We distribute code to workstations via checkout of a signed git tag. If there is a branch with the same as the tag, then the tag will verify, but when we actually `git checkout <tag name>` then the branch, not the tag, will be checked out.
### Short term
As a guard, implement logic to verify there is no duplicate tag and branch prior to checkout.
### Long term
We should stop distributing code in this way and implement https://github.com/freedomofpress/securedrop/issues/3502
| @eloquence had a very good suggestion here (idea shared privately):
> I'm curious if `git show-ref --heads --verify 0.x.0` may be an alternative to parsing stdout. It returns error code 0 if a branch with this name exists, and non-zero otherwise.
This is a good solution | 2018-06-25T20:33:12Z | [] | [] |
freedomofpress/securedrop | 3,615 | freedomofpress__securedrop-3615 | [
"724"
] | d3554e72d320888e4b9fd3ae3373d3ea71e0dc9e | diff --git a/securedrop/crypto_util.py b/securedrop/crypto_util.py
--- a/securedrop/crypto_util.py
+++ b/securedrop/crypto_util.py
@@ -81,7 +81,6 @@ def __init__(self,
self.adjectives = f.read().splitlines()
# Make sure these pass before the app can run
- # TODO: Add more tests
def do_runtime_tests(self):
if self.scrypt_id_pepper == self.scrypt_gpg_pepper:
raise AssertionError('scrypt_id_pepper == scrypt_gpg_pepper')
@@ -184,7 +183,6 @@ def delete_reply_keypair(self, source_filesystem_id):
# deleted. http://pythonhosted.org/python-gnupg/#deleting-keys
self.gpg.delete_keys(key, True) # private key
self.gpg.delete_keys(key) # public key
- # TODO: srm?
def getkey(self, name):
for key in self.gpg.list_keys():
diff --git a/securedrop/journalist_app/decorators.py b/securedrop/journalist_app/decorators.py
--- a/securedrop/journalist_app/decorators.py
+++ b/securedrop/journalist_app/decorators.py
@@ -12,7 +12,6 @@ def admin_required(func):
def wrapper(*args, **kwargs):
if logged_in() and g.user.is_admin:
return func(*args, **kwargs)
- # TODO: sometimes this gets flashed 2x (Chrome only?)
flash(gettext("Only administrators can access this page."),
"notification")
return redirect(url_for('main.index'))
diff --git a/securedrop/manage.py b/securedrop/manage.py
--- a/securedrop/manage.py
+++ b/securedrop/manage.py
@@ -42,7 +42,7 @@ def reset(args):
"""
# Erase the development db file
if not hasattr(config, 'DATABASE_FILE'):
- raise Exception("TODO: ./manage.py doesn't know how to clear the db "
+ raise Exception("./manage.py doesn't know how to clear the db "
'if the backend is not sqlite')
# we need to save some data about the old DB file so we can recreate it
diff --git a/securedrop/management/run.py b/securedrop/management/run.py
--- a/securedrop/management/run.py
+++ b/securedrop/management/run.py
@@ -88,16 +88,6 @@ def __init__(self, proc_funcs):
def monitor(self):
while True:
- # TODO: we currently don't handle input, which makes using an
- # interactive debugger like pdb impossible. Since Flask provides
- # a featureful in-browser debugger, I'll accept that pdb is
- # broken for now. If someone really wants it, they should be
- # able to change this function to make it work (although I'm not
- # sure how hard that would be).
- #
- # If you really want to use pdb, you can just run the
- # application scripts individually (`python source.py` or
- # `python journalist.py`).
rprocs, _, _ = select.select(self.procs, [], [])
for proc in rprocs:
diff --git a/securedrop/models.py b/securedrop/models.py
--- a/securedrop/models.py
+++ b/securedrop/models.py
@@ -26,9 +26,6 @@
LOGIN_HARDENING = True
-# Unfortunately, the login hardening measures mess with the tests in
-# non-deterministic ways. TODO rewrite the tests so we can more
-# precisely control which code paths are exercised.
if os.environ.get('SECUREDROP_ENV') == 'test':
LOGIN_HARDENING = False
| diff --git a/securedrop/tests/conftest.py b/securedrop/tests/conftest.py
--- a/securedrop/tests/conftest.py
+++ b/securedrop/tests/conftest.py
@@ -22,7 +22,7 @@
from source_app import create_app as create_source_app
import utils
-# TODO: the PID file for the redis worker is hard-coded below.
+# The PID file for the redis worker is hard-coded below.
# Ideally this constant would be provided by a test harness.
# It has been intentionally omitted from `config.py.example`
# in order to isolate the test vars from prod vars.
diff --git a/securedrop/tests/functional/journalist_navigation_steps.py b/securedrop/tests/functional/journalist_navigation_steps.py
--- a/securedrop/tests/functional/journalist_navigation_steps.py
+++ b/securedrop/tests/functional/journalist_navigation_steps.py
@@ -234,8 +234,7 @@ def _add_user(self, username, is_admin=False, hotp=None):
hotp_secret.send_keys(hotp)
if is_admin:
- # TODO implement (checkbox is unchecked by default)
- pass
+ raise NotImplementedError("Admin's can't be added yet.")
submit_button = self.driver.find_element_by_css_selector(
'button[type=submit]')
diff --git a/securedrop/tests/test_journalist.py b/securedrop/tests/test_journalist.py
--- a/securedrop/tests/test_journalist.py
+++ b/securedrop/tests/test_journalist.py
@@ -161,7 +161,6 @@ def test_unauthorized_access_redirects_to_login(journalist_app):
def test_login_throttle(journalist_app, test_journo):
# Overwrite the default value used during testing
- # TODO this may break other tests during parallel testing
models.LOGIN_HARDENING = True
try:
with journalist_app.test_client() as app:
@@ -1130,9 +1129,6 @@ def tearDown(self):
# making a point of this, we hope to avoid the introduction of new tests,
# that do not truly prove their result because of this disconnect between
# request context in Flask Testing and production.
- #
- # TODO: either ditch Flask Testing or subclass it as discussed in the
- # aforementioned issue to fix the described problem.
def _login_admin(self):
self._ctx.g.user = self.admin
diff --git a/securedrop/tests/utils/env.py b/securedrop/tests/utils/env.py
--- a/securedrop/tests/utils/env.py
+++ b/securedrop/tests/utils/env.py
@@ -16,7 +16,7 @@
FILES_DIR = abspath(join(dirname(realpath(__file__)), '..', 'files'))
-# TODO: the PID file for the redis worker is hard-coded below. Ideally this
+# The PID file for the redis worker is hard-coded below. Ideally this
# constant would be provided by a test harness. It has been intentionally
# omitted from `config.py.example` in order to isolate the test vars from prod
# vars. When refactoring the test suite, the test_worker_pidfile
diff --git a/testinfra/app/test_tor_hidden_services.py b/testinfra/app/test_tor_hidden_services.py
--- a/testinfra/app/test_tor_hidden_services.py
+++ b/testinfra/app/test_tor_hidden_services.py
@@ -13,7 +13,6 @@ def test_tor_service_directories(File, Sudo, tor_service):
with Sudo():
f = File("/var/lib/tor/services/{}".format(tor_service['name']))
assert f.is_directory
- # TODO: tor might mark these dirs as setgid
assert oct(f.mode) == "0700"
assert f.user == "debian-tor"
assert f.group == "debian-tor"
| Fix TODO commented items
```
> grep -r 'TODO' ./ | wc -l
43
```
lots of these don't seem to have open tickets
| :+1: Good call!
```
$ grep -ri todo | wc -l
150
```
```
$ grep -r 'TODO' ./ | wc -l
216
```
Hmm, I'm not liking that growth...
Not so bad ;-)
<pre>
$ git --no-pager grep -nH -e 'TODO' | wc -l
31
</pre>
| 2018-06-29T14:33:10Z | [] | [] |